MemoryDependenceAnalysis.cpp revision 4012fdda13710d21b415a79475adc2bbb6628527
1//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements an analysis that determines, for a given memory 11// operation, what preceding memory operations it depends on. It builds on 12// alias analysis information, and tries to provide a lazy, caching interface to 13// a common kind of alias information query. 14// 15//===----------------------------------------------------------------------===// 16 17#define DEBUG_TYPE "memdep" 18#include "llvm/Analysis/MemoryDependenceAnalysis.h" 19#include "llvm/Constants.h" 20#include "llvm/Instructions.h" 21#include "llvm/Function.h" 22#include "llvm/Analysis/AliasAnalysis.h" 23#include "llvm/ADT/Statistic.h" 24#include "llvm/Support/PredIteratorCache.h" 25#include "llvm/Support/Debug.h" 26#include "llvm/Target/TargetData.h" 27using namespace llvm; 28 29STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses"); 30STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses"); 31STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses"); 32 33STATISTIC(NumCacheNonLocalPtr, 34 "Number of fully cached non-local ptr responses"); 35STATISTIC(NumCacheDirtyNonLocalPtr, 36 "Number of cached, but dirty, non-local ptr responses"); 37STATISTIC(NumUncacheNonLocalPtr, 38 "Number of uncached non-local ptr responses"); 39STATISTIC(NumCacheCompleteNonLocalPtr, 40 "Number of block queries that were completely cached"); 41 42char MemoryDependenceAnalysis::ID = 0; 43 44// Register this pass... 45static RegisterPass<MemoryDependenceAnalysis> X("memdep", 46 "Memory Dependence Analysis", false, true); 47 48MemoryDependenceAnalysis::MemoryDependenceAnalysis() 49: FunctionPass(&ID), PredCache(0) { 50} 51MemoryDependenceAnalysis::~MemoryDependenceAnalysis() { 52} 53 54/// Clean up memory in between runs 55void MemoryDependenceAnalysis::releaseMemory() { 56 LocalDeps.clear(); 57 NonLocalDeps.clear(); 58 NonLocalPointerDeps.clear(); 59 ReverseLocalDeps.clear(); 60 ReverseNonLocalDeps.clear(); 61 ReverseNonLocalPtrDeps.clear(); 62 PredCache->clear(); 63} 64 65 66 67/// getAnalysisUsage - Does not modify anything. It uses Alias Analysis. 68/// 69void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 70 AU.setPreservesAll(); 71 AU.addRequiredTransitive<AliasAnalysis>(); 72 AU.addRequiredTransitive<TargetData>(); 73} 74 75bool MemoryDependenceAnalysis::runOnFunction(Function &) { 76 AA = &getAnalysis<AliasAnalysis>(); 77 TD = &getAnalysis<TargetData>(); 78 if (PredCache == 0) 79 PredCache.reset(new PredIteratorCache()); 80 return false; 81} 82 83/// RemoveFromReverseMap - This is a helper function that removes Val from 84/// 'Inst's set in ReverseMap. If the set becomes empty, remove Inst's entry. 85template <typename KeyTy> 86static void RemoveFromReverseMap(DenseMap<Instruction*, 87 SmallPtrSet<KeyTy*, 4> > &ReverseMap, 88 Instruction *Inst, KeyTy *Val) { 89 typename DenseMap<Instruction*, SmallPtrSet<KeyTy*, 4> >::iterator 90 InstIt = ReverseMap.find(Inst); 91 assert(InstIt != ReverseMap.end() && "Reverse map out of sync?"); 92 bool Found = InstIt->second.erase(Val); 93 assert(Found && "Invalid reverse map!"); Found=Found; 94 if (InstIt->second.empty()) 95 ReverseMap.erase(InstIt); 96} 97 98 99/// getCallSiteDependencyFrom - Private helper for finding the local 100/// dependencies of a call site. 101MemDepResult MemoryDependenceAnalysis:: 102getCallSiteDependencyFrom(CallSite CS, BasicBlock::iterator ScanIt, 103 BasicBlock *BB) { 104 // Walk backwards through the block, looking for dependencies 105 while (ScanIt != BB->begin()) { 106 Instruction *Inst = --ScanIt; 107 108 // If this inst is a memory op, get the pointer it accessed 109 Value *Pointer = 0; 110 uint64_t PointerSize = 0; 111 if (StoreInst *S = dyn_cast<StoreInst>(Inst)) { 112 Pointer = S->getPointerOperand(); 113 PointerSize = TD->getTypeStoreSize(S->getOperand(0)->getType()); 114 } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) { 115 Pointer = V->getOperand(0); 116 PointerSize = TD->getTypeStoreSize(V->getType()); 117 } else if (FreeInst *F = dyn_cast<FreeInst>(Inst)) { 118 Pointer = F->getPointerOperand(); 119 120 // FreeInsts erase the entire structure 121 PointerSize = ~0ULL; 122 } else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) { 123 CallSite InstCS = CallSite::get(Inst); 124 // If these two calls do not interfere, look past it. 125 if (AA->getModRefInfo(CS, InstCS) == AliasAnalysis::NoModRef) 126 continue; 127 128 // FIXME: If this is a ref/ref result, we should ignore it! 129 // X = strlen(P); 130 // Y = strlen(Q); 131 // Z = strlen(P); // Z = X 132 133 // If they interfere, we generally return clobber. However, if they are 134 // calls to the same read-only functions we return Def. 135 if (!AA->onlyReadsMemory(CS) || CS.getCalledFunction() == 0 || 136 CS.getCalledFunction() != InstCS.getCalledFunction()) 137 return MemDepResult::getClobber(Inst); 138 return MemDepResult::getDef(Inst); 139 } else { 140 // Non-memory instruction. 141 continue; 142 } 143 144 if (AA->getModRefInfo(CS, Pointer, PointerSize) != AliasAnalysis::NoModRef) 145 return MemDepResult::getClobber(Inst); 146 } 147 148 // No dependence found. If this is the entry block of the function, it is a 149 // clobber, otherwise it is non-local. 150 if (BB != &BB->getParent()->getEntryBlock()) 151 return MemDepResult::getNonLocal(); 152 return MemDepResult::getClobber(ScanIt); 153} 154 155/// getPointerDependencyFrom - Return the instruction on which a memory 156/// location depends. If isLoad is true, this routine ignore may-aliases with 157/// read-only operations. 158MemDepResult MemoryDependenceAnalysis:: 159getPointerDependencyFrom(Value *MemPtr, uint64_t MemSize, bool isLoad, 160 BasicBlock::iterator ScanIt, BasicBlock *BB) { 161 162 // Walk backwards through the basic block, looking for dependencies. 163 while (ScanIt != BB->begin()) { 164 Instruction *Inst = --ScanIt; 165 166 // Values depend on loads if the pointers are must aliased. This means that 167 // a load depends on another must aliased load from the same value. 168 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { 169 Value *Pointer = LI->getPointerOperand(); 170 uint64_t PointerSize = TD->getTypeStoreSize(LI->getType()); 171 172 // If we found a pointer, check if it could be the same as our pointer. 173 AliasAnalysis::AliasResult R = 174 AA->alias(Pointer, PointerSize, MemPtr, MemSize); 175 if (R == AliasAnalysis::NoAlias) 176 continue; 177 178 // May-alias loads don't depend on each other without a dependence. 179 if (isLoad && R == AliasAnalysis::MayAlias) 180 continue; 181 // Stores depend on may and must aliased loads, loads depend on must-alias 182 // loads. 183 return MemDepResult::getDef(Inst); 184 } 185 186 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { 187 Value *Pointer = SI->getPointerOperand(); 188 uint64_t PointerSize = TD->getTypeStoreSize(SI->getOperand(0)->getType()); 189 190 // If we found a pointer, check if it could be the same as our pointer. 191 AliasAnalysis::AliasResult R = 192 AA->alias(Pointer, PointerSize, MemPtr, MemSize); 193 194 if (R == AliasAnalysis::NoAlias) 195 continue; 196 if (R == AliasAnalysis::MayAlias) 197 return MemDepResult::getClobber(Inst); 198 return MemDepResult::getDef(Inst); 199 } 200 201 // If this is an allocation, and if we know that the accessed pointer is to 202 // the allocation, return Def. This means that there is no dependence and 203 // the access can be optimized based on that. For example, a load could 204 // turn into undef. 205 if (AllocationInst *AI = dyn_cast<AllocationInst>(Inst)) { 206 Value *AccessPtr = MemPtr->getUnderlyingObject(); 207 208 if (AccessPtr == AI || 209 AA->alias(AI, 1, AccessPtr, 1) == AliasAnalysis::MustAlias) 210 return MemDepResult::getDef(AI); 211 continue; 212 } 213 214 // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer. 215 // FIXME: If this is a load, we should ignore readonly calls! 216 if (AA->getModRefInfo(Inst, MemPtr, MemSize) == AliasAnalysis::NoModRef) 217 continue; 218 219 // Otherwise, there is a dependence. 220 return MemDepResult::getClobber(Inst); 221 } 222 223 // No dependence found. If this is the entry block of the function, it is a 224 // clobber, otherwise it is non-local. 225 if (BB != &BB->getParent()->getEntryBlock()) 226 return MemDepResult::getNonLocal(); 227 return MemDepResult::getClobber(ScanIt); 228} 229 230/// getDependency - Return the instruction on which a memory operation 231/// depends. 232MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) { 233 Instruction *ScanPos = QueryInst; 234 235 // Check for a cached result 236 MemDepResult &LocalCache = LocalDeps[QueryInst]; 237 238 // If the cached entry is non-dirty, just return it. Note that this depends 239 // on MemDepResult's default constructing to 'dirty'. 240 if (!LocalCache.isDirty()) 241 return LocalCache; 242 243 // Otherwise, if we have a dirty entry, we know we can start the scan at that 244 // instruction, which may save us some work. 245 if (Instruction *Inst = LocalCache.getInst()) { 246 ScanPos = Inst; 247 248 RemoveFromReverseMap(ReverseLocalDeps, Inst, QueryInst); 249 } 250 251 BasicBlock *QueryParent = QueryInst->getParent(); 252 253 Value *MemPtr = 0; 254 uint64_t MemSize = 0; 255 256 // Do the scan. 257 if (BasicBlock::iterator(QueryInst) == QueryParent->begin()) { 258 // No dependence found. If this is the entry block of the function, it is a 259 // clobber, otherwise it is non-local. 260 if (QueryParent != &QueryParent->getParent()->getEntryBlock()) 261 LocalCache = MemDepResult::getNonLocal(); 262 else 263 LocalCache = MemDepResult::getClobber(QueryInst); 264 } else if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) { 265 // If this is a volatile store, don't mess around with it. Just return the 266 // previous instruction as a clobber. 267 if (SI->isVolatile()) 268 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos)); 269 else { 270 MemPtr = SI->getPointerOperand(); 271 MemSize = TD->getTypeStoreSize(SI->getOperand(0)->getType()); 272 } 273 } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) { 274 // If this is a volatile load, don't mess around with it. Just return the 275 // previous instruction as a clobber. 276 if (LI->isVolatile()) 277 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos)); 278 else { 279 MemPtr = LI->getPointerOperand(); 280 MemSize = TD->getTypeStoreSize(LI->getType()); 281 } 282 } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) { 283 LocalCache = getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanPos, 284 QueryParent); 285 } else if (FreeInst *FI = dyn_cast<FreeInst>(QueryInst)) { 286 MemPtr = FI->getPointerOperand(); 287 // FreeInsts erase the entire structure, not just a field. 288 MemSize = ~0UL; 289 } else { 290 // Non-memory instruction. 291 LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos)); 292 } 293 294 // If we need to do a pointer scan, make it happen. 295 if (MemPtr) 296 LocalCache = getPointerDependencyFrom(MemPtr, MemSize, 297 isa<LoadInst>(QueryInst), 298 ScanPos, QueryParent); 299 300 // Remember the result! 301 if (Instruction *I = LocalCache.getInst()) 302 ReverseLocalDeps[I].insert(QueryInst); 303 304 return LocalCache; 305} 306 307/// getNonLocalDependency - Perform a full dependency query for the 308/// specified instruction, returning the set of blocks that the value is 309/// potentially live across. The returned set of results will include a 310/// "NonLocal" result for all blocks where the value is live across. 311/// 312/// This method assumes the instruction returns a "nonlocal" dependency 313/// within its own block. 314/// 315const MemoryDependenceAnalysis::NonLocalDepInfo & 316MemoryDependenceAnalysis::getNonLocalDependency(Instruction *QueryInst) { 317 // FIXME: Make this only be for callsites in the future. 318 assert(isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst) || 319 isa<LoadInst>(QueryInst) || isa<StoreInst>(QueryInst)); 320 assert(getDependency(QueryInst).isNonLocal() && 321 "getNonLocalDependency should only be used on insts with non-local deps!"); 322 PerInstNLInfo &CacheP = NonLocalDeps[QueryInst]; 323 NonLocalDepInfo &Cache = CacheP.first; 324 325 /// DirtyBlocks - This is the set of blocks that need to be recomputed. In 326 /// the cached case, this can happen due to instructions being deleted etc. In 327 /// the uncached case, this starts out as the set of predecessors we care 328 /// about. 329 SmallVector<BasicBlock*, 32> DirtyBlocks; 330 331 if (!Cache.empty()) { 332 // Okay, we have a cache entry. If we know it is not dirty, just return it 333 // with no computation. 334 if (!CacheP.second) { 335 NumCacheNonLocal++; 336 return Cache; 337 } 338 339 // If we already have a partially computed set of results, scan them to 340 // determine what is dirty, seeding our initial DirtyBlocks worklist. 341 for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end(); 342 I != E; ++I) 343 if (I->second.isDirty()) 344 DirtyBlocks.push_back(I->first); 345 346 // Sort the cache so that we can do fast binary search lookups below. 347 std::sort(Cache.begin(), Cache.end()); 348 349 ++NumCacheDirtyNonLocal; 350 //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: " 351 // << Cache.size() << " cached: " << *QueryInst; 352 } else { 353 // Seed DirtyBlocks with each of the preds of QueryInst's block. 354 BasicBlock *QueryBB = QueryInst->getParent(); 355 DirtyBlocks.append(pred_begin(QueryBB), pred_end(QueryBB)); 356 NumUncacheNonLocal++; 357 } 358 359 // Visited checked first, vector in sorted order. 360 SmallPtrSet<BasicBlock*, 64> Visited; 361 362 unsigned NumSortedEntries = Cache.size(); 363 364 // Iterate while we still have blocks to update. 365 while (!DirtyBlocks.empty()) { 366 BasicBlock *DirtyBB = DirtyBlocks.back(); 367 DirtyBlocks.pop_back(); 368 369 // Already processed this block? 370 if (!Visited.insert(DirtyBB)) 371 continue; 372 373 // Do a binary search to see if we already have an entry for this block in 374 // the cache set. If so, find it. 375 NonLocalDepInfo::iterator Entry = 376 std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries, 377 std::make_pair(DirtyBB, MemDepResult())); 378 if (Entry != Cache.begin() && (&*Entry)[-1].first == DirtyBB) 379 --Entry; 380 381 MemDepResult *ExistingResult = 0; 382 if (Entry != Cache.begin()+NumSortedEntries && 383 Entry->first == DirtyBB) { 384 // If we already have an entry, and if it isn't already dirty, the block 385 // is done. 386 if (!Entry->second.isDirty()) 387 continue; 388 389 // Otherwise, remember this slot so we can update the value. 390 ExistingResult = &Entry->second; 391 } 392 393 // If the dirty entry has a pointer, start scanning from it so we don't have 394 // to rescan the entire block. 395 BasicBlock::iterator ScanPos = DirtyBB->end(); 396 if (ExistingResult) { 397 if (Instruction *Inst = ExistingResult->getInst()) { 398 ScanPos = Inst; 399 // We're removing QueryInst's use of Inst. 400 RemoveFromReverseMap(ReverseNonLocalDeps, Inst, QueryInst); 401 } 402 } 403 404 // Find out if this block has a local dependency for QueryInst. 405 MemDepResult Dep; 406 407 Value *MemPtr = 0; 408 uint64_t MemSize = 0; 409 410 if (ScanPos == DirtyBB->begin()) { 411 // No dependence found. If this is the entry block of the function, it is a 412 // clobber, otherwise it is non-local. 413 if (DirtyBB != &DirtyBB->getParent()->getEntryBlock()) 414 Dep = MemDepResult::getNonLocal(); 415 else 416 Dep = MemDepResult::getClobber(ScanPos); 417 } else if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) { 418 // If this is a volatile store, don't mess around with it. Just return the 419 // previous instruction as a clobber. 420 if (SI->isVolatile()) 421 Dep = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos)); 422 else { 423 MemPtr = SI->getPointerOperand(); 424 MemSize = TD->getTypeStoreSize(SI->getOperand(0)->getType()); 425 } 426 } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) { 427 // If this is a volatile load, don't mess around with it. Just return the 428 // previous instruction as a clobber. 429 if (LI->isVolatile()) 430 Dep = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos)); 431 else { 432 MemPtr = LI->getPointerOperand(); 433 MemSize = TD->getTypeStoreSize(LI->getType()); 434 } 435 } else { 436 assert(isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)); 437 Dep = getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanPos, 438 DirtyBB); 439 } 440 441 if (MemPtr) 442 Dep = getPointerDependencyFrom(MemPtr, MemSize, isa<LoadInst>(QueryInst), 443 ScanPos, DirtyBB); 444 445 // If we had a dirty entry for the block, update it. Otherwise, just add 446 // a new entry. 447 if (ExistingResult) 448 *ExistingResult = Dep; 449 else 450 Cache.push_back(std::make_pair(DirtyBB, Dep)); 451 452 // If the block has a dependency (i.e. it isn't completely transparent to 453 // the value), remember the association! 454 if (!Dep.isNonLocal()) { 455 // Keep the ReverseNonLocalDeps map up to date so we can efficiently 456 // update this when we remove instructions. 457 if (Instruction *Inst = Dep.getInst()) 458 ReverseNonLocalDeps[Inst].insert(QueryInst); 459 } else { 460 461 // If the block *is* completely transparent to the load, we need to check 462 // the predecessors of this block. Add them to our worklist. 463 DirtyBlocks.append(pred_begin(DirtyBB), pred_end(DirtyBB)); 464 } 465 } 466 467 return Cache; 468} 469 470/// getNonLocalPointerDependency - Perform a full dependency query for an 471/// access to the specified (non-volatile) memory location, returning the 472/// set of instructions that either define or clobber the value. 473/// 474/// This method assumes the pointer has a "NonLocal" dependency within its 475/// own block. 476/// 477void MemoryDependenceAnalysis:: 478getNonLocalPointerDependency(Value *Pointer, bool isLoad, BasicBlock *FromBB, 479 SmallVectorImpl<NonLocalDepEntry> &Result) { 480 assert(isa<PointerType>(Pointer->getType()) && 481 "Can't get pointer deps of a non-pointer!"); 482 Result.clear(); 483 484 // We know that the pointer value is live into FromBB find the def/clobbers 485 // from presecessors. 486 const Type *EltTy = cast<PointerType>(Pointer->getType())->getElementType(); 487 uint64_t PointeeSize = TD->getTypeStoreSize(EltTy); 488 489 // While we have blocks to analyze, get their values. 490 SmallPtrSet<BasicBlock*, 64> Visited; 491 492 for (BasicBlock **PI = PredCache->GetPreds(FromBB); *PI; ++PI) { 493 // TODO: PHI TRANSLATE. 494 getNonLocalPointerDepInternal(Pointer, PointeeSize, isLoad, *PI, 495 Result, Visited); 496 } 497} 498 499void MemoryDependenceAnalysis:: 500getNonLocalPointerDepInternal(Value *Pointer, uint64_t PointeeSize, 501 bool isLoad, BasicBlock *StartBB, 502 SmallVectorImpl<NonLocalDepEntry> &Result, 503 SmallPtrSet<BasicBlock*, 64> &Visited) { 504 // Look up the cached info for Pointer. 505 ValueIsLoadPair CacheKey(Pointer, isLoad); 506 507 std::pair<BasicBlock*, NonLocalDepInfo> &CacheInfo = 508 NonLocalPointerDeps[CacheKey]; 509 NonLocalDepInfo *Cache = &CacheInfo.second; 510 511 // If we have valid cached information for exactly the block we are 512 // investigating, just return it with no recomputation. 513 if (CacheInfo.first == StartBB) { 514 for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end(); 515 I != E; ++I) 516 if (!I->second.isNonLocal()) 517 Result.push_back(*I); 518 ++NumCacheCompleteNonLocalPtr; 519 return; 520 } 521 522 // Otherwise, either this is a new block, a block with an invalid cache 523 // pointer or one that we're about to invalidate by putting more info into it 524 // than its valid cache info. If empty, the result will be valid cache info, 525 // otherwise it isn't. 526 CacheInfo.first = Cache->empty() ? StartBB : 0; 527 528 SmallVector<BasicBlock*, 32> Worklist; 529 Worklist.push_back(StartBB); 530 531 // Keep track of the entries that we know are sorted. Previously cached 532 // entries will all be sorted. The entries we add we only sort on demand (we 533 // don't insert every element into its sorted position). We know that we 534 // won't get any reuse from currently inserted values, because we don't 535 // revisit blocks after we insert info for them. 536 unsigned NumSortedEntries = Cache->size(); 537 538 while (!Worklist.empty()) { 539 BasicBlock *BB = Worklist.pop_back_val(); 540 541 // Analyze the dependency of *Pointer in FromBB. See if we already have 542 // been here. 543 if (!Visited.insert(BB)) 544 continue; 545 546 // Get the dependency info for Pointer in BB. If we have cached 547 // information, we will use it, otherwise we compute it. 548 549 // Do a binary search to see if we already have an entry for this block in 550 // the cache set. If so, find it. 551 NonLocalDepInfo::iterator Entry = 552 std::upper_bound(Cache->begin(), Cache->begin()+NumSortedEntries, 553 std::make_pair(BB, MemDepResult())); 554 if (Entry != Cache->begin() && (&*Entry)[-1].first == BB) 555 --Entry; 556 557 MemDepResult *ExistingResult = 0; 558 if (Entry != Cache->begin()+NumSortedEntries && Entry->first == BB) 559 ExistingResult = &Entry->second; 560 561 // If we have a cached entry, and it is non-dirty, use it as the value for 562 // this dependency. 563 MemDepResult Dep; 564 if (ExistingResult && !ExistingResult->isDirty()) { 565 Dep = *ExistingResult; 566 ++NumCacheNonLocalPtr; 567 } else { 568 // Otherwise, we have to scan for the value. If we have a dirty cache 569 // entry, start scanning from its position, otherwise we scan from the end 570 // of the block. 571 BasicBlock::iterator ScanPos = BB->end(); 572 if (ExistingResult && ExistingResult->getInst()) { 573 assert(ExistingResult->getInst()->getParent() == BB && 574 "Instruction invalidated?"); 575 ++NumCacheDirtyNonLocalPtr; 576 ScanPos = ExistingResult->getInst(); 577 578 // Eliminating the dirty entry from 'Cache', so update the reverse info. 579 RemoveFromReverseMap(ReverseNonLocalPtrDeps, ScanPos, 580 CacheKey.getOpaqueValue()); 581 } else { 582 ++NumUncacheNonLocalPtr; 583 } 584 585 // Scan the block for the dependency. 586 Dep = getPointerDependencyFrom(Pointer, PointeeSize, isLoad, ScanPos, BB); 587 588 // If we had a dirty entry for the block, update it. Otherwise, just add 589 // a new entry. 590 if (ExistingResult) 591 *ExistingResult = Dep; 592 else 593 Cache->push_back(std::make_pair(BB, Dep)); 594 595 // If the block has a dependency (i.e. it isn't completely transparent to 596 // the value), remember the reverse association because we just added it 597 // to Cache! 598 if (!Dep.isNonLocal()) { 599 // Keep the ReverseNonLocalPtrDeps map up to date so we can efficiently 600 // update MemDep when we remove instructions. 601 Instruction *Inst = Dep.getInst(); 602 assert(Inst && "Didn't depend on anything?"); 603 ReverseNonLocalPtrDeps[Inst].insert(CacheKey.getOpaqueValue()); 604 } 605 } 606 607 // If we got a Def or Clobber, add this to the list of results. 608 if (!Dep.isNonLocal()) { 609 Result.push_back(NonLocalDepEntry(BB, Dep)); 610 continue; 611 } 612 613 // Otherwise, we have to process all the predecessors of this block to scan 614 // them as well. 615 for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) { 616 // TODO: PHI TRANSLATE. 617 Worklist.push_back(*PI); 618 } 619 } 620 621 // If we computed new values, re-sort Cache. 622 if (NumSortedEntries != Cache->size()) 623 std::sort(Cache->begin(), Cache->end()); 624} 625 626/// RemoveCachedNonLocalPointerDependencies - If P exists in 627/// CachedNonLocalPointerInfo, remove it. 628void MemoryDependenceAnalysis:: 629RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) { 630 CachedNonLocalPointerInfo::iterator It = 631 NonLocalPointerDeps.find(P); 632 if (It == NonLocalPointerDeps.end()) return; 633 634 // Remove all of the entries in the BB->val map. This involves removing 635 // instructions from the reverse map. 636 NonLocalDepInfo &PInfo = It->second.second; 637 638 for (unsigned i = 0, e = PInfo.size(); i != e; ++i) { 639 Instruction *Target = PInfo[i].second.getInst(); 640 if (Target == 0) continue; // Ignore non-local dep results. 641 assert(Target->getParent() == PInfo[i].first && Target != P.getPointer()); 642 643 // Eliminating the dirty entry from 'Cache', so update the reverse info. 644 RemoveFromReverseMap(ReverseNonLocalPtrDeps, Target, P.getOpaqueValue()); 645 } 646 647 // Remove P from NonLocalPointerDeps (which deletes NonLocalDepInfo). 648 NonLocalPointerDeps.erase(It); 649} 650 651 652/// removeInstruction - Remove an instruction from the dependence analysis, 653/// updating the dependence of instructions that previously depended on it. 654/// This method attempts to keep the cache coherent using the reverse map. 655void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) { 656 // Walk through the Non-local dependencies, removing this one as the value 657 // for any cached queries. 658 NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst); 659 if (NLDI != NonLocalDeps.end()) { 660 NonLocalDepInfo &BlockMap = NLDI->second.first; 661 for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end(); 662 DI != DE; ++DI) 663 if (Instruction *Inst = DI->second.getInst()) 664 RemoveFromReverseMap(ReverseNonLocalDeps, Inst, RemInst); 665 NonLocalDeps.erase(NLDI); 666 } 667 668 // If we have a cached local dependence query for this instruction, remove it. 669 // 670 LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst); 671 if (LocalDepEntry != LocalDeps.end()) { 672 // Remove us from DepInst's reverse set now that the local dep info is gone. 673 if (Instruction *Inst = LocalDepEntry->second.getInst()) 674 RemoveFromReverseMap(ReverseLocalDeps, Inst, RemInst); 675 676 // Remove this local dependency info. 677 LocalDeps.erase(LocalDepEntry); 678 } 679 680 // If we have any cached pointer dependencies on this instruction, remove 681 // them. If the instruction has non-pointer type, then it can't be a pointer 682 // base. 683 684 // Remove it from both the load info and the store info. The instruction 685 // can't be in either of these maps if it is non-pointer. 686 if (isa<PointerType>(RemInst->getType())) { 687 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, false)); 688 RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, true)); 689 } 690 691 // Loop over all of the things that depend on the instruction we're removing. 692 // 693 SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd; 694 695 // If we find RemInst as a clobber or Def in any of the maps for other values, 696 // we need to replace its entry with a dirty version of the instruction after 697 // it. If RemInst is a terminator, we use a null dirty value. 698 // 699 // Using a dirty version of the instruction after RemInst saves having to scan 700 // the entire block to get to this point. 701 MemDepResult NewDirtyVal; 702 if (!RemInst->isTerminator()) 703 NewDirtyVal = MemDepResult::getDirty(++BasicBlock::iterator(RemInst)); 704 705 ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst); 706 if (ReverseDepIt != ReverseLocalDeps.end()) { 707 SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second; 708 // RemInst can't be the terminator if it has local stuff depending on it. 709 assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) && 710 "Nothing can locally depend on a terminator"); 711 712 for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(), 713 E = ReverseDeps.end(); I != E; ++I) { 714 Instruction *InstDependingOnRemInst = *I; 715 assert(InstDependingOnRemInst != RemInst && 716 "Already removed our local dep info"); 717 718 LocalDeps[InstDependingOnRemInst] = NewDirtyVal; 719 720 // Make sure to remember that new things depend on NewDepInst. 721 assert(NewDirtyVal.getInst() && "There is no way something else can have " 722 "a local dep on this if it is a terminator!"); 723 ReverseDepsToAdd.push_back(std::make_pair(NewDirtyVal.getInst(), 724 InstDependingOnRemInst)); 725 } 726 727 ReverseLocalDeps.erase(ReverseDepIt); 728 729 // Add new reverse deps after scanning the set, to avoid invalidating the 730 // 'ReverseDeps' reference. 731 while (!ReverseDepsToAdd.empty()) { 732 ReverseLocalDeps[ReverseDepsToAdd.back().first] 733 .insert(ReverseDepsToAdd.back().second); 734 ReverseDepsToAdd.pop_back(); 735 } 736 } 737 738 ReverseDepIt = ReverseNonLocalDeps.find(RemInst); 739 if (ReverseDepIt != ReverseNonLocalDeps.end()) { 740 SmallPtrSet<Instruction*, 4> &Set = ReverseDepIt->second; 741 for (SmallPtrSet<Instruction*, 4>::iterator I = Set.begin(), E = Set.end(); 742 I != E; ++I) { 743 assert(*I != RemInst && "Already removed NonLocalDep info for RemInst"); 744 745 PerInstNLInfo &INLD = NonLocalDeps[*I]; 746 // The information is now dirty! 747 INLD.second = true; 748 749 for (NonLocalDepInfo::iterator DI = INLD.first.begin(), 750 DE = INLD.first.end(); DI != DE; ++DI) { 751 if (DI->second.getInst() != RemInst) continue; 752 753 // Convert to a dirty entry for the subsequent instruction. 754 DI->second = NewDirtyVal; 755 756 if (Instruction *NextI = NewDirtyVal.getInst()) 757 ReverseDepsToAdd.push_back(std::make_pair(NextI, *I)); 758 } 759 } 760 761 ReverseNonLocalDeps.erase(ReverseDepIt); 762 763 // Add new reverse deps after scanning the set, to avoid invalidating 'Set' 764 while (!ReverseDepsToAdd.empty()) { 765 ReverseNonLocalDeps[ReverseDepsToAdd.back().first] 766 .insert(ReverseDepsToAdd.back().second); 767 ReverseDepsToAdd.pop_back(); 768 } 769 } 770 771 // If the instruction is in ReverseNonLocalPtrDeps then it appears as a 772 // value in the NonLocalPointerDeps info. 773 ReverseNonLocalPtrDepTy::iterator ReversePtrDepIt = 774 ReverseNonLocalPtrDeps.find(RemInst); 775 if (ReversePtrDepIt != ReverseNonLocalPtrDeps.end()) { 776 SmallPtrSet<void*, 4> &Set = ReversePtrDepIt->second; 777 SmallVector<std::pair<Instruction*, ValueIsLoadPair>,8> ReversePtrDepsToAdd; 778 779 for (SmallPtrSet<void*, 4>::iterator I = Set.begin(), E = Set.end(); 780 I != E; ++I) { 781 ValueIsLoadPair P; 782 P.setFromOpaqueValue(*I); 783 assert(P.getPointer() != RemInst && 784 "Already removed NonLocalPointerDeps info for RemInst"); 785 786 NonLocalDepInfo &NLPDI = NonLocalPointerDeps[P].second; 787 788 // The cache is not valid for any specific block anymore. 789 NonLocalPointerDeps[P].first = 0; 790 791 // Update any entries for RemInst to use the instruction after it. 792 for (NonLocalDepInfo::iterator DI = NLPDI.begin(), DE = NLPDI.end(); 793 DI != DE; ++DI) { 794 if (DI->second.getInst() != RemInst) continue; 795 796 // Convert to a dirty entry for the subsequent instruction. 797 DI->second = NewDirtyVal; 798 799 if (Instruction *NewDirtyInst = NewDirtyVal.getInst()) 800 ReversePtrDepsToAdd.push_back(std::make_pair(NewDirtyInst, P)); 801 } 802 } 803 804 ReverseNonLocalPtrDeps.erase(ReversePtrDepIt); 805 806 while (!ReversePtrDepsToAdd.empty()) { 807 ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.back().first] 808 .insert(ReversePtrDepsToAdd.back().second.getOpaqueValue()); 809 ReversePtrDepsToAdd.pop_back(); 810 } 811 } 812 813 814 assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?"); 815 AA->deleteValue(RemInst); 816 DEBUG(verifyRemoved(RemInst)); 817} 818 819/// verifyRemoved - Verify that the specified instruction does not occur 820/// in our internal data structures. 821void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const { 822 for (LocalDepMapType::const_iterator I = LocalDeps.begin(), 823 E = LocalDeps.end(); I != E; ++I) { 824 assert(I->first != D && "Inst occurs in data structures"); 825 assert(I->second.getInst() != D && 826 "Inst occurs in data structures"); 827 } 828 829 for (CachedNonLocalPointerInfo::const_iterator I =NonLocalPointerDeps.begin(), 830 E = NonLocalPointerDeps.end(); I != E; ++I) { 831 assert(I->first.getPointer() != D && "Inst occurs in NLPD map key"); 832 const NonLocalDepInfo &Val = I->second.second; 833 for (NonLocalDepInfo::const_iterator II = Val.begin(), E = Val.end(); 834 II != E; ++II) 835 assert(II->second.getInst() != D && "Inst occurs as NLPD value"); 836 } 837 838 for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(), 839 E = NonLocalDeps.end(); I != E; ++I) { 840 assert(I->first != D && "Inst occurs in data structures"); 841 const PerInstNLInfo &INLD = I->second; 842 for (NonLocalDepInfo::const_iterator II = INLD.first.begin(), 843 EE = INLD.first.end(); II != EE; ++II) 844 assert(II->second.getInst() != D && "Inst occurs in data structures"); 845 } 846 847 for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(), 848 E = ReverseLocalDeps.end(); I != E; ++I) { 849 assert(I->first != D && "Inst occurs in data structures"); 850 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(), 851 EE = I->second.end(); II != EE; ++II) 852 assert(*II != D && "Inst occurs in data structures"); 853 } 854 855 for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(), 856 E = ReverseNonLocalDeps.end(); 857 I != E; ++I) { 858 assert(I->first != D && "Inst occurs in data structures"); 859 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(), 860 EE = I->second.end(); II != EE; ++II) 861 assert(*II != D && "Inst occurs in data structures"); 862 } 863 864 for (ReverseNonLocalPtrDepTy::const_iterator 865 I = ReverseNonLocalPtrDeps.begin(), 866 E = ReverseNonLocalPtrDeps.end(); I != E; ++I) { 867 assert(I->first != D && "Inst occurs in rev NLPD map"); 868 869 for (SmallPtrSet<void*, 4>::const_iterator II = I->second.begin(), 870 E = I->second.end(); II != E; ++II) 871 assert(*II != ValueIsLoadPair(D, false).getOpaqueValue() && 872 *II != ValueIsLoadPair(D, true).getOpaqueValue() && 873 "Inst occurs in ReverseNonLocalPtrDeps map"); 874 } 875 876} 877