MemoryDependenceAnalysis.cpp revision d8dd934d16d1190881d45b065daec4a1ba82133f
1//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements an analysis that determines, for a given memory 11// operation, what preceding memory operations it depends on. It builds on 12// alias analysis information, and tries to provide a lazy, caching interface to 13// a common kind of alias information query. 14// 15//===----------------------------------------------------------------------===// 16 17#define DEBUG_TYPE "memdep" 18#include "llvm/Analysis/MemoryDependenceAnalysis.h" 19#include "llvm/Constants.h" 20#include "llvm/Instructions.h" 21#include "llvm/Function.h" 22#include "llvm/Analysis/AliasAnalysis.h" 23#include "llvm/ADT/Statistic.h" 24#include "llvm/ADT/STLExtras.h" 25#include "llvm/Support/CFG.h" 26#include "llvm/Support/CommandLine.h" 27#include "llvm/Support/Debug.h" 28#include "llvm/Target/TargetData.h" 29using namespace llvm; 30 31STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses"); 32STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses"); 33STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses"); 34char MemoryDependenceAnalysis::ID = 0; 35 36// Register this pass... 37static RegisterPass<MemoryDependenceAnalysis> X("memdep", 38 "Memory Dependence Analysis", false, true); 39 40/// getAnalysisUsage - Does not modify anything. It uses Alias Analysis. 41/// 42void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 43 AU.setPreservesAll(); 44 AU.addRequiredTransitive<AliasAnalysis>(); 45 AU.addRequiredTransitive<TargetData>(); 46} 47 48bool MemoryDependenceAnalysis::runOnFunction(Function &) { 49 AA = &getAnalysis<AliasAnalysis>(); 50 TD = &getAnalysis<TargetData>(); 51 return false; 52} 53 54 55/// getCallSiteDependencyFrom - Private helper for finding the local 56/// dependencies of a call site. 57MemDepResult MemoryDependenceAnalysis:: 58getCallSiteDependencyFrom(CallSite CS, BasicBlock::iterator ScanIt, 59 BasicBlock *BB) { 60 // Walk backwards through the block, looking for dependencies 61 while (ScanIt != BB->begin()) { 62 Instruction *Inst = --ScanIt; 63 64 // If this inst is a memory op, get the pointer it accessed 65 Value *Pointer = 0; 66 uint64_t PointerSize = 0; 67 if (StoreInst *S = dyn_cast<StoreInst>(Inst)) { 68 Pointer = S->getPointerOperand(); 69 PointerSize = TD->getTypeStoreSize(S->getOperand(0)->getType()); 70 } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) { 71 Pointer = V->getOperand(0); 72 PointerSize = TD->getTypeStoreSize(V->getType()); 73 } else if (FreeInst *F = dyn_cast<FreeInst>(Inst)) { 74 Pointer = F->getPointerOperand(); 75 76 // FreeInsts erase the entire structure 77 PointerSize = ~0UL; 78 } else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) { 79 CallSite InstCS = CallSite::get(Inst); 80 // If these two calls do not interfere, look past it. 81 if (AA->getModRefInfo(CS, InstCS) == AliasAnalysis::NoModRef) 82 continue; 83 84 // FIXME: If this is a ref/ref result, we should ignore it! 85 // X = strlen(P); 86 // Y = strlen(Q); 87 // Z = strlen(P); // Z = X 88 89 // If they interfere, we generally return clobber. However, if they are 90 // calls to the same read-only functions we return Def. 91 if (!AA->onlyReadsMemory(CS) || CS.getCalledFunction() == 0 || 92 CS.getCalledFunction() != InstCS.getCalledFunction()) 93 return MemDepResult::getClobber(Inst); 94 return MemDepResult::getDef(Inst); 95 } else { 96 // Non-memory instruction. 97 continue; 98 } 99 100 if (AA->getModRefInfo(CS, Pointer, PointerSize) != AliasAnalysis::NoModRef) 101 return MemDepResult::getClobber(Inst); 102 } 103 104 // No dependence found. 105 return MemDepResult::getNonLocal(); 106} 107 108/// getDependencyFrom - Return the instruction on which a memory operation 109/// depends. 110MemDepResult MemoryDependenceAnalysis:: 111getDependencyFrom(Instruction *QueryInst, BasicBlock::iterator ScanIt, 112 BasicBlock *BB) { 113 // The first instruction in a block is always non-local. 114 if (ScanIt == BB->begin()) 115 return MemDepResult::getNonLocal(); 116 117 // Get the pointer value for which dependence will be determined 118 Value *MemPtr = 0; 119 uint64_t MemSize = 0; 120 121 if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) { 122 // If this is a volatile store, don't mess around with it. Just return the 123 // previous instruction as a clobber. 124 if (SI->isVolatile()) 125 return MemDepResult::getClobber(--ScanIt); 126 127 MemPtr = SI->getPointerOperand(); 128 MemSize = TD->getTypeStoreSize(SI->getOperand(0)->getType()); 129 } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) { 130 // If this is a volatile load, don't mess around with it. Just return the 131 // previous instruction as a clobber. 132 if (LI->isVolatile()) 133 return MemDepResult::getClobber(--ScanIt); 134 135 MemPtr = LI->getPointerOperand(); 136 MemSize = TD->getTypeStoreSize(LI->getType()); 137 } else if (FreeInst *FI = dyn_cast<FreeInst>(QueryInst)) { 138 MemPtr = FI->getPointerOperand(); 139 // FreeInsts erase the entire structure, not just a field. 140 MemSize = ~0UL; 141 } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) { 142 assert(0 && "Should use getCallSiteDependencyFrom!"); 143 return getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanIt, BB); 144 } else { 145 // Otherwise, this is a vaarg or non-memory instruction, just return a 146 // clobber dependency on the previous inst. 147 return MemDepResult::getClobber(--ScanIt); 148 } 149 150 // Walk backwards through the basic block, looking for dependencies 151 while (ScanIt != BB->begin()) { 152 Instruction *Inst = --ScanIt; 153 154 // Values depend on loads if the pointers are must aliased. This means that 155 // a load depends on another must aliased load from the same value. 156 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { 157 Value *Pointer = LI->getPointerOperand(); 158 uint64_t PointerSize = TD->getTypeStoreSize(LI->getType()); 159 160 // If we found a pointer, check if it could be the same as our pointer. 161 AliasAnalysis::AliasResult R = 162 AA->alias(Pointer, PointerSize, MemPtr, MemSize); 163 if (R == AliasAnalysis::NoAlias) 164 continue; 165 166 // May-alias loads don't depend on each other without a dependence. 167 if (isa<LoadInst>(QueryInst) && R == AliasAnalysis::MayAlias) 168 continue; 169 return MemDepResult::getDef(Inst); 170 } 171 172 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { 173 Value *Pointer = SI->getPointerOperand(); 174 uint64_t PointerSize = TD->getTypeStoreSize(SI->getOperand(0)->getType()); 175 176 // If we found a pointer, check if it could be the same as our pointer. 177 AliasAnalysis::AliasResult R = 178 AA->alias(Pointer, PointerSize, MemPtr, MemSize); 179 180 if (R == AliasAnalysis::NoAlias) 181 continue; 182 if (R == AliasAnalysis::MayAlias) 183 return MemDepResult::getClobber(Inst); 184 return MemDepResult::getDef(Inst); 185 } 186 187 // If this is an allocation, and if we know that the accessed pointer is to 188 // the allocation, return Def. This means that there is no dependence and 189 // the access can be optimized based on that. For example, a load could 190 // turn into undef. 191 if (AllocationInst *AI = dyn_cast<AllocationInst>(Inst)) { 192 Value *AccessPtr = MemPtr->getUnderlyingObject(); 193 194 if (AccessPtr == AI || 195 AA->alias(AI, 1, AccessPtr, 1) == AliasAnalysis::MustAlias) 196 return MemDepResult::getDef(AI); 197 continue; 198 } 199 200 // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer. 201 if (AA->getModRefInfo(Inst, MemPtr, MemSize) == AliasAnalysis::NoModRef) 202 continue; 203 204 // Otherwise, there is a dependence. 205 return MemDepResult::getClobber(Inst); 206 } 207 208 // If we found nothing, return the non-local flag. 209 return MemDepResult::getNonLocal(); 210} 211 212/// getDependency - Return the instruction on which a memory operation 213/// depends. 214MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) { 215 Instruction *ScanPos = QueryInst; 216 217 // Check for a cached result 218 MemDepResult &LocalCache = LocalDeps[QueryInst]; 219 220 // If the cached entry is non-dirty, just return it. Note that this depends 221 // on MemDepResult's default constructing to 'dirty'. 222 if (!LocalCache.isDirty()) 223 return LocalCache; 224 225 // Otherwise, if we have a dirty entry, we know we can start the scan at that 226 // instruction, which may save us some work. 227 if (Instruction *Inst = LocalCache.getInst()) { 228 ScanPos = Inst; 229 230 SmallPtrSet<Instruction*, 4> &InstMap = ReverseLocalDeps[Inst]; 231 InstMap.erase(QueryInst); 232 if (InstMap.empty()) 233 ReverseLocalDeps.erase(Inst); 234 } 235 236 // Do the scan. 237 if (!isa<CallInst>(QueryInst) && !isa<InvokeInst>(QueryInst)) 238 LocalCache = getDependencyFrom(QueryInst, ScanPos, QueryInst->getParent()); 239 else 240 LocalCache = getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanPos, 241 QueryInst->getParent()); 242 243 // Remember the result! 244 if (Instruction *I = LocalCache.getInst()) 245 ReverseLocalDeps[I].insert(QueryInst); 246 247 return LocalCache; 248} 249 250/// getNonLocalDependency - Perform a full dependency query for the 251/// specified instruction, returning the set of blocks that the value is 252/// potentially live across. The returned set of results will include a 253/// "NonLocal" result for all blocks where the value is live across. 254/// 255/// This method assumes the instruction returns a "nonlocal" dependency 256/// within its own block. 257/// 258const MemoryDependenceAnalysis::NonLocalDepInfo & 259MemoryDependenceAnalysis::getNonLocalDependency(Instruction *QueryInst) { 260 assert(getDependency(QueryInst).isNonLocal() && 261 "getNonLocalDependency should only be used on insts with non-local deps!"); 262 PerInstNLInfo &CacheP = NonLocalDeps[QueryInst]; 263 264 NonLocalDepInfo &Cache = CacheP.first; 265 266 /// DirtyBlocks - This is the set of blocks that need to be recomputed. In 267 /// the cached case, this can happen due to instructions being deleted etc. In 268 /// the uncached case, this starts out as the set of predecessors we care 269 /// about. 270 SmallVector<BasicBlock*, 32> DirtyBlocks; 271 272 if (!Cache.empty()) { 273 // Okay, we have a cache entry. If we know it is not dirty, just return it 274 // with no computation. 275 if (!CacheP.second) { 276 NumCacheNonLocal++; 277 return Cache; 278 } 279 280 // If we already have a partially computed set of results, scan them to 281 // determine what is dirty, seeding our initial DirtyBlocks worklist. 282 for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end(); 283 I != E; ++I) 284 if (I->second.isDirty()) 285 DirtyBlocks.push_back(I->first); 286 287 // Sort the cache so that we can do fast binary search lookups below. 288 std::sort(Cache.begin(), Cache.end()); 289 290 ++NumCacheDirtyNonLocal; 291 //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: " 292 // << Cache.size() << " cached: " << *QueryInst; 293 } else { 294 // Seed DirtyBlocks with each of the preds of QueryInst's block. 295 BasicBlock *QueryBB = QueryInst->getParent(); 296 DirtyBlocks.append(pred_begin(QueryBB), pred_end(QueryBB)); 297 NumUncacheNonLocal++; 298 } 299 300 // Visited checked first, vector in sorted order. 301 SmallPtrSet<BasicBlock*, 64> Visited; 302 303 unsigned NumSortedEntries = Cache.size(); 304 305 // Iterate while we still have blocks to update. 306 while (!DirtyBlocks.empty()) { 307 BasicBlock *DirtyBB = DirtyBlocks.back(); 308 DirtyBlocks.pop_back(); 309 310 // Already processed this block? 311 if (!Visited.insert(DirtyBB)) 312 continue; 313 314 // Do a binary search to see if we already have an entry for this block in 315 // the cache set. If so, find it. 316 NonLocalDepInfo::iterator Entry = 317 std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries, 318 std::make_pair(DirtyBB, MemDepResult())); 319 if (Entry != Cache.begin() && (&*Entry)[-1].first == DirtyBB) 320 --Entry; 321 322 MemDepResult *ExistingResult = 0; 323 if (Entry != Cache.begin()+NumSortedEntries && 324 Entry->first == DirtyBB) { 325 // If we already have an entry, and if it isn't already dirty, the block 326 // is done. 327 if (!Entry->second.isDirty()) 328 continue; 329 330 // Otherwise, remember this slot so we can update the value. 331 ExistingResult = &Entry->second; 332 } 333 334 // If the dirty entry has a pointer, start scanning from it so we don't have 335 // to rescan the entire block. 336 BasicBlock::iterator ScanPos = DirtyBB->end(); 337 if (ExistingResult) { 338 if (Instruction *Inst = ExistingResult->getInst()) { 339 ScanPos = Inst; 340 341 // We're removing QueryInst's use of Inst. 342 SmallPtrSet<Instruction*, 4> &InstMap = ReverseNonLocalDeps[Inst]; 343 InstMap.erase(QueryInst); 344 if (InstMap.empty()) ReverseNonLocalDeps.erase(Inst); 345 } 346 } 347 348 // Find out if this block has a local dependency for QueryInst. 349 MemDepResult Dep; 350 if (!isa<CallInst>(QueryInst) && !isa<InvokeInst>(QueryInst)) 351 Dep = getDependencyFrom(QueryInst, ScanPos, DirtyBB); 352 else 353 Dep = getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanPos, 354 DirtyBB); 355 356 // If we had a dirty entry for the block, update it. Otherwise, just add 357 // a new entry. 358 if (ExistingResult) 359 *ExistingResult = Dep; 360 else 361 Cache.push_back(std::make_pair(DirtyBB, Dep)); 362 363 // If the block has a dependency (i.e. it isn't completely transparent to 364 // the value), remember the association! 365 if (!Dep.isNonLocal()) { 366 // Keep the ReverseNonLocalDeps map up to date so we can efficiently 367 // update this when we remove instructions. 368 if (Instruction *Inst = Dep.getInst()) 369 ReverseNonLocalDeps[Inst].insert(QueryInst); 370 } else { 371 372 // If the block *is* completely transparent to the load, we need to check 373 // the predecessors of this block. Add them to our worklist. 374 DirtyBlocks.append(pred_begin(DirtyBB), pred_end(DirtyBB)); 375 } 376 } 377 378 return Cache; 379} 380 381 382/// removeInstruction - Remove an instruction from the dependence analysis, 383/// updating the dependence of instructions that previously depended on it. 384/// This method attempts to keep the cache coherent using the reverse map. 385void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) { 386 // Walk through the Non-local dependencies, removing this one as the value 387 // for any cached queries. 388 NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst); 389 if (NLDI != NonLocalDeps.end()) { 390 NonLocalDepInfo &BlockMap = NLDI->second.first; 391 for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end(); 392 DI != DE; ++DI) 393 if (Instruction *Inst = DI->second.getInst()) 394 ReverseNonLocalDeps[Inst].erase(RemInst); 395 NonLocalDeps.erase(NLDI); 396 } 397 398 // If we have a cached local dependence query for this instruction, remove it. 399 // 400 LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst); 401 if (LocalDepEntry != LocalDeps.end()) { 402 // Remove us from DepInst's reverse set now that the local dep info is gone. 403 if (Instruction *Inst = LocalDepEntry->second.getInst()) { 404 SmallPtrSet<Instruction*, 4> &RLD = ReverseLocalDeps[Inst]; 405 RLD.erase(RemInst); 406 if (RLD.empty()) 407 ReverseLocalDeps.erase(Inst); 408 } 409 410 // Remove this local dependency info. 411 LocalDeps.erase(LocalDepEntry); 412 } 413 414 // Loop over all of the things that depend on the instruction we're removing. 415 // 416 SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd; 417 418 ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst); 419 if (ReverseDepIt != ReverseLocalDeps.end()) { 420 SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second; 421 // RemInst can't be the terminator if it has stuff depending on it. 422 assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) && 423 "Nothing can locally depend on a terminator"); 424 425 // Anything that was locally dependent on RemInst is now going to be 426 // dependent on the instruction after RemInst. It will have the dirty flag 427 // set so it will rescan. This saves having to scan the entire block to get 428 // to this point. 429 Instruction *NewDepInst = next(BasicBlock::iterator(RemInst)); 430 431 for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(), 432 E = ReverseDeps.end(); I != E; ++I) { 433 Instruction *InstDependingOnRemInst = *I; 434 assert(InstDependingOnRemInst != RemInst && 435 "Already removed our local dep info"); 436 437 LocalDeps[InstDependingOnRemInst] = MemDepResult::getDirty(NewDepInst); 438 439 // Make sure to remember that new things depend on NewDepInst. 440 ReverseDepsToAdd.push_back(std::make_pair(NewDepInst, 441 InstDependingOnRemInst)); 442 } 443 444 ReverseLocalDeps.erase(ReverseDepIt); 445 446 // Add new reverse deps after scanning the set, to avoid invalidating the 447 // 'ReverseDeps' reference. 448 while (!ReverseDepsToAdd.empty()) { 449 ReverseLocalDeps[ReverseDepsToAdd.back().first] 450 .insert(ReverseDepsToAdd.back().second); 451 ReverseDepsToAdd.pop_back(); 452 } 453 } 454 455 ReverseDepIt = ReverseNonLocalDeps.find(RemInst); 456 if (ReverseDepIt != ReverseNonLocalDeps.end()) { 457 SmallPtrSet<Instruction*, 4>& set = ReverseDepIt->second; 458 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end(); 459 I != E; ++I) { 460 assert(*I != RemInst && "Already removed NonLocalDep info for RemInst"); 461 462 PerInstNLInfo &INLD = NonLocalDeps[*I]; 463 // The information is now dirty! 464 INLD.second = true; 465 466 for (NonLocalDepInfo::iterator DI = INLD.first.begin(), 467 DE = INLD.first.end(); DI != DE; ++DI) { 468 if (DI->second.getInst() != RemInst) continue; 469 470 // Convert to a dirty entry for the subsequent instruction. 471 Instruction *NextI = 0; 472 if (!RemInst->isTerminator()) { 473 NextI = next(BasicBlock::iterator(RemInst)); 474 ReverseDepsToAdd.push_back(std::make_pair(NextI, *I)); 475 } 476 DI->second = MemDepResult::getDirty(NextI); 477 } 478 } 479 480 ReverseNonLocalDeps.erase(ReverseDepIt); 481 482 // Add new reverse deps after scanning the set, to avoid invalidating 'Set' 483 while (!ReverseDepsToAdd.empty()) { 484 ReverseNonLocalDeps[ReverseDepsToAdd.back().first] 485 .insert(ReverseDepsToAdd.back().second); 486 ReverseDepsToAdd.pop_back(); 487 } 488 } 489 490 assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?"); 491 AA->deleteValue(RemInst); 492 DEBUG(verifyRemoved(RemInst)); 493} 494 495/// verifyRemoved - Verify that the specified instruction does not occur 496/// in our internal data structures. 497void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const { 498 for (LocalDepMapType::const_iterator I = LocalDeps.begin(), 499 E = LocalDeps.end(); I != E; ++I) { 500 assert(I->first != D && "Inst occurs in data structures"); 501 assert(I->second.getInst() != D && 502 "Inst occurs in data structures"); 503 } 504 505 for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(), 506 E = NonLocalDeps.end(); I != E; ++I) { 507 assert(I->first != D && "Inst occurs in data structures"); 508 const PerInstNLInfo &INLD = I->second; 509 for (NonLocalDepInfo::const_iterator II = INLD.first.begin(), 510 EE = INLD.first.end(); II != EE; ++II) 511 assert(II->second.getInst() != D && "Inst occurs in data structures"); 512 } 513 514 for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(), 515 E = ReverseLocalDeps.end(); I != E; ++I) { 516 assert(I->first != D && "Inst occurs in data structures"); 517 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(), 518 EE = I->second.end(); II != EE; ++II) 519 assert(*II != D && "Inst occurs in data structures"); 520 } 521 522 for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(), 523 E = ReverseNonLocalDeps.end(); 524 I != E; ++I) { 525 assert(I->first != D && "Inst occurs in data structures"); 526 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(), 527 EE = I->second.end(); II != EE; ++II) 528 assert(*II != D && "Inst occurs in data structures"); 529 } 530} 531