MemoryDependenceAnalysis.cpp revision 106c6ca7b0d1ffc28c832edd6f697bf4de55dbcd
1//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements an analysis that determines, for a given memory 11// operation, what preceding memory operations it depends on. It builds on 12// alias analysis information, and tries to provide a lazy, caching interface to 13// a common kind of alias information query. 14// 15//===----------------------------------------------------------------------===// 16 17#define DEBUG_TYPE "memdep" 18#include "llvm/Analysis/MemoryDependenceAnalysis.h" 19#include "llvm/Constants.h" 20#include "llvm/Instructions.h" 21#include "llvm/Function.h" 22#include "llvm/Analysis/AliasAnalysis.h" 23#include "llvm/ADT/Statistic.h" 24#include "llvm/ADT/STLExtras.h" 25#include "llvm/Support/CFG.h" 26#include "llvm/Support/CommandLine.h" 27#include "llvm/Support/Debug.h" 28#include "llvm/Target/TargetData.h" 29using namespace llvm; 30 31STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses"); 32STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses"); 33STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses"); 34char MemoryDependenceAnalysis::ID = 0; 35 36// Register this pass... 37static RegisterPass<MemoryDependenceAnalysis> X("memdep", 38 "Memory Dependence Analysis", false, true); 39 40/// getAnalysisUsage - Does not modify anything. It uses Alias Analysis. 41/// 42void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 43 AU.setPreservesAll(); 44 AU.addRequiredTransitive<AliasAnalysis>(); 45 AU.addRequiredTransitive<TargetData>(); 46} 47 48bool MemoryDependenceAnalysis::runOnFunction(Function &) { 49 AA = &getAnalysis<AliasAnalysis>(); 50 TD = &getAnalysis<TargetData>(); 51 return false; 52} 53 54 55/// getCallSiteDependencyFrom - Private helper for finding the local 56/// dependencies of a call site. 57MemDepResult MemoryDependenceAnalysis:: 58getCallSiteDependencyFrom(CallSite CS, BasicBlock::iterator ScanIt, 59 BasicBlock *BB) { 60 // Walk backwards through the block, looking for dependencies 61 while (ScanIt != BB->begin()) { 62 Instruction *Inst = --ScanIt; 63 64 // If this inst is a memory op, get the pointer it accessed 65 Value *Pointer = 0; 66 uint64_t PointerSize = 0; 67 if (StoreInst *S = dyn_cast<StoreInst>(Inst)) { 68 Pointer = S->getPointerOperand(); 69 PointerSize = TD->getTypeStoreSize(S->getOperand(0)->getType()); 70 } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) { 71 Pointer = V->getOperand(0); 72 PointerSize = TD->getTypeStoreSize(V->getType()); 73 } else if (FreeInst *F = dyn_cast<FreeInst>(Inst)) { 74 Pointer = F->getPointerOperand(); 75 76 // FreeInsts erase the entire structure 77 PointerSize = ~0UL; 78 } else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) { 79 CallSite InstCS = CallSite::get(Inst); 80 // If these two calls do not interfere, look past it. 81 if (AA->getModRefInfo(CS, InstCS) == AliasAnalysis::NoModRef) 82 continue; 83 84 // FIXME: If this is a ref/ref result, we should ignore it! 85 // X = strlen(P); 86 // Y = strlen(Q); 87 // Z = strlen(P); // Z = X 88 89 // If they interfere, we generally return clobber. However, if they are 90 // calls to the same read-only functions we return Def. 91 if (!AA->onlyReadsMemory(CS) || CS.getCalledFunction() == 0 || 92 CS.getCalledFunction() != InstCS.getCalledFunction()) 93 return MemDepResult::getClobber(Inst); 94 return MemDepResult::getDef(Inst); 95 } else { 96 // Non-memory instruction. 97 continue; 98 } 99 100 if (AA->getModRefInfo(CS, Pointer, PointerSize) != AliasAnalysis::NoModRef) 101 return MemDepResult::getClobber(Inst); 102 } 103 104 // No dependence found. 105 return MemDepResult::getNonLocal(); 106} 107 108/// getDependencyFrom - Return the instruction on which a memory operation 109/// depends. 110MemDepResult MemoryDependenceAnalysis:: 111getDependencyFrom(Instruction *QueryInst, BasicBlock::iterator ScanIt, 112 BasicBlock *BB) { 113 // The first instruction in a block is always non-local. 114 if (ScanIt == BB->begin()) 115 return MemDepResult::getNonLocal(); 116 117 // Get the pointer value for which dependence will be determined 118 Value *MemPtr = 0; 119 uint64_t MemSize = 0; 120 121 if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) { 122 // If this is a volatile store, don't mess around with it. Just return the 123 // previous instruction as a clobber. 124 if (SI->isVolatile()) 125 return MemDepResult::getClobber(--ScanIt); 126 127 MemPtr = SI->getPointerOperand(); 128 MemSize = TD->getTypeStoreSize(SI->getOperand(0)->getType()); 129 } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) { 130 // If this is a volatile load, don't mess around with it. Just return the 131 // previous instruction as a clobber. 132 if (LI->isVolatile()) 133 return MemDepResult::getClobber(--ScanIt); 134 135 MemPtr = LI->getPointerOperand(); 136 MemSize = TD->getTypeStoreSize(LI->getType()); 137 } else if (FreeInst *FI = dyn_cast<FreeInst>(QueryInst)) { 138 MemPtr = FI->getPointerOperand(); 139 // FreeInsts erase the entire structure, not just a field. 140 MemSize = ~0UL; 141 } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) { 142 return getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanIt, BB); 143 } else { 144 // Otherwise, this is a vaarg or non-memory instruction, just return a 145 // clobber dependency on the previous inst. 146 return MemDepResult::getClobber(--ScanIt); 147 } 148 149 // Walk backwards through the basic block, looking for dependencies 150 while (ScanIt != BB->begin()) { 151 Instruction *Inst = --ScanIt; 152 153 // Values depend on loads if the pointers are must aliased. This means that 154 // a load depends on another must aliased load from the same value. 155 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) { 156 Value *Pointer = LI->getPointerOperand(); 157 uint64_t PointerSize = TD->getTypeStoreSize(LI->getType()); 158 159 // If we found a pointer, check if it could be the same as our pointer. 160 AliasAnalysis::AliasResult R = 161 AA->alias(Pointer, PointerSize, MemPtr, MemSize); 162 if (R == AliasAnalysis::NoAlias) 163 continue; 164 165 // May-alias loads don't depend on each other without a dependence. 166 if (isa<LoadInst>(QueryInst) && R == AliasAnalysis::MayAlias) 167 continue; 168 return MemDepResult::getDef(Inst); 169 } 170 171 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) { 172 Value *Pointer = SI->getPointerOperand(); 173 uint64_t PointerSize = TD->getTypeStoreSize(SI->getOperand(0)->getType()); 174 175 // If we found a pointer, check if it could be the same as our pointer. 176 AliasAnalysis::AliasResult R = 177 AA->alias(Pointer, PointerSize, MemPtr, MemSize); 178 179 if (R == AliasAnalysis::NoAlias) 180 continue; 181 if (R == AliasAnalysis::MayAlias) 182 return MemDepResult::getClobber(Inst); 183 return MemDepResult::getDef(Inst); 184 } 185 186 // If this is an allocation, and if we know that the accessed pointer is to 187 // the allocation, return Def. This means that there is no dependence and 188 // the access can be optimized based on that. For example, a load could 189 // turn into undef. 190 if (AllocationInst *AI = dyn_cast<AllocationInst>(Inst)) { 191 Value *AccessPtr = MemPtr->getUnderlyingObject(); 192 193 if (AccessPtr == AI || 194 AA->alias(AI, 1, AccessPtr, 1) == AliasAnalysis::MustAlias) 195 return MemDepResult::getDef(AI); 196 continue; 197 } 198 199 // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer. 200 if (AA->getModRefInfo(Inst, MemPtr, MemSize) == AliasAnalysis::NoModRef) 201 continue; 202 203 // Otherwise, there is a dependence. 204 return MemDepResult::getClobber(Inst); 205 } 206 207 // If we found nothing, return the non-local flag. 208 return MemDepResult::getNonLocal(); 209} 210 211/// getDependency - Return the instruction on which a memory operation 212/// depends. 213MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) { 214 Instruction *ScanPos = QueryInst; 215 216 // Check for a cached result 217 MemDepResult &LocalCache = LocalDeps[QueryInst]; 218 219 // If the cached entry is non-dirty, just return it. Note that this depends 220 // on MemDepResult's default constructing to 'dirty'. 221 if (!LocalCache.isDirty()) 222 return LocalCache; 223 224 // Otherwise, if we have a dirty entry, we know we can start the scan at that 225 // instruction, which may save us some work. 226 if (Instruction *Inst = LocalCache.getInst()) { 227 ScanPos = Inst; 228 229 SmallPtrSet<Instruction*, 4> &InstMap = ReverseLocalDeps[Inst]; 230 InstMap.erase(QueryInst); 231 if (InstMap.empty()) 232 ReverseLocalDeps.erase(Inst); 233 } 234 235 // Do the scan. 236 LocalCache = getDependencyFrom(QueryInst, ScanPos, QueryInst->getParent()); 237 238 // Remember the result! 239 if (Instruction *I = LocalCache.getInst()) 240 ReverseLocalDeps[I].insert(QueryInst); 241 242 return LocalCache; 243} 244 245/// getNonLocalDependency - Perform a full dependency query for the 246/// specified instruction, returning the set of blocks that the value is 247/// potentially live across. The returned set of results will include a 248/// "NonLocal" result for all blocks where the value is live across. 249/// 250/// This method assumes the instruction returns a "nonlocal" dependency 251/// within its own block. 252/// 253const MemoryDependenceAnalysis::NonLocalDepInfo & 254MemoryDependenceAnalysis::getNonLocalDependency(Instruction *QueryInst) { 255 assert(getDependency(QueryInst).isNonLocal() && 256 "getNonLocalDependency should only be used on insts with non-local deps!"); 257 PerInstNLInfo &CacheP = NonLocalDeps[QueryInst]; 258 259 NonLocalDepInfo &Cache = CacheP.first; 260 261 /// DirtyBlocks - This is the set of blocks that need to be recomputed. In 262 /// the cached case, this can happen due to instructions being deleted etc. In 263 /// the uncached case, this starts out as the set of predecessors we care 264 /// about. 265 SmallVector<BasicBlock*, 32> DirtyBlocks; 266 267 if (!Cache.empty()) { 268 // Okay, we have a cache entry. If we know it is not dirty, just return it 269 // with no computation. 270 if (!CacheP.second) { 271 NumCacheNonLocal++; 272 return Cache; 273 } 274 275 // If we already have a partially computed set of results, scan them to 276 // determine what is dirty, seeding our initial DirtyBlocks worklist. 277 for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end(); 278 I != E; ++I) 279 if (I->second.isDirty()) 280 DirtyBlocks.push_back(I->first); 281 282 // Sort the cache so that we can do fast binary search lookups below. 283 std::sort(Cache.begin(), Cache.end()); 284 285 ++NumCacheDirtyNonLocal; 286 //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: " 287 // << Cache.size() << " cached: " << *QueryInst; 288 } else { 289 // Seed DirtyBlocks with each of the preds of QueryInst's block. 290 BasicBlock *QueryBB = QueryInst->getParent(); 291 DirtyBlocks.append(pred_begin(QueryBB), pred_end(QueryBB)); 292 NumUncacheNonLocal++; 293 } 294 295 // Visited checked first, vector in sorted order. 296 SmallPtrSet<BasicBlock*, 64> Visited; 297 298 unsigned NumSortedEntries = Cache.size(); 299 300 // Iterate while we still have blocks to update. 301 while (!DirtyBlocks.empty()) { 302 BasicBlock *DirtyBB = DirtyBlocks.back(); 303 DirtyBlocks.pop_back(); 304 305 // Already processed this block? 306 if (!Visited.insert(DirtyBB)) 307 continue; 308 309 // Do a binary search to see if we already have an entry for this block in 310 // the cache set. If so, find it. 311 NonLocalDepInfo::iterator Entry = 312 std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries, 313 std::make_pair(DirtyBB, MemDepResult())); 314 if (Entry != Cache.begin() && (&*Entry)[-1].first == DirtyBB) 315 --Entry; 316 317 MemDepResult *ExistingResult = 0; 318 if (Entry != Cache.begin()+NumSortedEntries && 319 Entry->first == DirtyBB) { 320 // If we already have an entry, and if it isn't already dirty, the block 321 // is done. 322 if (!Entry->second.isDirty()) 323 continue; 324 325 // Otherwise, remember this slot so we can update the value. 326 ExistingResult = &Entry->second; 327 } 328 329 // If the dirty entry has a pointer, start scanning from it so we don't have 330 // to rescan the entire block. 331 BasicBlock::iterator ScanPos = DirtyBB->end(); 332 if (ExistingResult) { 333 if (Instruction *Inst = ExistingResult->getInst()) { 334 ScanPos = Inst; 335 336 // We're removing QueryInst's use of Inst. 337 SmallPtrSet<Instruction*, 4> &InstMap = ReverseNonLocalDeps[Inst]; 338 InstMap.erase(QueryInst); 339 if (InstMap.empty()) ReverseNonLocalDeps.erase(Inst); 340 } 341 } 342 343 // Find out if this block has a local dependency for QueryInst. 344 MemDepResult Dep = getDependencyFrom(QueryInst, ScanPos, DirtyBB); 345 346 // If we had a dirty entry for the block, update it. Otherwise, just add 347 // a new entry. 348 if (ExistingResult) 349 *ExistingResult = Dep; 350 else 351 Cache.push_back(std::make_pair(DirtyBB, Dep)); 352 353 // If the block has a dependency (i.e. it isn't completely transparent to 354 // the value), remember the association! 355 if (!Dep.isNonLocal()) { 356 // Keep the ReverseNonLocalDeps map up to date so we can efficiently 357 // update this when we remove instructions. 358 if (Instruction *Inst = Dep.getInst()) 359 ReverseNonLocalDeps[Inst].insert(QueryInst); 360 } else { 361 362 // If the block *is* completely transparent to the load, we need to check 363 // the predecessors of this block. Add them to our worklist. 364 DirtyBlocks.append(pred_begin(DirtyBB), pred_end(DirtyBB)); 365 } 366 } 367 368 return Cache; 369} 370 371/// removeInstruction - Remove an instruction from the dependence analysis, 372/// updating the dependence of instructions that previously depended on it. 373/// This method attempts to keep the cache coherent using the reverse map. 374void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) { 375 // Walk through the Non-local dependencies, removing this one as the value 376 // for any cached queries. 377 NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst); 378 if (NLDI != NonLocalDeps.end()) { 379 NonLocalDepInfo &BlockMap = NLDI->second.first; 380 for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end(); 381 DI != DE; ++DI) 382 if (Instruction *Inst = DI->second.getInst()) 383 ReverseNonLocalDeps[Inst].erase(RemInst); 384 NonLocalDeps.erase(NLDI); 385 } 386 387 // If we have a cached local dependence query for this instruction, remove it. 388 // 389 LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst); 390 if (LocalDepEntry != LocalDeps.end()) { 391 // Remove us from DepInst's reverse set now that the local dep info is gone. 392 if (Instruction *Inst = LocalDepEntry->second.getInst()) { 393 SmallPtrSet<Instruction*, 4> &RLD = ReverseLocalDeps[Inst]; 394 RLD.erase(RemInst); 395 if (RLD.empty()) 396 ReverseLocalDeps.erase(Inst); 397 } 398 399 // Remove this local dependency info. 400 LocalDeps.erase(LocalDepEntry); 401 } 402 403 // Loop over all of the things that depend on the instruction we're removing. 404 // 405 SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd; 406 407 ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst); 408 if (ReverseDepIt != ReverseLocalDeps.end()) { 409 SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second; 410 // RemInst can't be the terminator if it has stuff depending on it. 411 assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) && 412 "Nothing can locally depend on a terminator"); 413 414 // Anything that was locally dependent on RemInst is now going to be 415 // dependent on the instruction after RemInst. It will have the dirty flag 416 // set so it will rescan. This saves having to scan the entire block to get 417 // to this point. 418 Instruction *NewDepInst = next(BasicBlock::iterator(RemInst)); 419 420 for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(), 421 E = ReverseDeps.end(); I != E; ++I) { 422 Instruction *InstDependingOnRemInst = *I; 423 assert(InstDependingOnRemInst != RemInst && 424 "Already removed our local dep info"); 425 426 LocalDeps[InstDependingOnRemInst] = MemDepResult::getDirty(NewDepInst); 427 428 // Make sure to remember that new things depend on NewDepInst. 429 ReverseDepsToAdd.push_back(std::make_pair(NewDepInst, 430 InstDependingOnRemInst)); 431 } 432 433 ReverseLocalDeps.erase(ReverseDepIt); 434 435 // Add new reverse deps after scanning the set, to avoid invalidating the 436 // 'ReverseDeps' reference. 437 while (!ReverseDepsToAdd.empty()) { 438 ReverseLocalDeps[ReverseDepsToAdd.back().first] 439 .insert(ReverseDepsToAdd.back().second); 440 ReverseDepsToAdd.pop_back(); 441 } 442 } 443 444 ReverseDepIt = ReverseNonLocalDeps.find(RemInst); 445 if (ReverseDepIt != ReverseNonLocalDeps.end()) { 446 SmallPtrSet<Instruction*, 4>& set = ReverseDepIt->second; 447 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end(); 448 I != E; ++I) { 449 assert(*I != RemInst && "Already removed NonLocalDep info for RemInst"); 450 451 PerInstNLInfo &INLD = NonLocalDeps[*I]; 452 // The information is now dirty! 453 INLD.second = true; 454 455 for (NonLocalDepInfo::iterator DI = INLD.first.begin(), 456 DE = INLD.first.end(); DI != DE; ++DI) { 457 if (DI->second.getInst() != RemInst) continue; 458 459 // Convert to a dirty entry for the subsequent instruction. 460 Instruction *NextI = 0; 461 if (!RemInst->isTerminator()) { 462 NextI = next(BasicBlock::iterator(RemInst)); 463 ReverseDepsToAdd.push_back(std::make_pair(NextI, *I)); 464 } 465 DI->second = MemDepResult::getDirty(NextI); 466 } 467 } 468 469 ReverseNonLocalDeps.erase(ReverseDepIt); 470 471 // Add new reverse deps after scanning the set, to avoid invalidating 'Set' 472 while (!ReverseDepsToAdd.empty()) { 473 ReverseNonLocalDeps[ReverseDepsToAdd.back().first] 474 .insert(ReverseDepsToAdd.back().second); 475 ReverseDepsToAdd.pop_back(); 476 } 477 } 478 479 assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?"); 480 AA->deleteValue(RemInst); 481 DEBUG(verifyRemoved(RemInst)); 482} 483 484/// verifyRemoved - Verify that the specified instruction does not occur 485/// in our internal data structures. 486void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const { 487 for (LocalDepMapType::const_iterator I = LocalDeps.begin(), 488 E = LocalDeps.end(); I != E; ++I) { 489 assert(I->first != D && "Inst occurs in data structures"); 490 assert(I->second.getInst() != D && 491 "Inst occurs in data structures"); 492 } 493 494 for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(), 495 E = NonLocalDeps.end(); I != E; ++I) { 496 assert(I->first != D && "Inst occurs in data structures"); 497 const PerInstNLInfo &INLD = I->second; 498 for (NonLocalDepInfo::const_iterator II = INLD.first.begin(), 499 EE = INLD.first.end(); II != EE; ++II) 500 assert(II->second.getInst() != D && "Inst occurs in data structures"); 501 } 502 503 for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(), 504 E = ReverseLocalDeps.end(); I != E; ++I) { 505 assert(I->first != D && "Inst occurs in data structures"); 506 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(), 507 EE = I->second.end(); II != EE; ++II) 508 assert(*II != D && "Inst occurs in data structures"); 509 } 510 511 for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(), 512 E = ReverseNonLocalDeps.end(); 513 I != E; ++I) { 514 assert(I->first != D && "Inst occurs in data structures"); 515 for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(), 516 EE = I->second.end(); II != EE; ++II) 517 assert(*II != D && "Inst occurs in data structures"); 518 } 519} 520