MemoryDependenceAnalysis.cpp revision 00a6d1448d27b5140b911caf2eca9585abdae5c8
1//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file was developed by the Owen Anderson and is distributed under 6// the University of Illinois Open Source License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements an analysis that determines, for a given memory 11// operation, what preceding memory operations it depends on. It builds on 12// alias analysis information, and tries to provide a lazy, caching interface to 13// a common kind of alias information query. 14// 15//===----------------------------------------------------------------------===// 16 17#include "llvm/Analysis/MemoryDependenceAnalysis.h" 18#include "llvm/Constants.h" 19#include "llvm/Instructions.h" 20#include "llvm/Function.h" 21#include "llvm/Analysis/AliasAnalysis.h" 22#include "llvm/Support/CFG.h" 23#include "llvm/Target/TargetData.h" 24#include "llvm/ADT/Statistic.h" 25 26#define DEBUG_TYPE "memdep" 27 28using namespace llvm; 29 30STATISTIC(NumCacheNonlocal, "Number of cached non-local responses"); 31STATISTIC(NumUncacheNonlocal, "Number of uncached non-local responses"); 32 33char MemoryDependenceAnalysis::ID = 0; 34 35Instruction* const MemoryDependenceAnalysis::NonLocal = (Instruction*)-3; 36Instruction* const MemoryDependenceAnalysis::None = (Instruction*)-4; 37Instruction* const MemoryDependenceAnalysis::Dirty = (Instruction*)-5; 38 39// Register this pass... 40static RegisterPass<MemoryDependenceAnalysis> X("memdep", 41 "Memory Dependence Analysis"); 42 43/// getAnalysisUsage - Does not modify anything. It uses Alias Analysis. 44/// 45void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 46 AU.setPreservesAll(); 47 AU.addRequiredTransitive<AliasAnalysis>(); 48 AU.addRequiredTransitive<TargetData>(); 49} 50 51/// getCallSiteDependency - Private helper for finding the local dependencies 52/// of a call site. 53Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C, 54 Instruction* start, 55 BasicBlock* block) { 56 57 AliasAnalysis& AA = getAnalysis<AliasAnalysis>(); 58 TargetData& TD = getAnalysis<TargetData>(); 59 BasicBlock::iterator blockBegin = C.getInstruction()->getParent()->begin(); 60 BasicBlock::iterator QI = C.getInstruction(); 61 62 // If the starting point was specifiy, use it 63 if (start) { 64 QI = start; 65 blockBegin = start->getParent()->end(); 66 // If the starting point wasn't specified, but the block was, use it 67 } else if (!start && block) { 68 QI = block->end(); 69 blockBegin = block->end(); 70 } 71 72 // Walk backwards through the block, looking for dependencies 73 while (QI != blockBegin) { 74 --QI; 75 76 // If this inst is a memory op, get the pointer it accessed 77 Value* pointer = 0; 78 uint64_t pointerSize = 0; 79 if (StoreInst* S = dyn_cast<StoreInst>(QI)) { 80 pointer = S->getPointerOperand(); 81 pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType()); 82 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) { 83 pointer = AI; 84 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize())) 85 pointerSize = C->getZExtValue() * \ 86 TD.getABITypeSize(AI->getAllocatedType()); 87 else 88 pointerSize = ~0UL; 89 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) { 90 pointer = V->getOperand(0); 91 pointerSize = TD.getTypeStoreSize(V->getType()); 92 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) { 93 pointer = F->getPointerOperand(); 94 95 // FreeInsts erase the entire structure 96 pointerSize = ~0UL; 97 } else if (CallSite::get(QI).getInstruction() != 0) { 98 AliasAnalysis::ModRefBehavior result = 99 AA.getModRefBehavior(cast<CallInst>(QI)->getCalledFunction(), 100 CallSite::get(QI)); 101 if (result != AliasAnalysis::DoesNotAccessMemory && 102 result != AliasAnalysis::OnlyReadsMemory) { 103 if (!start && !block) { 104 depGraphLocal.insert(std::make_pair(C.getInstruction(), 105 std::make_pair(QI, true))); 106 reverseDep[QI].insert(C.getInstruction()); 107 } 108 return QI; 109 } else { 110 continue; 111 } 112 } else 113 continue; 114 115 if (AA.getModRefInfo(C, pointer, pointerSize) != AliasAnalysis::NoModRef) { 116 if (!start && !block) { 117 depGraphLocal.insert(std::make_pair(C.getInstruction(), 118 std::make_pair(QI, true))); 119 reverseDep[QI].insert(C.getInstruction()); 120 } 121 return QI; 122 } 123 } 124 125 // No dependence found 126 depGraphLocal.insert(std::make_pair(C.getInstruction(), 127 std::make_pair(NonLocal, true))); 128 reverseDep[NonLocal].insert(C.getInstruction()); 129 return NonLocal; 130} 131 132/// nonLocalHelper - Private helper used to calculate non-local dependencies 133/// by doing DFS on the predecessors of a block to find its dependencies 134void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query, 135 BasicBlock* block, 136 DenseMap<BasicBlock*, Value*>& resp) { 137 // Set of blocks that we've already visited in our DFS 138 SmallPtrSet<BasicBlock*, 4> visited; 139 // If we're updating a dirtied cache entry, we don't need to reprocess 140 // already computed entries. 141 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), 142 E = resp.end(); I != E; ++I) 143 if (I->second != Dirty) 144 visited.insert(I->first); 145 146 // Current stack of the DFS 147 SmallVector<BasicBlock*, 4> stack; 148 stack.push_back(block); 149 150 // Do a basic DFS 151 while (!stack.empty()) { 152 BasicBlock* BB = stack.back(); 153 154 // If we've already visited this block, no need to revist 155 if (visited.count(BB)) { 156 stack.pop_back(); 157 continue; 158 } 159 160 // If we find a new block with a local dependency for query, 161 // then we insert the new dependency and backtrack. 162 if (BB != block) { 163 visited.insert(BB); 164 165 Instruction* localDep = getDependency(query, 0, BB); 166 if (localDep != NonLocal) { 167 resp.insert(std::make_pair(BB, localDep)); 168 stack.pop_back(); 169 170 continue; 171 } 172 // If we re-encounter the starting block, we still need to search it 173 // because there might be a dependency in the starting block AFTER 174 // the position of the query. This is necessary to get loops right. 175 } else if (BB == block && stack.size() > 1) { 176 visited.insert(BB); 177 178 Instruction* localDep = getDependency(query, 0, BB); 179 if (localDep != query) 180 resp.insert(std::make_pair(BB, localDep)); 181 182 stack.pop_back(); 183 184 continue; 185 } 186 187 // If we didn't find anything, recurse on the precessors of this block 188 bool predOnStack = false; 189 bool inserted = false; 190 for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB); 191 PI != PE; ++PI) 192 if (!visited.count(*PI)) { 193 stack.push_back(*PI); 194 inserted = true; 195 } else 196 predOnStack = true; 197 198 // If we inserted a new predecessor, then we'll come back to this block 199 if (inserted) 200 continue; 201 // If we didn't insert because we have no predecessors, then this 202 // query has no dependency at all. 203 else if (!inserted && !predOnStack) { 204 resp.insert(std::make_pair(BB, None)); 205 // If we didn't insert because our predecessors are already on the stack, 206 // then we might still have a dependency, but it will be discovered during 207 // backtracking. 208 } else if (!inserted && predOnStack){ 209 resp.insert(std::make_pair(BB, NonLocal)); 210 } 211 212 stack.pop_back(); 213 } 214} 215 216/// getNonLocalDependency - Fills the passed-in map with the non-local 217/// dependencies of the queries. The map will contain NonLocal for 218/// blocks between the query and its dependencies. 219void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query, 220 DenseMap<BasicBlock*, Value*>& resp) { 221 if (depGraphNonLocal.count(query)) { 222 DenseMap<BasicBlock*, Value*>& cached = depGraphNonLocal[query]; 223 NumCacheNonlocal++; 224 225 SmallVector<BasicBlock*, 4> dirtied; 226 for (DenseMap<BasicBlock*, Value*>::iterator I = cached.begin(), 227 E = cached.end(); I != E; ++I) 228 if (I->second == Dirty) 229 dirtied.push_back(I->first); 230 231 for (SmallVector<BasicBlock*, 4>::iterator I = dirtied.begin(), 232 E = dirtied.end(); I != E; ++I) { 233 Instruction* localDep = getDependency(query, 0, *I); 234 if (localDep != NonLocal) 235 cached[*I] = localDep; 236 else { 237 cached.erase(*I); 238 nonLocalHelper(query, *I, cached); 239 } 240 } 241 242 resp = cached; 243 244 return; 245 } else 246 NumUncacheNonlocal++; 247 248 // If not, go ahead and search for non-local deps. 249 nonLocalHelper(query, query->getParent(), resp); 250 251 // Update the non-local dependency cache 252 for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), E = resp.end(); 253 I != E; ++I) { 254 depGraphNonLocal[query].insert(*I); 255 reverseDepNonLocal[I->second].insert(query); 256 } 257} 258 259/// getDependency - Return the instruction on which a memory operation 260/// depends. The local paramter indicates if the query should only 261/// evaluate dependencies within the same basic block. 262Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query, 263 Instruction* start, 264 BasicBlock* block) { 265 // Start looking for dependencies with the queried inst 266 BasicBlock::iterator QI = query; 267 268 // Check for a cached result 269 std::pair<Instruction*, bool> cachedResult = depGraphLocal[query]; 270 // If we have a _confirmed_ cached entry, return it 271 if (cachedResult.second) 272 return cachedResult.first; 273 else if (cachedResult.first && cachedResult.first != NonLocal) 274 // If we have an unconfirmed cached entry, we can start our search from there 275 QI = cachedResult.first; 276 277 if (start) 278 QI = start; 279 else if (!start && block) 280 QI = block->end(); 281 282 AliasAnalysis& AA = getAnalysis<AliasAnalysis>(); 283 TargetData& TD = getAnalysis<TargetData>(); 284 285 // Get the pointer value for which dependence will be determined 286 Value* dependee = 0; 287 uint64_t dependeeSize = 0; 288 bool queryIsVolatile = false; 289 if (StoreInst* S = dyn_cast<StoreInst>(query)) { 290 dependee = S->getPointerOperand(); 291 dependeeSize = TD.getTypeStoreSize(S->getOperand(0)->getType()); 292 queryIsVolatile = S->isVolatile(); 293 } else if (LoadInst* L = dyn_cast<LoadInst>(query)) { 294 dependee = L->getPointerOperand(); 295 dependeeSize = TD.getTypeStoreSize(L->getType()); 296 queryIsVolatile = L->isVolatile(); 297 } else if (VAArgInst* V = dyn_cast<VAArgInst>(query)) { 298 dependee = V->getOperand(0); 299 dependeeSize = TD.getTypeStoreSize(V->getType()); 300 } else if (FreeInst* F = dyn_cast<FreeInst>(query)) { 301 dependee = F->getPointerOperand(); 302 303 // FreeInsts erase the entire structure, not just a field 304 dependeeSize = ~0UL; 305 } else if (CallSite::get(query).getInstruction() != 0) 306 return getCallSiteDependency(CallSite::get(query), start, block); 307 else if (isa<AllocationInst>(query)) 308 return None; 309 else 310 return None; 311 312 BasicBlock::iterator blockBegin = block ? block->begin() 313 : query->getParent()->begin(); 314 315 // Walk backwards through the basic block, looking for dependencies 316 while (QI != blockBegin) { 317 --QI; 318 319 // If this inst is a memory op, get the pointer it accessed 320 Value* pointer = 0; 321 uint64_t pointerSize = 0; 322 if (StoreInst* S = dyn_cast<StoreInst>(QI)) { 323 // All volatile loads/stores depend on each other 324 if (queryIsVolatile && S->isVolatile()) { 325 if (!start && !block) { 326 depGraphLocal.insert(std::make_pair(query, std::make_pair(S, true))); 327 reverseDep[S].insert(query); 328 } 329 330 return S; 331 } 332 333 pointer = S->getPointerOperand(); 334 pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType()); 335 } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) { 336 // All volatile loads/stores depend on each other 337 if (queryIsVolatile && L->isVolatile()) { 338 if (!start && !block) { 339 depGraphLocal.insert(std::make_pair(query, std::make_pair(L, true))); 340 reverseDep[L].insert(query); 341 } 342 343 return L; 344 } 345 346 pointer = L->getPointerOperand(); 347 pointerSize = TD.getTypeStoreSize(L->getType()); 348 } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) { 349 pointer = AI; 350 if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize())) 351 pointerSize = C->getZExtValue() * \ 352 TD.getABITypeSize(AI->getAllocatedType()); 353 else 354 pointerSize = ~0UL; 355 } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) { 356 pointer = V->getOperand(0); 357 pointerSize = TD.getTypeStoreSize(V->getType()); 358 } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) { 359 pointer = F->getPointerOperand(); 360 361 // FreeInsts erase the entire structure 362 pointerSize = ~0UL; 363 } else if (CallSite::get(QI).getInstruction() != 0) { 364 // Call insts need special handling. Check if they can modify our pointer 365 AliasAnalysis::ModRefResult MR = AA.getModRefInfo(CallSite::get(QI), 366 dependee, dependeeSize); 367 368 if (MR != AliasAnalysis::NoModRef) { 369 // Loads don't depend on read-only calls 370 if (isa<LoadInst>(query) && MR == AliasAnalysis::Ref) 371 continue; 372 373 if (!start && !block) { 374 depGraphLocal.insert(std::make_pair(query, 375 std::make_pair(QI, true))); 376 reverseDep[QI].insert(query); 377 } 378 379 return QI; 380 } else { 381 continue; 382 } 383 } 384 385 // If we found a pointer, check if it could be the same as our pointer 386 if (pointer) { 387 AliasAnalysis::AliasResult R = AA.alias(pointer, pointerSize, 388 dependee, dependeeSize); 389 390 if (R != AliasAnalysis::NoAlias) { 391 // May-alias loads don't depend on each other 392 if (isa<LoadInst>(query) && isa<LoadInst>(QI) && 393 R == AliasAnalysis::MayAlias) 394 continue; 395 396 if (!start && !block) { 397 depGraphLocal.insert(std::make_pair(query, 398 std::make_pair(QI, true))); 399 reverseDep[QI].insert(query); 400 } 401 402 return QI; 403 } 404 } 405 } 406 407 // If we found nothing, return the non-local flag 408 if (!start && !block) { 409 depGraphLocal.insert(std::make_pair(query, 410 std::make_pair(NonLocal, true))); 411 reverseDep[NonLocal].insert(query); 412 } 413 414 return NonLocal; 415} 416 417/// removeInstruction - Remove an instruction from the dependence analysis, 418/// updating the dependence of instructions that previously depended on it. 419/// This method attempts to keep the cache coherent using the reverse map. 420void MemoryDependenceAnalysis::removeInstruction(Instruction* rem) { 421 // Figure out the new dep for things that currently depend on rem 422 Instruction* newDep = NonLocal; 423 424 depMapType::iterator depGraphEntry = depGraphLocal.find(rem); 425 426 if (depGraphEntry != depGraphLocal.end()) { 427 if (depGraphEntry->second.first != NonLocal && 428 depGraphEntry->second.second) { 429 // If we have dep info for rem, set them to it 430 BasicBlock::iterator RI = depGraphEntry->second.first; 431 RI++; 432 newDep = RI; 433 } else if (depGraphEntry->second.first == NonLocal && 434 depGraphEntry->second.second ) { 435 // If we have a confirmed non-local flag, use it 436 newDep = NonLocal; 437 } else { 438 // Otherwise, use the immediate successor of rem 439 // NOTE: This is because, when getDependence is called, it will first 440 // check the immediate predecessor of what is in the cache. 441 BasicBlock::iterator RI = rem; 442 RI++; 443 newDep = RI; 444 } 445 446 SmallPtrSet<Instruction*, 4>& set = reverseDep[rem]; 447 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end(); 448 I != E; ++I) { 449 // Insert the new dependencies 450 // Mark it as unconfirmed as long as it is not the non-local flag 451 depGraphLocal[*I] = std::make_pair(newDep, !newDep); 452 } 453 454 reverseDep.erase(rem); 455 } 456 457 if (reverseDepNonLocal.count(rem)) { 458 SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[rem]; 459 for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end(); 460 I != E; ++I) 461 for (DenseMap<BasicBlock*, Value*>::iterator DI = 462 depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end(); 463 DI != DE; ++DI) 464 if (DI->second == rem) 465 DI->second = Dirty; 466 467 reverseDepNonLocal.erase(rem); 468 } 469 470 getAnalysis<AliasAnalysis>().deleteValue(rem); 471} 472