MemoryDependenceAnalysis.cpp revision 5f589dc77f459d169495ab1e0e03de3740a3b290
1//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation  --*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements an analysis that determines, for a given memory
11// operation, what preceding memory operations it depends on.  It builds on
12// alias analysis information, and tries to provide a lazy, caching interface to
13// a common kind of alias information query.
14//
15//===----------------------------------------------------------------------===//
16
17#define DEBUG_TYPE "memdep"
18#include "llvm/Analysis/MemoryDependenceAnalysis.h"
19#include "llvm/Constants.h"
20#include "llvm/Instructions.h"
21#include "llvm/Function.h"
22#include "llvm/Analysis/AliasAnalysis.h"
23#include "llvm/Support/CFG.h"
24#include "llvm/Support/CommandLine.h"
25#include "llvm/Support/Debug.h"
26#include "llvm/Target/TargetData.h"
27#include "llvm/ADT/Statistic.h"
28
29using namespace llvm;
30
31// Control the calculation of non-local dependencies by only examining the
32// predecessors if the basic block has less than X amount (50 by default).
33static cl::opt<int>
34PredLimit("nonlocaldep-threshold", cl::Hidden, cl::init(50),
35          cl::desc("Control the calculation of non-local"
36                   "dependencies (default = 50)"));
37
38STATISTIC(NumCacheNonlocal, "Number of cached non-local responses");
39STATISTIC(NumUncacheNonlocal, "Number of uncached non-local responses");
40
41char MemoryDependenceAnalysis::ID = 0;
42
43Instruction* const MemoryDependenceAnalysis::NonLocal = (Instruction*)-3;
44Instruction* const MemoryDependenceAnalysis::None = (Instruction*)-4;
45Instruction* const MemoryDependenceAnalysis::Dirty = (Instruction*)-5;
46
47// Register this pass...
48static RegisterPass<MemoryDependenceAnalysis> X("memdep",
49                                     "Memory Dependence Analysis", false, true);
50
51/// verifyRemoved - Verify that the specified instruction does not occur
52/// in our internal data structures.
53void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
54  for (depMapType::const_iterator I = depGraphLocal.begin(),
55       E = depGraphLocal.end(); I != E; ++I) {
56    assert(I->first != D && "Inst occurs in data structures");
57    assert(I->second.first != D && "Inst occurs in data structures");
58  }
59
60  for (nonLocalDepMapType::const_iterator I = depGraphNonLocal.begin(),
61       E = depGraphNonLocal.end(); I != E; ++I) {
62    assert(I->first != D && "Inst occurs in data structures");
63    for (DenseMap<BasicBlock*, Value*>::iterator II = I->second.begin(),
64         EE = I->second.end(); II  != EE; ++II)
65      assert(II->second != D && "Inst occurs in data structures");
66  }
67
68  for (reverseDepMapType::const_iterator I = reverseDep.begin(),
69       E = reverseDep.end(); I != E; ++I)
70    for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
71         EE = I->second.end(); II != EE; ++II)
72      assert(*II != D && "Inst occurs in data structures");
73
74  for (reverseDepMapType::const_iterator I = reverseDepNonLocal.begin(),
75       E = reverseDepNonLocal.end();
76       I != E; ++I)
77    for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
78         EE = I->second.end(); II != EE; ++II)
79      assert(*II != D && "Inst occurs in data structures");
80}
81
82/// getAnalysisUsage - Does not modify anything.  It uses Alias Analysis.
83///
84void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
85  AU.setPreservesAll();
86  AU.addRequiredTransitive<AliasAnalysis>();
87  AU.addRequiredTransitive<TargetData>();
88}
89
90/// getCallSiteDependency - Private helper for finding the local dependencies
91/// of a call site.
92Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C,
93                                                             Instruction* start,
94                                                            BasicBlock* block) {
95
96  std::pair<Instruction*, bool>& cachedResult =
97                                              depGraphLocal[C.getInstruction()];
98  AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
99  TargetData& TD = getAnalysis<TargetData>();
100  BasicBlock::iterator blockBegin = C.getInstruction()->getParent()->begin();
101  BasicBlock::iterator QI = C.getInstruction();
102
103  // If the starting point was specified, use it
104  if (start) {
105    QI = start;
106    blockBegin = start->getParent()->begin();
107  // If the starting point wasn't specified, but the block was, use it
108  } else if (!start && block) {
109    QI = block->end();
110    blockBegin = block->begin();
111  }
112
113  // Walk backwards through the block, looking for dependencies
114  while (QI != blockBegin) {
115    --QI;
116
117    // If this inst is a memory op, get the pointer it accessed
118    Value* pointer = 0;
119    uint64_t pointerSize = 0;
120    if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
121      pointer = S->getPointerOperand();
122      pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
123    } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
124      pointer = AI;
125      if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
126        pointerSize = C->getZExtValue() *
127                      TD.getABITypeSize(AI->getAllocatedType());
128      else
129        pointerSize = ~0UL;
130    } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
131      pointer = V->getOperand(0);
132      pointerSize = TD.getTypeStoreSize(V->getType());
133    } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
134      pointer = F->getPointerOperand();
135
136      // FreeInsts erase the entire structure
137      pointerSize = ~0UL;
138    } else if (CallSite::get(QI).getInstruction() != 0) {
139      AliasAnalysis::ModRefBehavior result =
140                   AA.getModRefBehavior(CallSite::get(QI));
141      if (result != AliasAnalysis::DoesNotAccessMemory) {
142        if (!start && !block) {
143          cachedResult.first = QI;
144          cachedResult.second = true;
145          reverseDep[QI].insert(C.getInstruction());
146        }
147        return QI;
148      } else {
149        continue;
150      }
151    } else
152      continue;
153
154    if (AA.getModRefInfo(C, pointer, pointerSize) != AliasAnalysis::NoModRef) {
155      if (!start && !block) {
156        cachedResult.first = QI;
157        cachedResult.second = true;
158        reverseDep[QI].insert(C.getInstruction());
159      }
160      return QI;
161    }
162  }
163
164  // No dependence found
165  cachedResult.first = NonLocal;
166  cachedResult.second = true;
167  reverseDep[NonLocal].insert(C.getInstruction());
168  return NonLocal;
169}
170
171/// nonLocalHelper - Private helper used to calculate non-local dependencies
172/// by doing DFS on the predecessors of a block to find its dependencies
173void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
174                                              BasicBlock* block,
175                                         DenseMap<BasicBlock*, Value*>& resp) {
176  // Set of blocks that we've already visited in our DFS
177  SmallPtrSet<BasicBlock*, 4> visited;
178  // If we're updating a dirtied cache entry, we don't need to reprocess
179  // already computed entries.
180  for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(),
181       E = resp.end(); I != E; ++I)
182    if (I->second != Dirty)
183      visited.insert(I->first);
184
185  // Current stack of the DFS
186  SmallVector<BasicBlock*, 4> stack;
187  for (pred_iterator PI = pred_begin(block), PE = pred_end(block);
188       PI != PE; ++PI)
189    stack.push_back(*PI);
190
191  // Do a basic DFS
192  while (!stack.empty()) {
193    BasicBlock* BB = stack.back();
194
195    // If we've already visited this block, no need to revist
196    if (visited.count(BB)) {
197      stack.pop_back();
198      continue;
199    }
200
201    // If we find a new block with a local dependency for query,
202    // then we insert the new dependency and backtrack.
203    if (BB != block) {
204      visited.insert(BB);
205
206      Instruction* localDep = getDependency(query, 0, BB);
207      if (localDep != NonLocal) {
208        resp.insert(std::make_pair(BB, localDep));
209        stack.pop_back();
210
211        continue;
212      }
213    // If we re-encounter the starting block, we still need to search it
214    // because there might be a dependency in the starting block AFTER
215    // the position of the query.  This is necessary to get loops right.
216    } else if (BB == block) {
217      visited.insert(BB);
218
219      Instruction* localDep = getDependency(query, 0, BB);
220      if (localDep != query)
221        resp.insert(std::make_pair(BB, localDep));
222
223      stack.pop_back();
224
225      continue;
226    }
227
228    // If we didn't find anything, recurse on the precessors of this block
229    // Only do this for blocks with a small number of predecessors.
230    bool predOnStack = false;
231    bool inserted = false;
232    if (std::distance(pred_begin(BB), pred_end(BB)) <= PredLimit) {
233      for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
234           PI != PE; ++PI)
235        if (!visited.count(*PI)) {
236          stack.push_back(*PI);
237          inserted = true;
238        } else
239          predOnStack = true;
240    }
241
242    // If we inserted a new predecessor, then we'll come back to this block
243    if (inserted)
244      continue;
245    // If we didn't insert because we have no predecessors, then this
246    // query has no dependency at all.
247    else if (!inserted && !predOnStack) {
248      resp.insert(std::make_pair(BB, None));
249    // If we didn't insert because our predecessors are already on the stack,
250    // then we might still have a dependency, but it will be discovered during
251    // backtracking.
252    } else if (!inserted && predOnStack){
253      resp.insert(std::make_pair(BB, NonLocal));
254    }
255
256    stack.pop_back();
257  }
258}
259
260/// getNonLocalDependency - Fills the passed-in map with the non-local
261/// dependencies of the queries.  The map will contain NonLocal for
262/// blocks between the query and its dependencies.
263void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
264                                         DenseMap<BasicBlock*, Value*>& resp) {
265  if (depGraphNonLocal.count(query)) {
266    DenseMap<BasicBlock*, Value*>& cached = depGraphNonLocal[query];
267    NumCacheNonlocal++;
268
269    SmallVector<BasicBlock*, 4> dirtied;
270    for (DenseMap<BasicBlock*, Value*>::iterator I = cached.begin(),
271         E = cached.end(); I != E; ++I)
272      if (I->second == Dirty)
273        dirtied.push_back(I->first);
274
275    for (SmallVector<BasicBlock*, 4>::iterator I = dirtied.begin(),
276         E = dirtied.end(); I != E; ++I) {
277      Instruction* localDep = getDependency(query, 0, *I);
278      if (localDep != NonLocal)
279        cached[*I] = localDep;
280      else {
281        cached.erase(*I);
282        nonLocalHelper(query, *I, cached);
283      }
284    }
285
286    resp = cached;
287
288    // Update the reverse non-local dependency cache
289    for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), E = resp.end();
290         I != E; ++I)
291      reverseDepNonLocal[I->second].insert(query);
292
293    return;
294  } else
295    NumUncacheNonlocal++;
296
297  // If not, go ahead and search for non-local deps.
298  nonLocalHelper(query, query->getParent(), resp);
299
300  // Update the non-local dependency cache
301  for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), E = resp.end();
302       I != E; ++I) {
303    depGraphNonLocal[query].insert(*I);
304    reverseDepNonLocal[I->second].insert(query);
305  }
306}
307
308/// getDependency - Return the instruction on which a memory operation
309/// depends.  The local parameter indicates if the query should only
310/// evaluate dependencies within the same basic block.
311Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
312                                                     Instruction* start,
313                                                     BasicBlock* block) {
314  // Start looking for dependencies with the queried inst
315  BasicBlock::iterator QI = query;
316
317  // Check for a cached result
318  std::pair<Instruction*, bool>& cachedResult = depGraphLocal[query];
319  // If we have a _confirmed_ cached entry, return it
320  if (!block && !start) {
321    if (cachedResult.second)
322      return cachedResult.first;
323    else if (cachedResult.first && cachedResult.first != NonLocal)
324      // If we have an unconfirmed cached entry, we can start our search from there
325      QI = cachedResult.first;
326  }
327
328  if (start)
329    QI = start;
330  else if (!start && block)
331    QI = block->end();
332
333  AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
334  TargetData& TD = getAnalysis<TargetData>();
335
336  // Get the pointer value for which dependence will be determined
337  Value* dependee = 0;
338  uint64_t dependeeSize = 0;
339  bool queryIsVolatile = false;
340  if (StoreInst* S = dyn_cast<StoreInst>(query)) {
341    dependee = S->getPointerOperand();
342    dependeeSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
343    queryIsVolatile = S->isVolatile();
344  } else if (LoadInst* L = dyn_cast<LoadInst>(query)) {
345    dependee = L->getPointerOperand();
346    dependeeSize = TD.getTypeStoreSize(L->getType());
347    queryIsVolatile = L->isVolatile();
348  } else if (VAArgInst* V = dyn_cast<VAArgInst>(query)) {
349    dependee = V->getOperand(0);
350    dependeeSize = TD.getTypeStoreSize(V->getType());
351  } else if (FreeInst* F = dyn_cast<FreeInst>(query)) {
352    dependee = F->getPointerOperand();
353
354    // FreeInsts erase the entire structure, not just a field
355    dependeeSize = ~0UL;
356  } else if (CallSite::get(query).getInstruction() != 0)
357    return getCallSiteDependency(CallSite::get(query), start, block);
358  else if (isa<AllocationInst>(query))
359    return None;
360  else
361    return None;
362
363  BasicBlock::iterator blockBegin = block ? block->begin()
364                                          : query->getParent()->begin();
365
366  // Walk backwards through the basic block, looking for dependencies
367  while (QI != blockBegin) {
368    --QI;
369
370    // If this inst is a memory op, get the pointer it accessed
371    Value* pointer = 0;
372    uint64_t pointerSize = 0;
373    if (StoreInst* S = dyn_cast<StoreInst>(QI)) {
374      // All volatile loads/stores depend on each other
375      if (queryIsVolatile && S->isVolatile()) {
376        if (!start && !block) {
377          cachedResult.first = S;
378          cachedResult.second = true;
379          reverseDep[S].insert(query);
380        }
381
382        return S;
383      }
384
385      pointer = S->getPointerOperand();
386      pointerSize = TD.getTypeStoreSize(S->getOperand(0)->getType());
387    } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) {
388      // All volatile loads/stores depend on each other
389      if (queryIsVolatile && L->isVolatile()) {
390        if (!start && !block) {
391          cachedResult.first = L;
392          cachedResult.second = true;
393          reverseDep[L].insert(query);
394        }
395
396        return L;
397      }
398
399      pointer = L->getPointerOperand();
400      pointerSize = TD.getTypeStoreSize(L->getType());
401    } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) {
402      pointer = AI;
403      if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize()))
404        pointerSize = C->getZExtValue() *
405                      TD.getABITypeSize(AI->getAllocatedType());
406      else
407        pointerSize = ~0UL;
408    } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) {
409      pointer = V->getOperand(0);
410      pointerSize = TD.getTypeStoreSize(V->getType());
411    } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) {
412      pointer = F->getPointerOperand();
413
414      // FreeInsts erase the entire structure
415      pointerSize = ~0UL;
416    } else if (CallSite::get(QI).getInstruction() != 0) {
417      // Call insts need special handling. Check if they can modify our pointer
418      AliasAnalysis::ModRefResult MR = AA.getModRefInfo(CallSite::get(QI),
419                                                      dependee, dependeeSize);
420
421      if (MR != AliasAnalysis::NoModRef) {
422        // Loads don't depend on read-only calls
423        if (isa<LoadInst>(query) && MR == AliasAnalysis::Ref)
424          continue;
425
426        if (!start && !block) {
427          cachedResult.first = QI;
428          cachedResult.second = true;
429          reverseDep[QI].insert(query);
430        }
431
432        return QI;
433      } else {
434        continue;
435      }
436    }
437
438    // If we found a pointer, check if it could be the same as our pointer
439    if (pointer) {
440      AliasAnalysis::AliasResult R = AA.alias(pointer, pointerSize,
441                                              dependee, dependeeSize);
442
443      if (R != AliasAnalysis::NoAlias) {
444        // May-alias loads don't depend on each other
445        if (isa<LoadInst>(query) && isa<LoadInst>(QI) &&
446            R == AliasAnalysis::MayAlias)
447          continue;
448
449        if (!start && !block) {
450          cachedResult.first = QI;
451          cachedResult.second = true;
452          reverseDep[QI].insert(query);
453        }
454
455        return QI;
456      }
457    }
458  }
459
460  // If we found nothing, return the non-local flag
461  if (!start && !block) {
462    cachedResult.first = NonLocal;
463    cachedResult.second = true;
464    reverseDep[NonLocal].insert(query);
465  }
466
467  return NonLocal;
468}
469
470/// dropInstruction - Remove an instruction from the analysis, making
471/// absolutely conservative assumptions when updating the cache.  This is
472/// useful, for example when an instruction is changed rather than removed.
473void MemoryDependenceAnalysis::dropInstruction(Instruction* drop) {
474  depMapType::iterator depGraphEntry = depGraphLocal.find(drop);
475  if (depGraphEntry != depGraphLocal.end())
476    reverseDep[depGraphEntry->second.first].erase(drop);
477
478  // Drop dependency information for things that depended on this instr
479  SmallPtrSet<Instruction*, 4>& set = reverseDep[drop];
480  for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
481       I != E; ++I)
482    depGraphLocal.erase(*I);
483
484  depGraphLocal.erase(drop);
485  reverseDep.erase(drop);
486
487  for (DenseMap<BasicBlock*, Value*>::iterator DI =
488       depGraphNonLocal[drop].begin(), DE = depGraphNonLocal[drop].end();
489       DI != DE; ++DI)
490    if (DI->second != None)
491      reverseDepNonLocal[DI->second].erase(drop);
492
493  if (reverseDepNonLocal.count(drop)) {
494    SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[drop];
495    for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
496         I != E; ++I)
497      for (DenseMap<BasicBlock*, Value*>::iterator DI =
498           depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end();
499           DI != DE; ++DI)
500        if (DI->second == drop)
501          DI->second = Dirty;
502  }
503
504  reverseDepNonLocal.erase(drop);
505  nonLocalDepMapType::iterator I = depGraphNonLocal.find(drop);
506  if (I != depGraphNonLocal.end())
507    depGraphNonLocal.erase(I);
508}
509
510/// removeInstruction - Remove an instruction from the dependence analysis,
511/// updating the dependence of instructions that previously depended on it.
512/// This method attempts to keep the cache coherent using the reverse map.
513void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
514  // Figure out the new dep for things that currently depend on rem
515  Instruction* newDep = NonLocal;
516
517  // Walk through the Non-local dependencies, removing this one as the value
518  // for any cached queries.
519  for (DenseMap<BasicBlock*, Value*>::iterator DI =
520       depGraphNonLocal[RemInst].begin(), DE = depGraphNonLocal[RemInst].end();
521       DI != DE; ++DI)
522    if (DI->second != None)
523      reverseDepNonLocal[DI->second].erase(RemInst);
524
525  // If we have a cached local dependence query for this instruction, remove it.
526  depMapType::iterator depGraphEntry = depGraphLocal.find(RemInst);
527  if (depGraphEntry != depGraphLocal.end()) {
528    Instruction *DepInst = depGraphEntry->second.first;
529    bool IsConfirmed = depGraphEntry->second.second;
530
531    reverseDep[DepInst].erase(RemInst);
532
533    if (DepInst != NonLocal && DepInst != None && IsConfirmed) {
534      // If we have dep info for rem, set them to it
535      BasicBlock::iterator RI = DepInst;
536      RI++;
537
538      // If RI is rem, then we use rem's immediate successor.
539      if (RI == (BasicBlock::iterator)RemInst) RI++;
540
541      newDep = RI;
542    } else if ((DepInst == NonLocal || DepInst == None) && IsConfirmed) {
543      // If we have a confirmed non-local flag, use it
544      newDep = DepInst;
545    } else {
546      // Otherwise, use the immediate successor of rem
547      // NOTE: This is because, when getDependence is called, it will first
548      // check the immediate predecessor of what is in the cache.
549      BasicBlock::iterator RI = RemInst;
550      RI++;
551      newDep = RI;
552    }
553    depGraphLocal.erase(RemInst);
554  } else {
555    // Otherwise, use the immediate successor of rem
556    // NOTE: This is because, when getDependence is called, it will first
557    // check the immediate predecessor of what is in the cache.
558    BasicBlock::iterator RI = RemInst;
559    RI++;
560    newDep = RI;
561  }
562
563  SmallPtrSet<Instruction*, 4>& set = reverseDep[RemInst];
564  for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
565       I != E; ++I) {
566    // Insert the new dependencies
567    // Mark it as unconfirmed as long as it is not the non-local flag
568    depGraphLocal[*I] = std::make_pair(newDep, (newDep == NonLocal ||
569                                                newDep == None));
570  }
571
572  reverseDep.erase(RemInst);
573
574  if (reverseDepNonLocal.count(RemInst)) {
575    SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[RemInst];
576    for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
577         I != E; ++I)
578      for (DenseMap<BasicBlock*, Value*>::iterator DI =
579           depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end();
580           DI != DE; ++DI)
581        if (DI->second == RemInst)
582          DI->second = Dirty;
583
584  }
585
586  reverseDepNonLocal.erase(RemInst);
587  depGraphNonLocal.erase(RemInst);
588
589  getAnalysis<AliasAnalysis>().deleteValue(RemInst);
590
591  DEBUG(verifyRemoved(RemInst));
592}
593