MemoryDependenceAnalysis.cpp revision 6fbc1969e94cb00c82ab84e1dfe243e7388d3b1b
1//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation  --*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements an analysis that determines, for a given memory
11// operation, what preceding memory operations it depends on.  It builds on
12// alias analysis information, and tries to provide a lazy, caching interface to
13// a common kind of alias information query.
14//
15//===----------------------------------------------------------------------===//
16
17#define DEBUG_TYPE "memdep"
18#include "llvm/Analysis/MemoryDependenceAnalysis.h"
19#include "llvm/Constants.h"
20#include "llvm/Instructions.h"
21#include "llvm/IntrinsicInst.h"
22#include "llvm/Function.h"
23#include "llvm/Analysis/AliasAnalysis.h"
24#include "llvm/ADT/Statistic.h"
25#include "llvm/ADT/STLExtras.h"
26#include "llvm/Support/PredIteratorCache.h"
27#include "llvm/Support/Debug.h"
28#include "llvm/Target/TargetData.h"
29using namespace llvm;
30
31STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses");
32STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses");
33STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses");
34
35STATISTIC(NumCacheNonLocalPtr,
36          "Number of fully cached non-local ptr responses");
37STATISTIC(NumCacheDirtyNonLocalPtr,
38          "Number of cached, but dirty, non-local ptr responses");
39STATISTIC(NumUncacheNonLocalPtr,
40          "Number of uncached non-local ptr responses");
41STATISTIC(NumCacheCompleteNonLocalPtr,
42          "Number of block queries that were completely cached");
43
44char MemoryDependenceAnalysis::ID = 0;
45
46// Register this pass...
47static RegisterPass<MemoryDependenceAnalysis> X("memdep",
48                                     "Memory Dependence Analysis", false, true);
49
50MemoryDependenceAnalysis::MemoryDependenceAnalysis()
51: FunctionPass(&ID), PredCache(0) {
52}
53MemoryDependenceAnalysis::~MemoryDependenceAnalysis() {
54}
55
56/// Clean up memory in between runs
57void MemoryDependenceAnalysis::releaseMemory() {
58  LocalDeps.clear();
59  NonLocalDeps.clear();
60  NonLocalPointerDeps.clear();
61  ReverseLocalDeps.clear();
62  ReverseNonLocalDeps.clear();
63  ReverseNonLocalPtrDeps.clear();
64  PredCache->clear();
65}
66
67
68
69/// getAnalysisUsage - Does not modify anything.  It uses Alias Analysis.
70///
71void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
72  AU.setPreservesAll();
73  AU.addRequiredTransitive<AliasAnalysis>();
74  AU.addRequiredTransitive<TargetData>();
75}
76
77bool MemoryDependenceAnalysis::runOnFunction(Function &) {
78  AA = &getAnalysis<AliasAnalysis>();
79  TD = &getAnalysis<TargetData>();
80  if (PredCache == 0)
81    PredCache.reset(new PredIteratorCache());
82  return false;
83}
84
85/// RemoveFromReverseMap - This is a helper function that removes Val from
86/// 'Inst's set in ReverseMap.  If the set becomes empty, remove Inst's entry.
87template <typename KeyTy>
88static void RemoveFromReverseMap(DenseMap<Instruction*,
89                                 SmallPtrSet<KeyTy, 4> > &ReverseMap,
90                                 Instruction *Inst, KeyTy Val) {
91  typename DenseMap<Instruction*, SmallPtrSet<KeyTy, 4> >::iterator
92  InstIt = ReverseMap.find(Inst);
93  assert(InstIt != ReverseMap.end() && "Reverse map out of sync?");
94  bool Found = InstIt->second.erase(Val);
95  assert(Found && "Invalid reverse map!"); Found=Found;
96  if (InstIt->second.empty())
97    ReverseMap.erase(InstIt);
98}
99
100
101/// getCallSiteDependencyFrom - Private helper for finding the local
102/// dependencies of a call site.
103MemDepResult MemoryDependenceAnalysis::
104getCallSiteDependencyFrom(CallSite CS, bool isReadOnlyCall,
105                          BasicBlock::iterator ScanIt, BasicBlock *BB) {
106  // Walk backwards through the block, looking for dependencies
107  while (ScanIt != BB->begin()) {
108    Instruction *Inst = --ScanIt;
109
110    // If this inst is a memory op, get the pointer it accessed
111    Value *Pointer = 0;
112    uint64_t PointerSize = 0;
113    if (StoreInst *S = dyn_cast<StoreInst>(Inst)) {
114      Pointer = S->getPointerOperand();
115      PointerSize = TD->getTypeStoreSize(S->getOperand(0)->getType());
116    } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
117      Pointer = V->getOperand(0);
118      PointerSize = TD->getTypeStoreSize(V->getType());
119    } else if (FreeInst *F = dyn_cast<FreeInst>(Inst)) {
120      Pointer = F->getPointerOperand();
121
122      // FreeInsts erase the entire structure
123      PointerSize = ~0ULL;
124    } else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) {
125      // Debug intrinsics don't cause dependences.
126      if (isa<DbgInfoIntrinsic>(Inst)) continue;
127      CallSite InstCS = CallSite::get(Inst);
128      // If these two calls do not interfere, look past it.
129      switch (AA->getModRefInfo(CS, InstCS)) {
130      case AliasAnalysis::NoModRef:
131        // If the two calls don't interact (e.g. InstCS is readnone) keep
132        // scanning.
133        continue;
134      case AliasAnalysis::Ref:
135        // If the two calls read the same memory locations and CS is a readonly
136        // function, then we have two cases: 1) the calls may not interfere with
137        // each other at all.  2) the calls may produce the same value.  In case
138        // #1 we want to ignore the values, in case #2, we want to return Inst
139        // as a Def dependence.  This allows us to CSE in cases like:
140        //   X = strlen(P);
141        //    memchr(...);
142        //   Y = strlen(P);  // Y = X
143        if (isReadOnlyCall) {
144          if (CS.getCalledFunction() != 0 &&
145              CS.getCalledFunction() == InstCS.getCalledFunction())
146            return MemDepResult::getDef(Inst);
147          // Ignore unrelated read/read call dependences.
148          continue;
149        }
150        // FALL THROUGH
151      default:
152        return MemDepResult::getClobber(Inst);
153      }
154    } else {
155      // Non-memory instruction.
156      continue;
157    }
158
159    if (AA->getModRefInfo(CS, Pointer, PointerSize) != AliasAnalysis::NoModRef)
160      return MemDepResult::getClobber(Inst);
161  }
162
163  // No dependence found.  If this is the entry block of the function, it is a
164  // clobber, otherwise it is non-local.
165  if (BB != &BB->getParent()->getEntryBlock())
166    return MemDepResult::getNonLocal();
167  return MemDepResult::getClobber(ScanIt);
168}
169
170/// getPointerDependencyFrom - Return the instruction on which a memory
171/// location depends.  If isLoad is true, this routine ignore may-aliases with
172/// read-only operations.
173MemDepResult MemoryDependenceAnalysis::
174getPointerDependencyFrom(Value *MemPtr, uint64_t MemSize, bool isLoad,
175                         BasicBlock::iterator ScanIt, BasicBlock *BB) {
176
177  // Walk backwards through the basic block, looking for dependencies.
178  while (ScanIt != BB->begin()) {
179    Instruction *Inst = --ScanIt;
180
181    // Debug intrinsics don't cause dependences.
182    if (isa<DbgInfoIntrinsic>(Inst)) continue;
183
184    // Values depend on loads if the pointers are must aliased.  This means that
185    // a load depends on another must aliased load from the same value.
186    if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
187      Value *Pointer = LI->getPointerOperand();
188      uint64_t PointerSize = TD->getTypeStoreSize(LI->getType());
189
190      // If we found a pointer, check if it could be the same as our pointer.
191      AliasAnalysis::AliasResult R =
192        AA->alias(Pointer, PointerSize, MemPtr, MemSize);
193      if (R == AliasAnalysis::NoAlias)
194        continue;
195
196      // May-alias loads don't depend on each other without a dependence.
197      if (isLoad && R == AliasAnalysis::MayAlias)
198        continue;
199      // Stores depend on may and must aliased loads, loads depend on must-alias
200      // loads.
201      return MemDepResult::getDef(Inst);
202    }
203
204    if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
205      // If alias analysis can tell that this store is guaranteed to not modify
206      // the query pointer, ignore it.  Use getModRefInfo to handle cases where
207      // the query pointer points to constant memory etc.
208      if (AA->getModRefInfo(SI, MemPtr, MemSize) == AliasAnalysis::NoModRef)
209        continue;
210
211      // Ok, this store might clobber the query pointer.  Check to see if it is
212      // a must alias: in this case, we want to return this as a def.
213      Value *Pointer = SI->getPointerOperand();
214      uint64_t PointerSize = TD->getTypeStoreSize(SI->getOperand(0)->getType());
215
216      // If we found a pointer, check if it could be the same as our pointer.
217      AliasAnalysis::AliasResult R =
218        AA->alias(Pointer, PointerSize, MemPtr, MemSize);
219
220      if (R == AliasAnalysis::NoAlias)
221        continue;
222      if (R == AliasAnalysis::MayAlias)
223        return MemDepResult::getClobber(Inst);
224      return MemDepResult::getDef(Inst);
225    }
226
227    // If this is an allocation, and if we know that the accessed pointer is to
228    // the allocation, return Def.  This means that there is no dependence and
229    // the access can be optimized based on that.  For example, a load could
230    // turn into undef.
231    if (AllocationInst *AI = dyn_cast<AllocationInst>(Inst)) {
232      Value *AccessPtr = MemPtr->getUnderlyingObject();
233
234      if (AccessPtr == AI ||
235          AA->alias(AI, 1, AccessPtr, 1) == AliasAnalysis::MustAlias)
236        return MemDepResult::getDef(AI);
237      continue;
238    }
239
240    // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer.
241    switch (AA->getModRefInfo(Inst, MemPtr, MemSize)) {
242    case AliasAnalysis::NoModRef:
243      // If the call has no effect on the queried pointer, just ignore it.
244      continue;
245    case AliasAnalysis::Ref:
246      // If the call is known to never store to the pointer, and if this is a
247      // load query, we can safely ignore it (scan past it).
248      if (isLoad)
249        continue;
250      // FALL THROUGH.
251    default:
252      // Otherwise, there is a potential dependence.  Return a clobber.
253      return MemDepResult::getClobber(Inst);
254    }
255  }
256
257  // No dependence found.  If this is the entry block of the function, it is a
258  // clobber, otherwise it is non-local.
259  if (BB != &BB->getParent()->getEntryBlock())
260    return MemDepResult::getNonLocal();
261  return MemDepResult::getClobber(ScanIt);
262}
263
264/// getDependency - Return the instruction on which a memory operation
265/// depends.
266MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
267  Instruction *ScanPos = QueryInst;
268
269  // Check for a cached result
270  MemDepResult &LocalCache = LocalDeps[QueryInst];
271
272  // If the cached entry is non-dirty, just return it.  Note that this depends
273  // on MemDepResult's default constructing to 'dirty'.
274  if (!LocalCache.isDirty())
275    return LocalCache;
276
277  // Otherwise, if we have a dirty entry, we know we can start the scan at that
278  // instruction, which may save us some work.
279  if (Instruction *Inst = LocalCache.getInst()) {
280    ScanPos = Inst;
281
282    RemoveFromReverseMap(ReverseLocalDeps, Inst, QueryInst);
283  }
284
285  BasicBlock *QueryParent = QueryInst->getParent();
286
287  Value *MemPtr = 0;
288  uint64_t MemSize = 0;
289
290  // Do the scan.
291  if (BasicBlock::iterator(QueryInst) == QueryParent->begin()) {
292    // No dependence found.  If this is the entry block of the function, it is a
293    // clobber, otherwise it is non-local.
294    if (QueryParent != &QueryParent->getParent()->getEntryBlock())
295      LocalCache = MemDepResult::getNonLocal();
296    else
297      LocalCache = MemDepResult::getClobber(QueryInst);
298  } else if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) {
299    // If this is a volatile store, don't mess around with it.  Just return the
300    // previous instruction as a clobber.
301    if (SI->isVolatile())
302      LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
303    else {
304      MemPtr = SI->getPointerOperand();
305      MemSize = TD->getTypeStoreSize(SI->getOperand(0)->getType());
306    }
307  } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) {
308    // If this is a volatile load, don't mess around with it.  Just return the
309    // previous instruction as a clobber.
310    if (LI->isVolatile())
311      LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
312    else {
313      MemPtr = LI->getPointerOperand();
314      MemSize = TD->getTypeStoreSize(LI->getType());
315    }
316  } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) {
317    CallSite QueryCS = CallSite::get(QueryInst);
318    bool isReadOnly = AA->onlyReadsMemory(QueryCS);
319    LocalCache = getCallSiteDependencyFrom(QueryCS, isReadOnly, ScanPos,
320                                           QueryParent);
321  } else if (FreeInst *FI = dyn_cast<FreeInst>(QueryInst)) {
322    MemPtr = FI->getPointerOperand();
323    // FreeInsts erase the entire structure, not just a field.
324    MemSize = ~0UL;
325  } else {
326    // Non-memory instruction.
327    LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
328  }
329
330  // If we need to do a pointer scan, make it happen.
331  if (MemPtr)
332    LocalCache = getPointerDependencyFrom(MemPtr, MemSize,
333                                          isa<LoadInst>(QueryInst),
334                                          ScanPos, QueryParent);
335
336  // Remember the result!
337  if (Instruction *I = LocalCache.getInst())
338    ReverseLocalDeps[I].insert(QueryInst);
339
340  return LocalCache;
341}
342
343#ifndef NDEBUG
344/// AssertSorted - This method is used when -debug is specified to verify that
345/// cache arrays are properly kept sorted.
346static void AssertSorted(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
347                         int Count = -1) {
348  if (Count == -1) Count = Cache.size();
349  if (Count == 0) return;
350
351  for (unsigned i = 1; i != unsigned(Count); ++i)
352    assert(Cache[i-1] <= Cache[i] && "Cache isn't sorted!");
353}
354#endif
355
356/// getNonLocalCallDependency - Perform a full dependency query for the
357/// specified call, returning the set of blocks that the value is
358/// potentially live across.  The returned set of results will include a
359/// "NonLocal" result for all blocks where the value is live across.
360///
361/// This method assumes the instruction returns a "NonLocal" dependency
362/// within its own block.
363///
364/// This returns a reference to an internal data structure that may be
365/// invalidated on the next non-local query or when an instruction is
366/// removed.  Clients must copy this data if they want it around longer than
367/// that.
368const MemoryDependenceAnalysis::NonLocalDepInfo &
369MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
370  assert(getDependency(QueryCS.getInstruction()).isNonLocal() &&
371 "getNonLocalCallDependency should only be used on calls with non-local deps!");
372  PerInstNLInfo &CacheP = NonLocalDeps[QueryCS.getInstruction()];
373  NonLocalDepInfo &Cache = CacheP.first;
374
375  /// DirtyBlocks - This is the set of blocks that need to be recomputed.  In
376  /// the cached case, this can happen due to instructions being deleted etc. In
377  /// the uncached case, this starts out as the set of predecessors we care
378  /// about.
379  SmallVector<BasicBlock*, 32> DirtyBlocks;
380
381  if (!Cache.empty()) {
382    // Okay, we have a cache entry.  If we know it is not dirty, just return it
383    // with no computation.
384    if (!CacheP.second) {
385      NumCacheNonLocal++;
386      return Cache;
387    }
388
389    // If we already have a partially computed set of results, scan them to
390    // determine what is dirty, seeding our initial DirtyBlocks worklist.
391    for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end();
392       I != E; ++I)
393      if (I->second.isDirty())
394        DirtyBlocks.push_back(I->first);
395
396    // Sort the cache so that we can do fast binary search lookups below.
397    std::sort(Cache.begin(), Cache.end());
398
399    ++NumCacheDirtyNonLocal;
400    //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
401    //     << Cache.size() << " cached: " << *QueryInst;
402  } else {
403    // Seed DirtyBlocks with each of the preds of QueryInst's block.
404    BasicBlock *QueryBB = QueryCS.getInstruction()->getParent();
405    for (BasicBlock **PI = PredCache->GetPreds(QueryBB); *PI; ++PI)
406      DirtyBlocks.push_back(*PI);
407    NumUncacheNonLocal++;
408  }
409
410  // isReadonlyCall - If this is a read-only call, we can be more aggressive.
411  bool isReadonlyCall = AA->onlyReadsMemory(QueryCS);
412
413  SmallPtrSet<BasicBlock*, 64> Visited;
414
415  unsigned NumSortedEntries = Cache.size();
416  DEBUG(AssertSorted(Cache));
417
418  // Iterate while we still have blocks to update.
419  while (!DirtyBlocks.empty()) {
420    BasicBlock *DirtyBB = DirtyBlocks.back();
421    DirtyBlocks.pop_back();
422
423    // Already processed this block?
424    if (!Visited.insert(DirtyBB))
425      continue;
426
427    // Do a binary search to see if we already have an entry for this block in
428    // the cache set.  If so, find it.
429    DEBUG(AssertSorted(Cache, NumSortedEntries));
430    NonLocalDepInfo::iterator Entry =
431      std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries,
432                       std::make_pair(DirtyBB, MemDepResult()));
433    if (Entry != Cache.begin() && prior(Entry)->first == DirtyBB)
434      --Entry;
435
436    MemDepResult *ExistingResult = 0;
437    if (Entry != Cache.begin()+NumSortedEntries &&
438        Entry->first == DirtyBB) {
439      // If we already have an entry, and if it isn't already dirty, the block
440      // is done.
441      if (!Entry->second.isDirty())
442        continue;
443
444      // Otherwise, remember this slot so we can update the value.
445      ExistingResult = &Entry->second;
446    }
447
448    // If the dirty entry has a pointer, start scanning from it so we don't have
449    // to rescan the entire block.
450    BasicBlock::iterator ScanPos = DirtyBB->end();
451    if (ExistingResult) {
452      if (Instruction *Inst = ExistingResult->getInst()) {
453        ScanPos = Inst;
454        // We're removing QueryInst's use of Inst.
455        RemoveFromReverseMap(ReverseNonLocalDeps, Inst,
456                             QueryCS.getInstruction());
457      }
458    }
459
460    // Find out if this block has a local dependency for QueryInst.
461    MemDepResult Dep;
462
463    if (ScanPos != DirtyBB->begin()) {
464      Dep = getCallSiteDependencyFrom(QueryCS, isReadonlyCall,ScanPos, DirtyBB);
465    } else if (DirtyBB != &DirtyBB->getParent()->getEntryBlock()) {
466      // No dependence found.  If this is the entry block of the function, it is
467      // a clobber, otherwise it is non-local.
468      Dep = MemDepResult::getNonLocal();
469    } else {
470      Dep = MemDepResult::getClobber(ScanPos);
471    }
472
473    // If we had a dirty entry for the block, update it.  Otherwise, just add
474    // a new entry.
475    if (ExistingResult)
476      *ExistingResult = Dep;
477    else
478      Cache.push_back(std::make_pair(DirtyBB, Dep));
479
480    // If the block has a dependency (i.e. it isn't completely transparent to
481    // the value), remember the association!
482    if (!Dep.isNonLocal()) {
483      // Keep the ReverseNonLocalDeps map up to date so we can efficiently
484      // update this when we remove instructions.
485      if (Instruction *Inst = Dep.getInst())
486        ReverseNonLocalDeps[Inst].insert(QueryCS.getInstruction());
487    } else {
488
489      // If the block *is* completely transparent to the load, we need to check
490      // the predecessors of this block.  Add them to our worklist.
491      for (BasicBlock **PI = PredCache->GetPreds(DirtyBB); *PI; ++PI)
492        DirtyBlocks.push_back(*PI);
493    }
494  }
495
496  return Cache;
497}
498
499/// getNonLocalPointerDependency - Perform a full dependency query for an
500/// access to the specified (non-volatile) memory location, returning the
501/// set of instructions that either define or clobber the value.
502///
503/// This method assumes the pointer has a "NonLocal" dependency within its
504/// own block.
505///
506void MemoryDependenceAnalysis::
507getNonLocalPointerDependency(Value *Pointer, bool isLoad, BasicBlock *FromBB,
508                             SmallVectorImpl<NonLocalDepEntry> &Result) {
509  assert(isa<PointerType>(Pointer->getType()) &&
510         "Can't get pointer deps of a non-pointer!");
511  Result.clear();
512
513  // We know that the pointer value is live into FromBB find the def/clobbers
514  // from presecessors.
515  const Type *EltTy = cast<PointerType>(Pointer->getType())->getElementType();
516  uint64_t PointeeSize = TD->getTypeStoreSize(EltTy);
517
518  // This is the set of blocks we've inspected, and the pointer we consider in
519  // each block.  Because of critical edges, we currently bail out if querying
520  // a block with multiple different pointers.  This can happen during PHI
521  // translation.
522  DenseMap<BasicBlock*, Value*> Visited;
523  if (!getNonLocalPointerDepFromBB(Pointer, PointeeSize, isLoad, FromBB,
524                                   Result, Visited, true))
525    return;
526  Result.clear();
527  Result.push_back(std::make_pair(FromBB,
528                                  MemDepResult::getClobber(FromBB->begin())));
529}
530
531/// GetNonLocalInfoForBlock - Compute the memdep value for BB with
532/// Pointer/PointeeSize using either cached information in Cache or by doing a
533/// lookup (which may use dirty cache info if available).  If we do a lookup,
534/// add the result to the cache.
535MemDepResult MemoryDependenceAnalysis::
536GetNonLocalInfoForBlock(Value *Pointer, uint64_t PointeeSize,
537                        bool isLoad, BasicBlock *BB,
538                        NonLocalDepInfo *Cache, unsigned NumSortedEntries) {
539
540  // Do a binary search to see if we already have an entry for this block in
541  // the cache set.  If so, find it.
542  NonLocalDepInfo::iterator Entry =
543    std::upper_bound(Cache->begin(), Cache->begin()+NumSortedEntries,
544                     std::make_pair(BB, MemDepResult()));
545  if (Entry != Cache->begin() && prior(Entry)->first == BB)
546    --Entry;
547
548  MemDepResult *ExistingResult = 0;
549  if (Entry != Cache->begin()+NumSortedEntries && Entry->first == BB)
550    ExistingResult = &Entry->second;
551
552  // If we have a cached entry, and it is non-dirty, use it as the value for
553  // this dependency.
554  if (ExistingResult && !ExistingResult->isDirty()) {
555    ++NumCacheNonLocalPtr;
556    return *ExistingResult;
557  }
558
559  // Otherwise, we have to scan for the value.  If we have a dirty cache
560  // entry, start scanning from its position, otherwise we scan from the end
561  // of the block.
562  BasicBlock::iterator ScanPos = BB->end();
563  if (ExistingResult && ExistingResult->getInst()) {
564    assert(ExistingResult->getInst()->getParent() == BB &&
565           "Instruction invalidated?");
566    ++NumCacheDirtyNonLocalPtr;
567    ScanPos = ExistingResult->getInst();
568
569    // Eliminating the dirty entry from 'Cache', so update the reverse info.
570    ValueIsLoadPair CacheKey(Pointer, isLoad);
571    RemoveFromReverseMap(ReverseNonLocalPtrDeps, ScanPos, CacheKey);
572  } else {
573    ++NumUncacheNonLocalPtr;
574  }
575
576  // Scan the block for the dependency.
577  MemDepResult Dep = getPointerDependencyFrom(Pointer, PointeeSize, isLoad,
578                                              ScanPos, BB);
579
580  // If we had a dirty entry for the block, update it.  Otherwise, just add
581  // a new entry.
582  if (ExistingResult)
583    *ExistingResult = Dep;
584  else
585    Cache->push_back(std::make_pair(BB, Dep));
586
587  // If the block has a dependency (i.e. it isn't completely transparent to
588  // the value), remember the reverse association because we just added it
589  // to Cache!
590  if (Dep.isNonLocal())
591    return Dep;
592
593  // Keep the ReverseNonLocalPtrDeps map up to date so we can efficiently
594  // update MemDep when we remove instructions.
595  Instruction *Inst = Dep.getInst();
596  assert(Inst && "Didn't depend on anything?");
597  ValueIsLoadPair CacheKey(Pointer, isLoad);
598  ReverseNonLocalPtrDeps[Inst].insert(CacheKey);
599  return Dep;
600}
601
602
603/// getNonLocalPointerDepFromBB - Perform a dependency query based on
604/// pointer/pointeesize starting at the end of StartBB.  Add any clobber/def
605/// results to the results vector and keep track of which blocks are visited in
606/// 'Visited'.
607///
608/// This has special behavior for the first block queries (when SkipFirstBlock
609/// is true).  In this special case, it ignores the contents of the specified
610/// block and starts returning dependence info for its predecessors.
611///
612/// This function returns false on success, or true to indicate that it could
613/// not compute dependence information for some reason.  This should be treated
614/// as a clobber dependence on the first instruction in the predecessor block.
615bool MemoryDependenceAnalysis::
616getNonLocalPointerDepFromBB(Value *Pointer, uint64_t PointeeSize,
617                            bool isLoad, BasicBlock *StartBB,
618                            SmallVectorImpl<NonLocalDepEntry> &Result,
619                            DenseMap<BasicBlock*, Value*> &Visited,
620                            bool SkipFirstBlock) {
621
622  // Look up the cached info for Pointer.
623  ValueIsLoadPair CacheKey(Pointer, isLoad);
624
625  std::pair<BBSkipFirstBlockPair, NonLocalDepInfo> *CacheInfo =
626    &NonLocalPointerDeps[CacheKey];
627  NonLocalDepInfo *Cache = &CacheInfo->second;
628
629  // If we have valid cached information for exactly the block we are
630  // investigating, just return it with no recomputation.
631  if (CacheInfo->first == BBSkipFirstBlockPair(StartBB, SkipFirstBlock)) {
632    // We have a fully cached result for this query then we can just return the
633    // cached results and populate the visited set.  However, we have to verify
634    // that we don't already have conflicting results for these blocks.  Check
635    // to ensure that if a block in the results set is in the visited set that
636    // it was for the same pointer query.
637    if (!Visited.empty()) {
638      for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
639           I != E; ++I) {
640        DenseMap<BasicBlock*, Value*>::iterator VI = Visited.find(I->first);
641        if (VI == Visited.end() || VI->second == Pointer) continue;
642
643        // We have a pointer mismatch in a block.  Just return clobber, saying
644        // that something was clobbered in this result.  We could also do a
645        // non-fully cached query, but there is little point in doing this.
646        return true;
647      }
648    }
649
650    for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
651         I != E; ++I) {
652      Visited.insert(std::make_pair(I->first, Pointer));
653      if (!I->second.isNonLocal())
654        Result.push_back(*I);
655    }
656    ++NumCacheCompleteNonLocalPtr;
657    return false;
658  }
659
660  // Otherwise, either this is a new block, a block with an invalid cache
661  // pointer or one that we're about to invalidate by putting more info into it
662  // than its valid cache info.  If empty, the result will be valid cache info,
663  // otherwise it isn't.
664  if (Cache->empty())
665    CacheInfo->first = BBSkipFirstBlockPair(StartBB, SkipFirstBlock);
666  else
667    CacheInfo->first = BBSkipFirstBlockPair();
668
669  SmallVector<BasicBlock*, 32> Worklist;
670  Worklist.push_back(StartBB);
671
672  // Keep track of the entries that we know are sorted.  Previously cached
673  // entries will all be sorted.  The entries we add we only sort on demand (we
674  // don't insert every element into its sorted position).  We know that we
675  // won't get any reuse from currently inserted values, because we don't
676  // revisit blocks after we insert info for them.
677  unsigned NumSortedEntries = Cache->size();
678  DEBUG(AssertSorted(*Cache));
679
680  while (!Worklist.empty()) {
681    BasicBlock *BB = Worklist.pop_back_val();
682
683    // Skip the first block if we have it.
684    if (!SkipFirstBlock) {
685      // Analyze the dependency of *Pointer in FromBB.  See if we already have
686      // been here.
687      assert(Visited.count(BB) && "Should check 'visited' before adding to WL");
688
689      // Get the dependency info for Pointer in BB.  If we have cached
690      // information, we will use it, otherwise we compute it.
691      DEBUG(AssertSorted(*Cache, NumSortedEntries));
692      MemDepResult Dep = GetNonLocalInfoForBlock(Pointer, PointeeSize, isLoad,
693                                                 BB, Cache, NumSortedEntries);
694
695      // If we got a Def or Clobber, add this to the list of results.
696      if (!Dep.isNonLocal()) {
697        Result.push_back(NonLocalDepEntry(BB, Dep));
698        continue;
699      }
700    }
701
702    // If 'Pointer' is an instruction defined in this block, then we need to do
703    // phi translation to change it into a value live in the predecessor block.
704    // If phi translation fails, then we can't continue dependence analysis.
705    Instruction *PtrInst = dyn_cast<Instruction>(Pointer);
706    bool NeedsPHITranslation = PtrInst && PtrInst->getParent() == BB;
707
708    // If no PHI translation is needed, just add all the predecessors of this
709    // block to scan them as well.
710    if (!NeedsPHITranslation) {
711      SkipFirstBlock = false;
712      for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) {
713        // Verify that we haven't looked at this block yet.
714        std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
715          InsertRes = Visited.insert(std::make_pair(*PI, Pointer));
716        if (InsertRes.second) {
717          // First time we've looked at *PI.
718          Worklist.push_back(*PI);
719          continue;
720        }
721
722        // If we have seen this block before, but it was with a different
723        // pointer then we have a phi translation failure and we have to treat
724        // this as a clobber.
725        if (InsertRes.first->second != Pointer)
726          goto PredTranslationFailure;
727      }
728      continue;
729    }
730
731    // If we do need to do phi translation, then there are a bunch of different
732    // cases, because we have to find a Value* live in the predecessor block. We
733    // know that PtrInst is defined in this block at least.
734
735    // We may have added values to the cache list before this PHI translation.
736    // If so, we haven't done anything to ensure that the cache remains sorted.
737    // Sort it now (if needed) so that recursive invocations of
738    // getNonLocalPointerDepFromBB and other routines that could reuse the cache
739    // value will only see properly sorted cache arrays.
740    if (Cache && NumSortedEntries != Cache->size()) {
741      std::sort(Cache->begin(), Cache->end());
742      NumSortedEntries = Cache->size();
743    }
744
745    // If this is directly a PHI node, just use the incoming values for each
746    // pred as the phi translated version.
747    if (PHINode *PtrPHI = dyn_cast<PHINode>(PtrInst)) {
748      Cache = 0;
749
750      for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) {
751        BasicBlock *Pred = *PI;
752        Value *PredPtr = PtrPHI->getIncomingValueForBlock(Pred);
753
754        // Check to see if we have already visited this pred block with another
755        // pointer.  If so, we can't do this lookup.  This failure can occur
756        // with PHI translation when a critical edge exists and the PHI node in
757        // the successor translates to a pointer value different than the
758        // pointer the block was first analyzed with.
759        std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
760          InsertRes = Visited.insert(std::make_pair(Pred, PredPtr));
761
762        if (!InsertRes.second) {
763          // If the predecessor was visited with PredPtr, then we already did
764          // the analysis and can ignore it.
765          if (InsertRes.first->second == PredPtr)
766            continue;
767
768          // Otherwise, the block was previously analyzed with a different
769          // pointer.  We can't represent the result of this case, so we just
770          // treat this as a phi translation failure.
771          goto PredTranslationFailure;
772        }
773
774        // FIXME: it is entirely possible that PHI translating will end up with
775        // the same value.  Consider PHI translating something like:
776        // X = phi [x, bb1], [y, bb2].  PHI translating for bb1 doesn't *need*
777        // to recurse here, pedantically speaking.
778
779        // If we have a problem phi translating, fall through to the code below
780        // to handle the failure condition.
781        if (getNonLocalPointerDepFromBB(PredPtr, PointeeSize, isLoad, Pred,
782                                        Result, Visited))
783          goto PredTranslationFailure;
784      }
785
786      // Refresh the CacheInfo/Cache pointer so that it isn't invalidated.
787      CacheInfo = &NonLocalPointerDeps[CacheKey];
788      Cache = &CacheInfo->second;
789      NumSortedEntries = Cache->size();
790
791      // Since we did phi translation, the "Cache" set won't contain all of the
792      // results for the query.  This is ok (we can still use it to accelerate
793      // specific block queries) but we can't do the fastpath "return all
794      // results from the set"  Clear out the indicator for this.
795      CacheInfo->first = BBSkipFirstBlockPair();
796      SkipFirstBlock = false;
797      continue;
798    }
799
800    // TODO: BITCAST, GEP.
801
802    //   cerr << "MEMDEP: Could not PHI translate: " << *Pointer;
803    //   if (isa<BitCastInst>(PtrInst) || isa<GetElementPtrInst>(PtrInst))
804    //     cerr << "OP:\t\t\t\t" << *PtrInst->getOperand(0);
805  PredTranslationFailure:
806
807    if (Cache == 0) {
808      // Refresh the CacheInfo/Cache pointer if it got invalidated.
809      CacheInfo = &NonLocalPointerDeps[CacheKey];
810      Cache = &CacheInfo->second;
811      NumSortedEntries = Cache->size();
812    }
813
814    // Since we did phi translation, the "Cache" set won't contain all of the
815    // results for the query.  This is ok (we can still use it to accelerate
816    // specific block queries) but we can't do the fastpath "return all
817    // results from the set"  Clear out the indicator for this.
818    CacheInfo->first = BBSkipFirstBlockPair();
819
820    // If *nothing* works, mark the pointer as being clobbered by the first
821    // instruction in this block.
822    //
823    // If this is the magic first block, return this as a clobber of the whole
824    // incoming value.  Since we can't phi translate to one of the predecessors,
825    // we have to bail out.
826    if (SkipFirstBlock)
827      return true;
828
829    for (NonLocalDepInfo::reverse_iterator I = Cache->rbegin(); ; ++I) {
830      assert(I != Cache->rend() && "Didn't find current block??");
831      if (I->first != BB)
832        continue;
833
834      assert(I->second.isNonLocal() &&
835             "Should only be here with transparent block");
836      I->second = MemDepResult::getClobber(BB->begin());
837      ReverseNonLocalPtrDeps[BB->begin()].insert(CacheKey);
838      Result.push_back(*I);
839      break;
840    }
841  }
842
843  // Okay, we're done now.  If we added new values to the cache, re-sort it.
844  switch (Cache->size()-NumSortedEntries) {
845  case 0:
846    // done, no new entries.
847    break;
848  case 2: {
849    // Two new entries, insert the last one into place.
850    NonLocalDepEntry Val = Cache->back();
851    Cache->pop_back();
852    NonLocalDepInfo::iterator Entry =
853    std::upper_bound(Cache->begin(), Cache->end()-1, Val);
854    Cache->insert(Entry, Val);
855    // FALL THROUGH.
856  }
857  case 1:
858    // One new entry, Just insert the new value at the appropriate position.
859    if (Cache->size() != 1) {
860      NonLocalDepEntry Val = Cache->back();
861      Cache->pop_back();
862      NonLocalDepInfo::iterator Entry =
863        std::upper_bound(Cache->begin(), Cache->end(), Val);
864      Cache->insert(Entry, Val);
865    }
866    break;
867  default:
868    // Added many values, do a full scale sort.
869    std::sort(Cache->begin(), Cache->end());
870  }
871  DEBUG(AssertSorted(*Cache));
872  return false;
873}
874
875/// RemoveCachedNonLocalPointerDependencies - If P exists in
876/// CachedNonLocalPointerInfo, remove it.
877void MemoryDependenceAnalysis::
878RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) {
879  CachedNonLocalPointerInfo::iterator It =
880    NonLocalPointerDeps.find(P);
881  if (It == NonLocalPointerDeps.end()) return;
882
883  // Remove all of the entries in the BB->val map.  This involves removing
884  // instructions from the reverse map.
885  NonLocalDepInfo &PInfo = It->second.second;
886
887  for (unsigned i = 0, e = PInfo.size(); i != e; ++i) {
888    Instruction *Target = PInfo[i].second.getInst();
889    if (Target == 0) continue;  // Ignore non-local dep results.
890    assert(Target->getParent() == PInfo[i].first);
891
892    // Eliminating the dirty entry from 'Cache', so update the reverse info.
893    RemoveFromReverseMap(ReverseNonLocalPtrDeps, Target, P);
894  }
895
896  // Remove P from NonLocalPointerDeps (which deletes NonLocalDepInfo).
897  NonLocalPointerDeps.erase(It);
898}
899
900
901/// invalidateCachedPointerInfo - This method is used to invalidate cached
902/// information about the specified pointer, because it may be too
903/// conservative in memdep.  This is an optional call that can be used when
904/// the client detects an equivalence between the pointer and some other
905/// value and replaces the other value with ptr. This can make Ptr available
906/// in more places that cached info does not necessarily keep.
907void MemoryDependenceAnalysis::invalidateCachedPointerInfo(Value *Ptr) {
908  // If Ptr isn't really a pointer, just ignore it.
909  if (!isa<PointerType>(Ptr->getType())) return;
910  // Flush store info for the pointer.
911  RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, false));
912  // Flush load info for the pointer.
913  RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, true));
914}
915
916/// removeInstruction - Remove an instruction from the dependence analysis,
917/// updating the dependence of instructions that previously depended on it.
918/// This method attempts to keep the cache coherent using the reverse map.
919void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
920  // Walk through the Non-local dependencies, removing this one as the value
921  // for any cached queries.
922  NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst);
923  if (NLDI != NonLocalDeps.end()) {
924    NonLocalDepInfo &BlockMap = NLDI->second.first;
925    for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end();
926         DI != DE; ++DI)
927      if (Instruction *Inst = DI->second.getInst())
928        RemoveFromReverseMap(ReverseNonLocalDeps, Inst, RemInst);
929    NonLocalDeps.erase(NLDI);
930  }
931
932  // If we have a cached local dependence query for this instruction, remove it.
933  //
934  LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst);
935  if (LocalDepEntry != LocalDeps.end()) {
936    // Remove us from DepInst's reverse set now that the local dep info is gone.
937    if (Instruction *Inst = LocalDepEntry->second.getInst())
938      RemoveFromReverseMap(ReverseLocalDeps, Inst, RemInst);
939
940    // Remove this local dependency info.
941    LocalDeps.erase(LocalDepEntry);
942  }
943
944  // If we have any cached pointer dependencies on this instruction, remove
945  // them.  If the instruction has non-pointer type, then it can't be a pointer
946  // base.
947
948  // Remove it from both the load info and the store info.  The instruction
949  // can't be in either of these maps if it is non-pointer.
950  if (isa<PointerType>(RemInst->getType())) {
951    RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, false));
952    RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, true));
953  }
954
955  // Loop over all of the things that depend on the instruction we're removing.
956  //
957  SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd;
958
959  // If we find RemInst as a clobber or Def in any of the maps for other values,
960  // we need to replace its entry with a dirty version of the instruction after
961  // it.  If RemInst is a terminator, we use a null dirty value.
962  //
963  // Using a dirty version of the instruction after RemInst saves having to scan
964  // the entire block to get to this point.
965  MemDepResult NewDirtyVal;
966  if (!RemInst->isTerminator())
967    NewDirtyVal = MemDepResult::getDirty(++BasicBlock::iterator(RemInst));
968
969  ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst);
970  if (ReverseDepIt != ReverseLocalDeps.end()) {
971    SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second;
972    // RemInst can't be the terminator if it has local stuff depending on it.
973    assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) &&
974           "Nothing can locally depend on a terminator");
975
976    for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(),
977         E = ReverseDeps.end(); I != E; ++I) {
978      Instruction *InstDependingOnRemInst = *I;
979      assert(InstDependingOnRemInst != RemInst &&
980             "Already removed our local dep info");
981
982      LocalDeps[InstDependingOnRemInst] = NewDirtyVal;
983
984      // Make sure to remember that new things depend on NewDepInst.
985      assert(NewDirtyVal.getInst() && "There is no way something else can have "
986             "a local dep on this if it is a terminator!");
987      ReverseDepsToAdd.push_back(std::make_pair(NewDirtyVal.getInst(),
988                                                InstDependingOnRemInst));
989    }
990
991    ReverseLocalDeps.erase(ReverseDepIt);
992
993    // Add new reverse deps after scanning the set, to avoid invalidating the
994    // 'ReverseDeps' reference.
995    while (!ReverseDepsToAdd.empty()) {
996      ReverseLocalDeps[ReverseDepsToAdd.back().first]
997        .insert(ReverseDepsToAdd.back().second);
998      ReverseDepsToAdd.pop_back();
999    }
1000  }
1001
1002  ReverseDepIt = ReverseNonLocalDeps.find(RemInst);
1003  if (ReverseDepIt != ReverseNonLocalDeps.end()) {
1004    SmallPtrSet<Instruction*, 4> &Set = ReverseDepIt->second;
1005    for (SmallPtrSet<Instruction*, 4>::iterator I = Set.begin(), E = Set.end();
1006         I != E; ++I) {
1007      assert(*I != RemInst && "Already removed NonLocalDep info for RemInst");
1008
1009      PerInstNLInfo &INLD = NonLocalDeps[*I];
1010      // The information is now dirty!
1011      INLD.second = true;
1012
1013      for (NonLocalDepInfo::iterator DI = INLD.first.begin(),
1014           DE = INLD.first.end(); DI != DE; ++DI) {
1015        if (DI->second.getInst() != RemInst) continue;
1016
1017        // Convert to a dirty entry for the subsequent instruction.
1018        DI->second = NewDirtyVal;
1019
1020        if (Instruction *NextI = NewDirtyVal.getInst())
1021          ReverseDepsToAdd.push_back(std::make_pair(NextI, *I));
1022      }
1023    }
1024
1025    ReverseNonLocalDeps.erase(ReverseDepIt);
1026
1027    // Add new reverse deps after scanning the set, to avoid invalidating 'Set'
1028    while (!ReverseDepsToAdd.empty()) {
1029      ReverseNonLocalDeps[ReverseDepsToAdd.back().first]
1030        .insert(ReverseDepsToAdd.back().second);
1031      ReverseDepsToAdd.pop_back();
1032    }
1033  }
1034
1035  // If the instruction is in ReverseNonLocalPtrDeps then it appears as a
1036  // value in the NonLocalPointerDeps info.
1037  ReverseNonLocalPtrDepTy::iterator ReversePtrDepIt =
1038    ReverseNonLocalPtrDeps.find(RemInst);
1039  if (ReversePtrDepIt != ReverseNonLocalPtrDeps.end()) {
1040    SmallPtrSet<ValueIsLoadPair, 4> &Set = ReversePtrDepIt->second;
1041    SmallVector<std::pair<Instruction*, ValueIsLoadPair>,8> ReversePtrDepsToAdd;
1042
1043    for (SmallPtrSet<ValueIsLoadPair, 4>::iterator I = Set.begin(),
1044         E = Set.end(); I != E; ++I) {
1045      ValueIsLoadPair P = *I;
1046      assert(P.getPointer() != RemInst &&
1047             "Already removed NonLocalPointerDeps info for RemInst");
1048
1049      NonLocalDepInfo &NLPDI = NonLocalPointerDeps[P].second;
1050
1051      // The cache is not valid for any specific block anymore.
1052      NonLocalPointerDeps[P].first = BBSkipFirstBlockPair();
1053
1054      // Update any entries for RemInst to use the instruction after it.
1055      for (NonLocalDepInfo::iterator DI = NLPDI.begin(), DE = NLPDI.end();
1056           DI != DE; ++DI) {
1057        if (DI->second.getInst() != RemInst) continue;
1058
1059        // Convert to a dirty entry for the subsequent instruction.
1060        DI->second = NewDirtyVal;
1061
1062        if (Instruction *NewDirtyInst = NewDirtyVal.getInst())
1063          ReversePtrDepsToAdd.push_back(std::make_pair(NewDirtyInst, P));
1064      }
1065
1066      // Re-sort the NonLocalDepInfo.  Changing the dirty entry to its
1067      // subsequent value may invalidate the sortedness.
1068      std::sort(NLPDI.begin(), NLPDI.end());
1069    }
1070
1071    ReverseNonLocalPtrDeps.erase(ReversePtrDepIt);
1072
1073    while (!ReversePtrDepsToAdd.empty()) {
1074      ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.back().first]
1075        .insert(ReversePtrDepsToAdd.back().second);
1076      ReversePtrDepsToAdd.pop_back();
1077    }
1078  }
1079
1080
1081  assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?");
1082  AA->deleteValue(RemInst);
1083  DEBUG(verifyRemoved(RemInst));
1084}
1085/// verifyRemoved - Verify that the specified instruction does not occur
1086/// in our internal data structures.
1087void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
1088  for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
1089       E = LocalDeps.end(); I != E; ++I) {
1090    assert(I->first != D && "Inst occurs in data structures");
1091    assert(I->second.getInst() != D &&
1092           "Inst occurs in data structures");
1093  }
1094
1095  for (CachedNonLocalPointerInfo::const_iterator I =NonLocalPointerDeps.begin(),
1096       E = NonLocalPointerDeps.end(); I != E; ++I) {
1097    assert(I->first.getPointer() != D && "Inst occurs in NLPD map key");
1098    const NonLocalDepInfo &Val = I->second.second;
1099    for (NonLocalDepInfo::const_iterator II = Val.begin(), E = Val.end();
1100         II != E; ++II)
1101      assert(II->second.getInst() != D && "Inst occurs as NLPD value");
1102  }
1103
1104  for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
1105       E = NonLocalDeps.end(); I != E; ++I) {
1106    assert(I->first != D && "Inst occurs in data structures");
1107    const PerInstNLInfo &INLD = I->second;
1108    for (NonLocalDepInfo::const_iterator II = INLD.first.begin(),
1109         EE = INLD.first.end(); II  != EE; ++II)
1110      assert(II->second.getInst() != D && "Inst occurs in data structures");
1111  }
1112
1113  for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
1114       E = ReverseLocalDeps.end(); I != E; ++I) {
1115    assert(I->first != D && "Inst occurs in data structures");
1116    for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
1117         EE = I->second.end(); II != EE; ++II)
1118      assert(*II != D && "Inst occurs in data structures");
1119  }
1120
1121  for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(),
1122       E = ReverseNonLocalDeps.end();
1123       I != E; ++I) {
1124    assert(I->first != D && "Inst occurs in data structures");
1125    for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
1126         EE = I->second.end(); II != EE; ++II)
1127      assert(*II != D && "Inst occurs in data structures");
1128  }
1129
1130  for (ReverseNonLocalPtrDepTy::const_iterator
1131       I = ReverseNonLocalPtrDeps.begin(),
1132       E = ReverseNonLocalPtrDeps.end(); I != E; ++I) {
1133    assert(I->first != D && "Inst occurs in rev NLPD map");
1134
1135    for (SmallPtrSet<ValueIsLoadPair, 4>::const_iterator II = I->second.begin(),
1136         E = I->second.end(); II != E; ++II)
1137      assert(*II != ValueIsLoadPair(D, false) &&
1138             *II != ValueIsLoadPair(D, true) &&
1139             "Inst occurs in ReverseNonLocalPtrDeps map");
1140  }
1141
1142}
1143