MemoryDependenceAnalysis.cpp revision e79be944c8ced0a0cb80ede8cb9f97e4fdc6778f
1//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation  --*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements an analysis that determines, for a given memory
11// operation, what preceding memory operations it depends on.  It builds on
12// alias analysis information, and tries to provide a lazy, caching interface to
13// a common kind of alias information query.
14//
15//===----------------------------------------------------------------------===//
16
17#define DEBUG_TYPE "memdep"
18#include "llvm/Analysis/MemoryDependenceAnalysis.h"
19#include "llvm/Constants.h"
20#include "llvm/Instructions.h"
21#include "llvm/Function.h"
22#include "llvm/Analysis/AliasAnalysis.h"
23#include "llvm/ADT/Statistic.h"
24#include "llvm/ADT/STLExtras.h"
25#include "llvm/Support/CFG.h"
26#include "llvm/Support/CommandLine.h"
27#include "llvm/Support/Debug.h"
28#include "llvm/Target/TargetData.h"
29using namespace llvm;
30
31STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses");
32STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses");
33STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses");
34char MemoryDependenceAnalysis::ID = 0;
35
36// Register this pass...
37static RegisterPass<MemoryDependenceAnalysis> X("memdep",
38                                     "Memory Dependence Analysis", false, true);
39
40/// getAnalysisUsage - Does not modify anything.  It uses Alias Analysis.
41///
42void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
43  AU.setPreservesAll();
44  AU.addRequiredTransitive<AliasAnalysis>();
45  AU.addRequiredTransitive<TargetData>();
46}
47
48bool MemoryDependenceAnalysis::runOnFunction(Function &) {
49  AA = &getAnalysis<AliasAnalysis>();
50  TD = &getAnalysis<TargetData>();
51  return false;
52}
53
54
55/// getCallSiteDependencyFrom - Private helper for finding the local
56/// dependencies of a call site.
57MemDepResult MemoryDependenceAnalysis::
58getCallSiteDependencyFrom(CallSite CS, BasicBlock::iterator ScanIt,
59                          BasicBlock *BB) {
60  // Walk backwards through the block, looking for dependencies
61  while (ScanIt != BB->begin()) {
62    Instruction *Inst = --ScanIt;
63
64    // If this inst is a memory op, get the pointer it accessed
65    Value *Pointer = 0;
66    uint64_t PointerSize = 0;
67    if (StoreInst *S = dyn_cast<StoreInst>(Inst)) {
68      Pointer = S->getPointerOperand();
69      PointerSize = TD->getTypeStoreSize(S->getOperand(0)->getType());
70    } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
71      Pointer = V->getOperand(0);
72      PointerSize = TD->getTypeStoreSize(V->getType());
73    } else if (FreeInst *F = dyn_cast<FreeInst>(Inst)) {
74      Pointer = F->getPointerOperand();
75
76      // FreeInsts erase the entire structure
77      PointerSize = ~0UL;
78    } else if (isa<CallInst>(Inst) || isa<InvokeInst>(Inst)) {
79      CallSite InstCS = CallSite::get(Inst);
80      // If these two calls do not interfere, look past it.
81      if (AA->getModRefInfo(CS, InstCS) == AliasAnalysis::NoModRef)
82        continue;
83
84      // FIXME: If this is a ref/ref result, we should ignore it!
85      //  X = strlen(P);
86      //  Y = strlen(Q);
87      //  Z = strlen(P);  // Z = X
88
89      // If they interfere, we generally return clobber.  However, if they are
90      // calls to the same read-only functions we return Def.
91      if (!AA->onlyReadsMemory(CS) || CS.getCalledFunction() == 0 ||
92          CS.getCalledFunction() != InstCS.getCalledFunction())
93        return MemDepResult::getClobber(Inst);
94      return MemDepResult::getDef(Inst);
95    } else {
96      // Non-memory instruction.
97      continue;
98    }
99
100    if (AA->getModRefInfo(CS, Pointer, PointerSize) != AliasAnalysis::NoModRef)
101      return MemDepResult::getClobber(Inst);
102  }
103
104  // No dependence found.
105  return MemDepResult::getNonLocal();
106}
107
108/// getPointerDependencyFrom - Return the instruction on which a memory
109/// location depends.  If isLoad is true, this routine ignore may-aliases with
110/// read-only operations.
111MemDepResult MemoryDependenceAnalysis::
112getPointerDependencyFrom(Value *MemPtr, uint64_t MemSize, bool isLoad,
113                         BasicBlock::iterator ScanIt, BasicBlock *BB) {
114  // The first instruction in a block is always non-local.
115  if (ScanIt == BB->begin())
116    return MemDepResult::getNonLocal();
117
118  // Walk backwards through the basic block, looking for dependencies
119  while (ScanIt != BB->begin()) {
120    Instruction *Inst = --ScanIt;
121
122    // Values depend on loads if the pointers are must aliased.  This means that
123    // a load depends on another must aliased load from the same value.
124    if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
125      Value *Pointer = LI->getPointerOperand();
126      uint64_t PointerSize = TD->getTypeStoreSize(LI->getType());
127
128      // If we found a pointer, check if it could be the same as our pointer.
129      AliasAnalysis::AliasResult R =
130        AA->alias(Pointer, PointerSize, MemPtr, MemSize);
131      if (R == AliasAnalysis::NoAlias)
132        continue;
133
134      // May-alias loads don't depend on each other without a dependence.
135      if (isLoad && R == AliasAnalysis::MayAlias)
136        continue;
137      return MemDepResult::getDef(Inst);
138    }
139
140    if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
141      Value *Pointer = SI->getPointerOperand();
142      uint64_t PointerSize = TD->getTypeStoreSize(SI->getOperand(0)->getType());
143
144      // If we found a pointer, check if it could be the same as our pointer.
145      AliasAnalysis::AliasResult R =
146        AA->alias(Pointer, PointerSize, MemPtr, MemSize);
147
148      if (R == AliasAnalysis::NoAlias)
149        continue;
150      if (R == AliasAnalysis::MayAlias)
151        return MemDepResult::getClobber(Inst);
152      return MemDepResult::getDef(Inst);
153    }
154
155    // If this is an allocation, and if we know that the accessed pointer is to
156    // the allocation, return Def.  This means that there is no dependence and
157    // the access can be optimized based on that.  For example, a load could
158    // turn into undef.
159    if (AllocationInst *AI = dyn_cast<AllocationInst>(Inst)) {
160      Value *AccessPtr = MemPtr->getUnderlyingObject();
161
162      if (AccessPtr == AI ||
163          AA->alias(AI, 1, AccessPtr, 1) == AliasAnalysis::MustAlias)
164        return MemDepResult::getDef(AI);
165      continue;
166    }
167
168    // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer.
169    // FIXME: If this is a load, we should ignore readonly calls!
170    if (AA->getModRefInfo(Inst, MemPtr, MemSize) == AliasAnalysis::NoModRef)
171      continue;
172
173    // Otherwise, there is a dependence.
174    return MemDepResult::getClobber(Inst);
175  }
176
177  // If we found nothing, return the non-local flag.
178  return MemDepResult::getNonLocal();
179}
180
181/// getDependency - Return the instruction on which a memory operation
182/// depends.
183MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
184  Instruction *ScanPos = QueryInst;
185
186  // Check for a cached result
187  MemDepResult &LocalCache = LocalDeps[QueryInst];
188
189  // If the cached entry is non-dirty, just return it.  Note that this depends
190  // on MemDepResult's default constructing to 'dirty'.
191  if (!LocalCache.isDirty())
192    return LocalCache;
193
194  // Otherwise, if we have a dirty entry, we know we can start the scan at that
195  // instruction, which may save us some work.
196  if (Instruction *Inst = LocalCache.getInst()) {
197    ScanPos = Inst;
198
199    SmallPtrSet<Instruction*, 4> &InstMap = ReverseLocalDeps[Inst];
200    InstMap.erase(QueryInst);
201    if (InstMap.empty())
202      ReverseLocalDeps.erase(Inst);
203  }
204
205  BasicBlock *QueryParent = QueryInst->getParent();
206
207  Value *MemPtr = 0;
208  uint64_t MemSize = 0;
209
210  // Do the scan.
211  if (BasicBlock::iterator(QueryInst) == QueryParent->begin()) {
212    // First instruction in the block -> non local.
213    LocalCache = MemDepResult::getNonLocal();
214  } else if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) {
215    // If this is a volatile store, don't mess around with it.  Just return the
216    // previous instruction as a clobber.
217    if (SI->isVolatile())
218      LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
219    else {
220      MemPtr = SI->getPointerOperand();
221      MemSize = TD->getTypeStoreSize(SI->getOperand(0)->getType());
222    }
223  } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) {
224    // If this is a volatile load, don't mess around with it.  Just return the
225    // previous instruction as a clobber.
226    if (LI->isVolatile())
227      LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
228    else {
229      MemPtr = LI->getPointerOperand();
230      MemSize = TD->getTypeStoreSize(LI->getType());
231    }
232  } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) {
233    LocalCache = getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanPos,
234                                           QueryParent);
235  } else if (FreeInst *FI = dyn_cast<FreeInst>(QueryInst)) {
236    MemPtr = FI->getPointerOperand();
237    // FreeInsts erase the entire structure, not just a field.
238    MemSize = ~0UL;
239  } else {
240    // Non-memory instruction.
241    LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
242  }
243
244  // If we need to do a pointer scan, make it happen.
245  if (MemPtr)
246    LocalCache = getPointerDependencyFrom(MemPtr, MemSize,
247                                          isa<LoadInst>(QueryInst),
248                                          ScanPos, QueryParent);
249
250  // Remember the result!
251  if (Instruction *I = LocalCache.getInst())
252    ReverseLocalDeps[I].insert(QueryInst);
253
254  return LocalCache;
255}
256
257/// getNonLocalDependency - Perform a full dependency query for the
258/// specified instruction, returning the set of blocks that the value is
259/// potentially live across.  The returned set of results will include a
260/// "NonLocal" result for all blocks where the value is live across.
261///
262/// This method assumes the instruction returns a "nonlocal" dependency
263/// within its own block.
264///
265const MemoryDependenceAnalysis::NonLocalDepInfo &
266MemoryDependenceAnalysis::getNonLocalDependency(Instruction *QueryInst) {
267  assert(isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst) ||
268         isa<LoadInst>(QueryInst) || isa<StoreInst>(QueryInst));
269  assert(getDependency(QueryInst).isNonLocal() &&
270     "getNonLocalDependency should only be used on insts with non-local deps!");
271  PerInstNLInfo &CacheP = NonLocalDeps[QueryInst];
272  NonLocalDepInfo &Cache = CacheP.first;
273
274  /// DirtyBlocks - This is the set of blocks that need to be recomputed.  In
275  /// the cached case, this can happen due to instructions being deleted etc. In
276  /// the uncached case, this starts out as the set of predecessors we care
277  /// about.
278  SmallVector<BasicBlock*, 32> DirtyBlocks;
279
280  if (!Cache.empty()) {
281    // Okay, we have a cache entry.  If we know it is not dirty, just return it
282    // with no computation.
283    if (!CacheP.second) {
284      NumCacheNonLocal++;
285      return Cache;
286    }
287
288    // If we already have a partially computed set of results, scan them to
289    // determine what is dirty, seeding our initial DirtyBlocks worklist.
290    for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end();
291       I != E; ++I)
292      if (I->second.isDirty())
293        DirtyBlocks.push_back(I->first);
294
295    // Sort the cache so that we can do fast binary search lookups below.
296    std::sort(Cache.begin(), Cache.end());
297
298    ++NumCacheDirtyNonLocal;
299    //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
300    //     << Cache.size() << " cached: " << *QueryInst;
301  } else {
302    // Seed DirtyBlocks with each of the preds of QueryInst's block.
303    BasicBlock *QueryBB = QueryInst->getParent();
304    DirtyBlocks.append(pred_begin(QueryBB), pred_end(QueryBB));
305    NumUncacheNonLocal++;
306  }
307
308  // Visited checked first, vector in sorted order.
309  SmallPtrSet<BasicBlock*, 64> Visited;
310
311  unsigned NumSortedEntries = Cache.size();
312
313  // Iterate while we still have blocks to update.
314  while (!DirtyBlocks.empty()) {
315    BasicBlock *DirtyBB = DirtyBlocks.back();
316    DirtyBlocks.pop_back();
317
318    // Already processed this block?
319    if (!Visited.insert(DirtyBB))
320      continue;
321
322    // Do a binary search to see if we already have an entry for this block in
323    // the cache set.  If so, find it.
324    NonLocalDepInfo::iterator Entry =
325      std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries,
326                       std::make_pair(DirtyBB, MemDepResult()));
327    if (Entry != Cache.begin() && (&*Entry)[-1].first == DirtyBB)
328      --Entry;
329
330    MemDepResult *ExistingResult = 0;
331    if (Entry != Cache.begin()+NumSortedEntries &&
332        Entry->first == DirtyBB) {
333      // If we already have an entry, and if it isn't already dirty, the block
334      // is done.
335      if (!Entry->second.isDirty())
336        continue;
337
338      // Otherwise, remember this slot so we can update the value.
339      ExistingResult = &Entry->second;
340    }
341
342    // If the dirty entry has a pointer, start scanning from it so we don't have
343    // to rescan the entire block.
344    BasicBlock::iterator ScanPos = DirtyBB->end();
345    if (ExistingResult) {
346      if (Instruction *Inst = ExistingResult->getInst()) {
347        ScanPos = Inst;
348
349        // We're removing QueryInst's use of Inst.
350        SmallPtrSet<Instruction*, 4> &InstMap = ReverseNonLocalDeps[Inst];
351        InstMap.erase(QueryInst);
352        if (InstMap.empty()) ReverseNonLocalDeps.erase(Inst);
353      }
354    }
355
356    // Find out if this block has a local dependency for QueryInst.
357    MemDepResult Dep;
358
359    Value *MemPtr = 0;
360    uint64_t MemSize = 0;
361
362    if (BasicBlock::iterator(QueryInst) == DirtyBB->begin()) {
363      // First instruction in the block -> non local.
364      Dep = MemDepResult::getNonLocal();
365    } else if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) {
366      // If this is a volatile store, don't mess around with it.  Just return the
367      // previous instruction as a clobber.
368      if (SI->isVolatile())
369        Dep = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
370      else {
371        MemPtr = SI->getPointerOperand();
372        MemSize = TD->getTypeStoreSize(SI->getOperand(0)->getType());
373      }
374    } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) {
375      // If this is a volatile load, don't mess around with it.  Just return the
376      // previous instruction as a clobber.
377      if (LI->isVolatile())
378        Dep = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
379      else {
380        MemPtr = LI->getPointerOperand();
381        MemSize = TD->getTypeStoreSize(LI->getType());
382      }
383    } else {
384      assert(isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst));
385      Dep = getCallSiteDependencyFrom(CallSite::get(QueryInst), ScanPos,
386                                      DirtyBB);
387    }
388
389    if (MemPtr)
390      Dep = getPointerDependencyFrom(MemPtr, MemSize, isa<LoadInst>(QueryInst),
391                                     ScanPos, DirtyBB);
392
393    // If we had a dirty entry for the block, update it.  Otherwise, just add
394    // a new entry.
395    if (ExistingResult)
396      *ExistingResult = Dep;
397    else
398      Cache.push_back(std::make_pair(DirtyBB, Dep));
399
400    // If the block has a dependency (i.e. it isn't completely transparent to
401    // the value), remember the association!
402    if (!Dep.isNonLocal()) {
403      // Keep the ReverseNonLocalDeps map up to date so we can efficiently
404      // update this when we remove instructions.
405      if (Instruction *Inst = Dep.getInst())
406        ReverseNonLocalDeps[Inst].insert(QueryInst);
407    } else {
408
409      // If the block *is* completely transparent to the load, we need to check
410      // the predecessors of this block.  Add them to our worklist.
411      DirtyBlocks.append(pred_begin(DirtyBB), pred_end(DirtyBB));
412    }
413  }
414
415  return Cache;
416}
417
418/// removeInstruction - Remove an instruction from the dependence analysis,
419/// updating the dependence of instructions that previously depended on it.
420/// This method attempts to keep the cache coherent using the reverse map.
421void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
422  // Walk through the Non-local dependencies, removing this one as the value
423  // for any cached queries.
424  NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst);
425  if (NLDI != NonLocalDeps.end()) {
426    NonLocalDepInfo &BlockMap = NLDI->second.first;
427    for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end();
428         DI != DE; ++DI)
429      if (Instruction *Inst = DI->second.getInst())
430        ReverseNonLocalDeps[Inst].erase(RemInst);
431    NonLocalDeps.erase(NLDI);
432  }
433
434  // If we have a cached local dependence query for this instruction, remove it.
435  //
436  LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst);
437  if (LocalDepEntry != LocalDeps.end()) {
438    // Remove us from DepInst's reverse set now that the local dep info is gone.
439    if (Instruction *Inst = LocalDepEntry->second.getInst()) {
440      SmallPtrSet<Instruction*, 4> &RLD = ReverseLocalDeps[Inst];
441      RLD.erase(RemInst);
442      if (RLD.empty())
443        ReverseLocalDeps.erase(Inst);
444    }
445
446    // Remove this local dependency info.
447    LocalDeps.erase(LocalDepEntry);
448  }
449
450  // Loop over all of the things that depend on the instruction we're removing.
451  //
452  SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd;
453
454  ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst);
455  if (ReverseDepIt != ReverseLocalDeps.end()) {
456    SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second;
457    // RemInst can't be the terminator if it has stuff depending on it.
458    assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) &&
459           "Nothing can locally depend on a terminator");
460
461    // Anything that was locally dependent on RemInst is now going to be
462    // dependent on the instruction after RemInst.  It will have the dirty flag
463    // set so it will rescan.  This saves having to scan the entire block to get
464    // to this point.
465    Instruction *NewDepInst = next(BasicBlock::iterator(RemInst));
466
467    for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(),
468         E = ReverseDeps.end(); I != E; ++I) {
469      Instruction *InstDependingOnRemInst = *I;
470      assert(InstDependingOnRemInst != RemInst &&
471             "Already removed our local dep info");
472
473      LocalDeps[InstDependingOnRemInst] = MemDepResult::getDirty(NewDepInst);
474
475      // Make sure to remember that new things depend on NewDepInst.
476      ReverseDepsToAdd.push_back(std::make_pair(NewDepInst,
477                                                InstDependingOnRemInst));
478    }
479
480    ReverseLocalDeps.erase(ReverseDepIt);
481
482    // Add new reverse deps after scanning the set, to avoid invalidating the
483    // 'ReverseDeps' reference.
484    while (!ReverseDepsToAdd.empty()) {
485      ReverseLocalDeps[ReverseDepsToAdd.back().first]
486        .insert(ReverseDepsToAdd.back().second);
487      ReverseDepsToAdd.pop_back();
488    }
489  }
490
491  ReverseDepIt = ReverseNonLocalDeps.find(RemInst);
492  if (ReverseDepIt != ReverseNonLocalDeps.end()) {
493    SmallPtrSet<Instruction*, 4>& set = ReverseDepIt->second;
494    for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
495         I != E; ++I) {
496      assert(*I != RemInst && "Already removed NonLocalDep info for RemInst");
497
498      PerInstNLInfo &INLD = NonLocalDeps[*I];
499      // The information is now dirty!
500      INLD.second = true;
501
502      for (NonLocalDepInfo::iterator DI = INLD.first.begin(),
503           DE = INLD.first.end(); DI != DE; ++DI) {
504        if (DI->second.getInst() != RemInst) continue;
505
506        // Convert to a dirty entry for the subsequent instruction.
507        Instruction *NextI = 0;
508        if (!RemInst->isTerminator()) {
509          NextI = next(BasicBlock::iterator(RemInst));
510          ReverseDepsToAdd.push_back(std::make_pair(NextI, *I));
511        }
512        DI->second = MemDepResult::getDirty(NextI);
513      }
514    }
515
516    ReverseNonLocalDeps.erase(ReverseDepIt);
517
518    // Add new reverse deps after scanning the set, to avoid invalidating 'Set'
519    while (!ReverseDepsToAdd.empty()) {
520      ReverseNonLocalDeps[ReverseDepsToAdd.back().first]
521        .insert(ReverseDepsToAdd.back().second);
522      ReverseDepsToAdd.pop_back();
523    }
524  }
525
526  assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?");
527  AA->deleteValue(RemInst);
528  DEBUG(verifyRemoved(RemInst));
529}
530
531/// verifyRemoved - Verify that the specified instruction does not occur
532/// in our internal data structures.
533void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
534  for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
535       E = LocalDeps.end(); I != E; ++I) {
536    assert(I->first != D && "Inst occurs in data structures");
537    assert(I->second.getInst() != D &&
538           "Inst occurs in data structures");
539  }
540
541  for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
542       E = NonLocalDeps.end(); I != E; ++I) {
543    assert(I->first != D && "Inst occurs in data structures");
544    const PerInstNLInfo &INLD = I->second;
545    for (NonLocalDepInfo::const_iterator II = INLD.first.begin(),
546         EE = INLD.first.end(); II  != EE; ++II)
547      assert(II->second.getInst() != D && "Inst occurs in data structures");
548  }
549
550  for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
551       E = ReverseLocalDeps.end(); I != E; ++I) {
552    assert(I->first != D && "Inst occurs in data structures");
553    for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
554         EE = I->second.end(); II != EE; ++II)
555      assert(*II != D && "Inst occurs in data structures");
556  }
557
558  for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(),
559       E = ReverseNonLocalDeps.end();
560       I != E; ++I) {
561    assert(I->first != D && "Inst occurs in data structures");
562    for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
563         EE = I->second.end(); II != EE; ++II)
564      assert(*II != D && "Inst occurs in data structures");
565  }
566}
567