MemoryDependenceAnalysis.cpp revision 733c54da1e5432d9d64f88ea960121fa7a16076a
1//===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation  --*- C++ -*-===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements an analysis that determines, for a given memory
11// operation, what preceding memory operations it depends on.  It builds on
12// alias analysis information, and tries to provide a lazy, caching interface to
13// a common kind of alias information query.
14//
15//===----------------------------------------------------------------------===//
16
17#define DEBUG_TYPE "memdep"
18#include "llvm/Analysis/MemoryDependenceAnalysis.h"
19#include "llvm/Instructions.h"
20#include "llvm/IntrinsicInst.h"
21#include "llvm/Function.h"
22#include "llvm/LLVMContext.h"
23#include "llvm/Analysis/AliasAnalysis.h"
24#include "llvm/Analysis/Dominators.h"
25#include "llvm/Analysis/InstructionSimplify.h"
26#include "llvm/Analysis/MemoryBuiltins.h"
27#include "llvm/Analysis/PHITransAddr.h"
28#include "llvm/ADT/Statistic.h"
29#include "llvm/ADT/STLExtras.h"
30#include "llvm/Support/PredIteratorCache.h"
31#include "llvm/Support/Debug.h"
32using namespace llvm;
33
34STATISTIC(NumCacheNonLocal, "Number of fully cached non-local responses");
35STATISTIC(NumCacheDirtyNonLocal, "Number of dirty cached non-local responses");
36STATISTIC(NumUncacheNonLocal, "Number of uncached non-local responses");
37
38STATISTIC(NumCacheNonLocalPtr,
39          "Number of fully cached non-local ptr responses");
40STATISTIC(NumCacheDirtyNonLocalPtr,
41          "Number of cached, but dirty, non-local ptr responses");
42STATISTIC(NumUncacheNonLocalPtr,
43          "Number of uncached non-local ptr responses");
44STATISTIC(NumCacheCompleteNonLocalPtr,
45          "Number of block queries that were completely cached");
46
47char MemoryDependenceAnalysis::ID = 0;
48
49// Register this pass...
50INITIALIZE_PASS_BEGIN(MemoryDependenceAnalysis, "memdep",
51                "Memory Dependence Analysis", false, true)
52INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
53INITIALIZE_PASS_END(MemoryDependenceAnalysis, "memdep",
54                      "Memory Dependence Analysis", false, true)
55
56MemoryDependenceAnalysis::MemoryDependenceAnalysis()
57: FunctionPass(ID), PredCache(0) {
58  initializeMemoryDependenceAnalysisPass(*PassRegistry::getPassRegistry());
59}
60MemoryDependenceAnalysis::~MemoryDependenceAnalysis() {
61}
62
63/// Clean up memory in between runs
64void MemoryDependenceAnalysis::releaseMemory() {
65  LocalDeps.clear();
66  NonLocalDeps.clear();
67  NonLocalPointerDeps.clear();
68  ReverseLocalDeps.clear();
69  ReverseNonLocalDeps.clear();
70  ReverseNonLocalPtrDeps.clear();
71  PredCache->clear();
72}
73
74
75
76/// getAnalysisUsage - Does not modify anything.  It uses Alias Analysis.
77///
78void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
79  AU.setPreservesAll();
80  AU.addRequiredTransitive<AliasAnalysis>();
81}
82
83bool MemoryDependenceAnalysis::runOnFunction(Function &) {
84  AA = &getAnalysis<AliasAnalysis>();
85  if (PredCache == 0)
86    PredCache.reset(new PredIteratorCache());
87  return false;
88}
89
90/// RemoveFromReverseMap - This is a helper function that removes Val from
91/// 'Inst's set in ReverseMap.  If the set becomes empty, remove Inst's entry.
92template <typename KeyTy>
93static void RemoveFromReverseMap(DenseMap<Instruction*,
94                                 SmallPtrSet<KeyTy, 4> > &ReverseMap,
95                                 Instruction *Inst, KeyTy Val) {
96  typename DenseMap<Instruction*, SmallPtrSet<KeyTy, 4> >::iterator
97  InstIt = ReverseMap.find(Inst);
98  assert(InstIt != ReverseMap.end() && "Reverse map out of sync?");
99  bool Found = InstIt->second.erase(Val);
100  assert(Found && "Invalid reverse map!"); Found=Found;
101  if (InstIt->second.empty())
102    ReverseMap.erase(InstIt);
103}
104
105
106/// getCallSiteDependencyFrom - Private helper for finding the local
107/// dependencies of a call site.
108MemDepResult MemoryDependenceAnalysis::
109getCallSiteDependencyFrom(CallSite CS, bool isReadOnlyCall,
110                          BasicBlock::iterator ScanIt, BasicBlock *BB) {
111  // Walk backwards through the block, looking for dependencies
112  while (ScanIt != BB->begin()) {
113    Instruction *Inst = --ScanIt;
114
115    // If this inst is a memory op, get the pointer it accessed
116    AliasAnalysis::Location Loc;
117    if (StoreInst *S = dyn_cast<StoreInst>(Inst)) {
118      Loc = AliasAnalysis::Location(S->getPointerOperand(),
119                                    AA->getTypeStoreSize(S->getValueOperand()
120                                                           ->getType()),
121                                    S->getMetadata(LLVMContext::MD_tbaa));
122    } else if (VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
123      Loc = AliasAnalysis::Location(V->getPointerOperand(),
124                                    AA->getTypeStoreSize(V->getType()),
125                                    V->getMetadata(LLVMContext::MD_tbaa));
126    } else if (const CallInst *CI = isFreeCall(Inst)) {
127      // calls to free() erase the entire structure
128      Loc = AliasAnalysis::Location(CI->getArgOperand(0));
129    } else if (CallSite InstCS = cast<Value>(Inst)) {
130      // Debug intrinsics don't cause dependences.
131      if (isa<DbgInfoIntrinsic>(Inst)) continue;
132      // If these two calls do not interfere, look past it.
133      switch (AA->getModRefInfo(CS, InstCS)) {
134      case AliasAnalysis::NoModRef:
135        // If the two calls are the same, return InstCS as a Def, so that
136        // CS can be found redundant and eliminated.
137        if (isReadOnlyCall && InstCS.onlyReadsMemory() &&
138            CS.getInstruction()->isIdenticalToWhenDefined(Inst))
139          return MemDepResult::getDef(Inst);
140
141        // Otherwise if the two calls don't interact (e.g. InstCS is readnone)
142        // keep scanning.
143        continue;
144      default:
145        return MemDepResult::getClobber(Inst);
146      }
147    } else {
148      // Non-memory instruction.
149      continue;
150    }
151
152    if (AA->getModRefInfo(CS, Loc) != AliasAnalysis::NoModRef)
153      return MemDepResult::getClobber(Inst);
154  }
155
156  // No dependence found.  If this is the entry block of the function, it is a
157  // clobber, otherwise it is non-local.
158  if (BB != &BB->getParent()->getEntryBlock())
159    return MemDepResult::getNonLocal();
160  return MemDepResult::getClobber(ScanIt);
161}
162
163/// getPointerDependencyFrom - Return the instruction on which a memory
164/// location depends.  If isLoad is true, this routine ignores may-aliases with
165/// read-only operations.  If isLoad is false, this routine ignores may-aliases
166/// with reads from read-only locations.
167MemDepResult MemoryDependenceAnalysis::
168getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
169                         BasicBlock::iterator ScanIt, BasicBlock *BB) {
170
171  Value *InvariantTag = 0;
172
173  // Walk backwards through the basic block, looking for dependencies.
174  while (ScanIt != BB->begin()) {
175    Instruction *Inst = --ScanIt;
176
177    // If we're in an invariant region, no dependencies can be found before
178    // we pass an invariant-begin marker.
179    if (InvariantTag == Inst) {
180      InvariantTag = 0;
181      continue;
182    }
183
184    if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst)) {
185      // Debug intrinsics don't (and can't) cause dependences.
186      if (isa<DbgInfoIntrinsic>(II)) continue;
187
188      // If we pass an invariant-end marker, then we've just entered an
189      // invariant region and can start ignoring dependencies.
190      if (II->getIntrinsicID() == Intrinsic::invariant_end) {
191        // FIXME: This only considers queries directly on the invariant-tagged
192        // pointer, not on query pointers that are indexed off of them.  It'd
193        // be nice to handle that at some point.
194        AliasAnalysis::AliasResult R =
195          AA->alias(AliasAnalysis::Location(II->getArgOperand(2)), MemLoc);
196        if (R == AliasAnalysis::MustAlias)
197          InvariantTag = II->getArgOperand(0);
198
199        continue;
200      }
201
202      // If we reach a lifetime begin or end marker, then the query ends here
203      // because the value is undefined.
204      if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
205        // FIXME: This only considers queries directly on the invariant-tagged
206        // pointer, not on query pointers that are indexed off of them.  It'd
207        // be nice to handle that at some point.
208        AliasAnalysis::AliasResult R =
209          AA->alias(AliasAnalysis::Location(II->getArgOperand(1)), MemLoc);
210        if (R == AliasAnalysis::MustAlias)
211          return MemDepResult::getDef(II);
212        continue;
213      }
214    }
215
216    // If we're querying on a load and we're in an invariant region, we're done
217    // at this point. Nothing a load depends on can live in an invariant region.
218    //
219    // FIXME: this will prevent us from returning load/load must-aliases, so GVN
220    // won't remove redundant loads.
221    if (isLoad && InvariantTag) continue;
222
223    // Values depend on loads if the pointers are must aliased.  This means that
224    // a load depends on another must aliased load from the same value.
225    if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
226      Value *Pointer = LI->getPointerOperand();
227      uint64_t PointerSize = AA->getTypeStoreSize(LI->getType());
228      MDNode *TBAATag = LI->getMetadata(LLVMContext::MD_tbaa);
229      AliasAnalysis::Location LoadLoc(Pointer, PointerSize, TBAATag);
230
231      // If we found a pointer, check if it could be the same as our pointer.
232      AliasAnalysis::AliasResult R = AA->alias(LoadLoc, MemLoc);
233      if (R == AliasAnalysis::NoAlias)
234        continue;
235
236      // May-alias loads don't depend on each other without a dependence.
237      if (isLoad && R == AliasAnalysis::MayAlias)
238        continue;
239
240      // Stores don't alias loads from read-only memory.
241      if (!isLoad && AA->pointsToConstantMemory(LoadLoc))
242        continue;
243
244      // Stores depend on may and must aliased loads, loads depend on must-alias
245      // loads.
246      return MemDepResult::getDef(Inst);
247    }
248
249    if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
250      // There can't be stores to the value we care about inside an
251      // invariant region.
252      if (InvariantTag) continue;
253
254      // If alias analysis can tell that this store is guaranteed to not modify
255      // the query pointer, ignore it.  Use getModRefInfo to handle cases where
256      // the query pointer points to constant memory etc.
257      if (AA->getModRefInfo(SI, MemLoc) == AliasAnalysis::NoModRef)
258        continue;
259
260      // Ok, this store might clobber the query pointer.  Check to see if it is
261      // a must alias: in this case, we want to return this as a def.
262      Value *Pointer = SI->getPointerOperand();
263      uint64_t PointerSize = AA->getTypeStoreSize(SI->getOperand(0)->getType());
264      MDNode *TBAATag = SI->getMetadata(LLVMContext::MD_tbaa);
265
266      // If we found a pointer, check if it could be the same as our pointer.
267      AliasAnalysis::AliasResult R =
268        AA->alias(AliasAnalysis::Location(Pointer, PointerSize, TBAATag),
269                  MemLoc);
270
271      if (R == AliasAnalysis::NoAlias)
272        continue;
273      if (R == AliasAnalysis::MayAlias)
274        return MemDepResult::getClobber(Inst);
275      return MemDepResult::getDef(Inst);
276    }
277
278    // If this is an allocation, and if we know that the accessed pointer is to
279    // the allocation, return Def.  This means that there is no dependence and
280    // the access can be optimized based on that.  For example, a load could
281    // turn into undef.
282    // Note: Only determine this to be a malloc if Inst is the malloc call, not
283    // a subsequent bitcast of the malloc call result.  There can be stores to
284    // the malloced memory between the malloc call and its bitcast uses, and we
285    // need to continue scanning until the malloc call.
286    if (isa<AllocaInst>(Inst) ||
287        (isa<CallInst>(Inst) && extractMallocCall(Inst))) {
288      const Value *AccessPtr = MemLoc.Ptr->getUnderlyingObject();
289
290      if (AccessPtr == Inst ||
291          AA->alias(Inst, 1, AccessPtr, 1) == AliasAnalysis::MustAlias)
292        return MemDepResult::getDef(Inst);
293      continue;
294    }
295
296    // See if this instruction (e.g. a call or vaarg) mod/ref's the pointer.
297    switch (AA->getModRefInfo(Inst, MemLoc)) {
298    case AliasAnalysis::NoModRef:
299      // If the call has no effect on the queried pointer, just ignore it.
300      continue;
301    case AliasAnalysis::Mod:
302      // If we're in an invariant region, we can ignore calls that ONLY
303      // modify the pointer.
304      if (InvariantTag) continue;
305      return MemDepResult::getClobber(Inst);
306    case AliasAnalysis::Ref:
307      // If the call is known to never store to the pointer, and if this is a
308      // load query, we can safely ignore it (scan past it).
309      if (isLoad)
310        continue;
311    default:
312      // Otherwise, there is a potential dependence.  Return a clobber.
313      return MemDepResult::getClobber(Inst);
314    }
315  }
316
317  // No dependence found.  If this is the entry block of the function, it is a
318  // clobber, otherwise it is non-local.
319  if (BB != &BB->getParent()->getEntryBlock())
320    return MemDepResult::getNonLocal();
321  return MemDepResult::getClobber(ScanIt);
322}
323
324/// getDependency - Return the instruction on which a memory operation
325/// depends.
326MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *QueryInst) {
327  Instruction *ScanPos = QueryInst;
328
329  // Check for a cached result
330  MemDepResult &LocalCache = LocalDeps[QueryInst];
331
332  // If the cached entry is non-dirty, just return it.  Note that this depends
333  // on MemDepResult's default constructing to 'dirty'.
334  if (!LocalCache.isDirty())
335    return LocalCache;
336
337  // Otherwise, if we have a dirty entry, we know we can start the scan at that
338  // instruction, which may save us some work.
339  if (Instruction *Inst = LocalCache.getInst()) {
340    ScanPos = Inst;
341
342    RemoveFromReverseMap(ReverseLocalDeps, Inst, QueryInst);
343  }
344
345  BasicBlock *QueryParent = QueryInst->getParent();
346
347  AliasAnalysis::Location MemLoc;
348
349  // Do the scan.
350  if (BasicBlock::iterator(QueryInst) == QueryParent->begin()) {
351    // No dependence found.  If this is the entry block of the function, it is a
352    // clobber, otherwise it is non-local.
353    if (QueryParent != &QueryParent->getParent()->getEntryBlock())
354      LocalCache = MemDepResult::getNonLocal();
355    else
356      LocalCache = MemDepResult::getClobber(QueryInst);
357  } else if (StoreInst *SI = dyn_cast<StoreInst>(QueryInst)) {
358    // If this is a volatile store, don't mess around with it.  Just return the
359    // previous instruction as a clobber.
360    if (SI->isVolatile())
361      LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
362    else
363      MemLoc = AliasAnalysis::Location(SI->getPointerOperand(),
364                                       AA->getTypeStoreSize(SI->getOperand(0)
365                                                              ->getType()),
366                                       SI->getMetadata(LLVMContext::MD_tbaa));
367  } else if (LoadInst *LI = dyn_cast<LoadInst>(QueryInst)) {
368    // If this is a volatile load, don't mess around with it.  Just return the
369    // previous instruction as a clobber.
370    if (LI->isVolatile())
371      LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
372    else
373      MemLoc = AliasAnalysis::Location(LI->getPointerOperand(),
374                                       AA->getTypeStoreSize(LI->getType()),
375                                       LI->getMetadata(LLVMContext::MD_tbaa));
376  } else if (const CallInst *CI = isFreeCall(QueryInst)) {
377    // calls to free() erase the entire structure, not just a field.
378    MemLoc = AliasAnalysis::Location(CI->getArgOperand(0));
379  } else if (isa<CallInst>(QueryInst) || isa<InvokeInst>(QueryInst)) {
380    int IntrinsicID = 0;  // Intrinsic IDs start at 1.
381    IntrinsicInst *II = dyn_cast<IntrinsicInst>(QueryInst);
382    if (II)
383      IntrinsicID = II->getIntrinsicID();
384
385    switch (IntrinsicID) {
386    case Intrinsic::lifetime_start:
387    case Intrinsic::lifetime_end:
388    case Intrinsic::invariant_start:
389      MemLoc = AliasAnalysis::Location(II->getArgOperand(1),
390                                       cast<ConstantInt>(II->getArgOperand(0))
391                                         ->getZExtValue(),
392                                       II->getMetadata(LLVMContext::MD_tbaa));
393      break;
394    case Intrinsic::invariant_end:
395      MemLoc = AliasAnalysis::Location(II->getArgOperand(2),
396                                       cast<ConstantInt>(II->getArgOperand(1))
397                                         ->getZExtValue(),
398                                       II->getMetadata(LLVMContext::MD_tbaa));
399      break;
400    default:
401      CallSite QueryCS(QueryInst);
402      bool isReadOnly = AA->onlyReadsMemory(QueryCS);
403      LocalCache = getCallSiteDependencyFrom(QueryCS, isReadOnly, ScanPos,
404                                             QueryParent);
405      break;
406    }
407  } else {
408    // Non-memory instruction.
409    LocalCache = MemDepResult::getClobber(--BasicBlock::iterator(ScanPos));
410  }
411
412  // If we need to do a pointer scan, make it happen.
413  if (MemLoc.Ptr) {
414    bool isLoad = !QueryInst->mayWriteToMemory();
415    if (IntrinsicInst *II = dyn_cast<MemoryUseIntrinsic>(QueryInst)) {
416      isLoad |= II->getIntrinsicID() == Intrinsic::lifetime_end;
417    }
418    LocalCache = getPointerDependencyFrom(MemLoc, isLoad, ScanPos,
419                                          QueryParent);
420  }
421
422  // Remember the result!
423  if (Instruction *I = LocalCache.getInst())
424    ReverseLocalDeps[I].insert(QueryInst);
425
426  return LocalCache;
427}
428
429#ifndef NDEBUG
430/// AssertSorted - This method is used when -debug is specified to verify that
431/// cache arrays are properly kept sorted.
432static void AssertSorted(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
433                         int Count = -1) {
434  if (Count == -1) Count = Cache.size();
435  if (Count == 0) return;
436
437  for (unsigned i = 1; i != unsigned(Count); ++i)
438    assert(!(Cache[i] < Cache[i-1]) && "Cache isn't sorted!");
439}
440#endif
441
442/// getNonLocalCallDependency - Perform a full dependency query for the
443/// specified call, returning the set of blocks that the value is
444/// potentially live across.  The returned set of results will include a
445/// "NonLocal" result for all blocks where the value is live across.
446///
447/// This method assumes the instruction returns a "NonLocal" dependency
448/// within its own block.
449///
450/// This returns a reference to an internal data structure that may be
451/// invalidated on the next non-local query or when an instruction is
452/// removed.  Clients must copy this data if they want it around longer than
453/// that.
454const MemoryDependenceAnalysis::NonLocalDepInfo &
455MemoryDependenceAnalysis::getNonLocalCallDependency(CallSite QueryCS) {
456  assert(getDependency(QueryCS.getInstruction()).isNonLocal() &&
457 "getNonLocalCallDependency should only be used on calls with non-local deps!");
458  PerInstNLInfo &CacheP = NonLocalDeps[QueryCS.getInstruction()];
459  NonLocalDepInfo &Cache = CacheP.first;
460
461  /// DirtyBlocks - This is the set of blocks that need to be recomputed.  In
462  /// the cached case, this can happen due to instructions being deleted etc. In
463  /// the uncached case, this starts out as the set of predecessors we care
464  /// about.
465  SmallVector<BasicBlock*, 32> DirtyBlocks;
466
467  if (!Cache.empty()) {
468    // Okay, we have a cache entry.  If we know it is not dirty, just return it
469    // with no computation.
470    if (!CacheP.second) {
471      ++NumCacheNonLocal;
472      return Cache;
473    }
474
475    // If we already have a partially computed set of results, scan them to
476    // determine what is dirty, seeding our initial DirtyBlocks worklist.
477    for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end();
478       I != E; ++I)
479      if (I->getResult().isDirty())
480        DirtyBlocks.push_back(I->getBB());
481
482    // Sort the cache so that we can do fast binary search lookups below.
483    std::sort(Cache.begin(), Cache.end());
484
485    ++NumCacheDirtyNonLocal;
486    //cerr << "CACHED CASE: " << DirtyBlocks.size() << " dirty: "
487    //     << Cache.size() << " cached: " << *QueryInst;
488  } else {
489    // Seed DirtyBlocks with each of the preds of QueryInst's block.
490    BasicBlock *QueryBB = QueryCS.getInstruction()->getParent();
491    for (BasicBlock **PI = PredCache->GetPreds(QueryBB); *PI; ++PI)
492      DirtyBlocks.push_back(*PI);
493    ++NumUncacheNonLocal;
494  }
495
496  // isReadonlyCall - If this is a read-only call, we can be more aggressive.
497  bool isReadonlyCall = AA->onlyReadsMemory(QueryCS);
498
499  SmallPtrSet<BasicBlock*, 64> Visited;
500
501  unsigned NumSortedEntries = Cache.size();
502  DEBUG(AssertSorted(Cache));
503
504  // Iterate while we still have blocks to update.
505  while (!DirtyBlocks.empty()) {
506    BasicBlock *DirtyBB = DirtyBlocks.back();
507    DirtyBlocks.pop_back();
508
509    // Already processed this block?
510    if (!Visited.insert(DirtyBB))
511      continue;
512
513    // Do a binary search to see if we already have an entry for this block in
514    // the cache set.  If so, find it.
515    DEBUG(AssertSorted(Cache, NumSortedEntries));
516    NonLocalDepInfo::iterator Entry =
517      std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries,
518                       NonLocalDepEntry(DirtyBB));
519    if (Entry != Cache.begin() && prior(Entry)->getBB() == DirtyBB)
520      --Entry;
521
522    NonLocalDepEntry *ExistingResult = 0;
523    if (Entry != Cache.begin()+NumSortedEntries &&
524        Entry->getBB() == DirtyBB) {
525      // If we already have an entry, and if it isn't already dirty, the block
526      // is done.
527      if (!Entry->getResult().isDirty())
528        continue;
529
530      // Otherwise, remember this slot so we can update the value.
531      ExistingResult = &*Entry;
532    }
533
534    // If the dirty entry has a pointer, start scanning from it so we don't have
535    // to rescan the entire block.
536    BasicBlock::iterator ScanPos = DirtyBB->end();
537    if (ExistingResult) {
538      if (Instruction *Inst = ExistingResult->getResult().getInst()) {
539        ScanPos = Inst;
540        // We're removing QueryInst's use of Inst.
541        RemoveFromReverseMap(ReverseNonLocalDeps, Inst,
542                             QueryCS.getInstruction());
543      }
544    }
545
546    // Find out if this block has a local dependency for QueryInst.
547    MemDepResult Dep;
548
549    if (ScanPos != DirtyBB->begin()) {
550      Dep = getCallSiteDependencyFrom(QueryCS, isReadonlyCall,ScanPos, DirtyBB);
551    } else if (DirtyBB != &DirtyBB->getParent()->getEntryBlock()) {
552      // No dependence found.  If this is the entry block of the function, it is
553      // a clobber, otherwise it is non-local.
554      Dep = MemDepResult::getNonLocal();
555    } else {
556      Dep = MemDepResult::getClobber(ScanPos);
557    }
558
559    // If we had a dirty entry for the block, update it.  Otherwise, just add
560    // a new entry.
561    if (ExistingResult)
562      ExistingResult->setResult(Dep);
563    else
564      Cache.push_back(NonLocalDepEntry(DirtyBB, Dep));
565
566    // If the block has a dependency (i.e. it isn't completely transparent to
567    // the value), remember the association!
568    if (!Dep.isNonLocal()) {
569      // Keep the ReverseNonLocalDeps map up to date so we can efficiently
570      // update this when we remove instructions.
571      if (Instruction *Inst = Dep.getInst())
572        ReverseNonLocalDeps[Inst].insert(QueryCS.getInstruction());
573    } else {
574
575      // If the block *is* completely transparent to the load, we need to check
576      // the predecessors of this block.  Add them to our worklist.
577      for (BasicBlock **PI = PredCache->GetPreds(DirtyBB); *PI; ++PI)
578        DirtyBlocks.push_back(*PI);
579    }
580  }
581
582  return Cache;
583}
584
585/// getNonLocalPointerDependency - Perform a full dependency query for an
586/// access to the specified (non-volatile) memory location, returning the
587/// set of instructions that either define or clobber the value.
588///
589/// This method assumes the pointer has a "NonLocal" dependency within its
590/// own block.
591///
592void MemoryDependenceAnalysis::
593getNonLocalPointerDependency(const AliasAnalysis::Location &Loc, bool isLoad,
594                             BasicBlock *FromBB,
595                             SmallVectorImpl<NonLocalDepResult> &Result) {
596  assert(Loc.Ptr->getType()->isPointerTy() &&
597         "Can't get pointer deps of a non-pointer!");
598  Result.clear();
599
600  PHITransAddr Address(const_cast<Value *>(Loc.Ptr), TD);
601
602  // This is the set of blocks we've inspected, and the pointer we consider in
603  // each block.  Because of critical edges, we currently bail out if querying
604  // a block with multiple different pointers.  This can happen during PHI
605  // translation.
606  DenseMap<BasicBlock*, Value*> Visited;
607  if (!getNonLocalPointerDepFromBB(Address, Loc, isLoad, FromBB,
608                                   Result, Visited, true))
609    return;
610  Result.clear();
611  Result.push_back(NonLocalDepResult(FromBB,
612                                     MemDepResult::getClobber(FromBB->begin()),
613                                     const_cast<Value *>(Loc.Ptr)));
614}
615
616/// GetNonLocalInfoForBlock - Compute the memdep value for BB with
617/// Pointer/PointeeSize using either cached information in Cache or by doing a
618/// lookup (which may use dirty cache info if available).  If we do a lookup,
619/// add the result to the cache.
620MemDepResult MemoryDependenceAnalysis::
621GetNonLocalInfoForBlock(const AliasAnalysis::Location &Loc,
622                        bool isLoad, BasicBlock *BB,
623                        NonLocalDepInfo *Cache, unsigned NumSortedEntries) {
624
625  // Do a binary search to see if we already have an entry for this block in
626  // the cache set.  If so, find it.
627  NonLocalDepInfo::iterator Entry =
628    std::upper_bound(Cache->begin(), Cache->begin()+NumSortedEntries,
629                     NonLocalDepEntry(BB));
630  if (Entry != Cache->begin() && (Entry-1)->getBB() == BB)
631    --Entry;
632
633  NonLocalDepEntry *ExistingResult = 0;
634  if (Entry != Cache->begin()+NumSortedEntries && Entry->getBB() == BB)
635    ExistingResult = &*Entry;
636
637  // If we have a cached entry, and it is non-dirty, use it as the value for
638  // this dependency.
639  if (ExistingResult && !ExistingResult->getResult().isDirty()) {
640    ++NumCacheNonLocalPtr;
641    return ExistingResult->getResult();
642  }
643
644  // Otherwise, we have to scan for the value.  If we have a dirty cache
645  // entry, start scanning from its position, otherwise we scan from the end
646  // of the block.
647  BasicBlock::iterator ScanPos = BB->end();
648  if (ExistingResult && ExistingResult->getResult().getInst()) {
649    assert(ExistingResult->getResult().getInst()->getParent() == BB &&
650           "Instruction invalidated?");
651    ++NumCacheDirtyNonLocalPtr;
652    ScanPos = ExistingResult->getResult().getInst();
653
654    // Eliminating the dirty entry from 'Cache', so update the reverse info.
655    ValueIsLoadPair CacheKey(Loc.Ptr, isLoad);
656    RemoveFromReverseMap(ReverseNonLocalPtrDeps, ScanPos, CacheKey);
657  } else {
658    ++NumUncacheNonLocalPtr;
659  }
660
661  // Scan the block for the dependency.
662  MemDepResult Dep = getPointerDependencyFrom(Loc, isLoad, ScanPos, BB);
663
664  // If we had a dirty entry for the block, update it.  Otherwise, just add
665  // a new entry.
666  if (ExistingResult)
667    ExistingResult->setResult(Dep);
668  else
669    Cache->push_back(NonLocalDepEntry(BB, Dep));
670
671  // If the block has a dependency (i.e. it isn't completely transparent to
672  // the value), remember the reverse association because we just added it
673  // to Cache!
674  if (Dep.isNonLocal())
675    return Dep;
676
677  // Keep the ReverseNonLocalPtrDeps map up to date so we can efficiently
678  // update MemDep when we remove instructions.
679  Instruction *Inst = Dep.getInst();
680  assert(Inst && "Didn't depend on anything?");
681  ValueIsLoadPair CacheKey(Loc.Ptr, isLoad);
682  ReverseNonLocalPtrDeps[Inst].insert(CacheKey);
683  return Dep;
684}
685
686/// SortNonLocalDepInfoCache - Sort the a NonLocalDepInfo cache, given a certain
687/// number of elements in the array that are already properly ordered.  This is
688/// optimized for the case when only a few entries are added.
689static void
690SortNonLocalDepInfoCache(MemoryDependenceAnalysis::NonLocalDepInfo &Cache,
691                         unsigned NumSortedEntries) {
692  switch (Cache.size() - NumSortedEntries) {
693  case 0:
694    // done, no new entries.
695    break;
696  case 2: {
697    // Two new entries, insert the last one into place.
698    NonLocalDepEntry Val = Cache.back();
699    Cache.pop_back();
700    MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry =
701      std::upper_bound(Cache.begin(), Cache.end()-1, Val);
702    Cache.insert(Entry, Val);
703    // FALL THROUGH.
704  }
705  case 1:
706    // One new entry, Just insert the new value at the appropriate position.
707    if (Cache.size() != 1) {
708      NonLocalDepEntry Val = Cache.back();
709      Cache.pop_back();
710      MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry =
711        std::upper_bound(Cache.begin(), Cache.end(), Val);
712      Cache.insert(Entry, Val);
713    }
714    break;
715  default:
716    // Added many values, do a full scale sort.
717    std::sort(Cache.begin(), Cache.end());
718    break;
719  }
720}
721
722/// getNonLocalPointerDepFromBB - Perform a dependency query based on
723/// pointer/pointeesize starting at the end of StartBB.  Add any clobber/def
724/// results to the results vector and keep track of which blocks are visited in
725/// 'Visited'.
726///
727/// This has special behavior for the first block queries (when SkipFirstBlock
728/// is true).  In this special case, it ignores the contents of the specified
729/// block and starts returning dependence info for its predecessors.
730///
731/// This function returns false on success, or true to indicate that it could
732/// not compute dependence information for some reason.  This should be treated
733/// as a clobber dependence on the first instruction in the predecessor block.
734bool MemoryDependenceAnalysis::
735getNonLocalPointerDepFromBB(const PHITransAddr &Pointer,
736                            const AliasAnalysis::Location &Loc,
737                            bool isLoad, BasicBlock *StartBB,
738                            SmallVectorImpl<NonLocalDepResult> &Result,
739                            DenseMap<BasicBlock*, Value*> &Visited,
740                            bool SkipFirstBlock) {
741
742  // Look up the cached info for Pointer.
743  ValueIsLoadPair CacheKey(Pointer.getAddr(), isLoad);
744
745  // Set up a temporary NLPI value. If the map doesn't yet have an entry for
746  // CacheKey, this value will be inserted as the associated value. Otherwise,
747  // it'll be ignored, and we'll have to check to see if the cached size and
748  // tbaa tag are consistent with the current query.
749  NonLocalPointerInfo InitialNLPI;
750  InitialNLPI.Size = Loc.Size;
751  InitialNLPI.TBAATag = Loc.TBAATag;
752
753  // Get the NLPI for CacheKey, inserting one into the map if it doesn't
754  // already have one.
755  std::pair<CachedNonLocalPointerInfo::iterator, bool> Pair =
756    NonLocalPointerDeps.insert(std::make_pair(CacheKey, InitialNLPI));
757  NonLocalPointerInfo *CacheInfo = &Pair.first->second;
758
759  // If we already have a cache entry for this CacheKey, we may need to do some
760  // work to reconcile the cache entry and the current query.
761  if (!Pair.second) {
762    if (CacheInfo->Size < Loc.Size) {
763      // The query's Size is greater than the cached one. Throw out the
764      // cached data and procede with the query at the greater size.
765      CacheInfo->Pair = BBSkipFirstBlockPair();
766      CacheInfo->Size = Loc.Size;
767      CacheInfo->NonLocalDeps.clear();
768    } else if (CacheInfo->Size > Loc.Size) {
769      // This query's Size is less than the cached one. Conservatively restart
770      // the query using the greater size.
771      return getNonLocalPointerDepFromBB(Pointer,
772                                         Loc.getWithNewSize(CacheInfo->Size),
773                                         isLoad, StartBB, Result, Visited,
774                                         SkipFirstBlock);
775    }
776
777    // If the query's TBAATag is inconsistent with the cached one,
778    // conservatively throw out the cached data and restart the query with
779    // no tag if needed.
780    if (CacheInfo->TBAATag != Loc.TBAATag) {
781      if (CacheInfo->TBAATag) {
782        CacheInfo->Pair = BBSkipFirstBlockPair();
783        CacheInfo->TBAATag = 0;
784        CacheInfo->NonLocalDeps.clear();
785      }
786      if (Loc.TBAATag)
787        return getNonLocalPointerDepFromBB(Pointer, Loc.getWithoutTBAATag(),
788                                           isLoad, StartBB, Result, Visited,
789                                           SkipFirstBlock);
790    }
791  }
792
793  NonLocalDepInfo *Cache = &CacheInfo->NonLocalDeps;
794
795  // If we have valid cached information for exactly the block we are
796  // investigating, just return it with no recomputation.
797  if (CacheInfo->Pair == BBSkipFirstBlockPair(StartBB, SkipFirstBlock)) {
798    // We have a fully cached result for this query then we can just return the
799    // cached results and populate the visited set.  However, we have to verify
800    // that we don't already have conflicting results for these blocks.  Check
801    // to ensure that if a block in the results set is in the visited set that
802    // it was for the same pointer query.
803    if (!Visited.empty()) {
804      for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
805           I != E; ++I) {
806        DenseMap<BasicBlock*, Value*>::iterator VI = Visited.find(I->getBB());
807        if (VI == Visited.end() || VI->second == Pointer.getAddr())
808          continue;
809
810        // We have a pointer mismatch in a block.  Just return clobber, saying
811        // that something was clobbered in this result.  We could also do a
812        // non-fully cached query, but there is little point in doing this.
813        return true;
814      }
815    }
816
817    Value *Addr = Pointer.getAddr();
818    for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
819         I != E; ++I) {
820      Visited.insert(std::make_pair(I->getBB(), Addr));
821      if (!I->getResult().isNonLocal())
822        Result.push_back(NonLocalDepResult(I->getBB(), I->getResult(), Addr));
823    }
824    ++NumCacheCompleteNonLocalPtr;
825    return false;
826  }
827
828  // Otherwise, either this is a new block, a block with an invalid cache
829  // pointer or one that we're about to invalidate by putting more info into it
830  // than its valid cache info.  If empty, the result will be valid cache info,
831  // otherwise it isn't.
832  if (Cache->empty())
833    CacheInfo->Pair = BBSkipFirstBlockPair(StartBB, SkipFirstBlock);
834  else {
835    CacheInfo->Pair = BBSkipFirstBlockPair();
836    CacheInfo->Size = 0;
837    CacheInfo->TBAATag = 0;
838  }
839
840  SmallVector<BasicBlock*, 32> Worklist;
841  Worklist.push_back(StartBB);
842
843  // Keep track of the entries that we know are sorted.  Previously cached
844  // entries will all be sorted.  The entries we add we only sort on demand (we
845  // don't insert every element into its sorted position).  We know that we
846  // won't get any reuse from currently inserted values, because we don't
847  // revisit blocks after we insert info for them.
848  unsigned NumSortedEntries = Cache->size();
849  DEBUG(AssertSorted(*Cache));
850
851  while (!Worklist.empty()) {
852    BasicBlock *BB = Worklist.pop_back_val();
853
854    // Skip the first block if we have it.
855    if (!SkipFirstBlock) {
856      // Analyze the dependency of *Pointer in FromBB.  See if we already have
857      // been here.
858      assert(Visited.count(BB) && "Should check 'visited' before adding to WL");
859
860      // Get the dependency info for Pointer in BB.  If we have cached
861      // information, we will use it, otherwise we compute it.
862      DEBUG(AssertSorted(*Cache, NumSortedEntries));
863      MemDepResult Dep = GetNonLocalInfoForBlock(Loc, isLoad, BB, Cache,
864                                                 NumSortedEntries);
865
866      // If we got a Def or Clobber, add this to the list of results.
867      if (!Dep.isNonLocal()) {
868        Result.push_back(NonLocalDepResult(BB, Dep, Pointer.getAddr()));
869        continue;
870      }
871    }
872
873    // If 'Pointer' is an instruction defined in this block, then we need to do
874    // phi translation to change it into a value live in the predecessor block.
875    // If not, we just add the predecessors to the worklist and scan them with
876    // the same Pointer.
877    if (!Pointer.NeedsPHITranslationFromBlock(BB)) {
878      SkipFirstBlock = false;
879      for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) {
880        // Verify that we haven't looked at this block yet.
881        std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
882          InsertRes = Visited.insert(std::make_pair(*PI, Pointer.getAddr()));
883        if (InsertRes.second) {
884          // First time we've looked at *PI.
885          Worklist.push_back(*PI);
886          continue;
887        }
888
889        // If we have seen this block before, but it was with a different
890        // pointer then we have a phi translation failure and we have to treat
891        // this as a clobber.
892        if (InsertRes.first->second != Pointer.getAddr())
893          goto PredTranslationFailure;
894      }
895      continue;
896    }
897
898    // We do need to do phi translation, if we know ahead of time we can't phi
899    // translate this value, don't even try.
900    if (!Pointer.IsPotentiallyPHITranslatable())
901      goto PredTranslationFailure;
902
903    // We may have added values to the cache list before this PHI translation.
904    // If so, we haven't done anything to ensure that the cache remains sorted.
905    // Sort it now (if needed) so that recursive invocations of
906    // getNonLocalPointerDepFromBB and other routines that could reuse the cache
907    // value will only see properly sorted cache arrays.
908    if (Cache && NumSortedEntries != Cache->size()) {
909      SortNonLocalDepInfoCache(*Cache, NumSortedEntries);
910      NumSortedEntries = Cache->size();
911    }
912    Cache = 0;
913
914    for (BasicBlock **PI = PredCache->GetPreds(BB); *PI; ++PI) {
915      BasicBlock *Pred = *PI;
916
917      // Get the PHI translated pointer in this predecessor.  This can fail if
918      // not translatable, in which case the getAddr() returns null.
919      PHITransAddr PredPointer(Pointer);
920      PredPointer.PHITranslateValue(BB, Pred, 0);
921
922      Value *PredPtrVal = PredPointer.getAddr();
923
924      // Check to see if we have already visited this pred block with another
925      // pointer.  If so, we can't do this lookup.  This failure can occur
926      // with PHI translation when a critical edge exists and the PHI node in
927      // the successor translates to a pointer value different than the
928      // pointer the block was first analyzed with.
929      std::pair<DenseMap<BasicBlock*,Value*>::iterator, bool>
930        InsertRes = Visited.insert(std::make_pair(Pred, PredPtrVal));
931
932      if (!InsertRes.second) {
933        // If the predecessor was visited with PredPtr, then we already did
934        // the analysis and can ignore it.
935        if (InsertRes.first->second == PredPtrVal)
936          continue;
937
938        // Otherwise, the block was previously analyzed with a different
939        // pointer.  We can't represent the result of this case, so we just
940        // treat this as a phi translation failure.
941        goto PredTranslationFailure;
942      }
943
944      // If PHI translation was unable to find an available pointer in this
945      // predecessor, then we have to assume that the pointer is clobbered in
946      // that predecessor.  We can still do PRE of the load, which would insert
947      // a computation of the pointer in this predecessor.
948      if (PredPtrVal == 0) {
949        // Add the entry to the Result list.
950        NonLocalDepResult Entry(Pred,
951                                MemDepResult::getClobber(Pred->getTerminator()),
952                                PredPtrVal);
953        Result.push_back(Entry);
954
955        // Since we had a phi translation failure, the cache for CacheKey won't
956        // include all of the entries that we need to immediately satisfy future
957        // queries.  Mark this in NonLocalPointerDeps by setting the
958        // BBSkipFirstBlockPair pointer to null.  This requires reuse of the
959        // cached value to do more work but not miss the phi trans failure.
960        NonLocalPointerInfo &NLPI = NonLocalPointerDeps[CacheKey];
961        NLPI.Pair = BBSkipFirstBlockPair();
962        NLPI.Size = 0;
963        NLPI.TBAATag = 0;
964        continue;
965      }
966
967      // FIXME: it is entirely possible that PHI translating will end up with
968      // the same value.  Consider PHI translating something like:
969      // X = phi [x, bb1], [y, bb2].  PHI translating for bb1 doesn't *need*
970      // to recurse here, pedantically speaking.
971
972      // If we have a problem phi translating, fall through to the code below
973      // to handle the failure condition.
974      if (getNonLocalPointerDepFromBB(PredPointer,
975                                      Loc.getWithNewPtr(PredPointer.getAddr()),
976                                      isLoad, Pred,
977                                      Result, Visited))
978        goto PredTranslationFailure;
979    }
980
981    // Refresh the CacheInfo/Cache pointer so that it isn't invalidated.
982    CacheInfo = &NonLocalPointerDeps[CacheKey];
983    Cache = &CacheInfo->NonLocalDeps;
984    NumSortedEntries = Cache->size();
985
986    // Since we did phi translation, the "Cache" set won't contain all of the
987    // results for the query.  This is ok (we can still use it to accelerate
988    // specific block queries) but we can't do the fastpath "return all
989    // results from the set"  Clear out the indicator for this.
990    CacheInfo->Pair = BBSkipFirstBlockPair();
991    CacheInfo->Size = 0;
992    CacheInfo->TBAATag = 0;
993    SkipFirstBlock = false;
994    continue;
995
996  PredTranslationFailure:
997
998    if (Cache == 0) {
999      // Refresh the CacheInfo/Cache pointer if it got invalidated.
1000      CacheInfo = &NonLocalPointerDeps[CacheKey];
1001      Cache = &CacheInfo->NonLocalDeps;
1002      NumSortedEntries = Cache->size();
1003    }
1004
1005    // Since we failed phi translation, the "Cache" set won't contain all of the
1006    // results for the query.  This is ok (we can still use it to accelerate
1007    // specific block queries) but we can't do the fastpath "return all
1008    // results from the set".  Clear out the indicator for this.
1009    CacheInfo->Pair = BBSkipFirstBlockPair();
1010    CacheInfo->Size = 0;
1011    CacheInfo->TBAATag = 0;
1012
1013    // If *nothing* works, mark the pointer as being clobbered by the first
1014    // instruction in this block.
1015    //
1016    // If this is the magic first block, return this as a clobber of the whole
1017    // incoming value.  Since we can't phi translate to one of the predecessors,
1018    // we have to bail out.
1019    if (SkipFirstBlock)
1020      return true;
1021
1022    for (NonLocalDepInfo::reverse_iterator I = Cache->rbegin(); ; ++I) {
1023      assert(I != Cache->rend() && "Didn't find current block??");
1024      if (I->getBB() != BB)
1025        continue;
1026
1027      assert(I->getResult().isNonLocal() &&
1028             "Should only be here with transparent block");
1029      I->setResult(MemDepResult::getClobber(BB->begin()));
1030      ReverseNonLocalPtrDeps[BB->begin()].insert(CacheKey);
1031      Result.push_back(NonLocalDepResult(I->getBB(), I->getResult(),
1032                                         Pointer.getAddr()));
1033      break;
1034    }
1035  }
1036
1037  // Okay, we're done now.  If we added new values to the cache, re-sort it.
1038  SortNonLocalDepInfoCache(*Cache, NumSortedEntries);
1039  DEBUG(AssertSorted(*Cache));
1040  return false;
1041}
1042
1043/// RemoveCachedNonLocalPointerDependencies - If P exists in
1044/// CachedNonLocalPointerInfo, remove it.
1045void MemoryDependenceAnalysis::
1046RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair P) {
1047  CachedNonLocalPointerInfo::iterator It =
1048    NonLocalPointerDeps.find(P);
1049  if (It == NonLocalPointerDeps.end()) return;
1050
1051  // Remove all of the entries in the BB->val map.  This involves removing
1052  // instructions from the reverse map.
1053  NonLocalDepInfo &PInfo = It->second.NonLocalDeps;
1054
1055  for (unsigned i = 0, e = PInfo.size(); i != e; ++i) {
1056    Instruction *Target = PInfo[i].getResult().getInst();
1057    if (Target == 0) continue;  // Ignore non-local dep results.
1058    assert(Target->getParent() == PInfo[i].getBB());
1059
1060    // Eliminating the dirty entry from 'Cache', so update the reverse info.
1061    RemoveFromReverseMap(ReverseNonLocalPtrDeps, Target, P);
1062  }
1063
1064  // Remove P from NonLocalPointerDeps (which deletes NonLocalDepInfo).
1065  NonLocalPointerDeps.erase(It);
1066}
1067
1068
1069/// invalidateCachedPointerInfo - This method is used to invalidate cached
1070/// information about the specified pointer, because it may be too
1071/// conservative in memdep.  This is an optional call that can be used when
1072/// the client detects an equivalence between the pointer and some other
1073/// value and replaces the other value with ptr. This can make Ptr available
1074/// in more places that cached info does not necessarily keep.
1075void MemoryDependenceAnalysis::invalidateCachedPointerInfo(Value *Ptr) {
1076  // If Ptr isn't really a pointer, just ignore it.
1077  if (!Ptr->getType()->isPointerTy()) return;
1078  // Flush store info for the pointer.
1079  RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, false));
1080  // Flush load info for the pointer.
1081  RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(Ptr, true));
1082}
1083
1084/// invalidateCachedPredecessors - Clear the PredIteratorCache info.
1085/// This needs to be done when the CFG changes, e.g., due to splitting
1086/// critical edges.
1087void MemoryDependenceAnalysis::invalidateCachedPredecessors() {
1088  PredCache->clear();
1089}
1090
1091/// removeInstruction - Remove an instruction from the dependence analysis,
1092/// updating the dependence of instructions that previously depended on it.
1093/// This method attempts to keep the cache coherent using the reverse map.
1094void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
1095  // Walk through the Non-local dependencies, removing this one as the value
1096  // for any cached queries.
1097  NonLocalDepMapType::iterator NLDI = NonLocalDeps.find(RemInst);
1098  if (NLDI != NonLocalDeps.end()) {
1099    NonLocalDepInfo &BlockMap = NLDI->second.first;
1100    for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end();
1101         DI != DE; ++DI)
1102      if (Instruction *Inst = DI->getResult().getInst())
1103        RemoveFromReverseMap(ReverseNonLocalDeps, Inst, RemInst);
1104    NonLocalDeps.erase(NLDI);
1105  }
1106
1107  // If we have a cached local dependence query for this instruction, remove it.
1108  //
1109  LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst);
1110  if (LocalDepEntry != LocalDeps.end()) {
1111    // Remove us from DepInst's reverse set now that the local dep info is gone.
1112    if (Instruction *Inst = LocalDepEntry->second.getInst())
1113      RemoveFromReverseMap(ReverseLocalDeps, Inst, RemInst);
1114
1115    // Remove this local dependency info.
1116    LocalDeps.erase(LocalDepEntry);
1117  }
1118
1119  // If we have any cached pointer dependencies on this instruction, remove
1120  // them.  If the instruction has non-pointer type, then it can't be a pointer
1121  // base.
1122
1123  // Remove it from both the load info and the store info.  The instruction
1124  // can't be in either of these maps if it is non-pointer.
1125  if (RemInst->getType()->isPointerTy()) {
1126    RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, false));
1127    RemoveCachedNonLocalPointerDependencies(ValueIsLoadPair(RemInst, true));
1128  }
1129
1130  // Loop over all of the things that depend on the instruction we're removing.
1131  //
1132  SmallVector<std::pair<Instruction*, Instruction*>, 8> ReverseDepsToAdd;
1133
1134  // If we find RemInst as a clobber or Def in any of the maps for other values,
1135  // we need to replace its entry with a dirty version of the instruction after
1136  // it.  If RemInst is a terminator, we use a null dirty value.
1137  //
1138  // Using a dirty version of the instruction after RemInst saves having to scan
1139  // the entire block to get to this point.
1140  MemDepResult NewDirtyVal;
1141  if (!RemInst->isTerminator())
1142    NewDirtyVal = MemDepResult::getDirty(++BasicBlock::iterator(RemInst));
1143
1144  ReverseDepMapType::iterator ReverseDepIt = ReverseLocalDeps.find(RemInst);
1145  if (ReverseDepIt != ReverseLocalDeps.end()) {
1146    SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second;
1147    // RemInst can't be the terminator if it has local stuff depending on it.
1148    assert(!ReverseDeps.empty() && !isa<TerminatorInst>(RemInst) &&
1149           "Nothing can locally depend on a terminator");
1150
1151    for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(),
1152         E = ReverseDeps.end(); I != E; ++I) {
1153      Instruction *InstDependingOnRemInst = *I;
1154      assert(InstDependingOnRemInst != RemInst &&
1155             "Already removed our local dep info");
1156
1157      LocalDeps[InstDependingOnRemInst] = NewDirtyVal;
1158
1159      // Make sure to remember that new things depend on NewDepInst.
1160      assert(NewDirtyVal.getInst() && "There is no way something else can have "
1161             "a local dep on this if it is a terminator!");
1162      ReverseDepsToAdd.push_back(std::make_pair(NewDirtyVal.getInst(),
1163                                                InstDependingOnRemInst));
1164    }
1165
1166    ReverseLocalDeps.erase(ReverseDepIt);
1167
1168    // Add new reverse deps after scanning the set, to avoid invalidating the
1169    // 'ReverseDeps' reference.
1170    while (!ReverseDepsToAdd.empty()) {
1171      ReverseLocalDeps[ReverseDepsToAdd.back().first]
1172        .insert(ReverseDepsToAdd.back().second);
1173      ReverseDepsToAdd.pop_back();
1174    }
1175  }
1176
1177  ReverseDepIt = ReverseNonLocalDeps.find(RemInst);
1178  if (ReverseDepIt != ReverseNonLocalDeps.end()) {
1179    SmallPtrSet<Instruction*, 4> &Set = ReverseDepIt->second;
1180    for (SmallPtrSet<Instruction*, 4>::iterator I = Set.begin(), E = Set.end();
1181         I != E; ++I) {
1182      assert(*I != RemInst && "Already removed NonLocalDep info for RemInst");
1183
1184      PerInstNLInfo &INLD = NonLocalDeps[*I];
1185      // The information is now dirty!
1186      INLD.second = true;
1187
1188      for (NonLocalDepInfo::iterator DI = INLD.first.begin(),
1189           DE = INLD.first.end(); DI != DE; ++DI) {
1190        if (DI->getResult().getInst() != RemInst) continue;
1191
1192        // Convert to a dirty entry for the subsequent instruction.
1193        DI->setResult(NewDirtyVal);
1194
1195        if (Instruction *NextI = NewDirtyVal.getInst())
1196          ReverseDepsToAdd.push_back(std::make_pair(NextI, *I));
1197      }
1198    }
1199
1200    ReverseNonLocalDeps.erase(ReverseDepIt);
1201
1202    // Add new reverse deps after scanning the set, to avoid invalidating 'Set'
1203    while (!ReverseDepsToAdd.empty()) {
1204      ReverseNonLocalDeps[ReverseDepsToAdd.back().first]
1205        .insert(ReverseDepsToAdd.back().second);
1206      ReverseDepsToAdd.pop_back();
1207    }
1208  }
1209
1210  // If the instruction is in ReverseNonLocalPtrDeps then it appears as a
1211  // value in the NonLocalPointerDeps info.
1212  ReverseNonLocalPtrDepTy::iterator ReversePtrDepIt =
1213    ReverseNonLocalPtrDeps.find(RemInst);
1214  if (ReversePtrDepIt != ReverseNonLocalPtrDeps.end()) {
1215    SmallPtrSet<ValueIsLoadPair, 4> &Set = ReversePtrDepIt->second;
1216    SmallVector<std::pair<Instruction*, ValueIsLoadPair>,8> ReversePtrDepsToAdd;
1217
1218    for (SmallPtrSet<ValueIsLoadPair, 4>::iterator I = Set.begin(),
1219         E = Set.end(); I != E; ++I) {
1220      ValueIsLoadPair P = *I;
1221      assert(P.getPointer() != RemInst &&
1222             "Already removed NonLocalPointerDeps info for RemInst");
1223
1224      NonLocalDepInfo &NLPDI = NonLocalPointerDeps[P].NonLocalDeps;
1225
1226      // The cache is not valid for any specific block anymore.
1227      NonLocalPointerDeps[P].Pair = BBSkipFirstBlockPair();
1228      NonLocalPointerDeps[P].Size = 0;
1229      NonLocalPointerDeps[P].TBAATag = 0;
1230
1231      // Update any entries for RemInst to use the instruction after it.
1232      for (NonLocalDepInfo::iterator DI = NLPDI.begin(), DE = NLPDI.end();
1233           DI != DE; ++DI) {
1234        if (DI->getResult().getInst() != RemInst) continue;
1235
1236        // Convert to a dirty entry for the subsequent instruction.
1237        DI->setResult(NewDirtyVal);
1238
1239        if (Instruction *NewDirtyInst = NewDirtyVal.getInst())
1240          ReversePtrDepsToAdd.push_back(std::make_pair(NewDirtyInst, P));
1241      }
1242
1243      // Re-sort the NonLocalDepInfo.  Changing the dirty entry to its
1244      // subsequent value may invalidate the sortedness.
1245      std::sort(NLPDI.begin(), NLPDI.end());
1246    }
1247
1248    ReverseNonLocalPtrDeps.erase(ReversePtrDepIt);
1249
1250    while (!ReversePtrDepsToAdd.empty()) {
1251      ReverseNonLocalPtrDeps[ReversePtrDepsToAdd.back().first]
1252        .insert(ReversePtrDepsToAdd.back().second);
1253      ReversePtrDepsToAdd.pop_back();
1254    }
1255  }
1256
1257
1258  assert(!NonLocalDeps.count(RemInst) && "RemInst got reinserted?");
1259  AA->deleteValue(RemInst);
1260  DEBUG(verifyRemoved(RemInst));
1261}
1262/// verifyRemoved - Verify that the specified instruction does not occur
1263/// in our internal data structures.
1264void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
1265  for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
1266       E = LocalDeps.end(); I != E; ++I) {
1267    assert(I->first != D && "Inst occurs in data structures");
1268    assert(I->second.getInst() != D &&
1269           "Inst occurs in data structures");
1270  }
1271
1272  for (CachedNonLocalPointerInfo::const_iterator I =NonLocalPointerDeps.begin(),
1273       E = NonLocalPointerDeps.end(); I != E; ++I) {
1274    assert(I->first.getPointer() != D && "Inst occurs in NLPD map key");
1275    const NonLocalDepInfo &Val = I->second.NonLocalDeps;
1276    for (NonLocalDepInfo::const_iterator II = Val.begin(), E = Val.end();
1277         II != E; ++II)
1278      assert(II->getResult().getInst() != D && "Inst occurs as NLPD value");
1279  }
1280
1281  for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
1282       E = NonLocalDeps.end(); I != E; ++I) {
1283    assert(I->first != D && "Inst occurs in data structures");
1284    const PerInstNLInfo &INLD = I->second;
1285    for (NonLocalDepInfo::const_iterator II = INLD.first.begin(),
1286         EE = INLD.first.end(); II  != EE; ++II)
1287      assert(II->getResult().getInst() != D && "Inst occurs in data structures");
1288  }
1289
1290  for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
1291       E = ReverseLocalDeps.end(); I != E; ++I) {
1292    assert(I->first != D && "Inst occurs in data structures");
1293    for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
1294         EE = I->second.end(); II != EE; ++II)
1295      assert(*II != D && "Inst occurs in data structures");
1296  }
1297
1298  for (ReverseDepMapType::const_iterator I = ReverseNonLocalDeps.begin(),
1299       E = ReverseNonLocalDeps.end();
1300       I != E; ++I) {
1301    assert(I->first != D && "Inst occurs in data structures");
1302    for (SmallPtrSet<Instruction*, 4>::const_iterator II = I->second.begin(),
1303         EE = I->second.end(); II != EE; ++II)
1304      assert(*II != D && "Inst occurs in data structures");
1305  }
1306
1307  for (ReverseNonLocalPtrDepTy::const_iterator
1308       I = ReverseNonLocalPtrDeps.begin(),
1309       E = ReverseNonLocalPtrDeps.end(); I != E; ++I) {
1310    assert(I->first != D && "Inst occurs in rev NLPD map");
1311
1312    for (SmallPtrSet<ValueIsLoadPair, 4>::const_iterator II = I->second.begin(),
1313         E = I->second.end(); II != E; ++II)
1314      assert(*II != ValueIsLoadPair(D, false) &&
1315             *II != ValueIsLoadPair(D, true) &&
1316             "Inst occurs in ReverseNonLocalPtrDeps map");
1317  }
1318
1319}
1320