BasicAliasAnalysis.cpp revision 50a5914e129c348e8878d4654b4306e0349281c2
1//===- BasicAliasAnalysis.cpp - Local Alias Analysis Impl -----------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines the default implementation of the Alias Analysis interface
11// that simply implements a few identities (two different globals cannot alias,
12// etc), but otherwise does no analysis.
13//
14//===----------------------------------------------------------------------===//
15
16#include "llvm/Analysis/AliasAnalysis.h"
17#include "llvm/Analysis/CaptureTracking.h"
18#include "llvm/Analysis/MallocHelper.h"
19#include "llvm/Analysis/Passes.h"
20#include "llvm/Constants.h"
21#include "llvm/DerivedTypes.h"
22#include "llvm/Function.h"
23#include "llvm/GlobalVariable.h"
24#include "llvm/Instructions.h"
25#include "llvm/IntrinsicInst.h"
26#include "llvm/LLVMContext.h"
27#include "llvm/Operator.h"
28#include "llvm/Pass.h"
29#include "llvm/Target/TargetData.h"
30#include "llvm/ADT/SmallSet.h"
31#include "llvm/ADT/SmallVector.h"
32#include "llvm/ADT/STLExtras.h"
33#include "llvm/Support/Compiler.h"
34#include "llvm/Support/ErrorHandling.h"
35#include "llvm/Support/GetElementPtrTypeIterator.h"
36#include <algorithm>
37using namespace llvm;
38
39//===----------------------------------------------------------------------===//
40// Useful predicates
41//===----------------------------------------------------------------------===//
42
43static const GEPOperator *isGEP(const Value *V) {
44  return dyn_cast<GEPOperator>(V);
45}
46
47static const Value *GetGEPOperands(const Value *V,
48                                   SmallVector<Value*, 16> &GEPOps) {
49  assert(GEPOps.empty() && "Expect empty list to populate!");
50  GEPOps.insert(GEPOps.end(), cast<User>(V)->op_begin()+1,
51                cast<User>(V)->op_end());
52
53  // Accumulate all of the chained indexes into the operand array
54  V = cast<User>(V)->getOperand(0);
55
56  while (const User *G = isGEP(V)) {
57    if (!isa<Constant>(GEPOps[0]) || isa<GlobalValue>(GEPOps[0]) ||
58        !cast<Constant>(GEPOps[0])->isNullValue())
59      break;  // Don't handle folding arbitrary pointer offsets yet...
60    GEPOps.erase(GEPOps.begin());   // Drop the zero index
61    GEPOps.insert(GEPOps.begin(), G->op_begin()+1, G->op_end());
62    V = G->getOperand(0);
63  }
64  return V;
65}
66
67/// isKnownNonNull - Return true if we know that the specified value is never
68/// null.
69static bool isKnownNonNull(const Value *V) {
70  // Alloca never returns null, malloc might.
71  if (isa<AllocaInst>(V)) return true;
72
73  // A byval argument is never null.
74  if (const Argument *A = dyn_cast<Argument>(V))
75    return A->hasByValAttr();
76
77  // Global values are not null unless extern weak.
78  if (const GlobalValue *GV = dyn_cast<GlobalValue>(V))
79    return !GV->hasExternalWeakLinkage();
80  return false;
81}
82
83/// isNonEscapingLocalObject - Return true if the pointer is to a function-local
84/// object that never escapes from the function.
85static bool isNonEscapingLocalObject(const Value *V) {
86  // If this is a local allocation, check to see if it escapes.
87  if (isa<AllocationInst>(V) || isNoAliasCall(V))
88    return !PointerMayBeCaptured(V, false);
89
90  // If this is an argument that corresponds to a byval or noalias argument,
91  // then it has not escaped before entering the function.  Check if it escapes
92  // inside the function.
93  if (const Argument *A = dyn_cast<Argument>(V))
94    if (A->hasByValAttr() || A->hasNoAliasAttr()) {
95      // Don't bother analyzing arguments already known not to escape.
96      if (A->hasNoCaptureAttr())
97        return true;
98      return !PointerMayBeCaptured(V, false);
99    }
100  return false;
101}
102
103
104/// isObjectSmallerThan - Return true if we can prove that the object specified
105/// by V is smaller than Size.
106static bool isObjectSmallerThan(const Value *V, unsigned Size,
107                                LLVMContext &Context, const TargetData &TD) {
108  const Type *AccessTy;
109  if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) {
110    AccessTy = GV->getType()->getElementType();
111  } else if (const AllocationInst *AI = dyn_cast<AllocationInst>(V)) {
112    if (!AI->isArrayAllocation())
113      AccessTy = AI->getType()->getElementType();
114    else
115      return false;
116  } else if (const CallInst* CI = extractMallocCall(V)) {
117    if (!isArrayMalloc(V, Context, &TD))
118      // The size is the argument to the malloc call.
119      if (const ConstantInt* C = dyn_cast<ConstantInt>(CI->getOperand(1)))
120        return (C->getZExtValue() < Size);
121    return false;
122  } else if (const Argument *A = dyn_cast<Argument>(V)) {
123    if (A->hasByValAttr())
124      AccessTy = cast<PointerType>(A->getType())->getElementType();
125    else
126      return false;
127  } else {
128    return false;
129  }
130
131  if (AccessTy->isSized())
132    return TD.getTypeAllocSize(AccessTy) < Size;
133  return false;
134}
135
136//===----------------------------------------------------------------------===//
137// NoAA Pass
138//===----------------------------------------------------------------------===//
139
140namespace {
141  /// NoAA - This class implements the -no-aa pass, which always returns "I
142  /// don't know" for alias queries.  NoAA is unlike other alias analysis
143  /// implementations, in that it does not chain to a previous analysis.  As
144  /// such it doesn't follow many of the rules that other alias analyses must.
145  ///
146  struct VISIBILITY_HIDDEN NoAA : public ImmutablePass, public AliasAnalysis {
147    static char ID; // Class identification, replacement for typeinfo
148    NoAA() : ImmutablePass(&ID) {}
149    explicit NoAA(void *PID) : ImmutablePass(PID) { }
150
151    virtual void getAnalysisUsage(AnalysisUsage &AU) const {
152    }
153
154    virtual void initializePass() {
155      TD = getAnalysisIfAvailable<TargetData>();
156    }
157
158    virtual AliasResult alias(const Value *V1, unsigned V1Size,
159                              const Value *V2, unsigned V2Size) {
160      return MayAlias;
161    }
162
163    virtual void getArgumentAccesses(Function *F, CallSite CS,
164                                     std::vector<PointerAccessInfo> &Info) {
165      llvm_unreachable("This method may not be called on this function!");
166    }
167
168    virtual void getMustAliases(Value *P, std::vector<Value*> &RetVals) { }
169    virtual bool pointsToConstantMemory(const Value *P) { return false; }
170    virtual ModRefResult getModRefInfo(CallSite CS, Value *P, unsigned Size) {
171      return ModRef;
172    }
173    virtual ModRefResult getModRefInfo(CallSite CS1, CallSite CS2) {
174      return ModRef;
175    }
176    virtual bool hasNoModRefInfoForCalls() const { return true; }
177
178    virtual void deleteValue(Value *V) {}
179    virtual void copyValue(Value *From, Value *To) {}
180  };
181}  // End of anonymous namespace
182
183// Register this pass...
184char NoAA::ID = 0;
185static RegisterPass<NoAA>
186U("no-aa", "No Alias Analysis (always returns 'may' alias)", true, true);
187
188// Declare that we implement the AliasAnalysis interface
189static RegisterAnalysisGroup<AliasAnalysis> V(U);
190
191ImmutablePass *llvm::createNoAAPass() { return new NoAA(); }
192
193//===----------------------------------------------------------------------===//
194// BasicAA Pass
195//===----------------------------------------------------------------------===//
196
197namespace {
198  /// BasicAliasAnalysis - This is the default alias analysis implementation.
199  /// Because it doesn't chain to a previous alias analysis (like -no-aa), it
200  /// derives from the NoAA class.
201  struct VISIBILITY_HIDDEN BasicAliasAnalysis : public NoAA {
202    static char ID; // Class identification, replacement for typeinfo
203    BasicAliasAnalysis() : NoAA(&ID) {}
204    AliasResult alias(const Value *V1, unsigned V1Size,
205                      const Value *V2, unsigned V2Size) {
206      SmallSet<const Value*, 16> VisitedPHIs;
207      return aliasCheck(V1, V1Size, V2, V2Size, VisitedPHIs);
208    }
209
210    ModRefResult getModRefInfo(CallSite CS, Value *P, unsigned Size);
211    ModRefResult getModRefInfo(CallSite CS1, CallSite CS2);
212
213    /// hasNoModRefInfoForCalls - We can provide mod/ref information against
214    /// non-escaping allocations.
215    virtual bool hasNoModRefInfoForCalls() const { return false; }
216
217    /// pointsToConstantMemory - Chase pointers until we find a (constant
218    /// global) or not.
219    bool pointsToConstantMemory(const Value *P);
220
221  private:
222    // aliasGEP - Provide a bunch of ad-hoc rules to disambiguate a GEP instruction
223    // against another.
224    AliasResult aliasGEP(const Value *V1, unsigned V1Size,
225                         const Value *V2, unsigned V2Size,
226                         SmallSet<const Value*, 16> &VisitedPHIs);
227
228    AliasResult aliasPHI(const Value *V1, unsigned V1Size,
229                         const Value *V2, unsigned V2Size,
230                         SmallSet<const Value*, 16> &VisitedPHIs);
231
232    AliasResult aliasCheck(const Value *V1, unsigned V1Size,
233                           const Value *V2, unsigned V2Size,
234                           SmallSet<const Value*, 16> &VisitedPHIs);
235
236    // CheckGEPInstructions - Check two GEP instructions with known
237    // must-aliasing base pointers.  This checks to see if the index expressions
238    // preclude the pointers from aliasing...
239    AliasResult
240    CheckGEPInstructions(const Type* BasePtr1Ty,
241                         Value **GEP1Ops, unsigned NumGEP1Ops, unsigned G1Size,
242                         const Type *BasePtr2Ty,
243                         Value **GEP2Ops, unsigned NumGEP2Ops, unsigned G2Size);
244  };
245}  // End of anonymous namespace
246
247// Register this pass...
248char BasicAliasAnalysis::ID = 0;
249static RegisterPass<BasicAliasAnalysis>
250X("basicaa", "Basic Alias Analysis (default AA impl)", false, true);
251
252// Declare that we implement the AliasAnalysis interface
253static RegisterAnalysisGroup<AliasAnalysis, true> Y(X);
254
255ImmutablePass *llvm::createBasicAliasAnalysisPass() {
256  return new BasicAliasAnalysis();
257}
258
259
260/// pointsToConstantMemory - Chase pointers until we find a (constant
261/// global) or not.
262bool BasicAliasAnalysis::pointsToConstantMemory(const Value *P) {
263  if (const GlobalVariable *GV =
264        dyn_cast<GlobalVariable>(P->getUnderlyingObject()))
265    return GV->isConstant();
266  return false;
267}
268
269
270// getModRefInfo - Check to see if the specified callsite can clobber the
271// specified memory object.  Since we only look at local properties of this
272// function, we really can't say much about this query.  We do, however, use
273// simple "address taken" analysis on local objects.
274//
275AliasAnalysis::ModRefResult
276BasicAliasAnalysis::getModRefInfo(CallSite CS, Value *P, unsigned Size) {
277  if (!isa<Constant>(P)) {
278    const Value *Object = P->getUnderlyingObject();
279
280    // If this is a tail call and P points to a stack location, we know that
281    // the tail call cannot access or modify the local stack.
282    // We cannot exclude byval arguments here; these belong to the caller of
283    // the current function not to the current function, and a tail callee
284    // may reference them.
285    if (isa<AllocaInst>(Object))
286      if (CallInst *CI = dyn_cast<CallInst>(CS.getInstruction()))
287        if (CI->isTailCall())
288          return NoModRef;
289
290    // If the pointer is to a locally allocated object that does not escape,
291    // then the call can not mod/ref the pointer unless the call takes the
292    // argument without capturing it.
293    if (isNonEscapingLocalObject(Object) && CS.getInstruction() != Object) {
294      bool passedAsArg = false;
295      // TODO: Eventually only check 'nocapture' arguments.
296      for (CallSite::arg_iterator CI = CS.arg_begin(), CE = CS.arg_end();
297           CI != CE; ++CI)
298        if (isa<PointerType>((*CI)->getType()) &&
299            alias(cast<Value>(CI), ~0U, P, ~0U) != NoAlias)
300          passedAsArg = true;
301
302      if (!passedAsArg)
303        return NoModRef;
304    }
305
306    if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction())) {
307      switch (II->getIntrinsicID()) {
308      default: break;
309      case Intrinsic::atomic_cmp_swap:
310      case Intrinsic::atomic_swap:
311      case Intrinsic::atomic_load_add:
312      case Intrinsic::atomic_load_sub:
313      case Intrinsic::atomic_load_and:
314      case Intrinsic::atomic_load_nand:
315      case Intrinsic::atomic_load_or:
316      case Intrinsic::atomic_load_xor:
317      case Intrinsic::atomic_load_max:
318      case Intrinsic::atomic_load_min:
319      case Intrinsic::atomic_load_umax:
320      case Intrinsic::atomic_load_umin:
321        if (alias(II->getOperand(1), Size, P, Size) == NoAlias)
322          return NoModRef;
323        break;
324      }
325    }
326  }
327
328  // The AliasAnalysis base class has some smarts, lets use them.
329  return AliasAnalysis::getModRefInfo(CS, P, Size);
330}
331
332
333AliasAnalysis::ModRefResult
334BasicAliasAnalysis::getModRefInfo(CallSite CS1, CallSite CS2) {
335  // If CS1 or CS2 are readnone, they don't interact.
336  ModRefBehavior CS1B = AliasAnalysis::getModRefBehavior(CS1);
337  if (CS1B == DoesNotAccessMemory) return NoModRef;
338
339  ModRefBehavior CS2B = AliasAnalysis::getModRefBehavior(CS2);
340  if (CS2B == DoesNotAccessMemory) return NoModRef;
341
342  // If they both only read from memory, just return ref.
343  if (CS1B == OnlyReadsMemory && CS2B == OnlyReadsMemory)
344    return Ref;
345
346  // Otherwise, fall back to NoAA (mod+ref).
347  return NoAA::getModRefInfo(CS1, CS2);
348}
349
350// aliasGEP - Provide a bunch of ad-hoc rules to disambiguate a GEP instruction
351// against another.
352//
353AliasAnalysis::AliasResult
354BasicAliasAnalysis::aliasGEP(const Value *V1, unsigned V1Size,
355                             const Value *V2, unsigned V2Size,
356                             SmallSet<const Value*, 16> &VisitedPHIs) {
357  // If we have two gep instructions with must-alias'ing base pointers, figure
358  // out if the indexes to the GEP tell us anything about the derived pointer.
359  // Note that we also handle chains of getelementptr instructions as well as
360  // constant expression getelementptrs here.
361  //
362  if (isGEP(V1) && isGEP(V2)) {
363    const User *GEP1 = cast<User>(V1);
364    const User *GEP2 = cast<User>(V2);
365
366    // If V1 and V2 are identical GEPs, just recurse down on both of them.
367    // This allows us to analyze things like:
368    //   P = gep A, 0, i, 1
369    //   Q = gep B, 0, i, 1
370    // by just analyzing A and B.  This is even safe for variable indices.
371    if (GEP1->getType() == GEP2->getType() &&
372        GEP1->getNumOperands() == GEP2->getNumOperands() &&
373        GEP1->getOperand(0)->getType() == GEP2->getOperand(0)->getType() &&
374        // All operands are the same, ignoring the base.
375        std::equal(GEP1->op_begin()+1, GEP1->op_end(), GEP2->op_begin()+1))
376      return aliasCheck(GEP1->getOperand(0), V1Size,
377                        GEP2->getOperand(0), V2Size, VisitedPHIs);
378
379    // Drill down into the first non-gep value, to test for must-aliasing of
380    // the base pointers.
381    while (isGEP(GEP1->getOperand(0)) &&
382           GEP1->getOperand(1) ==
383           Constant::getNullValue(GEP1->getOperand(1)->getType()))
384      GEP1 = cast<User>(GEP1->getOperand(0));
385    const Value *BasePtr1 = GEP1->getOperand(0);
386
387    while (isGEP(GEP2->getOperand(0)) &&
388           GEP2->getOperand(1) ==
389           Constant::getNullValue(GEP2->getOperand(1)->getType()))
390      GEP2 = cast<User>(GEP2->getOperand(0));
391    const Value *BasePtr2 = GEP2->getOperand(0);
392
393    // Do the base pointers alias?
394    AliasResult BaseAlias = aliasCheck(BasePtr1, ~0U, BasePtr2, ~0U,
395                                       VisitedPHIs);
396    if (BaseAlias == NoAlias) return NoAlias;
397    if (BaseAlias == MustAlias) {
398      // If the base pointers alias each other exactly, check to see if we can
399      // figure out anything about the resultant pointers, to try to prove
400      // non-aliasing.
401
402      // Collect all of the chained GEP operands together into one simple place
403      SmallVector<Value*, 16> GEP1Ops, GEP2Ops;
404      BasePtr1 = GetGEPOperands(V1, GEP1Ops);
405      BasePtr2 = GetGEPOperands(V2, GEP2Ops);
406
407      // If GetGEPOperands were able to fold to the same must-aliased pointer,
408      // do the comparison.
409      if (BasePtr1 == BasePtr2) {
410        AliasResult GAlias =
411          CheckGEPInstructions(BasePtr1->getType(),
412                               &GEP1Ops[0], GEP1Ops.size(), V1Size,
413                               BasePtr2->getType(),
414                               &GEP2Ops[0], GEP2Ops.size(), V2Size);
415        if (GAlias != MayAlias)
416          return GAlias;
417      }
418    }
419  }
420
421  // Check to see if these two pointers are related by a getelementptr
422  // instruction.  If one pointer is a GEP with a non-zero index of the other
423  // pointer, we know they cannot alias.
424  //
425  if (V1Size == ~0U || V2Size == ~0U)
426    return MayAlias;
427
428  SmallVector<Value*, 16> GEPOperands;
429  const Value *BasePtr = GetGEPOperands(V1, GEPOperands);
430
431  AliasResult R = aliasCheck(BasePtr, V1Size, V2, V2Size, VisitedPHIs);
432  if (R == MustAlias) {
433    // If there is at least one non-zero constant index, we know they cannot
434    // alias.
435    bool ConstantFound = false;
436    bool AllZerosFound = true;
437    for (unsigned i = 0, e = GEPOperands.size(); i != e; ++i)
438      if (const Constant *C = dyn_cast<Constant>(GEPOperands[i])) {
439        if (!C->isNullValue()) {
440          ConstantFound = true;
441          AllZerosFound = false;
442          break;
443        }
444      } else {
445        AllZerosFound = false;
446      }
447
448    // If we have getelementptr <ptr>, 0, 0, 0, 0, ... and V2 must aliases
449    // the ptr, the end result is a must alias also.
450    if (AllZerosFound)
451      return MustAlias;
452
453    if (ConstantFound) {
454      if (V2Size <= 1 && V1Size <= 1)  // Just pointer check?
455        return NoAlias;
456
457      // Otherwise we have to check to see that the distance is more than
458      // the size of the argument... build an index vector that is equal to
459      // the arguments provided, except substitute 0's for any variable
460      // indexes we find...
461      if (TD &&
462          cast<PointerType>(BasePtr->getType())->getElementType()->isSized()) {
463        for (unsigned i = 0; i != GEPOperands.size(); ++i)
464          if (!isa<ConstantInt>(GEPOperands[i]))
465            GEPOperands[i] = Constant::getNullValue(GEPOperands[i]->getType());
466        int64_t Offset =
467          TD->getIndexedOffset(BasePtr->getType(),
468                               &GEPOperands[0],
469                               GEPOperands.size());
470
471        if (Offset >= (int64_t)V2Size || Offset <= -(int64_t)V1Size)
472          return NoAlias;
473      }
474    }
475  }
476
477  return MayAlias;
478}
479
480AliasAnalysis::AliasResult
481BasicAliasAnalysis::aliasPHI(const Value *V1, unsigned V1Size,
482                             const Value *V2, unsigned V2Size,
483                             SmallSet<const Value*, 16> &VisitedPHIs) {
484  // The PHI node has already been visited, avoid recursion any further.
485  if (!VisitedPHIs.insert(V1))
486    return MayAlias;
487
488  SmallSet<Value*, 4> UniqueSrc;
489  SmallVector<Value*, 4> V1Srcs;
490  const PHINode *PN = cast<PHINode>(V1);
491  for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
492    Value *PV1 = PN->getIncomingValue(i);
493    if (isa<PHINode>(PV1))
494      // If any of the source itself is a PHI, return MayAlias conservatively
495      // to avoid compile time explosion.
496      return MayAlias;
497    if (UniqueSrc.insert(PV1))
498      V1Srcs.push_back(PV1);
499  }
500
501  // If all sources of the PHI node NoAlias or MustAlias V2, then returns
502  // NoAlias / MustAlias. Otherwise, returns MayAlias.
503  AliasResult Alias = aliasCheck(V1Srcs[0], V1Size, V2, V2Size, VisitedPHIs);
504  for (unsigned i = 1, e = V1Srcs.size(); i != e; ++i) {
505    Value *V = V1Srcs[i];
506    AliasResult ThisAlias = aliasCheck(V, V1Size, V2, V2Size, VisitedPHIs);
507    if (ThisAlias != Alias)
508      return MayAlias;
509  }
510
511  return Alias;
512}
513
514// aliasCheck - Provide a bunch of ad-hoc rules to disambiguate in common cases,
515// such as array references.
516//
517AliasAnalysis::AliasResult
518BasicAliasAnalysis::aliasCheck(const Value *V1, unsigned V1Size,
519                               const Value *V2, unsigned V2Size,
520                               SmallSet<const Value*, 16> &VisitedPHIs) {
521  // Strip off any casts if they exist.
522  V1 = V1->stripPointerCasts();
523  V2 = V2->stripPointerCasts();
524
525  // Are we checking for alias of the same value?
526  if (V1 == V2) return MustAlias;
527
528  if (!isa<PointerType>(V1->getType()) || !isa<PointerType>(V2->getType()))
529    return NoAlias;  // Scalars cannot alias each other
530
531  // Figure out what objects these things are pointing to if we can.
532  const Value *O1 = V1->getUnderlyingObject();
533  const Value *O2 = V2->getUnderlyingObject();
534
535  if (O1 != O2) {
536    // If V1/V2 point to two different objects we know that we have no alias.
537    if (isIdentifiedObject(O1) && isIdentifiedObject(O2))
538      return NoAlias;
539
540    // Arguments can't alias with local allocations or noalias calls.
541    if ((isa<Argument>(O1) && (isa<AllocationInst>(O2) || isNoAliasCall(O2))) ||
542        (isa<Argument>(O2) && (isa<AllocationInst>(O1) || isNoAliasCall(O1))))
543      return NoAlias;
544
545    // Most objects can't alias null.
546    if ((isa<ConstantPointerNull>(V2) && isKnownNonNull(O1)) ||
547        (isa<ConstantPointerNull>(V1) && isKnownNonNull(O2)))
548      return NoAlias;
549  }
550
551  // If the size of one access is larger than the entire object on the other
552  // side, then we know such behavior is undefined and can assume no alias.
553  LLVMContext &Context = V1->getContext();
554  if (TD)
555    if ((V1Size != ~0U && isObjectSmallerThan(O2, V1Size, Context, *TD)) ||
556        (V2Size != ~0U && isObjectSmallerThan(O1, V2Size, Context, *TD)))
557      return NoAlias;
558
559  // If one pointer is the result of a call/invoke and the other is a
560  // non-escaping local object, then we know the object couldn't escape to a
561  // point where the call could return it.
562  if ((isa<CallInst>(O1) || isa<InvokeInst>(O1)) &&
563      isNonEscapingLocalObject(O2) && O1 != O2)
564    return NoAlias;
565  if ((isa<CallInst>(O2) || isa<InvokeInst>(O2)) &&
566      isNonEscapingLocalObject(O1) && O1 != O2)
567    return NoAlias;
568
569  if (!isGEP(V1) && isGEP(V2)) {
570    std::swap(V1, V2);
571    std::swap(V1Size, V2Size);
572  }
573  if (isGEP(V1))
574    return aliasGEP(V1, V1Size, V2, V2Size, VisitedPHIs);
575
576  if (isa<PHINode>(V2) && !isa<PHINode>(V1)) {
577    std::swap(V1, V2);
578    std::swap(V1Size, V2Size);
579  }
580  if (isa<PHINode>(V1))
581    return aliasPHI(V1, V1Size, V2, V2Size, VisitedPHIs);
582
583  return MayAlias;
584}
585
586// This function is used to determine if the indices of two GEP instructions are
587// equal. V1 and V2 are the indices.
588static bool IndexOperandsEqual(Value *V1, Value *V2, LLVMContext &Context) {
589  if (V1->getType() == V2->getType())
590    return V1 == V2;
591  if (Constant *C1 = dyn_cast<Constant>(V1))
592    if (Constant *C2 = dyn_cast<Constant>(V2)) {
593      // Sign extend the constants to long types, if necessary
594      if (C1->getType() != Type::getInt64Ty(Context))
595        C1 = ConstantExpr::getSExt(C1, Type::getInt64Ty(Context));
596      if (C2->getType() != Type::getInt64Ty(Context))
597        C2 = ConstantExpr::getSExt(C2, Type::getInt64Ty(Context));
598      return C1 == C2;
599    }
600  return false;
601}
602
603/// CheckGEPInstructions - Check two GEP instructions with known must-aliasing
604/// base pointers.  This checks to see if the index expressions preclude the
605/// pointers from aliasing...
606AliasAnalysis::AliasResult
607BasicAliasAnalysis::CheckGEPInstructions(
608  const Type* BasePtr1Ty, Value **GEP1Ops, unsigned NumGEP1Ops, unsigned G1S,
609  const Type *BasePtr2Ty, Value **GEP2Ops, unsigned NumGEP2Ops, unsigned G2S) {
610  // We currently can't handle the case when the base pointers have different
611  // primitive types.  Since this is uncommon anyway, we are happy being
612  // extremely conservative.
613  if (BasePtr1Ty != BasePtr2Ty)
614    return MayAlias;
615
616  const PointerType *GEPPointerTy = cast<PointerType>(BasePtr1Ty);
617
618  LLVMContext &Context = GEPPointerTy->getContext();
619
620  // Find the (possibly empty) initial sequence of equal values... which are not
621  // necessarily constants.
622  unsigned NumGEP1Operands = NumGEP1Ops, NumGEP2Operands = NumGEP2Ops;
623  unsigned MinOperands = std::min(NumGEP1Operands, NumGEP2Operands);
624  unsigned MaxOperands = std::max(NumGEP1Operands, NumGEP2Operands);
625  unsigned UnequalOper = 0;
626  while (UnequalOper != MinOperands &&
627         IndexOperandsEqual(GEP1Ops[UnequalOper], GEP2Ops[UnequalOper],
628         Context)) {
629    // Advance through the type as we go...
630    ++UnequalOper;
631    if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr1Ty))
632      BasePtr1Ty = CT->getTypeAtIndex(GEP1Ops[UnequalOper-1]);
633    else {
634      // If all operands equal each other, then the derived pointers must
635      // alias each other...
636      BasePtr1Ty = 0;
637      assert(UnequalOper == NumGEP1Operands && UnequalOper == NumGEP2Operands &&
638             "Ran out of type nesting, but not out of operands?");
639      return MustAlias;
640    }
641  }
642
643  // If we have seen all constant operands, and run out of indexes on one of the
644  // getelementptrs, check to see if the tail of the leftover one is all zeros.
645  // If so, return mustalias.
646  if (UnequalOper == MinOperands) {
647    if (NumGEP1Ops < NumGEP2Ops) {
648      std::swap(GEP1Ops, GEP2Ops);
649      std::swap(NumGEP1Ops, NumGEP2Ops);
650    }
651
652    bool AllAreZeros = true;
653    for (unsigned i = UnequalOper; i != MaxOperands; ++i)
654      if (!isa<Constant>(GEP1Ops[i]) ||
655          !cast<Constant>(GEP1Ops[i])->isNullValue()) {
656        AllAreZeros = false;
657        break;
658      }
659    if (AllAreZeros) return MustAlias;
660  }
661
662
663  // So now we know that the indexes derived from the base pointers,
664  // which are known to alias, are different.  We can still determine a
665  // no-alias result if there are differing constant pairs in the index
666  // chain.  For example:
667  //        A[i][0] != A[j][1] iff (&A[0][1]-&A[0][0] >= std::max(G1S, G2S))
668  //
669  // We have to be careful here about array accesses.  In particular, consider:
670  //        A[1][0] vs A[0][i]
671  // In this case, we don't *know* that the array will be accessed in bounds:
672  // the index could even be negative.  Because of this, we have to
673  // conservatively *give up* and return may alias.  We disregard differing
674  // array subscripts that are followed by a variable index without going
675  // through a struct.
676  //
677  unsigned SizeMax = std::max(G1S, G2S);
678  if (SizeMax == ~0U) return MayAlias; // Avoid frivolous work.
679
680  // Scan for the first operand that is constant and unequal in the
681  // two getelementptrs...
682  unsigned FirstConstantOper = UnequalOper;
683  for (; FirstConstantOper != MinOperands; ++FirstConstantOper) {
684    const Value *G1Oper = GEP1Ops[FirstConstantOper];
685    const Value *G2Oper = GEP2Ops[FirstConstantOper];
686
687    if (G1Oper != G2Oper)   // Found non-equal constant indexes...
688      if (Constant *G1OC = dyn_cast<ConstantInt>(const_cast<Value*>(G1Oper)))
689        if (Constant *G2OC = dyn_cast<ConstantInt>(const_cast<Value*>(G2Oper))){
690          if (G1OC->getType() != G2OC->getType()) {
691            // Sign extend both operands to long.
692            if (G1OC->getType() != Type::getInt64Ty(Context))
693              G1OC = ConstantExpr::getSExt(G1OC, Type::getInt64Ty(Context));
694            if (G2OC->getType() != Type::getInt64Ty(Context))
695              G2OC = ConstantExpr::getSExt(G2OC, Type::getInt64Ty(Context));
696            GEP1Ops[FirstConstantOper] = G1OC;
697            GEP2Ops[FirstConstantOper] = G2OC;
698          }
699
700          if (G1OC != G2OC) {
701            // Handle the "be careful" case above: if this is an array/vector
702            // subscript, scan for a subsequent variable array index.
703            if (const SequentialType *STy =
704                  dyn_cast<SequentialType>(BasePtr1Ty)) {
705              const Type *NextTy = STy;
706              bool isBadCase = false;
707
708              for (unsigned Idx = FirstConstantOper;
709                   Idx != MinOperands && isa<SequentialType>(NextTy); ++Idx) {
710                const Value *V1 = GEP1Ops[Idx], *V2 = GEP2Ops[Idx];
711                if (!isa<Constant>(V1) || !isa<Constant>(V2)) {
712                  isBadCase = true;
713                  break;
714                }
715                // If the array is indexed beyond the bounds of the static type
716                // at this level, it will also fall into the "be careful" case.
717                // It would theoretically be possible to analyze these cases,
718                // but for now just be conservatively correct.
719                if (const ArrayType *ATy = dyn_cast<ArrayType>(STy))
720                  if (cast<ConstantInt>(G1OC)->getZExtValue() >=
721                        ATy->getNumElements() ||
722                      cast<ConstantInt>(G2OC)->getZExtValue() >=
723                        ATy->getNumElements()) {
724                    isBadCase = true;
725                    break;
726                  }
727                if (const VectorType *VTy = dyn_cast<VectorType>(STy))
728                  if (cast<ConstantInt>(G1OC)->getZExtValue() >=
729                        VTy->getNumElements() ||
730                      cast<ConstantInt>(G2OC)->getZExtValue() >=
731                        VTy->getNumElements()) {
732                    isBadCase = true;
733                    break;
734                  }
735                STy = cast<SequentialType>(NextTy);
736                NextTy = cast<SequentialType>(NextTy)->getElementType();
737              }
738
739              if (isBadCase) G1OC = 0;
740            }
741
742            // Make sure they are comparable (ie, not constant expressions), and
743            // make sure the GEP with the smaller leading constant is GEP1.
744            if (G1OC) {
745              Constant *Compare = ConstantExpr::getICmp(ICmpInst::ICMP_SGT,
746                                                        G1OC, G2OC);
747              if (ConstantInt *CV = dyn_cast<ConstantInt>(Compare)) {
748                if (CV->getZExtValue()) {  // If they are comparable and G2 > G1
749                  std::swap(GEP1Ops, GEP2Ops);  // Make GEP1 < GEP2
750                  std::swap(NumGEP1Ops, NumGEP2Ops);
751                }
752                break;
753              }
754            }
755          }
756        }
757    BasePtr1Ty = cast<CompositeType>(BasePtr1Ty)->getTypeAtIndex(G1Oper);
758  }
759
760  // No shared constant operands, and we ran out of common operands.  At this
761  // point, the GEP instructions have run through all of their operands, and we
762  // haven't found evidence that there are any deltas between the GEP's.
763  // However, one GEP may have more operands than the other.  If this is the
764  // case, there may still be hope.  Check this now.
765  if (FirstConstantOper == MinOperands) {
766    // Without TargetData, we won't know what the offsets are.
767    if (!TD)
768      return MayAlias;
769
770    // Make GEP1Ops be the longer one if there is a longer one.
771    if (NumGEP1Ops < NumGEP2Ops) {
772      std::swap(GEP1Ops, GEP2Ops);
773      std::swap(NumGEP1Ops, NumGEP2Ops);
774    }
775
776    // Is there anything to check?
777    if (NumGEP1Ops > MinOperands) {
778      for (unsigned i = FirstConstantOper; i != MaxOperands; ++i)
779        if (isa<ConstantInt>(GEP1Ops[i]) &&
780            !cast<ConstantInt>(GEP1Ops[i])->isZero()) {
781          // Yup, there's a constant in the tail.  Set all variables to
782          // constants in the GEP instruction to make it suitable for
783          // TargetData::getIndexedOffset.
784          for (i = 0; i != MaxOperands; ++i)
785            if (!isa<ConstantInt>(GEP1Ops[i]))
786              GEP1Ops[i] = Constant::getNullValue(GEP1Ops[i]->getType());
787          // Okay, now get the offset.  This is the relative offset for the full
788          // instruction.
789          int64_t Offset1 = TD->getIndexedOffset(GEPPointerTy, GEP1Ops,
790                                                 NumGEP1Ops);
791
792          // Now check without any constants at the end.
793          int64_t Offset2 = TD->getIndexedOffset(GEPPointerTy, GEP1Ops,
794                                                 MinOperands);
795
796          // Make sure we compare the absolute difference.
797          if (Offset1 > Offset2)
798            std::swap(Offset1, Offset2);
799
800          // If the tail provided a bit enough offset, return noalias!
801          if ((uint64_t)(Offset2-Offset1) >= SizeMax)
802            return NoAlias;
803          // Otherwise break - we don't look for another constant in the tail.
804          break;
805        }
806    }
807
808    // Couldn't find anything useful.
809    return MayAlias;
810  }
811
812  // If there are non-equal constants arguments, then we can figure
813  // out a minimum known delta between the two index expressions... at
814  // this point we know that the first constant index of GEP1 is less
815  // than the first constant index of GEP2.
816
817  // Advance BasePtr[12]Ty over this first differing constant operand.
818  BasePtr2Ty = cast<CompositeType>(BasePtr1Ty)->
819      getTypeAtIndex(GEP2Ops[FirstConstantOper]);
820  BasePtr1Ty = cast<CompositeType>(BasePtr1Ty)->
821      getTypeAtIndex(GEP1Ops[FirstConstantOper]);
822
823  // We are going to be using TargetData::getIndexedOffset to determine the
824  // offset that each of the GEP's is reaching.  To do this, we have to convert
825  // all variable references to constant references.  To do this, we convert the
826  // initial sequence of array subscripts into constant zeros to start with.
827  const Type *ZeroIdxTy = GEPPointerTy;
828  for (unsigned i = 0; i != FirstConstantOper; ++i) {
829    if (!isa<StructType>(ZeroIdxTy))
830      GEP1Ops[i] = GEP2Ops[i] =
831                              Constant::getNullValue(Type::getInt32Ty(Context));
832
833    if (const CompositeType *CT = dyn_cast<CompositeType>(ZeroIdxTy))
834      ZeroIdxTy = CT->getTypeAtIndex(GEP1Ops[i]);
835  }
836
837  // We know that GEP1Ops[FirstConstantOper] & GEP2Ops[FirstConstantOper] are ok
838
839  // Loop over the rest of the operands...
840  for (unsigned i = FirstConstantOper+1; i != MaxOperands; ++i) {
841    const Value *Op1 = i < NumGEP1Ops ? GEP1Ops[i] : 0;
842    const Value *Op2 = i < NumGEP2Ops ? GEP2Ops[i] : 0;
843    // If they are equal, use a zero index...
844    if (Op1 == Op2 && BasePtr1Ty == BasePtr2Ty) {
845      if (!isa<ConstantInt>(Op1))
846        GEP1Ops[i] = GEP2Ops[i] = Constant::getNullValue(Op1->getType());
847      // Otherwise, just keep the constants we have.
848    } else {
849      if (Op1) {
850        if (const ConstantInt *Op1C = dyn_cast<ConstantInt>(Op1)) {
851          // If this is an array index, make sure the array element is in range.
852          if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr1Ty)) {
853            if (Op1C->getZExtValue() >= AT->getNumElements())
854              return MayAlias;  // Be conservative with out-of-range accesses
855          } else if (const VectorType *VT = dyn_cast<VectorType>(BasePtr1Ty)) {
856            if (Op1C->getZExtValue() >= VT->getNumElements())
857              return MayAlias;  // Be conservative with out-of-range accesses
858          }
859
860        } else {
861          // GEP1 is known to produce a value less than GEP2.  To be
862          // conservatively correct, we must assume the largest possible
863          // constant is used in this position.  This cannot be the initial
864          // index to the GEP instructions (because we know we have at least one
865          // element before this one with the different constant arguments), so
866          // we know that the current index must be into either a struct or
867          // array.  Because we know it's not constant, this cannot be a
868          // structure index.  Because of this, we can calculate the maximum
869          // value possible.
870          //
871          if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr1Ty))
872            GEP1Ops[i] =
873                  ConstantInt::get(Type::getInt64Ty(Context),
874                                   AT->getNumElements()-1);
875          else if (const VectorType *VT = dyn_cast<VectorType>(BasePtr1Ty))
876            GEP1Ops[i] =
877                  ConstantInt::get(Type::getInt64Ty(Context),
878                                   VT->getNumElements()-1);
879        }
880      }
881
882      if (Op2) {
883        if (const ConstantInt *Op2C = dyn_cast<ConstantInt>(Op2)) {
884          // If this is an array index, make sure the array element is in range.
885          if (const ArrayType *AT = dyn_cast<ArrayType>(BasePtr2Ty)) {
886            if (Op2C->getZExtValue() >= AT->getNumElements())
887              return MayAlias;  // Be conservative with out-of-range accesses
888          } else if (const VectorType *VT = dyn_cast<VectorType>(BasePtr2Ty)) {
889            if (Op2C->getZExtValue() >= VT->getNumElements())
890              return MayAlias;  // Be conservative with out-of-range accesses
891          }
892        } else {  // Conservatively assume the minimum value for this index
893          GEP2Ops[i] = Constant::getNullValue(Op2->getType());
894        }
895      }
896    }
897
898    if (BasePtr1Ty && Op1) {
899      if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr1Ty))
900        BasePtr1Ty = CT->getTypeAtIndex(GEP1Ops[i]);
901      else
902        BasePtr1Ty = 0;
903    }
904
905    if (BasePtr2Ty && Op2) {
906      if (const CompositeType *CT = dyn_cast<CompositeType>(BasePtr2Ty))
907        BasePtr2Ty = CT->getTypeAtIndex(GEP2Ops[i]);
908      else
909        BasePtr2Ty = 0;
910    }
911  }
912
913  if (TD && GEPPointerTy->getElementType()->isSized()) {
914    int64_t Offset1 =
915      TD->getIndexedOffset(GEPPointerTy, GEP1Ops, NumGEP1Ops);
916    int64_t Offset2 =
917      TD->getIndexedOffset(GEPPointerTy, GEP2Ops, NumGEP2Ops);
918    assert(Offset1 != Offset2 &&
919           "There is at least one different constant here!");
920
921    // Make sure we compare the absolute difference.
922    if (Offset1 > Offset2)
923      std::swap(Offset1, Offset2);
924
925    if ((uint64_t)(Offset2-Offset1) >= SizeMax) {
926      //cerr << "Determined that these two GEP's don't alias ["
927      //     << SizeMax << " bytes]: \n" << *GEP1 << *GEP2;
928      return NoAlias;
929    }
930  }
931  return MayAlias;
932}
933
934// Make sure that anything that uses AliasAnalysis pulls in this file...
935DEFINING_FILE_FOR(BasicAliasAnalysis)
936