UninitializedValues.cpp revision 8f40dcc4ddbe4ef796dd1bf0696ac66d4e40e99a
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements uninitialized values analysis for source-level CFGs.
11//
12//===----------------------------------------------------------------------===//
13
14#include <utility>
15#include "llvm/ADT/Optional.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/PackedVector.h"
18#include "llvm/ADT/DenseMap.h"
19#include "clang/AST/Decl.h"
20#include "clang/Analysis/CFG.h"
21#include "clang/Analysis/AnalysisContext.h"
22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
23#include "clang/Analysis/Analyses/UninitializedValues.h"
24#include "llvm/Support/SaveAndRestore.h"
25
26using namespace clang;
27
28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
29  if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
30      !vd->isExceptionVariable() &&
31      vd->getDeclContext() == dc) {
32    QualType ty = vd->getType();
33    return ty->isScalarType() || ty->isVectorType();
34  }
35  return false;
36}
37
38//------------------------------------------------------------------------====//
39// DeclToIndex: a mapping from Decls we track to value indices.
40//====------------------------------------------------------------------------//
41
42namespace {
43class DeclToIndex {
44  llvm::DenseMap<const VarDecl *, unsigned> map;
45public:
46  DeclToIndex() {}
47
48  /// Compute the actual mapping from declarations to bits.
49  void computeMap(const DeclContext &dc);
50
51  /// Return the number of declarations in the map.
52  unsigned size() const { return map.size(); }
53
54  /// Returns the bit vector index for a given declaration.
55  llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
56};
57}
58
59void DeclToIndex::computeMap(const DeclContext &dc) {
60  unsigned count = 0;
61  DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
62                                               E(dc.decls_end());
63  for ( ; I != E; ++I) {
64    const VarDecl *vd = *I;
65    if (isTrackedVar(vd, &dc))
66      map[vd] = count++;
67  }
68}
69
70llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
71  llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
72  if (I == map.end())
73    return llvm::Optional<unsigned>();
74  return I->second;
75}
76
77//------------------------------------------------------------------------====//
78// CFGBlockValues: dataflow values for CFG blocks.
79//====------------------------------------------------------------------------//
80
81// These values are defined in such a way that a merge can be done using
82// a bitwise OR.
83enum Value { Unknown = 0x0,         /* 00 */
84             Initialized = 0x1,     /* 01 */
85             Uninitialized = 0x2,   /* 10 */
86             MayUninitialized = 0x3 /* 11 */ };
87
88static bool isUninitialized(const Value v) {
89  return v >= Uninitialized;
90}
91static bool isAlwaysUninit(const Value v) {
92  return v == Uninitialized;
93}
94
95namespace {
96
97typedef llvm::PackedVector<Value, 2> ValueVector;
98typedef std::pair<ValueVector *, ValueVector *> BVPair;
99
100class CFGBlockValues {
101  const CFG &cfg;
102  BVPair *vals;
103  ValueVector scratch;
104  DeclToIndex declToIndex;
105
106  ValueVector &lazyCreate(ValueVector *&bv);
107public:
108  CFGBlockValues(const CFG &cfg);
109  ~CFGBlockValues();
110
111  unsigned getNumEntries() const { return declToIndex.size(); }
112
113  void computeSetOfDeclarations(const DeclContext &dc);
114  ValueVector &getValueVector(const CFGBlock *block,
115                              const CFGBlock *dstBlock);
116
117  BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate);
118
119  void mergeIntoScratch(ValueVector const &source, bool isFirst);
120  bool updateValueVectorWithScratch(const CFGBlock *block);
121  bool updateValueVectors(const CFGBlock *block, const BVPair &newVals);
122
123  bool hasNoDeclarations() const {
124    return declToIndex.size() == 0;
125  }
126
127  void resetScratch();
128  ValueVector &getScratch() { return scratch; }
129
130  ValueVector::reference operator[](const VarDecl *vd);
131
132  Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
133                 const VarDecl *vd) {
134    const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
135    assert(idx.hasValue());
136    return getValueVector(block, dstBlock)[idx.getValue()];
137  }
138};
139} // end anonymous namespace
140
141CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {
142  unsigned n = cfg.getNumBlockIDs();
143  if (!n)
144    return;
145  vals = new std::pair<ValueVector*, ValueVector*>[n];
146  memset((void*)vals, 0, sizeof(*vals) * n);
147}
148
149CFGBlockValues::~CFGBlockValues() {
150  unsigned n = cfg.getNumBlockIDs();
151  if (n == 0)
152    return;
153  for (unsigned i = 0; i < n; ++i) {
154    delete vals[i].first;
155    delete vals[i].second;
156  }
157  delete [] vals;
158}
159
160void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
161  declToIndex.computeMap(dc);
162  scratch.resize(declToIndex.size());
163}
164
165ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) {
166  if (!bv)
167    bv = new ValueVector(declToIndex.size());
168  return *bv;
169}
170
171/// This function pattern matches for a '&&' or '||' that appears at
172/// the beginning of a CFGBlock that also (1) has a terminator and
173/// (2) has no other elements.  If such an expression is found, it is returned.
174static const BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) {
175  if (block->empty())
176    return 0;
177
178  CFGElement front = block->front();
179  const CFGStmt *cstmt = front.getAs<CFGStmt>();
180  if (!cstmt)
181    return 0;
182
183  const BinaryOperator *b = dyn_cast_or_null<BinaryOperator>(cstmt->getStmt());
184
185  if (!b || !b->isLogicalOp())
186    return 0;
187
188  if (block->pred_size() == 2) {
189    if (block->getTerminatorCondition() == b) {
190      if (block->succ_size() == 2)
191      return b;
192    }
193    else if (block->size() == 1)
194      return b;
195  }
196
197  return 0;
198}
199
200ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block,
201                                            const CFGBlock *dstBlock) {
202  unsigned idx = block->getBlockID();
203  if (dstBlock && getLogicalOperatorInChain(block)) {
204    if (*block->succ_begin() == dstBlock)
205      return lazyCreate(vals[idx].first);
206    assert(*(block->succ_begin()+1) == dstBlock);
207    return lazyCreate(vals[idx].second);
208  }
209
210  assert(vals[idx].second == 0);
211  return lazyCreate(vals[idx].first);
212}
213
214BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block,
215                                        bool shouldLazyCreate) {
216  unsigned idx = block->getBlockID();
217  lazyCreate(vals[idx].first);
218  if (shouldLazyCreate)
219    lazyCreate(vals[idx].second);
220  return vals[idx];
221}
222
223#if 0
224static void printVector(const CFGBlock *block, ValueVector &bv,
225                        unsigned num) {
226
227  llvm::errs() << block->getBlockID() << " :";
228  for (unsigned i = 0; i < bv.size(); ++i) {
229    llvm::errs() << ' ' << bv[i];
230  }
231  llvm::errs() << " : " << num << '\n';
232}
233
234static void printVector(const char *name, ValueVector const &bv) {
235  llvm::errs() << name << " : ";
236  for (unsigned i = 0; i < bv.size(); ++i) {
237    llvm::errs() << ' ' << bv[i];
238  }
239  llvm::errs() << "\n";
240}
241#endif
242
243void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
244                                      bool isFirst) {
245  if (isFirst)
246    scratch = source;
247  else
248    scratch |= source;
249}
250
251bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
252  ValueVector &dst = getValueVector(block, 0);
253  bool changed = (dst != scratch);
254  if (changed)
255    dst = scratch;
256#if 0
257  printVector(block, scratch, 0);
258#endif
259  return changed;
260}
261
262bool CFGBlockValues::updateValueVectors(const CFGBlock *block,
263                                      const BVPair &newVals) {
264  BVPair &vals = getValueVectors(block, true);
265  bool changed = *newVals.first != *vals.first ||
266                 *newVals.second != *vals.second;
267  *vals.first = *newVals.first;
268  *vals.second = *newVals.second;
269#if 0
270  printVector(block, *vals.first, 1);
271  printVector(block, *vals.second, 2);
272#endif
273  return changed;
274}
275
276void CFGBlockValues::resetScratch() {
277  scratch.reset();
278}
279
280ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
281  const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
282  assert(idx.hasValue());
283  return scratch[idx.getValue()];
284}
285
286//------------------------------------------------------------------------====//
287// Worklist: worklist for dataflow analysis.
288//====------------------------------------------------------------------------//
289
290namespace {
291class DataflowWorklist {
292  SmallVector<const CFGBlock *, 20> worklist;
293  llvm::BitVector enqueuedBlocks;
294public:
295  DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
296
297  void enqueueSuccessors(const CFGBlock *block);
298  const CFGBlock *dequeue();
299};
300}
301
302void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
303  unsigned OldWorklistSize = worklist.size();
304  for (CFGBlock::const_succ_iterator I = block->succ_begin(),
305       E = block->succ_end(); I != E; ++I) {
306    const CFGBlock *Successor = *I;
307    if (!Successor || enqueuedBlocks[Successor->getBlockID()])
308      continue;
309    worklist.push_back(Successor);
310    enqueuedBlocks[Successor->getBlockID()] = true;
311  }
312  if (OldWorklistSize == 0 || OldWorklistSize == worklist.size())
313    return;
314
315  // Rotate the newly added blocks to the start of the worklist so that it forms
316  // a proper queue when we pop off the end of the worklist.
317  std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize,
318              worklist.end());
319}
320
321const CFGBlock *DataflowWorklist::dequeue() {
322  if (worklist.empty())
323    return 0;
324  const CFGBlock *b = worklist.back();
325  worklist.pop_back();
326  enqueuedBlocks[b->getBlockID()] = false;
327  return b;
328}
329
330//------------------------------------------------------------------------====//
331// Transfer function for uninitialized values analysis.
332//====------------------------------------------------------------------------//
333
334namespace {
335class FindVarResult {
336  const VarDecl *vd;
337  const DeclRefExpr *dr;
338public:
339  FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {}
340
341  const DeclRefExpr *getDeclRefExpr() const { return dr; }
342  const VarDecl *getDecl() const { return vd; }
343};
344
345class TransferFunctions : public StmtVisitor<TransferFunctions> {
346  CFGBlockValues &vals;
347  const CFG &cfg;
348  const CFGBlock *block;
349  AnalysisDeclContext &ac;
350  UninitVariablesHandler *handler;
351
352  /// The last DeclRefExpr seen when analyzing a block.  Used to
353  /// cheat when detecting cases when the address of a variable is taken.
354  DeclRefExpr *lastDR;
355
356  /// The last lvalue-to-rvalue conversion of a variable whose value
357  /// was uninitialized.  Normally this results in a warning, but it is
358  /// possible to either silence the warning in some cases, or we
359  /// propagate the uninitialized value.
360  CastExpr *lastLoad;
361
362  /// For some expressions, we want to ignore any post-processing after
363  /// visitation.
364  bool skipProcessUses;
365
366public:
367  TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
368                    const CFGBlock *block, AnalysisDeclContext &ac,
369                    UninitVariablesHandler *handler)
370    : vals(vals), cfg(cfg), block(block), ac(ac), handler(handler),
371      lastDR(0), lastLoad(0),
372      skipProcessUses(false) {}
373
374  void reportUse(const Expr *ex, const VarDecl *vd);
375
376  void VisitBlockExpr(BlockExpr *be);
377  void VisitDeclStmt(DeclStmt *ds);
378  void VisitDeclRefExpr(DeclRefExpr *dr);
379  void VisitUnaryOperator(UnaryOperator *uo);
380  void VisitBinaryOperator(BinaryOperator *bo);
381  void VisitCastExpr(CastExpr *ce);
382  void VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs);
383  void Visit(Stmt *s);
384
385  bool isTrackedVar(const VarDecl *vd) {
386    return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
387  }
388
389  UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
390    UninitUse Use(ex, isAlwaysUninit(v));
391
392    assert(isUninitialized(v));
393    if (Use.getKind() == UninitUse::Always)
394      return Use;
395
396    // If an edge which leads unconditionally to this use did not initialize
397    // the variable, we can say something stronger than 'may be uninitialized':
398    // we can say 'either it's used uninitialized or you have dead code'.
399    //
400    // We track the number of successors of a node which have been visited, and
401    // visit a node once we have visited all of its successors. Only edges where
402    // the variable might still be uninitialized are followed. Since a variable
403    // can't transfer from being initialized to being uninitialized, this will
404    // trace out the subgraph which inevitably leads to the use and does not
405    // initialize the variable. We do not want to skip past loops, since their
406    // non-termination might be correlated with the initialization condition.
407    //
408    // For example:
409    //
410    //         void f(bool a, bool b) {
411    // block1:   int n;
412    //           if (a) {
413    // block2:     if (b)
414    // block3:       n = 1;
415    // block4:   } else if (b) {
416    // block5:     while (!a) {
417    // block6:       do_work(&a);
418    //               n = 2;
419    //             }
420    //           }
421    // block7:   if (a)
422    // block8:     g();
423    // block9:   return n;
424    //         }
425    //
426    // Starting from the maybe-uninitialized use in block 9:
427    //  * Block 7 is not visited because we have only visited one of its two
428    //    successors.
429    //  * Block 8 is visited because we've visited its only successor.
430    // From block 8:
431    //  * Block 7 is visited because we've now visited both of its successors.
432    // From block 7:
433    //  * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
434    //    of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
435    //  * Block 3 is not visited because it initializes 'n'.
436    // Now the algorithm terminates, having visited blocks 7 and 8, and having
437    // found the frontier is blocks 2, 4, and 5.
438    //
439    // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
440    // and 4), so we report that any time either of those edges is taken (in
441    // each case when 'b == false'), 'n' is used uninitialized.
442    llvm::SmallVector<const CFGBlock*, 32> Queue;
443    llvm::SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
444    Queue.push_back(block);
445    // Specify that we've already visited all successors of the starting block.
446    // This has the dual purpose of ensuring we never add it to the queue, and
447    // of marking it as not being a candidate element of the frontier.
448    SuccsVisited[block->getBlockID()] = block->succ_size();
449    while (!Queue.empty()) {
450      const CFGBlock *B = Queue.back();
451      Queue.pop_back();
452      for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
453           I != E; ++I) {
454        const CFGBlock *Pred = *I;
455        if (vals.getValue(Pred, B, vd) == Initialized)
456          // This block initializes the variable.
457          continue;
458
459        if (++SuccsVisited[Pred->getBlockID()] == Pred->succ_size())
460          // All paths from this block lead to the use and don't initialize the
461          // variable.
462          Queue.push_back(Pred);
463      }
464    }
465
466    // Scan the frontier, looking for blocks where the variable was
467    // uninitialized.
468    for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
469      const CFGBlock *Block = *BI;
470      unsigned BlockID = Block->getBlockID();
471      const Stmt *Term = Block->getTerminator();
472      if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
473          Term) {
474        // This block inevitably leads to the use. If we have an edge from here
475        // to a post-dominator block, and the variable is uninitialized on that
476        // edge, we have found a bug.
477        for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
478             E = Block->succ_end(); I != E; ++I) {
479          const CFGBlock *Succ = *I;
480          if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
481              vals.getValue(Block, Succ, vd) == Uninitialized) {
482            // Switch cases are a special case: report the label to the caller
483            // as the 'terminator', not the switch statement itself. Suppress
484            // situations where no label matched: we can't be sure that's
485            // possible.
486            if (isa<SwitchStmt>(Term)) {
487              const Stmt *Label = Succ->getLabel();
488              if (!Label || !isa<SwitchCase>(Label))
489                // Might not be possible.
490                continue;
491              UninitUse::Branch Branch;
492              Branch.Terminator = Label;
493              Branch.Output = 0; // Ignored.
494              Use.addUninitBranch(Branch);
495            } else {
496              UninitUse::Branch Branch;
497              Branch.Terminator = Term;
498              Branch.Output = I - Block->succ_begin();
499              Use.addUninitBranch(Branch);
500            }
501          }
502        }
503      }
504    }
505
506    return Use;
507  }
508
509  FindVarResult findBlockVarDecl(Expr *ex);
510
511  void ProcessUses(Stmt *s = 0);
512};
513}
514
515static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
516  while (Ex) {
517    Ex = Ex->IgnoreParenNoopCasts(C);
518    if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
519      if (CE->getCastKind() == CK_LValueBitCast) {
520        Ex = CE->getSubExpr();
521        continue;
522      }
523    }
524    break;
525  }
526  return Ex;
527}
528
529void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
530  if (!handler)
531    return;
532  Value v = vals[vd];
533  if (isUninitialized(v))
534    handler->handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
535}
536
537FindVarResult TransferFunctions::findBlockVarDecl(Expr *ex) {
538  if (DeclRefExpr *dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts()))
539    if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
540      if (isTrackedVar(vd))
541        return FindVarResult(vd, dr);
542  return FindVarResult(0, 0);
543}
544
545void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs) {
546  // This represents an initialization of the 'element' value.
547  Stmt *element = fs->getElement();
548  const VarDecl *vd = 0;
549
550  if (DeclStmt *ds = dyn_cast<DeclStmt>(element)) {
551    vd = cast<VarDecl>(ds->getSingleDecl());
552    if (!isTrackedVar(vd))
553      vd = 0;
554  } else {
555    // Initialize the value of the reference variable.
556    const FindVarResult &res = findBlockVarDecl(cast<Expr>(element));
557    vd = res.getDecl();
558  }
559
560  if (vd)
561    vals[vd] = Initialized;
562}
563
564void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
565  const BlockDecl *bd = be->getBlockDecl();
566  for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
567        e = bd->capture_end() ; i != e; ++i) {
568    const VarDecl *vd = i->getVariable();
569    if (!isTrackedVar(vd))
570      continue;
571    if (i->isByRef()) {
572      vals[vd] = Initialized;
573      continue;
574    }
575    reportUse(be, vd);
576  }
577}
578
579void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
580  // Record the last DeclRefExpr seen.  This is an lvalue computation.
581  // We use this value to later detect if a variable "escapes" the analysis.
582  if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
583    if (isTrackedVar(vd)) {
584      ProcessUses();
585      lastDR = dr;
586    }
587}
588
589void TransferFunctions::VisitDeclStmt(DeclStmt *ds) {
590  for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end();
591       DI != DE; ++DI) {
592    if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) {
593      if (isTrackedVar(vd)) {
594        if (Expr *init = vd->getInit()) {
595          // If the initializer consists solely of a reference to itself, we
596          // explicitly mark the variable as uninitialized. This allows code
597          // like the following:
598          //
599          //   int x = x;
600          //
601          // to deliberately leave a variable uninitialized. Different analysis
602          // clients can detect this pattern and adjust their reporting
603          // appropriately, but we need to continue to analyze subsequent uses
604          // of the variable.
605          if (init == lastLoad) {
606            const DeclRefExpr *DR
607              = cast<DeclRefExpr>(stripCasts(ac.getASTContext(),
608                                             lastLoad->getSubExpr()));
609            if (DR->getDecl() == vd) {
610              // int x = x;
611              // Propagate uninitialized value, but don't immediately report
612              // a problem.
613              vals[vd] = Uninitialized;
614              lastLoad = 0;
615              lastDR = 0;
616              if (handler)
617                handler->handleSelfInit(vd);
618              return;
619            }
620          }
621
622          // All other cases: treat the new variable as initialized.
623          // This is a minor optimization to reduce the propagation
624          // of the analysis, since we will have already reported
625          // the use of the uninitialized value (which visiting the
626          // initializer).
627          vals[vd] = Initialized;
628        } else {
629          // No initializer: the variable is now uninitialized. This matters
630          // for cases like:
631          //   while (...) {
632          //     int n;
633          //     use(n);
634          //     n = 0;
635          //   }
636          // FIXME: Mark the variable as uninitialized whenever its scope is
637          // left, since its scope could be re-entered by a jump over the
638          // declaration.
639          vals[vd] = Uninitialized;
640        }
641      }
642    }
643  }
644}
645
646void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) {
647  if (bo->isAssignmentOp()) {
648    const FindVarResult &res = findBlockVarDecl(bo->getLHS());
649    if (const VarDecl *vd = res.getDecl()) {
650      if (bo->getOpcode() != BO_Assign)
651        reportUse(res.getDeclRefExpr(), vd);
652      else
653        vals[vd] = Initialized;
654    }
655  }
656}
657
658void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) {
659  switch (uo->getOpcode()) {
660    case clang::UO_PostDec:
661    case clang::UO_PostInc:
662    case clang::UO_PreDec:
663    case clang::UO_PreInc: {
664      const FindVarResult &res = findBlockVarDecl(uo->getSubExpr());
665      if (const VarDecl *vd = res.getDecl()) {
666        assert(res.getDeclRefExpr() == lastDR);
667        // We null out lastDR to indicate we have fully processed it
668        // and we don't want the auto-value setting in Visit().
669        lastDR = 0;
670        reportUse(res.getDeclRefExpr(), vd);
671      }
672      break;
673    }
674    default:
675      break;
676  }
677}
678
679void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) {
680  if (ce->getCastKind() == CK_LValueToRValue) {
681    const FindVarResult &res = findBlockVarDecl(ce->getSubExpr());
682    if (res.getDecl()) {
683      assert(res.getDeclRefExpr() == lastDR);
684      lastLoad = ce;
685    }
686  }
687  else if (ce->getCastKind() == CK_NoOp ||
688           ce->getCastKind() == CK_LValueBitCast) {
689    skipProcessUses = true;
690  }
691  else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) {
692    if (cse->getType()->isVoidType()) {
693      // e.g. (void) x;
694      if (lastLoad == cse->getSubExpr()) {
695        // Squelch any detected load of an uninitialized value if
696        // we cast it to void.
697        lastLoad = 0;
698        lastDR = 0;
699      }
700    }
701  }
702}
703
704void TransferFunctions::Visit(clang::Stmt *s) {
705  skipProcessUses = false;
706  StmtVisitor<TransferFunctions>::Visit(s);
707  if (!skipProcessUses)
708    ProcessUses(s);
709}
710
711void TransferFunctions::ProcessUses(Stmt *s) {
712  // This method is typically called after visiting a CFGElement statement
713  // in the CFG.  We delay processing of reporting many loads of uninitialized
714  // values until here.
715  if (lastLoad) {
716    // If we just visited the lvalue-to-rvalue cast, there is nothing
717    // left to do.
718    if (lastLoad == s)
719      return;
720
721    const DeclRefExpr *DR =
722      cast<DeclRefExpr>(stripCasts(ac.getASTContext(),
723                                   lastLoad->getSubExpr()));
724    const VarDecl *VD = cast<VarDecl>(DR->getDecl());
725
726    // If we reach here, we may have seen a load of an uninitialized value
727    // and it hasn't been casted to void or otherwise handled.  In this
728    // situation, report the incident.
729    reportUse(DR, VD);
730
731    lastLoad = 0;
732
733    if (DR == lastDR) {
734      lastDR = 0;
735      return;
736    }
737  }
738
739  // Any other uses of 'lastDR' involve taking an lvalue of variable.
740  // In this case, it "escapes" the analysis.
741  if (lastDR && lastDR != s) {
742    vals[cast<VarDecl>(lastDR->getDecl())] = Initialized;
743    lastDR = 0;
744  }
745}
746
747//------------------------------------------------------------------------====//
748// High-level "driver" logic for uninitialized values analysis.
749//====------------------------------------------------------------------------//
750
751static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
752                       AnalysisDeclContext &ac, CFGBlockValues &vals,
753                       llvm::BitVector &wasAnalyzed,
754                       UninitVariablesHandler *handler = 0) {
755
756  wasAnalyzed[block->getBlockID()] = true;
757
758  if (const BinaryOperator *b = getLogicalOperatorInChain(block)) {
759    CFGBlock::const_pred_iterator itr = block->pred_begin();
760    BVPair vA = vals.getValueVectors(*itr, false);
761    ++itr;
762    BVPair vB = vals.getValueVectors(*itr, false);
763
764    BVPair valsAB;
765
766    if (b->getOpcode() == BO_LAnd) {
767      // Merge the 'F' bits from the first and second.
768      vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true);
769      vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false);
770      valsAB.first = vA.first;
771      valsAB.second = &vals.getScratch();
772    } else {
773      // Merge the 'T' bits from the first and second.
774      assert(b->getOpcode() == BO_LOr);
775      vals.mergeIntoScratch(*vA.first, true);
776      vals.mergeIntoScratch(*vB.first, false);
777      valsAB.first = &vals.getScratch();
778      valsAB.second = vA.second ? vA.second : vA.first;
779    }
780    return vals.updateValueVectors(block, valsAB);
781  }
782
783  // Default behavior: merge in values of predecessor blocks.
784  vals.resetScratch();
785  bool isFirst = true;
786  for (CFGBlock::const_pred_iterator I = block->pred_begin(),
787       E = block->pred_end(); I != E; ++I) {
788    const CFGBlock *pred = *I;
789    if (wasAnalyzed[pred->getBlockID()]) {
790      vals.mergeIntoScratch(vals.getValueVector(pred, block), isFirst);
791      isFirst = false;
792    }
793  }
794  // Apply the transfer function.
795  TransferFunctions tf(vals, cfg, block, ac, handler);
796  for (CFGBlock::const_iterator I = block->begin(), E = block->end();
797       I != E; ++I) {
798    if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
799      tf.Visit(const_cast<Stmt*>(cs->getStmt()));
800    }
801  }
802  tf.ProcessUses();
803  return vals.updateValueVectorWithScratch(block);
804}
805
806void clang::runUninitializedVariablesAnalysis(
807    const DeclContext &dc,
808    const CFG &cfg,
809    AnalysisDeclContext &ac,
810    UninitVariablesHandler &handler,
811    UninitVariablesAnalysisStats &stats) {
812  CFGBlockValues vals(cfg);
813  vals.computeSetOfDeclarations(dc);
814  if (vals.hasNoDeclarations())
815    return;
816#if 0
817  cfg.dump(dc.getParentASTContext().getLangOpts(), true);
818#endif
819
820  stats.NumVariablesAnalyzed = vals.getNumEntries();
821
822  // Mark all variables uninitialized at the entry.
823  const CFGBlock &entry = cfg.getEntry();
824  for (CFGBlock::const_succ_iterator i = entry.succ_begin(),
825        e = entry.succ_end(); i != e; ++i) {
826    if (const CFGBlock *succ = *i) {
827      ValueVector &vec = vals.getValueVector(&entry, succ);
828      const unsigned n = vals.getNumEntries();
829      for (unsigned j = 0; j < n ; ++j) {
830        vec[j] = Uninitialized;
831      }
832    }
833  }
834
835  // Proceed with the workist.
836  DataflowWorklist worklist(cfg);
837  llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
838  worklist.enqueueSuccessors(&cfg.getEntry());
839  llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
840  wasAnalyzed[cfg.getEntry().getBlockID()] = true;
841
842  while (const CFGBlock *block = worklist.dequeue()) {
843    // Did the block change?
844    bool changed = runOnBlock(block, cfg, ac, vals, wasAnalyzed);
845    ++stats.NumBlockVisits;
846    if (changed || !previouslyVisited[block->getBlockID()])
847      worklist.enqueueSuccessors(block);
848    previouslyVisited[block->getBlockID()] = true;
849  }
850
851  // Run through the blocks one more time, and report uninitialized variabes.
852  for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
853    const CFGBlock *block = *BI;
854    if (wasAnalyzed[block->getBlockID()]) {
855      runOnBlock(block, cfg, ac, vals, wasAnalyzed, &handler);
856      ++stats.NumBlockVisits;
857    }
858  }
859}
860
861UninitVariablesHandler::~UninitVariablesHandler() {}
862