UninitializedValues.cpp revision a9e8b9e3e90fcfe10a04624a89c39b63c32614d1
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements uninitialized values analysis for source-level CFGs.
11//
12//===----------------------------------------------------------------------===//
13
14#include <utility>
15#include "llvm/ADT/Optional.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/PackedVector.h"
18#include "llvm/ADT/DenseMap.h"
19#include "clang/AST/Decl.h"
20#include "clang/Analysis/CFG.h"
21#include "clang/Analysis/AnalysisContext.h"
22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
23#include "clang/Analysis/Analyses/UninitializedValues.h"
24#include "llvm/Support/SaveAndRestore.h"
25
26using namespace clang;
27
28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
29  if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
30      !vd->isExceptionVariable() &&
31      vd->getDeclContext() == dc) {
32    QualType ty = vd->getType();
33    return ty->isScalarType() || ty->isVectorType();
34  }
35  return false;
36}
37
38//------------------------------------------------------------------------====//
39// DeclToIndex: a mapping from Decls we track to value indices.
40//====------------------------------------------------------------------------//
41
42namespace {
43class DeclToIndex {
44  llvm::DenseMap<const VarDecl *, unsigned> map;
45public:
46  DeclToIndex() {}
47
48  /// Compute the actual mapping from declarations to bits.
49  void computeMap(const DeclContext &dc);
50
51  /// Return the number of declarations in the map.
52  unsigned size() const { return map.size(); }
53
54  /// Returns the bit vector index for a given declaration.
55  llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
56};
57}
58
59void DeclToIndex::computeMap(const DeclContext &dc) {
60  unsigned count = 0;
61  DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
62                                               E(dc.decls_end());
63  for ( ; I != E; ++I) {
64    const VarDecl *vd = *I;
65    if (isTrackedVar(vd, &dc))
66      map[vd] = count++;
67  }
68}
69
70llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
71  llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
72  if (I == map.end())
73    return llvm::Optional<unsigned>();
74  return I->second;
75}
76
77//------------------------------------------------------------------------====//
78// CFGBlockValues: dataflow values for CFG blocks.
79//====------------------------------------------------------------------------//
80
81// These values are defined in such a way that a merge can be done using
82// a bitwise OR.
83enum Value { Unknown = 0x0,         /* 00 */
84             Initialized = 0x1,     /* 01 */
85             Uninitialized = 0x2,   /* 10 */
86             MayUninitialized = 0x3 /* 11 */ };
87
88static bool isUninitialized(const Value v) {
89  return v >= Uninitialized;
90}
91static bool isAlwaysUninit(const Value v) {
92  return v == Uninitialized;
93}
94
95namespace {
96
97typedef llvm::PackedVector<Value, 2> ValueVector;
98typedef std::pair<ValueVector *, ValueVector *> BVPair;
99
100class CFGBlockValues {
101  const CFG &cfg;
102  BVPair *vals;
103  ValueVector scratch;
104  DeclToIndex declToIndex;
105
106  ValueVector &lazyCreate(ValueVector *&bv);
107public:
108  CFGBlockValues(const CFG &cfg);
109  ~CFGBlockValues();
110
111  unsigned getNumEntries() const { return declToIndex.size(); }
112
113  void computeSetOfDeclarations(const DeclContext &dc);
114  ValueVector &getValueVector(const CFGBlock *block,
115                              const CFGBlock *dstBlock);
116
117  BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate);
118
119  void setAllScratchValues(Value V);
120  void mergeIntoScratch(ValueVector const &source, bool isFirst);
121  bool updateValueVectorWithScratch(const CFGBlock *block);
122  bool updateValueVectors(const CFGBlock *block, const BVPair &newVals);
123
124  bool hasNoDeclarations() const {
125    return declToIndex.size() == 0;
126  }
127
128  void resetScratch();
129  ValueVector &getScratch() { return scratch; }
130
131  ValueVector::reference operator[](const VarDecl *vd);
132
133  Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
134                 const VarDecl *vd) {
135    const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
136    assert(idx.hasValue());
137    return getValueVector(block, dstBlock)[idx.getValue()];
138  }
139};
140} // end anonymous namespace
141
142CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {
143  unsigned n = cfg.getNumBlockIDs();
144  if (!n)
145    return;
146  vals = new std::pair<ValueVector*, ValueVector*>[n];
147  memset((void*)vals, 0, sizeof(*vals) * n);
148}
149
150CFGBlockValues::~CFGBlockValues() {
151  unsigned n = cfg.getNumBlockIDs();
152  if (n == 0)
153    return;
154  for (unsigned i = 0; i < n; ++i) {
155    delete vals[i].first;
156    delete vals[i].second;
157  }
158  delete [] vals;
159}
160
161void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
162  declToIndex.computeMap(dc);
163  scratch.resize(declToIndex.size());
164}
165
166ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) {
167  if (!bv)
168    bv = new ValueVector(declToIndex.size());
169  return *bv;
170}
171
172/// This function pattern matches for a '&&' or '||' that appears at
173/// the beginning of a CFGBlock that also (1) has a terminator and
174/// (2) has no other elements.  If such an expression is found, it is returned.
175static const BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) {
176  if (block->empty())
177    return 0;
178
179  CFGElement front = block->front();
180  const CFGStmt *cstmt = front.getAs<CFGStmt>();
181  if (!cstmt)
182    return 0;
183
184  const BinaryOperator *b = dyn_cast_or_null<BinaryOperator>(cstmt->getStmt());
185
186  if (!b || !b->isLogicalOp())
187    return 0;
188
189  if (block->pred_size() == 2) {
190    if (block->getTerminatorCondition() == b) {
191      if (block->succ_size() == 2)
192      return b;
193    }
194    else if (block->size() == 1)
195      return b;
196  }
197
198  return 0;
199}
200
201ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block,
202                                            const CFGBlock *dstBlock) {
203  unsigned idx = block->getBlockID();
204  if (dstBlock && getLogicalOperatorInChain(block)) {
205    if (*block->succ_begin() == dstBlock)
206      return lazyCreate(vals[idx].first);
207    assert(*(block->succ_begin()+1) == dstBlock);
208    return lazyCreate(vals[idx].second);
209  }
210
211  assert(vals[idx].second == 0);
212  return lazyCreate(vals[idx].first);
213}
214
215BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block,
216                                        bool shouldLazyCreate) {
217  unsigned idx = block->getBlockID();
218  lazyCreate(vals[idx].first);
219  if (shouldLazyCreate)
220    lazyCreate(vals[idx].second);
221  return vals[idx];
222}
223
224#if 0
225static void printVector(const CFGBlock *block, ValueVector &bv,
226                        unsigned num) {
227
228  llvm::errs() << block->getBlockID() << " :";
229  for (unsigned i = 0; i < bv.size(); ++i) {
230    llvm::errs() << ' ' << bv[i];
231  }
232  llvm::errs() << " : " << num << '\n';
233}
234
235static void printVector(const char *name, ValueVector const &bv) {
236  llvm::errs() << name << " : ";
237  for (unsigned i = 0; i < bv.size(); ++i) {
238    llvm::errs() << ' ' << bv[i];
239  }
240  llvm::errs() << "\n";
241}
242#endif
243
244void CFGBlockValues::setAllScratchValues(Value V) {
245  for (unsigned I = 0, E = scratch.size(); I != E; ++I)
246    scratch[I] = V;
247}
248
249void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
250                                      bool isFirst) {
251  if (isFirst)
252    scratch = source;
253  else
254    scratch |= source;
255}
256
257bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
258  ValueVector &dst = getValueVector(block, 0);
259  bool changed = (dst != scratch);
260  if (changed)
261    dst = scratch;
262#if 0
263  printVector(block, scratch, 0);
264#endif
265  return changed;
266}
267
268bool CFGBlockValues::updateValueVectors(const CFGBlock *block,
269                                      const BVPair &newVals) {
270  BVPair &vals = getValueVectors(block, true);
271  bool changed = *newVals.first != *vals.first ||
272                 *newVals.second != *vals.second;
273  *vals.first = *newVals.first;
274  *vals.second = *newVals.second;
275#if 0
276  printVector(block, *vals.first, 1);
277  printVector(block, *vals.second, 2);
278#endif
279  return changed;
280}
281
282void CFGBlockValues::resetScratch() {
283  scratch.reset();
284}
285
286ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
287  const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
288  assert(idx.hasValue());
289  return scratch[idx.getValue()];
290}
291
292//------------------------------------------------------------------------====//
293// Worklist: worklist for dataflow analysis.
294//====------------------------------------------------------------------------//
295
296namespace {
297class DataflowWorklist {
298  SmallVector<const CFGBlock *, 20> worklist;
299  llvm::BitVector enqueuedBlocks;
300public:
301  DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
302
303  void enqueueSuccessors(const CFGBlock *block);
304  const CFGBlock *dequeue();
305};
306}
307
308void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
309  unsigned OldWorklistSize = worklist.size();
310  for (CFGBlock::const_succ_iterator I = block->succ_begin(),
311       E = block->succ_end(); I != E; ++I) {
312    const CFGBlock *Successor = *I;
313    if (!Successor || enqueuedBlocks[Successor->getBlockID()])
314      continue;
315    worklist.push_back(Successor);
316    enqueuedBlocks[Successor->getBlockID()] = true;
317  }
318  if (OldWorklistSize == 0 || OldWorklistSize == worklist.size())
319    return;
320
321  // Rotate the newly added blocks to the start of the worklist so that it forms
322  // a proper queue when we pop off the end of the worklist.
323  std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize,
324              worklist.end());
325}
326
327const CFGBlock *DataflowWorklist::dequeue() {
328  if (worklist.empty())
329    return 0;
330  const CFGBlock *b = worklist.back();
331  worklist.pop_back();
332  enqueuedBlocks[b->getBlockID()] = false;
333  return b;
334}
335
336//------------------------------------------------------------------------====//
337// Transfer function for uninitialized values analysis.
338//====------------------------------------------------------------------------//
339
340namespace {
341class FindVarResult {
342  const VarDecl *vd;
343  const DeclRefExpr *dr;
344public:
345  FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {}
346
347  const DeclRefExpr *getDeclRefExpr() const { return dr; }
348  const VarDecl *getDecl() const { return vd; }
349};
350
351class TransferFunctions : public StmtVisitor<TransferFunctions> {
352  CFGBlockValues &vals;
353  const CFG &cfg;
354  const CFGBlock *block;
355  AnalysisDeclContext &ac;
356  UninitVariablesHandler *handler;
357
358  /// The last DeclRefExpr seen when analyzing a block.  Used to
359  /// cheat when detecting cases when the address of a variable is taken.
360  DeclRefExpr *lastDR;
361
362  /// The last lvalue-to-rvalue conversion of a variable whose value
363  /// was uninitialized.  Normally this results in a warning, but it is
364  /// possible to either silence the warning in some cases, or we
365  /// propagate the uninitialized value.
366  CastExpr *lastLoad;
367
368  /// For some expressions, we want to ignore any post-processing after
369  /// visitation.
370  bool skipProcessUses;
371
372public:
373  TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
374                    const CFGBlock *block, AnalysisDeclContext &ac,
375                    UninitVariablesHandler *handler)
376    : vals(vals), cfg(cfg), block(block), ac(ac), handler(handler),
377      lastDR(0), lastLoad(0),
378      skipProcessUses(false) {}
379
380  void reportUse(const Expr *ex, const VarDecl *vd);
381
382  void VisitBlockExpr(BlockExpr *be);
383  void VisitCallExpr(CallExpr *ce);
384  void VisitDeclStmt(DeclStmt *ds);
385  void VisitDeclRefExpr(DeclRefExpr *dr);
386  void VisitUnaryOperator(UnaryOperator *uo);
387  void VisitBinaryOperator(BinaryOperator *bo);
388  void VisitCastExpr(CastExpr *ce);
389  void VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs);
390  void Visit(Stmt *s);
391
392  bool isTrackedVar(const VarDecl *vd) {
393    return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
394  }
395
396  UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
397    UninitUse Use(ex, isAlwaysUninit(v));
398
399    assert(isUninitialized(v));
400    if (Use.getKind() == UninitUse::Always)
401      return Use;
402
403    // If an edge which leads unconditionally to this use did not initialize
404    // the variable, we can say something stronger than 'may be uninitialized':
405    // we can say 'either it's used uninitialized or you have dead code'.
406    //
407    // We track the number of successors of a node which have been visited, and
408    // visit a node once we have visited all of its successors. Only edges where
409    // the variable might still be uninitialized are followed. Since a variable
410    // can't transfer from being initialized to being uninitialized, this will
411    // trace out the subgraph which inevitably leads to the use and does not
412    // initialize the variable. We do not want to skip past loops, since their
413    // non-termination might be correlated with the initialization condition.
414    //
415    // For example:
416    //
417    //         void f(bool a, bool b) {
418    // block1:   int n;
419    //           if (a) {
420    // block2:     if (b)
421    // block3:       n = 1;
422    // block4:   } else if (b) {
423    // block5:     while (!a) {
424    // block6:       do_work(&a);
425    //               n = 2;
426    //             }
427    //           }
428    // block7:   if (a)
429    // block8:     g();
430    // block9:   return n;
431    //         }
432    //
433    // Starting from the maybe-uninitialized use in block 9:
434    //  * Block 7 is not visited because we have only visited one of its two
435    //    successors.
436    //  * Block 8 is visited because we've visited its only successor.
437    // From block 8:
438    //  * Block 7 is visited because we've now visited both of its successors.
439    // From block 7:
440    //  * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
441    //    of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
442    //  * Block 3 is not visited because it initializes 'n'.
443    // Now the algorithm terminates, having visited blocks 7 and 8, and having
444    // found the frontier is blocks 2, 4, and 5.
445    //
446    // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
447    // and 4), so we report that any time either of those edges is taken (in
448    // each case when 'b == false'), 'n' is used uninitialized.
449    llvm::SmallVector<const CFGBlock*, 32> Queue;
450    llvm::SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
451    Queue.push_back(block);
452    // Specify that we've already visited all successors of the starting block.
453    // This has the dual purpose of ensuring we never add it to the queue, and
454    // of marking it as not being a candidate element of the frontier.
455    SuccsVisited[block->getBlockID()] = block->succ_size();
456    while (!Queue.empty()) {
457      const CFGBlock *B = Queue.back();
458      Queue.pop_back();
459      for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
460           I != E; ++I) {
461        const CFGBlock *Pred = *I;
462        if (vals.getValue(Pred, B, vd) == Initialized)
463          // This block initializes the variable.
464          continue;
465
466        if (++SuccsVisited[Pred->getBlockID()] == Pred->succ_size())
467          // All paths from this block lead to the use and don't initialize the
468          // variable.
469          Queue.push_back(Pred);
470      }
471    }
472
473    // Scan the frontier, looking for blocks where the variable was
474    // uninitialized.
475    for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
476      const CFGBlock *Block = *BI;
477      unsigned BlockID = Block->getBlockID();
478      const Stmt *Term = Block->getTerminator();
479      if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
480          Term) {
481        // This block inevitably leads to the use. If we have an edge from here
482        // to a post-dominator block, and the variable is uninitialized on that
483        // edge, we have found a bug.
484        for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
485             E = Block->succ_end(); I != E; ++I) {
486          const CFGBlock *Succ = *I;
487          if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
488              vals.getValue(Block, Succ, vd) == Uninitialized) {
489            // Switch cases are a special case: report the label to the caller
490            // as the 'terminator', not the switch statement itself. Suppress
491            // situations where no label matched: we can't be sure that's
492            // possible.
493            if (isa<SwitchStmt>(Term)) {
494              const Stmt *Label = Succ->getLabel();
495              if (!Label || !isa<SwitchCase>(Label))
496                // Might not be possible.
497                continue;
498              UninitUse::Branch Branch;
499              Branch.Terminator = Label;
500              Branch.Output = 0; // Ignored.
501              Use.addUninitBranch(Branch);
502            } else {
503              UninitUse::Branch Branch;
504              Branch.Terminator = Term;
505              Branch.Output = I - Block->succ_begin();
506              Use.addUninitBranch(Branch);
507            }
508          }
509        }
510      }
511    }
512
513    return Use;
514  }
515
516  FindVarResult findBlockVarDecl(Expr *ex);
517
518  void ProcessUses(Stmt *s = 0);
519};
520}
521
522static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
523  while (Ex) {
524    Ex = Ex->IgnoreParenNoopCasts(C);
525    if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
526      if (CE->getCastKind() == CK_LValueBitCast) {
527        Ex = CE->getSubExpr();
528        continue;
529      }
530    }
531    break;
532  }
533  return Ex;
534}
535
536void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
537  if (!handler)
538    return;
539  Value v = vals[vd];
540  if (isUninitialized(v))
541    handler->handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
542}
543
544FindVarResult TransferFunctions::findBlockVarDecl(Expr *ex) {
545  if (DeclRefExpr *dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts()))
546    if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
547      if (isTrackedVar(vd))
548        return FindVarResult(vd, dr);
549  return FindVarResult(0, 0);
550}
551
552void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs) {
553  // This represents an initialization of the 'element' value.
554  Stmt *element = fs->getElement();
555  const VarDecl *vd = 0;
556
557  if (DeclStmt *ds = dyn_cast<DeclStmt>(element)) {
558    vd = cast<VarDecl>(ds->getSingleDecl());
559    if (!isTrackedVar(vd))
560      vd = 0;
561  } else {
562    // Initialize the value of the reference variable.
563    const FindVarResult &res = findBlockVarDecl(cast<Expr>(element));
564    vd = res.getDecl();
565  }
566
567  if (vd)
568    vals[vd] = Initialized;
569}
570
571void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
572  const BlockDecl *bd = be->getBlockDecl();
573  for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
574        e = bd->capture_end() ; i != e; ++i) {
575    const VarDecl *vd = i->getVariable();
576    if (!isTrackedVar(vd))
577      continue;
578    if (i->isByRef()) {
579      vals[vd] = Initialized;
580      continue;
581    }
582    reportUse(be, vd);
583  }
584}
585
586void TransferFunctions::VisitCallExpr(CallExpr *ce) {
587  // After a call to a function like setjmp or vfork, any variable which is
588  // initialized anywhere within this function may now be initialized. For now,
589  // just assume such a call initializes all variables.
590  // FIXME: Only mark variables as initialized if they have an initializer which
591  // is reachable from here.
592  Decl *Callee = ce->getCalleeDecl();
593  if (Callee && Callee->hasAttr<ReturnsTwiceAttr>())
594    vals.setAllScratchValues(Initialized);
595}
596
597void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
598  // Record the last DeclRefExpr seen.  This is an lvalue computation.
599  // We use this value to later detect if a variable "escapes" the analysis.
600  if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
601    if (isTrackedVar(vd)) {
602      ProcessUses();
603      lastDR = dr;
604    }
605}
606
607void TransferFunctions::VisitDeclStmt(DeclStmt *ds) {
608  for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end();
609       DI != DE; ++DI) {
610    if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) {
611      if (isTrackedVar(vd)) {
612        if (Expr *init = vd->getInit()) {
613          // If the initializer consists solely of a reference to itself, we
614          // explicitly mark the variable as uninitialized. This allows code
615          // like the following:
616          //
617          //   int x = x;
618          //
619          // to deliberately leave a variable uninitialized. Different analysis
620          // clients can detect this pattern and adjust their reporting
621          // appropriately, but we need to continue to analyze subsequent uses
622          // of the variable.
623          if (init == lastLoad) {
624            const DeclRefExpr *DR
625              = cast<DeclRefExpr>(stripCasts(ac.getASTContext(),
626                                             lastLoad->getSubExpr()));
627            if (DR->getDecl() == vd) {
628              // int x = x;
629              // Propagate uninitialized value, but don't immediately report
630              // a problem.
631              vals[vd] = Uninitialized;
632              lastLoad = 0;
633              lastDR = 0;
634              if (handler)
635                handler->handleSelfInit(vd);
636              return;
637            }
638          }
639
640          // All other cases: treat the new variable as initialized.
641          // This is a minor optimization to reduce the propagation
642          // of the analysis, since we will have already reported
643          // the use of the uninitialized value (which visiting the
644          // initializer).
645          vals[vd] = Initialized;
646        } else {
647          // No initializer: the variable is now uninitialized. This matters
648          // for cases like:
649          //   while (...) {
650          //     int n;
651          //     use(n);
652          //     n = 0;
653          //   }
654          // FIXME: Mark the variable as uninitialized whenever its scope is
655          // left, since its scope could be re-entered by a jump over the
656          // declaration.
657          vals[vd] = Uninitialized;
658        }
659      }
660    }
661  }
662}
663
664void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) {
665  if (bo->isAssignmentOp()) {
666    const FindVarResult &res = findBlockVarDecl(bo->getLHS());
667    if (const VarDecl *vd = res.getDecl()) {
668      if (bo->getOpcode() != BO_Assign)
669        reportUse(res.getDeclRefExpr(), vd);
670      else
671        vals[vd] = Initialized;
672    }
673  }
674}
675
676void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) {
677  switch (uo->getOpcode()) {
678    case clang::UO_PostDec:
679    case clang::UO_PostInc:
680    case clang::UO_PreDec:
681    case clang::UO_PreInc: {
682      const FindVarResult &res = findBlockVarDecl(uo->getSubExpr());
683      if (const VarDecl *vd = res.getDecl()) {
684        assert(res.getDeclRefExpr() == lastDR);
685        // We null out lastDR to indicate we have fully processed it
686        // and we don't want the auto-value setting in Visit().
687        lastDR = 0;
688        reportUse(res.getDeclRefExpr(), vd);
689      }
690      break;
691    }
692    default:
693      break;
694  }
695}
696
697void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) {
698  if (ce->getCastKind() == CK_LValueToRValue) {
699    const FindVarResult &res = findBlockVarDecl(ce->getSubExpr());
700    if (res.getDecl()) {
701      assert(res.getDeclRefExpr() == lastDR);
702      lastLoad = ce;
703    }
704  }
705  else if (ce->getCastKind() == CK_NoOp ||
706           ce->getCastKind() == CK_LValueBitCast) {
707    skipProcessUses = true;
708  }
709  else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) {
710    if (cse->getType()->isVoidType()) {
711      // e.g. (void) x;
712      if (lastLoad == cse->getSubExpr()) {
713        // Squelch any detected load of an uninitialized value if
714        // we cast it to void.
715        lastLoad = 0;
716        lastDR = 0;
717      }
718    }
719  }
720}
721
722void TransferFunctions::Visit(clang::Stmt *s) {
723  skipProcessUses = false;
724  StmtVisitor<TransferFunctions>::Visit(s);
725  if (!skipProcessUses)
726    ProcessUses(s);
727}
728
729void TransferFunctions::ProcessUses(Stmt *s) {
730  // This method is typically called after visiting a CFGElement statement
731  // in the CFG.  We delay processing of reporting many loads of uninitialized
732  // values until here.
733  if (lastLoad) {
734    // If we just visited the lvalue-to-rvalue cast, there is nothing
735    // left to do.
736    if (lastLoad == s)
737      return;
738
739    const DeclRefExpr *DR =
740      cast<DeclRefExpr>(stripCasts(ac.getASTContext(),
741                                   lastLoad->getSubExpr()));
742    const VarDecl *VD = cast<VarDecl>(DR->getDecl());
743
744    // If we reach here, we may have seen a load of an uninitialized value
745    // and it hasn't been casted to void or otherwise handled.  In this
746    // situation, report the incident.
747    reportUse(DR, VD);
748
749    lastLoad = 0;
750
751    if (DR == lastDR) {
752      lastDR = 0;
753      return;
754    }
755  }
756
757  // Any other uses of 'lastDR' involve taking an lvalue of variable.
758  // In this case, it "escapes" the analysis.
759  if (lastDR && lastDR != s) {
760    vals[cast<VarDecl>(lastDR->getDecl())] = Initialized;
761    lastDR = 0;
762  }
763}
764
765//------------------------------------------------------------------------====//
766// High-level "driver" logic for uninitialized values analysis.
767//====------------------------------------------------------------------------//
768
769static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
770                       AnalysisDeclContext &ac, CFGBlockValues &vals,
771                       llvm::BitVector &wasAnalyzed,
772                       UninitVariablesHandler *handler = 0) {
773
774  wasAnalyzed[block->getBlockID()] = true;
775
776  if (const BinaryOperator *b = getLogicalOperatorInChain(block)) {
777    CFGBlock::const_pred_iterator itr = block->pred_begin();
778    BVPair vA = vals.getValueVectors(*itr, false);
779    ++itr;
780    BVPair vB = vals.getValueVectors(*itr, false);
781
782    BVPair valsAB;
783
784    if (b->getOpcode() == BO_LAnd) {
785      // Merge the 'F' bits from the first and second.
786      vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true);
787      vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false);
788      valsAB.first = vA.first;
789      valsAB.second = &vals.getScratch();
790    } else {
791      // Merge the 'T' bits from the first and second.
792      assert(b->getOpcode() == BO_LOr);
793      vals.mergeIntoScratch(*vA.first, true);
794      vals.mergeIntoScratch(*vB.first, false);
795      valsAB.first = &vals.getScratch();
796      valsAB.second = vA.second ? vA.second : vA.first;
797    }
798    return vals.updateValueVectors(block, valsAB);
799  }
800
801  // Default behavior: merge in values of predecessor blocks.
802  vals.resetScratch();
803  bool isFirst = true;
804  for (CFGBlock::const_pred_iterator I = block->pred_begin(),
805       E = block->pred_end(); I != E; ++I) {
806    const CFGBlock *pred = *I;
807    if (wasAnalyzed[pred->getBlockID()]) {
808      vals.mergeIntoScratch(vals.getValueVector(pred, block), isFirst);
809      isFirst = false;
810    }
811  }
812  // Apply the transfer function.
813  TransferFunctions tf(vals, cfg, block, ac, handler);
814  for (CFGBlock::const_iterator I = block->begin(), E = block->end();
815       I != E; ++I) {
816    if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
817      tf.Visit(const_cast<Stmt*>(cs->getStmt()));
818    }
819  }
820  tf.ProcessUses();
821  return vals.updateValueVectorWithScratch(block);
822}
823
824void clang::runUninitializedVariablesAnalysis(
825    const DeclContext &dc,
826    const CFG &cfg,
827    AnalysisDeclContext &ac,
828    UninitVariablesHandler &handler,
829    UninitVariablesAnalysisStats &stats) {
830  CFGBlockValues vals(cfg);
831  vals.computeSetOfDeclarations(dc);
832  if (vals.hasNoDeclarations())
833    return;
834#if 0
835  cfg.dump(dc.getParentASTContext().getLangOpts(), true);
836#endif
837
838  stats.NumVariablesAnalyzed = vals.getNumEntries();
839
840  // Mark all variables uninitialized at the entry.
841  const CFGBlock &entry = cfg.getEntry();
842  for (CFGBlock::const_succ_iterator i = entry.succ_begin(),
843        e = entry.succ_end(); i != e; ++i) {
844    if (const CFGBlock *succ = *i) {
845      ValueVector &vec = vals.getValueVector(&entry, succ);
846      const unsigned n = vals.getNumEntries();
847      for (unsigned j = 0; j < n ; ++j) {
848        vec[j] = Uninitialized;
849      }
850    }
851  }
852
853  // Proceed with the workist.
854  DataflowWorklist worklist(cfg);
855  llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
856  worklist.enqueueSuccessors(&cfg.getEntry());
857  llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
858  wasAnalyzed[cfg.getEntry().getBlockID()] = true;
859
860  while (const CFGBlock *block = worklist.dequeue()) {
861    // Did the block change?
862    bool changed = runOnBlock(block, cfg, ac, vals, wasAnalyzed);
863    ++stats.NumBlockVisits;
864    if (changed || !previouslyVisited[block->getBlockID()])
865      worklist.enqueueSuccessors(block);
866    previouslyVisited[block->getBlockID()] = true;
867  }
868
869  // Run through the blocks one more time, and report uninitialized variabes.
870  for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
871    const CFGBlock *block = *BI;
872    if (wasAnalyzed[block->getBlockID()]) {
873      runOnBlock(block, cfg, ac, vals, wasAnalyzed, &handler);
874      ++stats.NumBlockVisits;
875    }
876  }
877}
878
879UninitVariablesHandler::~UninitVariablesHandler() {}
880