UninitializedValues.cpp revision 25c1d57fc906e895535fb5c7e6dde80219f353b5
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements uninitialized values analysis for source-level CFGs.
11//
12//===----------------------------------------------------------------------===//
13
14#include <utility>
15#include "llvm/ADT/Optional.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/PackedVector.h"
18#include "llvm/ADT/DenseMap.h"
19#include "clang/AST/ASTContext.h"
20#include "clang/AST/Decl.h"
21#include "clang/Analysis/CFG.h"
22#include "clang/Analysis/AnalysisContext.h"
23#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
24#include "clang/Analysis/Analyses/UninitializedValues.h"
25#include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
26#include "llvm/Support/SaveAndRestore.h"
27
28using namespace clang;
29
30#define DEBUG_LOGGING 0
31
32static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
33  if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
34      !vd->isExceptionVariable() &&
35      vd->getDeclContext() == dc) {
36    QualType ty = vd->getType();
37    return ty->isScalarType() || ty->isVectorType();
38  }
39  return false;
40}
41
42//------------------------------------------------------------------------====//
43// DeclToIndex: a mapping from Decls we track to value indices.
44//====------------------------------------------------------------------------//
45
46namespace {
47class DeclToIndex {
48  llvm::DenseMap<const VarDecl *, unsigned> map;
49public:
50  DeclToIndex() {}
51
52  /// Compute the actual mapping from declarations to bits.
53  void computeMap(const DeclContext &dc);
54
55  /// Return the number of declarations in the map.
56  unsigned size() const { return map.size(); }
57
58  /// Returns the bit vector index for a given declaration.
59  llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
60};
61}
62
63void DeclToIndex::computeMap(const DeclContext &dc) {
64  unsigned count = 0;
65  DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
66                                               E(dc.decls_end());
67  for ( ; I != E; ++I) {
68    const VarDecl *vd = *I;
69    if (isTrackedVar(vd, &dc))
70      map[vd] = count++;
71  }
72}
73
74llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
75  llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
76  if (I == map.end())
77    return llvm::Optional<unsigned>();
78  return I->second;
79}
80
81//------------------------------------------------------------------------====//
82// CFGBlockValues: dataflow values for CFG blocks.
83//====------------------------------------------------------------------------//
84
85// These values are defined in such a way that a merge can be done using
86// a bitwise OR.
87enum Value { Unknown = 0x0,         /* 00 */
88             Initialized = 0x1,     /* 01 */
89             Uninitialized = 0x2,   /* 10 */
90             MayUninitialized = 0x3 /* 11 */ };
91
92static bool isUninitialized(const Value v) {
93  return v >= Uninitialized;
94}
95static bool isAlwaysUninit(const Value v) {
96  return v == Uninitialized;
97}
98
99namespace {
100
101typedef llvm::PackedVector<Value, 2> ValueVector;
102
103class CFGBlockValues {
104  const CFG &cfg;
105  std::vector<ValueVector*> vals;
106  ValueVector scratch;
107  DeclToIndex declToIndex;
108public:
109  CFGBlockValues(const CFG &cfg);
110  ~CFGBlockValues();
111
112  unsigned getNumEntries() const { return declToIndex.size(); }
113
114  void computeSetOfDeclarations(const DeclContext &dc);
115  ValueVector &getValueVector(const CFGBlock *block) {
116    return *vals[block->getBlockID()];
117  }
118
119  void setAllScratchValues(Value V);
120  void mergeIntoScratch(ValueVector const &source, bool isFirst);
121  bool updateValueVectorWithScratch(const CFGBlock *block);
122
123  bool hasNoDeclarations() const {
124    return declToIndex.size() == 0;
125  }
126
127  void resetScratch();
128
129  ValueVector::reference operator[](const VarDecl *vd);
130
131  Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
132                 const VarDecl *vd) {
133    const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
134    assert(idx.hasValue());
135    return getValueVector(block)[idx.getValue()];
136  }
137};
138} // end anonymous namespace
139
140CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
141
142CFGBlockValues::~CFGBlockValues() {
143  for (std::vector<ValueVector*>::iterator I = vals.begin(), E = vals.end();
144       I != E; ++I)
145    delete *I;
146}
147
148void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
149  declToIndex.computeMap(dc);
150  unsigned decls = declToIndex.size();
151  scratch.resize(decls);
152  unsigned n = cfg.getNumBlockIDs();
153  if (!n)
154    return;
155  vals.resize(n);
156  for (unsigned i = 0; i < n; ++i)
157    vals[i] = new ValueVector(decls);
158}
159
160#if DEBUG_LOGGING
161static void printVector(const CFGBlock *block, ValueVector &bv,
162                        unsigned num) {
163  llvm::errs() << block->getBlockID() << " :";
164  for (unsigned i = 0; i < bv.size(); ++i) {
165    llvm::errs() << ' ' << bv[i];
166  }
167  llvm::errs() << " : " << num << '\n';
168}
169#endif
170
171void CFGBlockValues::setAllScratchValues(Value V) {
172  for (unsigned I = 0, E = scratch.size(); I != E; ++I)
173    scratch[I] = V;
174}
175
176void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
177                                      bool isFirst) {
178  if (isFirst)
179    scratch = source;
180  else
181    scratch |= source;
182}
183
184bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
185  ValueVector &dst = getValueVector(block);
186  bool changed = (dst != scratch);
187  if (changed)
188    dst = scratch;
189#if DEBUG_LOGGING
190  printVector(block, scratch, 0);
191#endif
192  return changed;
193}
194
195void CFGBlockValues::resetScratch() {
196  scratch.reset();
197}
198
199ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
200  const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
201  assert(idx.hasValue());
202  return scratch[idx.getValue()];
203}
204
205//------------------------------------------------------------------------====//
206// Worklist: worklist for dataflow analysis.
207//====------------------------------------------------------------------------//
208
209namespace {
210class DataflowWorklist {
211  SmallVector<const CFGBlock *, 20> worklist;
212  llvm::BitVector enqueuedBlocks;
213public:
214  DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
215
216  void enqueueSuccessors(const CFGBlock *block);
217  const CFGBlock *dequeue();
218};
219}
220
221void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
222  unsigned OldWorklistSize = worklist.size();
223  for (CFGBlock::const_succ_iterator I = block->succ_begin(),
224       E = block->succ_end(); I != E; ++I) {
225    const CFGBlock *Successor = *I;
226    if (!Successor || enqueuedBlocks[Successor->getBlockID()])
227      continue;
228    worklist.push_back(Successor);
229    enqueuedBlocks[Successor->getBlockID()] = true;
230  }
231  if (OldWorklistSize == 0 || OldWorklistSize == worklist.size())
232    return;
233
234  // Rotate the newly added blocks to the start of the worklist so that it forms
235  // a proper queue when we pop off the end of the worklist.
236  std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize,
237              worklist.end());
238}
239
240const CFGBlock *DataflowWorklist::dequeue() {
241  if (worklist.empty())
242    return 0;
243  const CFGBlock *b = worklist.back();
244  worklist.pop_back();
245  enqueuedBlocks[b->getBlockID()] = false;
246  return b;
247}
248
249//------------------------------------------------------------------------====//
250// Classification of DeclRefExprs as use or initialization.
251//====------------------------------------------------------------------------//
252
253namespace {
254class FindVarResult {
255  const VarDecl *vd;
256  const DeclRefExpr *dr;
257public:
258  FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
259
260  const DeclRefExpr *getDeclRefExpr() const { return dr; }
261  const VarDecl *getDecl() const { return vd; }
262};
263
264static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
265  while (Ex) {
266    Ex = Ex->IgnoreParenNoopCasts(C);
267    if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
268      if (CE->getCastKind() == CK_LValueBitCast) {
269        Ex = CE->getSubExpr();
270        continue;
271      }
272    }
273    break;
274  }
275  return Ex;
276}
277
278/// If E is an expression comprising a reference to a single variable, find that
279/// variable.
280static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
281  if (const DeclRefExpr *DRE =
282        dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
283    if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
284      if (isTrackedVar(VD, DC))
285        return FindVarResult(VD, DRE);
286  return FindVarResult(0, 0);
287}
288
289/// \brief Classify each DeclRefExpr as an initialization or a use. Any
290/// DeclRefExpr which isn't explicitly classified will be assumed to have
291/// escaped the analysis and will be treated as an initialization.
292class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
293public:
294  enum Class {
295    Init,
296    Use,
297    SelfInit,
298    Ignore
299  };
300
301private:
302  const DeclContext *DC;
303  llvm::DenseMap<const DeclRefExpr*, Class> Classification;
304
305  bool isTrackedVar(const VarDecl *VD) const {
306    return ::isTrackedVar(VD, DC);
307  }
308
309  void classify(const Expr *E, Class C);
310
311public:
312  ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
313
314  void VisitDeclStmt(DeclStmt *DS);
315  void VisitUnaryOperator(UnaryOperator *UO);
316  void VisitBinaryOperator(BinaryOperator *BO);
317  void VisitCallExpr(CallExpr *CE);
318  void VisitCastExpr(CastExpr *CE);
319
320  void operator()(Stmt *S) { Visit(S); }
321
322  Class get(const DeclRefExpr *DRE) const {
323    llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
324        = Classification.find(DRE);
325    if (I != Classification.end())
326      return I->second;
327
328    const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
329    if (!VD || !isTrackedVar(VD))
330      return Ignore;
331
332    return Init;
333  }
334};
335}
336
337static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
338  if (Expr *Init = VD->getInit()) {
339    const DeclRefExpr *DRE
340      = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
341    if (DRE && DRE->getDecl() == VD)
342      return DRE;
343  }
344  return 0;
345}
346
347void ClassifyRefs::classify(const Expr *E, Class C) {
348  FindVarResult Var = findVar(E, DC);
349  if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
350    Classification[DRE] = std::max(Classification[DRE], C);
351}
352
353void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
354  for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
355       DI != DE; ++DI) {
356    VarDecl *VD = dyn_cast<VarDecl>(*DI);
357    if (VD && isTrackedVar(VD))
358      if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
359        Classification[DRE] = SelfInit;
360  }
361}
362
363void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
364  // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
365  // is not a compound-assignment, we will treat it as initializing the variable
366  // when TransferFunctions visits it. A compound-assignment does not affect
367  // whether a variable is uninitialized, and there's no point counting it as a
368  // use.
369  if (BO->isCompoundAssignmentOp())
370    classify(BO->getLHS(), Use);
371  else if (BO->getOpcode() == BO_Assign)
372    classify(BO->getLHS(), Ignore);
373}
374
375void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
376  // Increment and decrement are uses despite there being no lvalue-to-rvalue
377  // conversion.
378  if (UO->isIncrementDecrementOp())
379    classify(UO->getSubExpr(), Use);
380}
381
382void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
383  // If a value is passed by const reference to a function, we should not assume
384  // that it is initialized by the call, and we conservatively do not assume
385  // that it is used.
386  for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
387       I != E; ++I)
388    if ((*I)->getType().isConstQualified() && (*I)->isGLValue())
389      classify(*I, Ignore);
390}
391
392void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
393  if (CE->getCastKind() == CK_LValueToRValue)
394    classify(CE->getSubExpr(), Use);
395  else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
396    if (CSE->getType()->isVoidType()) {
397      // Squelch any detected load of an uninitialized value if
398      // we cast it to void.
399      // e.g. (void) x;
400      classify(CSE->getSubExpr(), Ignore);
401    }
402  }
403}
404
405//------------------------------------------------------------------------====//
406// Transfer function for uninitialized values analysis.
407//====------------------------------------------------------------------------//
408
409namespace {
410class TransferFunctions : public StmtVisitor<TransferFunctions> {
411  CFGBlockValues &vals;
412  const CFG &cfg;
413  const CFGBlock *block;
414  AnalysisDeclContext &ac;
415  const ClassifyRefs &classification;
416  ObjCNoReturn objCNoRet;
417  UninitVariablesHandler *handler;
418
419public:
420  TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
421                    const CFGBlock *block, AnalysisDeclContext &ac,
422                    const ClassifyRefs &classification,
423                    UninitVariablesHandler *handler)
424    : vals(vals), cfg(cfg), block(block), ac(ac),
425      classification(classification), objCNoRet(ac.getASTContext()),
426      handler(handler) {}
427
428  void reportUse(const Expr *ex, const VarDecl *vd);
429
430  void VisitBinaryOperator(BinaryOperator *bo);
431  void VisitBlockExpr(BlockExpr *be);
432  void VisitCallExpr(CallExpr *ce);
433  void VisitDeclRefExpr(DeclRefExpr *dr);
434  void VisitDeclStmt(DeclStmt *ds);
435  void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
436  void VisitObjCMessageExpr(ObjCMessageExpr *ME);
437
438  bool isTrackedVar(const VarDecl *vd) {
439    return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
440  }
441
442  FindVarResult findVar(const Expr *ex) {
443    return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
444  }
445
446  UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
447    UninitUse Use(ex, isAlwaysUninit(v));
448
449    assert(isUninitialized(v));
450    if (Use.getKind() == UninitUse::Always)
451      return Use;
452
453    // If an edge which leads unconditionally to this use did not initialize
454    // the variable, we can say something stronger than 'may be uninitialized':
455    // we can say 'either it's used uninitialized or you have dead code'.
456    //
457    // We track the number of successors of a node which have been visited, and
458    // visit a node once we have visited all of its successors. Only edges where
459    // the variable might still be uninitialized are followed. Since a variable
460    // can't transfer from being initialized to being uninitialized, this will
461    // trace out the subgraph which inevitably leads to the use and does not
462    // initialize the variable. We do not want to skip past loops, since their
463    // non-termination might be correlated with the initialization condition.
464    //
465    // For example:
466    //
467    //         void f(bool a, bool b) {
468    // block1:   int n;
469    //           if (a) {
470    // block2:     if (b)
471    // block3:       n = 1;
472    // block4:   } else if (b) {
473    // block5:     while (!a) {
474    // block6:       do_work(&a);
475    //               n = 2;
476    //             }
477    //           }
478    // block7:   if (a)
479    // block8:     g();
480    // block9:   return n;
481    //         }
482    //
483    // Starting from the maybe-uninitialized use in block 9:
484    //  * Block 7 is not visited because we have only visited one of its two
485    //    successors.
486    //  * Block 8 is visited because we've visited its only successor.
487    // From block 8:
488    //  * Block 7 is visited because we've now visited both of its successors.
489    // From block 7:
490    //  * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
491    //    of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
492    //  * Block 3 is not visited because it initializes 'n'.
493    // Now the algorithm terminates, having visited blocks 7 and 8, and having
494    // found the frontier is blocks 2, 4, and 5.
495    //
496    // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
497    // and 4), so we report that any time either of those edges is taken (in
498    // each case when 'b == false'), 'n' is used uninitialized.
499    llvm::SmallVector<const CFGBlock*, 32> Queue;
500    llvm::SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
501    Queue.push_back(block);
502    // Specify that we've already visited all successors of the starting block.
503    // This has the dual purpose of ensuring we never add it to the queue, and
504    // of marking it as not being a candidate element of the frontier.
505    SuccsVisited[block->getBlockID()] = block->succ_size();
506    while (!Queue.empty()) {
507      const CFGBlock *B = Queue.back();
508      Queue.pop_back();
509      for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
510           I != E; ++I) {
511        const CFGBlock *Pred = *I;
512        if (vals.getValue(Pred, B, vd) == Initialized)
513          // This block initializes the variable.
514          continue;
515
516        unsigned &SV = SuccsVisited[Pred->getBlockID()];
517        if (!SV) {
518          // When visiting the first successor of a block, mark all NULL
519          // successors as having been visited.
520          for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
521                                             SE = Pred->succ_end();
522               SI != SE; ++SI)
523            if (!*SI)
524              ++SV;
525        }
526
527        if (++SV == Pred->succ_size())
528          // All paths from this block lead to the use and don't initialize the
529          // variable.
530          Queue.push_back(Pred);
531      }
532    }
533
534    // Scan the frontier, looking for blocks where the variable was
535    // uninitialized.
536    for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
537      const CFGBlock *Block = *BI;
538      unsigned BlockID = Block->getBlockID();
539      const Stmt *Term = Block->getTerminator();
540      if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
541          Term) {
542        // This block inevitably leads to the use. If we have an edge from here
543        // to a post-dominator block, and the variable is uninitialized on that
544        // edge, we have found a bug.
545        for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
546             E = Block->succ_end(); I != E; ++I) {
547          const CFGBlock *Succ = *I;
548          if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
549              vals.getValue(Block, Succ, vd) == Uninitialized) {
550            // Switch cases are a special case: report the label to the caller
551            // as the 'terminator', not the switch statement itself. Suppress
552            // situations where no label matched: we can't be sure that's
553            // possible.
554            if (isa<SwitchStmt>(Term)) {
555              const Stmt *Label = Succ->getLabel();
556              if (!Label || !isa<SwitchCase>(Label))
557                // Might not be possible.
558                continue;
559              UninitUse::Branch Branch;
560              Branch.Terminator = Label;
561              Branch.Output = 0; // Ignored.
562              Use.addUninitBranch(Branch);
563            } else {
564              UninitUse::Branch Branch;
565              Branch.Terminator = Term;
566              Branch.Output = I - Block->succ_begin();
567              Use.addUninitBranch(Branch);
568            }
569          }
570        }
571      }
572    }
573
574    return Use;
575  }
576};
577}
578
579void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
580  if (!handler)
581    return;
582  Value v = vals[vd];
583  if (isUninitialized(v))
584    handler->handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
585}
586
587void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
588  // This represents an initialization of the 'element' value.
589  if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
590    const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
591    if (isTrackedVar(VD))
592      vals[VD] = Initialized;
593  }
594}
595
596void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
597  const BlockDecl *bd = be->getBlockDecl();
598  for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
599        e = bd->capture_end() ; i != e; ++i) {
600    const VarDecl *vd = i->getVariable();
601    if (!isTrackedVar(vd))
602      continue;
603    if (i->isByRef()) {
604      vals[vd] = Initialized;
605      continue;
606    }
607    reportUse(be, vd);
608  }
609}
610
611void TransferFunctions::VisitCallExpr(CallExpr *ce) {
612  if (Decl *Callee = ce->getCalleeDecl()) {
613    if (Callee->hasAttr<ReturnsTwiceAttr>()) {
614      // After a call to a function like setjmp or vfork, any variable which is
615      // initialized anywhere within this function may now be initialized. For
616      // now, just assume such a call initializes all variables.  FIXME: Only
617      // mark variables as initialized if they have an initializer which is
618      // reachable from here.
619      vals.setAllScratchValues(Initialized);
620    }
621    else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
622      // Functions labeled like "analyzer_noreturn" are often used to denote
623      // "panic" functions that in special debug situations can still return,
624      // but for the most part should not be treated as returning.  This is a
625      // useful annotation borrowed from the static analyzer that is useful for
626      // suppressing branch-specific false positives when we call one of these
627      // functions but keep pretending the path continues (when in reality the
628      // user doesn't care).
629      vals.setAllScratchValues(Unknown);
630    }
631  }
632}
633
634void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
635  switch (classification.get(dr)) {
636  case ClassifyRefs::Ignore:
637    break;
638  case ClassifyRefs::Use:
639    reportUse(dr, cast<VarDecl>(dr->getDecl()));
640    break;
641  case ClassifyRefs::Init:
642    vals[cast<VarDecl>(dr->getDecl())] = Initialized;
643    break;
644  case ClassifyRefs::SelfInit:
645    if (handler)
646      handler->handleSelfInit(cast<VarDecl>(dr->getDecl()));
647    break;
648  }
649}
650
651void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
652  if (BO->getOpcode() == BO_Assign) {
653    FindVarResult Var = findVar(BO->getLHS());
654    if (const VarDecl *VD = Var.getDecl())
655      vals[VD] = Initialized;
656  }
657}
658
659void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
660  for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
661       DI != DE; ++DI) {
662    VarDecl *VD = dyn_cast<VarDecl>(*DI);
663    if (VD && isTrackedVar(VD)) {
664      if (getSelfInitExpr(VD)) {
665        // If the initializer consists solely of a reference to itself, we
666        // explicitly mark the variable as uninitialized. This allows code
667        // like the following:
668        //
669        //   int x = x;
670        //
671        // to deliberately leave a variable uninitialized. Different analysis
672        // clients can detect this pattern and adjust their reporting
673        // appropriately, but we need to continue to analyze subsequent uses
674        // of the variable.
675        vals[VD] = Uninitialized;
676      } else if (VD->getInit()) {
677        // Treat the new variable as initialized.
678        vals[VD] = Initialized;
679      } else {
680        // No initializer: the variable is now uninitialized. This matters
681        // for cases like:
682        //   while (...) {
683        //     int n;
684        //     use(n);
685        //     n = 0;
686        //   }
687        // FIXME: Mark the variable as uninitialized whenever its scope is
688        // left, since its scope could be re-entered by a jump over the
689        // declaration.
690        vals[VD] = Uninitialized;
691      }
692    }
693  }
694}
695
696void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
697  // If the Objective-C message expression is an implicit no-return that
698  // is not modeled in the CFG, set the tracked dataflow values to Unknown.
699  if (objCNoRet.isImplicitNoReturn(ME)) {
700    vals.setAllScratchValues(Unknown);
701  }
702}
703
704//------------------------------------------------------------------------====//
705// High-level "driver" logic for uninitialized values analysis.
706//====------------------------------------------------------------------------//
707
708static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
709                       AnalysisDeclContext &ac, CFGBlockValues &vals,
710                       const ClassifyRefs &classification,
711                       llvm::BitVector &wasAnalyzed,
712                       UninitVariablesHandler *handler = 0) {
713  wasAnalyzed[block->getBlockID()] = true;
714  vals.resetScratch();
715  // Merge in values of predecessor blocks.
716  bool isFirst = true;
717  for (CFGBlock::const_pred_iterator I = block->pred_begin(),
718       E = block->pred_end(); I != E; ++I) {
719    const CFGBlock *pred = *I;
720    if (wasAnalyzed[pred->getBlockID()]) {
721      vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
722      isFirst = false;
723    }
724  }
725  // Apply the transfer function.
726  TransferFunctions tf(vals, cfg, block, ac, classification, handler);
727  for (CFGBlock::const_iterator I = block->begin(), E = block->end();
728       I != E; ++I) {
729    if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
730      tf.Visit(const_cast<Stmt*>(cs->getStmt()));
731    }
732  }
733  return vals.updateValueVectorWithScratch(block);
734}
735
736void clang::runUninitializedVariablesAnalysis(
737    const DeclContext &dc,
738    const CFG &cfg,
739    AnalysisDeclContext &ac,
740    UninitVariablesHandler &handler,
741    UninitVariablesAnalysisStats &stats) {
742  CFGBlockValues vals(cfg);
743  vals.computeSetOfDeclarations(dc);
744  if (vals.hasNoDeclarations())
745    return;
746
747  stats.NumVariablesAnalyzed = vals.getNumEntries();
748
749  // Precompute which expressions are uses and which are initializations.
750  ClassifyRefs classification(ac);
751  cfg.VisitBlockStmts(classification);
752
753  // Mark all variables uninitialized at the entry.
754  const CFGBlock &entry = cfg.getEntry();
755  ValueVector &vec = vals.getValueVector(&entry);
756  const unsigned n = vals.getNumEntries();
757  for (unsigned j = 0; j < n ; ++j) {
758    vec[j] = Uninitialized;
759  }
760
761  // Proceed with the workist.
762  DataflowWorklist worklist(cfg);
763  llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
764  worklist.enqueueSuccessors(&cfg.getEntry());
765  llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
766  wasAnalyzed[cfg.getEntry().getBlockID()] = true;
767
768  while (const CFGBlock *block = worklist.dequeue()) {
769    // Did the block change?
770    bool changed = runOnBlock(block, cfg, ac, vals,
771                              classification, wasAnalyzed);
772    ++stats.NumBlockVisits;
773    if (changed || !previouslyVisited[block->getBlockID()])
774      worklist.enqueueSuccessors(block);
775    previouslyVisited[block->getBlockID()] = true;
776  }
777
778  // Run through the blocks one more time, and report uninitialized variabes.
779  for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
780    const CFGBlock *block = *BI;
781    if (wasAnalyzed[block->getBlockID()]) {
782      runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, &handler);
783      ++stats.NumBlockVisits;
784    }
785  }
786}
787
788UninitVariablesHandler::~UninitVariablesHandler() {}
789