UninitializedValues.cpp revision eee18c38b6dd29c0e6982f5565fcbc3f76d1bf4d
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements uninitialized values analysis for source-level CFGs.
11//
12//===----------------------------------------------------------------------===//
13
14#include <utility>
15#include "llvm/ADT/Optional.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/PackedVector.h"
18#include "llvm/ADT/DenseMap.h"
19#include "clang/AST/ASTContext.h"
20#include "clang/AST/Decl.h"
21#include "clang/Analysis/CFG.h"
22#include "clang/Analysis/AnalysisContext.h"
23#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
24#include "clang/Analysis/Analyses/UninitializedValues.h"
25#include "llvm/Support/SaveAndRestore.h"
26
27using namespace clang;
28
29#define DEBUG_LOGGING 0
30
31static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
32  if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
33      !vd->isExceptionVariable() &&
34      vd->getDeclContext() == dc) {
35    QualType ty = vd->getType();
36    return ty->isScalarType() || ty->isVectorType();
37  }
38  return false;
39}
40
41//------------------------------------------------------------------------====//
42// DeclToIndex: a mapping from Decls we track to value indices.
43//====------------------------------------------------------------------------//
44
45namespace {
46class DeclToIndex {
47  llvm::DenseMap<const VarDecl *, unsigned> map;
48public:
49  DeclToIndex() {}
50
51  /// Compute the actual mapping from declarations to bits.
52  void computeMap(const DeclContext &dc);
53
54  /// Return the number of declarations in the map.
55  unsigned size() const { return map.size(); }
56
57  /// Returns the bit vector index for a given declaration.
58  llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
59};
60}
61
62void DeclToIndex::computeMap(const DeclContext &dc) {
63  unsigned count = 0;
64  DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
65                                               E(dc.decls_end());
66  for ( ; I != E; ++I) {
67    const VarDecl *vd = *I;
68    if (isTrackedVar(vd, &dc))
69      map[vd] = count++;
70  }
71}
72
73llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
74  llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
75  if (I == map.end())
76    return llvm::Optional<unsigned>();
77  return I->second;
78}
79
80//------------------------------------------------------------------------====//
81// CFGBlockValues: dataflow values for CFG blocks.
82//====------------------------------------------------------------------------//
83
84// These values are defined in such a way that a merge can be done using
85// a bitwise OR.
86enum Value { Unknown = 0x0,         /* 00 */
87             Initialized = 0x1,     /* 01 */
88             Uninitialized = 0x2,   /* 10 */
89             MayUninitialized = 0x3 /* 11 */ };
90
91static bool isUninitialized(const Value v) {
92  return v >= Uninitialized;
93}
94static bool isAlwaysUninit(const Value v) {
95  return v == Uninitialized;
96}
97
98namespace {
99
100typedef llvm::PackedVector<Value, 2> ValueVector;
101
102class CFGBlockValues {
103  const CFG &cfg;
104  std::vector<ValueVector*> vals;
105  ValueVector scratch;
106  DeclToIndex declToIndex;
107public:
108  CFGBlockValues(const CFG &cfg);
109  ~CFGBlockValues();
110
111  unsigned getNumEntries() const { return declToIndex.size(); }
112
113  void computeSetOfDeclarations(const DeclContext &dc);
114  ValueVector &getValueVector(const CFGBlock *block) {
115    return *vals[block->getBlockID()];
116  }
117
118  void setAllScratchValues(Value V);
119  void mergeIntoScratch(ValueVector const &source, bool isFirst);
120  bool updateValueVectorWithScratch(const CFGBlock *block);
121
122  bool hasNoDeclarations() const {
123    return declToIndex.size() == 0;
124  }
125
126  void resetScratch();
127
128  ValueVector::reference operator[](const VarDecl *vd);
129
130  Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
131                 const VarDecl *vd) {
132    const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
133    assert(idx.hasValue());
134    return getValueVector(block)[idx.getValue()];
135  }
136};
137} // end anonymous namespace
138
139CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
140
141CFGBlockValues::~CFGBlockValues() {
142  for (std::vector<ValueVector*>::iterator I = vals.begin(), E = vals.end();
143       I != E; ++I)
144    delete *I;
145}
146
147void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
148  declToIndex.computeMap(dc);
149  unsigned decls = declToIndex.size();
150  scratch.resize(decls);
151  unsigned n = cfg.getNumBlockIDs();
152  if (!n)
153    return;
154  vals.resize(n);
155  for (unsigned i = 0; i < n; ++i)
156    vals[i] = new ValueVector(decls);
157}
158
159#if DEBUG_LOGGING
160static void printVector(const CFGBlock *block, ValueVector &bv,
161                        unsigned num) {
162  llvm::errs() << block->getBlockID() << " :";
163  for (unsigned i = 0; i < bv.size(); ++i) {
164    llvm::errs() << ' ' << bv[i];
165  }
166  llvm::errs() << " : " << num << '\n';
167}
168#endif
169
170void CFGBlockValues::setAllScratchValues(Value V) {
171  for (unsigned I = 0, E = scratch.size(); I != E; ++I)
172    scratch[I] = V;
173}
174
175void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
176                                      bool isFirst) {
177  if (isFirst)
178    scratch = source;
179  else
180    scratch |= source;
181}
182
183bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
184  ValueVector &dst = getValueVector(block);
185  bool changed = (dst != scratch);
186  if (changed)
187    dst = scratch;
188#if DEBUG_LOGGING
189  printVector(block, scratch, 0);
190#endif
191  return changed;
192}
193
194void CFGBlockValues::resetScratch() {
195  scratch.reset();
196}
197
198ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
199  const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
200  assert(idx.hasValue());
201  return scratch[idx.getValue()];
202}
203
204//------------------------------------------------------------------------====//
205// Worklist: worklist for dataflow analysis.
206//====------------------------------------------------------------------------//
207
208namespace {
209class DataflowWorklist {
210  SmallVector<const CFGBlock *, 20> worklist;
211  llvm::BitVector enqueuedBlocks;
212public:
213  DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
214
215  void enqueueSuccessors(const CFGBlock *block);
216  const CFGBlock *dequeue();
217};
218}
219
220void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
221  unsigned OldWorklistSize = worklist.size();
222  for (CFGBlock::const_succ_iterator I = block->succ_begin(),
223       E = block->succ_end(); I != E; ++I) {
224    const CFGBlock *Successor = *I;
225    if (!Successor || enqueuedBlocks[Successor->getBlockID()])
226      continue;
227    worklist.push_back(Successor);
228    enqueuedBlocks[Successor->getBlockID()] = true;
229  }
230  if (OldWorklistSize == 0 || OldWorklistSize == worklist.size())
231    return;
232
233  // Rotate the newly added blocks to the start of the worklist so that it forms
234  // a proper queue when we pop off the end of the worklist.
235  std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize,
236              worklist.end());
237}
238
239const CFGBlock *DataflowWorklist::dequeue() {
240  if (worklist.empty())
241    return 0;
242  const CFGBlock *b = worklist.back();
243  worklist.pop_back();
244  enqueuedBlocks[b->getBlockID()] = false;
245  return b;
246}
247
248//------------------------------------------------------------------------====//
249// Classification of DeclRefExprs as use or initialization.
250//====------------------------------------------------------------------------//
251
252namespace {
253class FindVarResult {
254  const VarDecl *vd;
255  const DeclRefExpr *dr;
256public:
257  FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
258
259  const DeclRefExpr *getDeclRefExpr() const { return dr; }
260  const VarDecl *getDecl() const { return vd; }
261};
262
263static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
264  while (Ex) {
265    Ex = Ex->IgnoreParenNoopCasts(C);
266    if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
267      if (CE->getCastKind() == CK_LValueBitCast) {
268        Ex = CE->getSubExpr();
269        continue;
270      }
271    }
272    break;
273  }
274  return Ex;
275}
276
277/// If E is an expression comprising a reference to a single variable, find that
278/// variable.
279static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
280  if (const DeclRefExpr *DRE =
281        dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
282    if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
283      if (isTrackedVar(VD, DC))
284        return FindVarResult(VD, DRE);
285  return FindVarResult(0, 0);
286}
287
288/// \brief Classify each DeclRefExpr as an initialization or a use. Any
289/// DeclRefExpr which isn't explicitly classified will be assumed to have
290/// escaped the analysis and will be treated as an initialization.
291class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
292public:
293  enum Class {
294    Init,
295    Use,
296    SelfInit,
297    Ignore
298  };
299
300private:
301  const DeclContext *DC;
302  llvm::DenseMap<const DeclRefExpr*, Class> Classification;
303
304  bool isTrackedVar(const VarDecl *VD) const {
305    return ::isTrackedVar(VD, DC);
306  }
307
308  void classify(const Expr *E, Class C);
309
310public:
311  ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
312
313  void VisitDeclStmt(DeclStmt *DS);
314  void VisitUnaryOperator(UnaryOperator *UO);
315  void VisitBinaryOperator(BinaryOperator *BO);
316  void VisitCallExpr(CallExpr *CE);
317  void VisitCastExpr(CastExpr *CE);
318
319  void operator()(Stmt *S) { Visit(S); }
320
321  Class get(const DeclRefExpr *DRE) const {
322    llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
323        = Classification.find(DRE);
324    if (I != Classification.end())
325      return I->second;
326
327    const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
328    if (!VD || !isTrackedVar(VD))
329      return Ignore;
330
331    return Init;
332  }
333};
334}
335
336static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
337  if (Expr *Init = VD->getInit()) {
338    const DeclRefExpr *DRE
339      = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
340    if (DRE && DRE->getDecl() == VD)
341      return DRE;
342  }
343  return 0;
344}
345
346void ClassifyRefs::classify(const Expr *E, Class C) {
347  FindVarResult Var = findVar(E, DC);
348  if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
349    Classification[DRE] = std::max(Classification[DRE], C);
350}
351
352void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
353  for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
354       DI != DE; ++DI) {
355    VarDecl *VD = dyn_cast<VarDecl>(*DI);
356    if (VD && isTrackedVar(VD))
357      if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
358        Classification[DRE] = SelfInit;
359  }
360}
361
362void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
363  // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
364  // is not a compound-assignment, we will treat it as initializing the variable
365  // when TransferFunctions visits it. A compound-assignment does not affect
366  // whether a variable is uninitialized, and there's no point counting it as a
367  // use.
368  if (BO->isCompoundAssignmentOp())
369    classify(BO->getLHS(), Use);
370  else if (BO->getOpcode() == BO_Assign)
371    classify(BO->getLHS(), Ignore);
372}
373
374void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
375  // Increment and decrement are uses despite there being no lvalue-to-rvalue
376  // conversion.
377  if (UO->isIncrementDecrementOp())
378    classify(UO->getSubExpr(), Use);
379}
380
381void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
382  // If a value is passed by const reference to a function, we should not assume
383  // that it is initialized by the call, and we conservatively do not assume
384  // that it is used.
385  for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
386       I != E; ++I)
387    if ((*I)->getType().isConstQualified() && (*I)->isGLValue())
388      classify(*I, Ignore);
389}
390
391void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
392  if (CE->getCastKind() == CK_LValueToRValue)
393    classify(CE->getSubExpr(), Use);
394  else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
395    if (CSE->getType()->isVoidType()) {
396      // Squelch any detected load of an uninitialized value if
397      // we cast it to void.
398      // e.g. (void) x;
399      classify(CSE->getSubExpr(), Ignore);
400    }
401  }
402}
403
404//------------------------------------------------------------------------====//
405// Transfer function for uninitialized values analysis.
406//====------------------------------------------------------------------------//
407
408namespace {
409class TransferFunctions : public StmtVisitor<TransferFunctions> {
410  CFGBlockValues &vals;
411  const CFG &cfg;
412  const CFGBlock *block;
413  AnalysisDeclContext &ac;
414  const ClassifyRefs &classification;
415  UninitVariablesHandler *handler;
416
417public:
418  TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
419                    const CFGBlock *block, AnalysisDeclContext &ac,
420                    const ClassifyRefs &classification,
421                    UninitVariablesHandler *handler)
422    : vals(vals), cfg(cfg), block(block), ac(ac),
423      classification(classification), handler(handler) {}
424
425  void reportUse(const Expr *ex, const VarDecl *vd);
426
427  void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
428  void VisitBlockExpr(BlockExpr *be);
429  void VisitCallExpr(CallExpr *ce);
430  void VisitDeclStmt(DeclStmt *ds);
431  void VisitDeclRefExpr(DeclRefExpr *dr);
432  void VisitBinaryOperator(BinaryOperator *bo);
433
434  bool isTrackedVar(const VarDecl *vd) {
435    return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
436  }
437
438  FindVarResult findVar(const Expr *ex) {
439    return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
440  }
441
442  UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
443    UninitUse Use(ex, isAlwaysUninit(v));
444
445    assert(isUninitialized(v));
446    if (Use.getKind() == UninitUse::Always)
447      return Use;
448
449    // If an edge which leads unconditionally to this use did not initialize
450    // the variable, we can say something stronger than 'may be uninitialized':
451    // we can say 'either it's used uninitialized or you have dead code'.
452    //
453    // We track the number of successors of a node which have been visited, and
454    // visit a node once we have visited all of its successors. Only edges where
455    // the variable might still be uninitialized are followed. Since a variable
456    // can't transfer from being initialized to being uninitialized, this will
457    // trace out the subgraph which inevitably leads to the use and does not
458    // initialize the variable. We do not want to skip past loops, since their
459    // non-termination might be correlated with the initialization condition.
460    //
461    // For example:
462    //
463    //         void f(bool a, bool b) {
464    // block1:   int n;
465    //           if (a) {
466    // block2:     if (b)
467    // block3:       n = 1;
468    // block4:   } else if (b) {
469    // block5:     while (!a) {
470    // block6:       do_work(&a);
471    //               n = 2;
472    //             }
473    //           }
474    // block7:   if (a)
475    // block8:     g();
476    // block9:   return n;
477    //         }
478    //
479    // Starting from the maybe-uninitialized use in block 9:
480    //  * Block 7 is not visited because we have only visited one of its two
481    //    successors.
482    //  * Block 8 is visited because we've visited its only successor.
483    // From block 8:
484    //  * Block 7 is visited because we've now visited both of its successors.
485    // From block 7:
486    //  * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
487    //    of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
488    //  * Block 3 is not visited because it initializes 'n'.
489    // Now the algorithm terminates, having visited blocks 7 and 8, and having
490    // found the frontier is blocks 2, 4, and 5.
491    //
492    // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
493    // and 4), so we report that any time either of those edges is taken (in
494    // each case when 'b == false'), 'n' is used uninitialized.
495    llvm::SmallVector<const CFGBlock*, 32> Queue;
496    llvm::SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
497    Queue.push_back(block);
498    // Specify that we've already visited all successors of the starting block.
499    // This has the dual purpose of ensuring we never add it to the queue, and
500    // of marking it as not being a candidate element of the frontier.
501    SuccsVisited[block->getBlockID()] = block->succ_size();
502    while (!Queue.empty()) {
503      const CFGBlock *B = Queue.back();
504      Queue.pop_back();
505      for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
506           I != E; ++I) {
507        const CFGBlock *Pred = *I;
508        if (vals.getValue(Pred, B, vd) == Initialized)
509          // This block initializes the variable.
510          continue;
511
512        unsigned &SV = SuccsVisited[Pred->getBlockID()];
513        if (!SV) {
514          // When visiting the first successor of a block, mark all NULL
515          // successors as having been visited.
516          for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
517                                             SE = Pred->succ_end();
518               SI != SE; ++SI)
519            if (!*SI)
520              ++SV;
521        }
522
523        if (++SV == Pred->succ_size())
524          // All paths from this block lead to the use and don't initialize the
525          // variable.
526          Queue.push_back(Pred);
527      }
528    }
529
530    // Scan the frontier, looking for blocks where the variable was
531    // uninitialized.
532    for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
533      const CFGBlock *Block = *BI;
534      unsigned BlockID = Block->getBlockID();
535      const Stmt *Term = Block->getTerminator();
536      if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
537          Term) {
538        // This block inevitably leads to the use. If we have an edge from here
539        // to a post-dominator block, and the variable is uninitialized on that
540        // edge, we have found a bug.
541        for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
542             E = Block->succ_end(); I != E; ++I) {
543          const CFGBlock *Succ = *I;
544          if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
545              vals.getValue(Block, Succ, vd) == Uninitialized) {
546            // Switch cases are a special case: report the label to the caller
547            // as the 'terminator', not the switch statement itself. Suppress
548            // situations where no label matched: we can't be sure that's
549            // possible.
550            if (isa<SwitchStmt>(Term)) {
551              const Stmt *Label = Succ->getLabel();
552              if (!Label || !isa<SwitchCase>(Label))
553                // Might not be possible.
554                continue;
555              UninitUse::Branch Branch;
556              Branch.Terminator = Label;
557              Branch.Output = 0; // Ignored.
558              Use.addUninitBranch(Branch);
559            } else {
560              UninitUse::Branch Branch;
561              Branch.Terminator = Term;
562              Branch.Output = I - Block->succ_begin();
563              Use.addUninitBranch(Branch);
564            }
565          }
566        }
567      }
568    }
569
570    return Use;
571  }
572};
573}
574
575void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
576  if (!handler)
577    return;
578  Value v = vals[vd];
579  if (isUninitialized(v))
580    handler->handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
581}
582
583void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
584  // This represents an initialization of the 'element' value.
585  if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
586    const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
587    if (isTrackedVar(VD))
588      vals[VD] = Initialized;
589  }
590}
591
592void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
593  const BlockDecl *bd = be->getBlockDecl();
594  for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
595        e = bd->capture_end() ; i != e; ++i) {
596    const VarDecl *vd = i->getVariable();
597    if (!isTrackedVar(vd))
598      continue;
599    if (i->isByRef()) {
600      vals[vd] = Initialized;
601      continue;
602    }
603    reportUse(be, vd);
604  }
605}
606
607void TransferFunctions::VisitCallExpr(CallExpr *ce) {
608  // After a call to a function like setjmp or vfork, any variable which is
609  // initialized anywhere within this function may now be initialized. For now,
610  // just assume such a call initializes all variables.
611  // FIXME: Only mark variables as initialized if they have an initializer which
612  // is reachable from here.
613  Decl *Callee = ce->getCalleeDecl();
614  if (Callee && Callee->hasAttr<ReturnsTwiceAttr>())
615    vals.setAllScratchValues(Initialized);
616}
617
618void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
619  switch (classification.get(dr)) {
620  case ClassifyRefs::Ignore:
621    break;
622  case ClassifyRefs::Use:
623    reportUse(dr, cast<VarDecl>(dr->getDecl()));
624    break;
625  case ClassifyRefs::Init:
626    vals[cast<VarDecl>(dr->getDecl())] = Initialized;
627    break;
628  case ClassifyRefs::SelfInit:
629    if (handler)
630      handler->handleSelfInit(cast<VarDecl>(dr->getDecl()));
631    break;
632  }
633}
634
635void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
636  if (BO->getOpcode() == BO_Assign) {
637    FindVarResult Var = findVar(BO->getLHS());
638    if (const VarDecl *VD = Var.getDecl())
639      vals[VD] = Initialized;
640  }
641}
642
643void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
644  for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
645       DI != DE; ++DI) {
646    VarDecl *VD = dyn_cast<VarDecl>(*DI);
647    if (VD && isTrackedVar(VD)) {
648      if (getSelfInitExpr(VD)) {
649        // If the initializer consists solely of a reference to itself, we
650        // explicitly mark the variable as uninitialized. This allows code
651        // like the following:
652        //
653        //   int x = x;
654        //
655        // to deliberately leave a variable uninitialized. Different analysis
656        // clients can detect this pattern and adjust their reporting
657        // appropriately, but we need to continue to analyze subsequent uses
658        // of the variable.
659        vals[VD] = Uninitialized;
660      } else if (VD->getInit()) {
661        // Treat the new variable as initialized.
662        vals[VD] = Initialized;
663      } else {
664        // No initializer: the variable is now uninitialized. This matters
665        // for cases like:
666        //   while (...) {
667        //     int n;
668        //     use(n);
669        //     n = 0;
670        //   }
671        // FIXME: Mark the variable as uninitialized whenever its scope is
672        // left, since its scope could be re-entered by a jump over the
673        // declaration.
674        vals[VD] = Uninitialized;
675      }
676    }
677  }
678}
679
680//------------------------------------------------------------------------====//
681// High-level "driver" logic for uninitialized values analysis.
682//====------------------------------------------------------------------------//
683
684static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
685                       AnalysisDeclContext &ac, CFGBlockValues &vals,
686                       const ClassifyRefs &classification,
687                       llvm::BitVector &wasAnalyzed,
688                       UninitVariablesHandler *handler = 0) {
689  wasAnalyzed[block->getBlockID()] = true;
690  vals.resetScratch();
691  // Merge in values of predecessor blocks.
692  bool isFirst = true;
693  for (CFGBlock::const_pred_iterator I = block->pred_begin(),
694       E = block->pred_end(); I != E; ++I) {
695    const CFGBlock *pred = *I;
696    if (wasAnalyzed[pred->getBlockID()]) {
697      vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
698      isFirst = false;
699    }
700  }
701  // Apply the transfer function.
702  TransferFunctions tf(vals, cfg, block, ac, classification, handler);
703  for (CFGBlock::const_iterator I = block->begin(), E = block->end();
704       I != E; ++I) {
705    if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
706      tf.Visit(const_cast<Stmt*>(cs->getStmt()));
707    }
708  }
709  return vals.updateValueVectorWithScratch(block);
710}
711
712void clang::runUninitializedVariablesAnalysis(
713    const DeclContext &dc,
714    const CFG &cfg,
715    AnalysisDeclContext &ac,
716    UninitVariablesHandler &handler,
717    UninitVariablesAnalysisStats &stats) {
718  CFGBlockValues vals(cfg);
719  vals.computeSetOfDeclarations(dc);
720  if (vals.hasNoDeclarations())
721    return;
722
723  stats.NumVariablesAnalyzed = vals.getNumEntries();
724
725  // Precompute which expressions are uses and which are initializations.
726  ClassifyRefs classification(ac);
727  cfg.VisitBlockStmts(classification);
728
729  // Mark all variables uninitialized at the entry.
730  const CFGBlock &entry = cfg.getEntry();
731  ValueVector &vec = vals.getValueVector(&entry);
732  const unsigned n = vals.getNumEntries();
733  for (unsigned j = 0; j < n ; ++j) {
734    vec[j] = Uninitialized;
735  }
736
737  // Proceed with the workist.
738  DataflowWorklist worklist(cfg);
739  llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
740  worklist.enqueueSuccessors(&cfg.getEntry());
741  llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
742  wasAnalyzed[cfg.getEntry().getBlockID()] = true;
743
744  while (const CFGBlock *block = worklist.dequeue()) {
745    // Did the block change?
746    bool changed = runOnBlock(block, cfg, ac, vals,
747                              classification, wasAnalyzed);
748    ++stats.NumBlockVisits;
749    if (changed || !previouslyVisited[block->getBlockID()])
750      worklist.enqueueSuccessors(block);
751    previouslyVisited[block->getBlockID()] = true;
752  }
753
754  // Run through the blocks one more time, and report uninitialized variabes.
755  for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
756    const CFGBlock *block = *BI;
757    if (wasAnalyzed[block->getBlockID()]) {
758      runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, &handler);
759      ++stats.NumBlockVisits;
760    }
761  }
762}
763
764UninitVariablesHandler::~UninitVariablesHandler() {}
765