UninitializedValues.cpp revision bc8b44c4ee7f9c4c3ad296369e72feda61bdb580
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements uninitialized values analysis for source-level CFGs.
11//
12//===----------------------------------------------------------------------===//
13
14#include <utility>
15#include "llvm/ADT/Optional.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/BitVector.h"
18#include "llvm/ADT/DenseMap.h"
19#include "clang/AST/Decl.h"
20#include "clang/Analysis/CFG.h"
21#include "clang/Analysis/AnalysisContext.h"
22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
23#include "clang/Analysis/Analyses/UninitializedValues.h"
24#include "clang/Analysis/Support/SaveAndRestore.h"
25
26using namespace clang;
27
28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
29  if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
30      vd->getDeclContext() == dc) {
31    QualType ty = vd->getType();
32    return ty->isScalarType() || ty->isVectorType();
33  }
34  return false;
35}
36
37//------------------------------------------------------------------------====//
38// DeclToIndex: a mapping from Decls we track to value indices.
39//====------------------------------------------------------------------------//
40
41namespace {
42class DeclToIndex {
43  llvm::DenseMap<const VarDecl *, unsigned> map;
44public:
45  DeclToIndex() {}
46
47  /// Compute the actual mapping from declarations to bits.
48  void computeMap(const DeclContext &dc);
49
50  /// Return the number of declarations in the map.
51  unsigned size() const { return map.size(); }
52
53  /// Returns the bit vector index for a given declaration.
54  llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
55};
56}
57
58void DeclToIndex::computeMap(const DeclContext &dc) {
59  unsigned count = 0;
60  DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
61                                               E(dc.decls_end());
62  for ( ; I != E; ++I) {
63    const VarDecl *vd = *I;
64    if (isTrackedVar(vd, &dc))
65      map[vd] = count++;
66  }
67}
68
69llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
70  llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
71  if (I == map.end())
72    return llvm::Optional<unsigned>();
73  return I->second;
74}
75
76//------------------------------------------------------------------------====//
77// CFGBlockValues: dataflow values for CFG blocks.
78//====------------------------------------------------------------------------//
79
80// These values are defined in such a way that a merge can be done using
81// a bitwise OR.
82enum Value { Unknown = 0x0,         /* 00 */
83             Initialized = 0x1,     /* 01 */
84             Uninitialized = 0x2,   /* 10 */
85             MayUninitialized = 0x3 /* 11 */ };
86
87static bool isUninitialized(const Value v) {
88  return v >= Uninitialized;
89}
90static bool isAlwaysUninit(const Value v) {
91  return v == Uninitialized;
92}
93
94namespace {
95class ValueVector {
96  llvm::BitVector vec;
97public:
98  ValueVector() {}
99  ValueVector(unsigned size) : vec(size << 1) {}
100  void resize(unsigned n) { vec.resize(n << 1); }
101  void merge(const ValueVector &rhs) { vec |= rhs.vec; }
102  bool operator!=(const ValueVector &rhs) const { return vec != rhs.vec; }
103  void reset() { vec.reset(); }
104
105  class reference {
106    ValueVector &vv;
107    const unsigned idx;
108
109    reference();  // Undefined
110  public:
111    reference(ValueVector &vv, unsigned idx) : vv(vv), idx(idx) {}
112    ~reference() {}
113
114    reference &operator=(Value v) {
115      vv.vec[idx << 1] = (((unsigned) v) & 0x1) ? true : false;
116      vv.vec[(idx << 1) | 1] = (((unsigned) v) & 0x2) ? true : false;
117      return *this;
118    }
119    operator Value() {
120      unsigned x = (vv.vec[idx << 1] ? 1 : 0) | (vv.vec[(idx << 1) | 1] ? 2 :0);
121      return (Value) x;
122    }
123  };
124
125  reference operator[](unsigned idx) { return reference(*this, idx); }
126};
127
128typedef std::pair<ValueVector *, ValueVector *> BVPair;
129
130class CFGBlockValues {
131  const CFG &cfg;
132  BVPair *vals;
133  ValueVector scratch;
134  DeclToIndex declToIndex;
135
136  ValueVector &lazyCreate(ValueVector *&bv);
137public:
138  CFGBlockValues(const CFG &cfg);
139  ~CFGBlockValues();
140
141  void computeSetOfDeclarations(const DeclContext &dc);
142  ValueVector &getValueVector(const CFGBlock *block,
143                                const CFGBlock *dstBlock);
144
145  BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate);
146
147  void mergeIntoScratch(ValueVector const &source, bool isFirst);
148  bool updateValueVectorWithScratch(const CFGBlock *block);
149  bool updateValueVectors(const CFGBlock *block, const BVPair &newVals);
150
151  bool hasNoDeclarations() const {
152    return declToIndex.size() == 0;
153  }
154
155  bool hasEntry(const VarDecl *vd) const {
156    return declToIndex.getValueIndex(vd).hasValue();
157  }
158
159  void resetScratch();
160  ValueVector &getScratch() { return scratch; }
161
162  ValueVector::reference operator[](const VarDecl *vd);
163};
164} // end anonymous namespace
165
166CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {
167  unsigned n = cfg.getNumBlockIDs();
168  if (!n)
169    return;
170  vals = new std::pair<ValueVector*, ValueVector*>[n];
171  memset(vals, 0, sizeof(*vals) * n);
172}
173
174CFGBlockValues::~CFGBlockValues() {
175  unsigned n = cfg.getNumBlockIDs();
176  if (n == 0)
177    return;
178  for (unsigned i = 0; i < n; ++i) {
179    delete vals[i].first;
180    delete vals[i].second;
181  }
182  delete [] vals;
183}
184
185void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
186  declToIndex.computeMap(dc);
187  scratch.resize(declToIndex.size());
188}
189
190ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) {
191  if (!bv)
192    bv = new ValueVector(declToIndex.size());
193  return *bv;
194}
195
196/// This function pattern matches for a '&&' or '||' that appears at
197/// the beginning of a CFGBlock that also (1) has a terminator and
198/// (2) has no other elements.  If such an expression is found, it is returned.
199static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) {
200  if (block->empty())
201    return 0;
202
203  const CFGStmt *cstmt = block->front().getAs<CFGStmt>();
204  if (!cstmt)
205    return 0;
206
207  BinaryOperator *b = llvm::dyn_cast_or_null<BinaryOperator>(cstmt->getStmt());
208
209  if (!b || !b->isLogicalOp())
210    return 0;
211
212  if (block->pred_size() == 2 &&
213      ((block->succ_size() == 2 && block->getTerminatorCondition() == b) ||
214       block->size() == 1))
215    return b;
216
217  return 0;
218}
219
220ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block,
221                                            const CFGBlock *dstBlock) {
222  unsigned idx = block->getBlockID();
223  if (dstBlock && getLogicalOperatorInChain(block)) {
224    if (*block->succ_begin() == dstBlock)
225      return lazyCreate(vals[idx].first);
226    assert(*(block->succ_begin()+1) == dstBlock);
227    return lazyCreate(vals[idx].second);
228  }
229
230  assert(vals[idx].second == 0);
231  return lazyCreate(vals[idx].first);
232}
233
234BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block,
235                                        bool shouldLazyCreate) {
236  unsigned idx = block->getBlockID();
237  lazyCreate(vals[idx].first);
238  if (shouldLazyCreate)
239    lazyCreate(vals[idx].second);
240  return vals[idx];
241}
242
243void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
244                                      bool isFirst) {
245  if (isFirst)
246    scratch = source;
247  else
248    scratch.merge(source);
249}
250#if 0
251static void printVector(const CFGBlock *block, ValueVector &bv,
252                        unsigned num) {
253
254  llvm::errs() << block->getBlockID() << " :";
255  for (unsigned i = 0; i < bv.size(); ++i) {
256    llvm::errs() << ' ' << bv[i];
257  }
258  llvm::errs() << " : " << num << '\n';
259}
260#endif
261
262bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
263  ValueVector &dst = getValueVector(block, 0);
264  bool changed = (dst != scratch);
265  if (changed)
266    dst = scratch;
267#if 0
268  printVector(block, scratch, 0);
269#endif
270  return changed;
271}
272
273bool CFGBlockValues::updateValueVectors(const CFGBlock *block,
274                                      const BVPair &newVals) {
275  BVPair &vals = getValueVectors(block, true);
276  bool changed = *newVals.first != *vals.first ||
277                 *newVals.second != *vals.second;
278  *vals.first = *newVals.first;
279  *vals.second = *newVals.second;
280#if 0
281  printVector(block, *vals.first, 1);
282  printVector(block, *vals.second, 2);
283#endif
284  return changed;
285}
286
287void CFGBlockValues::resetScratch() {
288  scratch.reset();
289}
290
291ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
292  const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
293  assert(idx.hasValue());
294  return scratch[idx.getValue()];
295}
296
297//------------------------------------------------------------------------====//
298// Worklist: worklist for dataflow analysis.
299//====------------------------------------------------------------------------//
300
301namespace {
302class DataflowWorklist {
303  llvm::SmallVector<const CFGBlock *, 20> worklist;
304  llvm::BitVector enqueuedBlocks;
305public:
306  DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
307
308  void enqueue(const CFGBlock *block);
309  void enqueueSuccessors(const CFGBlock *block);
310  const CFGBlock *dequeue();
311
312};
313}
314
315void DataflowWorklist::enqueue(const CFGBlock *block) {
316  if (!block)
317    return;
318  unsigned idx = block->getBlockID();
319  if (enqueuedBlocks[idx])
320    return;
321  worklist.push_back(block);
322  enqueuedBlocks[idx] = true;
323}
324
325void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
326  for (CFGBlock::const_succ_iterator I = block->succ_begin(),
327       E = block->succ_end(); I != E; ++I) {
328    enqueue(*I);
329  }
330}
331
332const CFGBlock *DataflowWorklist::dequeue() {
333  if (worklist.empty())
334    return 0;
335  const CFGBlock *b = worklist.back();
336  worklist.pop_back();
337  enqueuedBlocks[b->getBlockID()] = false;
338  return b;
339}
340
341//------------------------------------------------------------------------====//
342// Transfer function for uninitialized values analysis.
343//====------------------------------------------------------------------------//
344
345namespace {
346class FindVarResult {
347  const VarDecl *vd;
348  const DeclRefExpr *dr;
349public:
350  FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {}
351
352  const DeclRefExpr *getDeclRefExpr() const { return dr; }
353  const VarDecl *getDecl() const { return vd; }
354};
355
356class TransferFunctions : public CFGRecStmtVisitor<TransferFunctions> {
357  CFGBlockValues &vals;
358  const CFG &cfg;
359  AnalysisContext &ac;
360  UninitVariablesHandler *handler;
361  const DeclRefExpr *currentDR;
362  const Expr *currentVoidCast;
363  const bool flagBlockUses;
364public:
365  TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
366                    AnalysisContext &ac,
367                    UninitVariablesHandler *handler,
368                    bool flagBlockUses)
369    : vals(vals), cfg(cfg), ac(ac), handler(handler), currentDR(0),
370      currentVoidCast(0), flagBlockUses(flagBlockUses) {}
371
372  const CFG &getCFG() { return cfg; }
373  void reportUninit(const DeclRefExpr *ex, const VarDecl *vd,
374                    bool isAlwaysUninit);
375
376  void VisitBlockExpr(BlockExpr *be);
377  void VisitDeclStmt(DeclStmt *ds);
378  void VisitDeclRefExpr(DeclRefExpr *dr);
379  void VisitUnaryOperator(UnaryOperator *uo);
380  void VisitBinaryOperator(BinaryOperator *bo);
381  void VisitCastExpr(CastExpr *ce);
382  void VisitUnaryExprOrTypeTraitExpr(UnaryExprOrTypeTraitExpr *se);
383  void BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs);
384
385  bool isTrackedVar(const VarDecl *vd) {
386#if 1
387    // FIXME: This is a temporary workaround to deal with the fact
388    // that DeclContext's do not always contain all of their variables!
389    return vals.hasEntry(vd);
390#else
391    return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
392#endif
393  }
394
395  FindVarResult findBlockVarDecl(Expr *ex);
396};
397}
398
399void TransferFunctions::reportUninit(const DeclRefExpr *ex,
400                                     const VarDecl *vd, bool isAlwaysUnit) {
401  if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit);
402}
403
404FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) {
405  if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts()))
406    if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
407      if (isTrackedVar(vd))
408        return FindVarResult(vd, dr);
409  return FindVarResult(0, 0);
410}
411
412void TransferFunctions::BlockStmt_VisitObjCForCollectionStmt(
413    ObjCForCollectionStmt *fs) {
414
415  Visit(fs->getCollection());
416
417  // This represents an initialization of the 'element' value.
418  Stmt *element = fs->getElement();
419  const VarDecl* vd = 0;
420
421  if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) {
422    vd = cast<VarDecl>(ds->getSingleDecl());
423    if (!isTrackedVar(vd))
424      vd = 0;
425  }
426  else {
427    // Initialize the value of the reference variable.
428    const FindVarResult &res = findBlockVarDecl(cast<Expr>(element));
429    vd = res.getDecl();
430    if (!vd) {
431      Visit(element);
432      return;
433    }
434  }
435
436  if (vd)
437    vals[vd] = Initialized;
438}
439
440void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
441  if (!flagBlockUses || !handler)
442    return;
443  const BlockDecl *bd = be->getBlockDecl();
444  for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
445        e = bd->capture_end() ; i != e; ++i) {
446    const VarDecl *vd = i->getVariable();
447    if (!vd->hasLocalStorage())
448      continue;
449    if (!isTrackedVar(vd))
450      continue;
451    if (i->isByRef()) {
452      vals[vd] = Initialized;
453      continue;
454    }
455    Value v = vals[vd];
456    if (isUninitialized(v))
457      handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v));
458  }
459}
460
461void TransferFunctions::VisitDeclStmt(DeclStmt *ds) {
462  for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end();
463       DI != DE; ++DI) {
464    if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) {
465      if (isTrackedVar(vd)) {
466        vals[vd] = Uninitialized;
467        if (Stmt *init = vd->getInit()) {
468          Visit(init);
469          vals[vd] = Initialized;
470        }
471      }
472      else if (Stmt *init = vd->getInit()) {
473        Visit(init);
474      }
475    }
476  }
477}
478
479void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
480  // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast
481  // cannot be block-level expressions.  Therefore, we determine if
482  // a DeclRefExpr is involved in a "load" by comparing it to the current
483  // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
484  // If a DeclRefExpr is not involved in a load, we are essentially computing
485  // its address, either for assignment to a reference or via the '&' operator.
486  // In such cases, treat the variable as being initialized, since this
487  // analysis isn't powerful enough to do alias tracking.
488  if (dr != currentDR)
489    if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
490      if (isTrackedVar(vd))
491        vals[vd] = Initialized;
492}
493
494void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) {
495  if (bo->isAssignmentOp()) {
496    const FindVarResult &res = findBlockVarDecl(bo->getLHS());
497    if (const VarDecl* vd = res.getDecl()) {
498      // We assume that DeclRefExprs wrapped in a BinaryOperator "assignment"
499      // cannot be block-level expressions.  Therefore, we determine if
500      // a DeclRefExpr is involved in a "load" by comparing it to the current
501      // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
502      SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
503                                                res.getDeclRefExpr());
504      Visit(bo->getRHS());
505      Visit(bo->getLHS());
506
507      ValueVector::reference val = vals[vd];
508      if (isUninitialized(val)) {
509        if (bo->getOpcode() != BO_Assign)
510          reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
511        val = Initialized;
512      }
513      return;
514    }
515  }
516  Visit(bo->getRHS());
517  Visit(bo->getLHS());
518}
519
520void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) {
521  switch (uo->getOpcode()) {
522    case clang::UO_PostDec:
523    case clang::UO_PostInc:
524    case clang::UO_PreDec:
525    case clang::UO_PreInc: {
526      const FindVarResult &res = findBlockVarDecl(uo->getSubExpr());
527      if (const VarDecl *vd = res.getDecl()) {
528        // We assume that DeclRefExprs wrapped in a unary operator ++/--
529        // cannot be block-level expressions.  Therefore, we determine if
530        // a DeclRefExpr is involved in a "load" by comparing it to the current
531        // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
532        SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
533                                                  res.getDeclRefExpr());
534        Visit(uo->getSubExpr());
535
536        ValueVector::reference val = vals[vd];
537        if (isUninitialized(val)) {
538          reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
539          // Don't cascade warnings.
540          val = Initialized;
541        }
542        return;
543      }
544      break;
545    }
546    default:
547      break;
548  }
549  Visit(uo->getSubExpr());
550}
551
552void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) {
553  if (ce->getCastKind() == CK_LValueToRValue) {
554    const FindVarResult &res = findBlockVarDecl(ce->getSubExpr());
555    if (const VarDecl *vd = res.getDecl()) {
556      // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast
557      // cannot be block-level expressions.  Therefore, we determine if
558      // a DeclRefExpr is involved in a "load" by comparing it to the current
559      // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
560      // Here we update 'currentDR' to be the one associated with this
561      // lvalue-to-rvalue cast.  Then, when we analyze the DeclRefExpr, we
562      // will know that we are not computing its lvalue for other purposes
563      // than to perform a load.
564      SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
565                                                res.getDeclRefExpr());
566      Visit(ce->getSubExpr());
567      if (currentVoidCast != ce) {
568        Value val = vals[vd];
569        if (isUninitialized(val)) {
570          reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
571          // Don't cascade warnings.
572          vals[vd] = Initialized;
573        }
574      }
575      return;
576    }
577  }
578  else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) {
579    if (cse->getType()->isVoidType()) {
580      // e.g. (void) x;
581      SaveAndRestore<const Expr *>
582        lastVoidCast(currentVoidCast, cse->getSubExpr()->IgnoreParens());
583      Visit(cse->getSubExpr());
584      return;
585    }
586  }
587  Visit(ce->getSubExpr());
588}
589
590void TransferFunctions::VisitUnaryExprOrTypeTraitExpr(
591                                          UnaryExprOrTypeTraitExpr *se) {
592  if (se->getKind() == UETT_SizeOf) {
593    if (se->getType()->isConstantSizeType())
594      return;
595    // Handle VLAs.
596    Visit(se->getArgumentExpr());
597  }
598}
599
600//------------------------------------------------------------------------====//
601// High-level "driver" logic for uninitialized values analysis.
602//====------------------------------------------------------------------------//
603
604static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
605                       AnalysisContext &ac, CFGBlockValues &vals,
606                       UninitVariablesHandler *handler = 0,
607                       bool flagBlockUses = false) {
608
609  if (const BinaryOperator *b = getLogicalOperatorInChain(block)) {
610    CFGBlock::const_pred_iterator itr = block->pred_begin();
611    BVPair vA = vals.getValueVectors(*itr, false);
612    ++itr;
613    BVPair vB = vals.getValueVectors(*itr, false);
614
615    BVPair valsAB;
616
617    if (b->getOpcode() == BO_LAnd) {
618      // Merge the 'F' bits from the first and second.
619      vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true);
620      vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false);
621      valsAB.first = vA.first;
622      valsAB.second = &vals.getScratch();
623    }
624    else {
625      // Merge the 'T' bits from the first and second.
626      assert(b->getOpcode() == BO_LOr);
627      vals.mergeIntoScratch(*vA.first, true);
628      vals.mergeIntoScratch(*vB.first, false);
629      valsAB.first = &vals.getScratch();
630      valsAB.second = vA.second ? vA.second : vA.first;
631    }
632    return vals.updateValueVectors(block, valsAB);
633  }
634
635  // Default behavior: merge in values of predecessor blocks.
636  vals.resetScratch();
637  bool isFirst = true;
638  for (CFGBlock::const_pred_iterator I = block->pred_begin(),
639       E = block->pred_end(); I != E; ++I) {
640    vals.mergeIntoScratch(vals.getValueVector(*I, block), isFirst);
641    isFirst = false;
642  }
643  // Apply the transfer function.
644  TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses);
645  for (CFGBlock::const_iterator I = block->begin(), E = block->end();
646       I != E; ++I) {
647    if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
648      tf.BlockStmt_Visit(cs->getStmt());
649    }
650  }
651  return vals.updateValueVectorWithScratch(block);
652}
653
654void clang::runUninitializedVariablesAnalysis(const DeclContext &dc,
655                                              const CFG &cfg,
656                                              AnalysisContext &ac,
657                                              UninitVariablesHandler &handler) {
658  CFGBlockValues vals(cfg);
659  vals.computeSetOfDeclarations(dc);
660  if (vals.hasNoDeclarations())
661    return;
662  DataflowWorklist worklist(cfg);
663  llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
664
665  worklist.enqueueSuccessors(&cfg.getEntry());
666
667  while (const CFGBlock *block = worklist.dequeue()) {
668    // Did the block change?
669    bool changed = runOnBlock(block, cfg, ac, vals);
670    if (changed || !previouslyVisited[block->getBlockID()])
671      worklist.enqueueSuccessors(block);
672    previouslyVisited[block->getBlockID()] = true;
673  }
674
675  // Run through the blocks one more time, and report uninitialized variabes.
676  for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
677    runOnBlock(*BI, cfg, ac, vals, &handler, /* flagBlockUses */ true);
678  }
679}
680
681UninitVariablesHandler::~UninitVariablesHandler() {}
682
683