UninitializedValues.cpp revision 05bcade0182524731cf4bc4984e08f63ddf62374
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements uninitialized values analysis for source-level CFGs.
11//
12//===----------------------------------------------------------------------===//
13
14#include <utility>
15#include "llvm/ADT/Optional.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/PackedVector.h"
18#include "llvm/ADT/DenseMap.h"
19#include "clang/AST/Decl.h"
20#include "clang/Analysis/CFG.h"
21#include "clang/Analysis/AnalysisContext.h"
22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
23#include "clang/Analysis/Analyses/UninitializedValues.h"
24#include "clang/Analysis/Support/SaveAndRestore.h"
25
26using namespace clang;
27
28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
29  if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
30      !vd->isExceptionVariable() &&
31      vd->getDeclContext() == dc) {
32    QualType ty = vd->getType();
33    return ty->isScalarType() || ty->isVectorType();
34  }
35  return false;
36}
37
38//------------------------------------------------------------------------====//
39// DeclToIndex: a mapping from Decls we track to value indices.
40//====------------------------------------------------------------------------//
41
42namespace {
43class DeclToIndex {
44  llvm::DenseMap<const VarDecl *, unsigned> map;
45public:
46  DeclToIndex() {}
47
48  /// Compute the actual mapping from declarations to bits.
49  void computeMap(const DeclContext &dc);
50
51  /// Return the number of declarations in the map.
52  unsigned size() const { return map.size(); }
53
54  /// Returns the bit vector index for a given declaration.
55  llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
56};
57}
58
59void DeclToIndex::computeMap(const DeclContext &dc) {
60  unsigned count = 0;
61  DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
62                                               E(dc.decls_end());
63  for ( ; I != E; ++I) {
64    const VarDecl *vd = *I;
65    if (isTrackedVar(vd, &dc))
66      map[vd] = count++;
67  }
68}
69
70llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
71  llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
72  if (I == map.end())
73    return llvm::Optional<unsigned>();
74  return I->second;
75}
76
77//------------------------------------------------------------------------====//
78// CFGBlockValues: dataflow values for CFG blocks.
79//====------------------------------------------------------------------------//
80
81// These values are defined in such a way that a merge can be done using
82// a bitwise OR.
83enum Value { Unknown = 0x0,         /* 00 */
84             Initialized = 0x1,     /* 01 */
85             Uninitialized = 0x2,   /* 10 */
86             MayUninitialized = 0x3 /* 11 */ };
87
88static bool isUninitialized(const Value v) {
89  return v >= Uninitialized;
90}
91static bool isAlwaysUninit(const Value v) {
92  return v == Uninitialized;
93}
94
95namespace {
96
97typedef llvm::PackedVector<Value, 2> ValueVector;
98typedef std::pair<ValueVector *, ValueVector *> BVPair;
99
100class CFGBlockValues {
101  const CFG &cfg;
102  BVPair *vals;
103  ValueVector scratch;
104  DeclToIndex declToIndex;
105
106  ValueVector &lazyCreate(ValueVector *&bv);
107public:
108  CFGBlockValues(const CFG &cfg);
109  ~CFGBlockValues();
110
111  unsigned getNumEntries() const { return declToIndex.size(); }
112
113  void computeSetOfDeclarations(const DeclContext &dc);
114  ValueVector &getValueVector(const CFGBlock *block,
115                                const CFGBlock *dstBlock);
116
117  BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate);
118
119  void mergeIntoScratch(ValueVector const &source, bool isFirst);
120  bool updateValueVectorWithScratch(const CFGBlock *block);
121  bool updateValueVectors(const CFGBlock *block, const BVPair &newVals);
122
123  bool hasNoDeclarations() const {
124    return declToIndex.size() == 0;
125  }
126
127  bool hasEntry(const VarDecl *vd) const {
128    return declToIndex.getValueIndex(vd).hasValue();
129  }
130
131  bool hasValues(const CFGBlock *block);
132
133  void resetScratch();
134  ValueVector &getScratch() { return scratch; }
135
136  ValueVector::reference operator[](const VarDecl *vd);
137};
138} // end anonymous namespace
139
140CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {
141  unsigned n = cfg.getNumBlockIDs();
142  if (!n)
143    return;
144  vals = new std::pair<ValueVector*, ValueVector*>[n];
145  memset((void*)vals, 0, sizeof(*vals) * n);
146}
147
148CFGBlockValues::~CFGBlockValues() {
149  unsigned n = cfg.getNumBlockIDs();
150  if (n == 0)
151    return;
152  for (unsigned i = 0; i < n; ++i) {
153    delete vals[i].first;
154    delete vals[i].second;
155  }
156  delete [] vals;
157}
158
159void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
160  declToIndex.computeMap(dc);
161  scratch.resize(declToIndex.size());
162}
163
164ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) {
165  if (!bv)
166    bv = new ValueVector(declToIndex.size());
167  return *bv;
168}
169
170/// This function pattern matches for a '&&' or '||' that appears at
171/// the beginning of a CFGBlock that also (1) has a terminator and
172/// (2) has no other elements.  If such an expression is found, it is returned.
173static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) {
174  if (block->empty())
175    return 0;
176
177  const CFGStmt *cstmt = block->front().getAs<CFGStmt>();
178  if (!cstmt)
179    return 0;
180
181  BinaryOperator *b = llvm::dyn_cast_or_null<BinaryOperator>(cstmt->getStmt());
182
183  if (!b || !b->isLogicalOp())
184    return 0;
185
186  if (block->pred_size() == 2) {
187    if (block->getTerminatorCondition() == b) {
188      if (block->succ_size() == 2)
189      return b;
190    }
191    else if (block->size() == 1)
192      return b;
193  }
194
195  return 0;
196}
197
198ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block,
199                                            const CFGBlock *dstBlock) {
200  unsigned idx = block->getBlockID();
201  if (dstBlock && getLogicalOperatorInChain(block)) {
202    if (*block->succ_begin() == dstBlock)
203      return lazyCreate(vals[idx].first);
204    assert(*(block->succ_begin()+1) == dstBlock);
205    return lazyCreate(vals[idx].second);
206  }
207
208  assert(vals[idx].second == 0);
209  return lazyCreate(vals[idx].first);
210}
211
212bool CFGBlockValues::hasValues(const CFGBlock *block) {
213  unsigned idx = block->getBlockID();
214  return vals[idx].second != 0;
215}
216
217BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block,
218                                        bool shouldLazyCreate) {
219  unsigned idx = block->getBlockID();
220  lazyCreate(vals[idx].first);
221  if (shouldLazyCreate)
222    lazyCreate(vals[idx].second);
223  return vals[idx];
224}
225
226void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
227                                      bool isFirst) {
228  if (isFirst)
229    scratch = source;
230  else
231    scratch |= source;
232}
233#if 0
234static void printVector(const CFGBlock *block, ValueVector &bv,
235                        unsigned num) {
236
237  llvm::errs() << block->getBlockID() << " :";
238  for (unsigned i = 0; i < bv.size(); ++i) {
239    llvm::errs() << ' ' << bv[i];
240  }
241  llvm::errs() << " : " << num << '\n';
242}
243#endif
244
245bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
246  ValueVector &dst = getValueVector(block, 0);
247  bool changed = (dst != scratch);
248  if (changed)
249    dst = scratch;
250#if 0
251  printVector(block, scratch, 0);
252#endif
253  return changed;
254}
255
256bool CFGBlockValues::updateValueVectors(const CFGBlock *block,
257                                      const BVPair &newVals) {
258  BVPair &vals = getValueVectors(block, true);
259  bool changed = *newVals.first != *vals.first ||
260                 *newVals.second != *vals.second;
261  *vals.first = *newVals.first;
262  *vals.second = *newVals.second;
263#if 0
264  printVector(block, *vals.first, 1);
265  printVector(block, *vals.second, 2);
266#endif
267  return changed;
268}
269
270void CFGBlockValues::resetScratch() {
271  scratch.reset();
272}
273
274ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
275  const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
276  assert(idx.hasValue());
277  return scratch[idx.getValue()];
278}
279
280//------------------------------------------------------------------------====//
281// Worklist: worklist for dataflow analysis.
282//====------------------------------------------------------------------------//
283
284namespace {
285class DataflowWorklist {
286  llvm::SmallVector<const CFGBlock *, 20> worklist;
287  llvm::BitVector enqueuedBlocks;
288public:
289  DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
290
291  void enqueueSuccessors(const CFGBlock *block);
292  const CFGBlock *dequeue();
293};
294}
295
296void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
297  unsigned OldWorklistSize = worklist.size();
298  for (CFGBlock::const_succ_iterator I = block->succ_begin(),
299       E = block->succ_end(); I != E; ++I) {
300    const CFGBlock *Successor = *I;
301    if (!Successor || enqueuedBlocks[Successor->getBlockID()])
302      continue;
303    worklist.push_back(Successor);
304    enqueuedBlocks[Successor->getBlockID()] = true;
305  }
306  if (OldWorklistSize == 0 || OldWorklistSize == worklist.size())
307    return;
308
309  // Rotate the newly added blocks to the start of the worklist so that it forms
310  // a proper queue when we pop off the end of the worklist.
311  std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize,
312              worklist.end());
313}
314
315const CFGBlock *DataflowWorklist::dequeue() {
316  if (worklist.empty())
317    return 0;
318  const CFGBlock *b = worklist.back();
319  worklist.pop_back();
320  enqueuedBlocks[b->getBlockID()] = false;
321  return b;
322}
323
324//------------------------------------------------------------------------====//
325// Transfer function for uninitialized values analysis.
326//====------------------------------------------------------------------------//
327
328namespace {
329class FindVarResult {
330  const VarDecl *vd;
331  const DeclRefExpr *dr;
332public:
333  FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {}
334
335  const DeclRefExpr *getDeclRefExpr() const { return dr; }
336  const VarDecl *getDecl() const { return vd; }
337};
338
339class TransferFunctions : public CFGRecStmtVisitor<TransferFunctions> {
340  CFGBlockValues &vals;
341  const CFG &cfg;
342  AnalysisContext &ac;
343  UninitVariablesHandler *handler;
344  const DeclRefExpr *currentDR;
345  const Expr *currentVoidCast;
346  const bool flagBlockUses;
347public:
348  TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
349                    AnalysisContext &ac,
350                    UninitVariablesHandler *handler,
351                    bool flagBlockUses)
352    : vals(vals), cfg(cfg), ac(ac), handler(handler), currentDR(0),
353      currentVoidCast(0), flagBlockUses(flagBlockUses) {}
354
355  const CFG &getCFG() { return cfg; }
356  void reportUninit(const DeclRefExpr *ex, const VarDecl *vd,
357                    bool isAlwaysUninit);
358
359  void VisitBlockExpr(BlockExpr *be);
360  void VisitDeclStmt(DeclStmt *ds);
361  void VisitDeclRefExpr(DeclRefExpr *dr);
362  void VisitUnaryOperator(UnaryOperator *uo);
363  void VisitBinaryOperator(BinaryOperator *bo);
364  void VisitCastExpr(CastExpr *ce);
365  void VisitUnaryExprOrTypeTraitExpr(UnaryExprOrTypeTraitExpr *se);
366  void VisitCXXTypeidExpr(CXXTypeidExpr *E);
367  void BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs);
368
369  bool isTrackedVar(const VarDecl *vd) {
370    return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
371  }
372
373  FindVarResult findBlockVarDecl(Expr *ex);
374};
375}
376
377void TransferFunctions::reportUninit(const DeclRefExpr *ex,
378                                     const VarDecl *vd, bool isAlwaysUnit) {
379  if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit);
380}
381
382FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) {
383  if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts()))
384    if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
385      if (isTrackedVar(vd))
386        return FindVarResult(vd, dr);
387  return FindVarResult(0, 0);
388}
389
390void TransferFunctions::BlockStmt_VisitObjCForCollectionStmt(
391    ObjCForCollectionStmt *fs) {
392
393  Visit(fs->getCollection());
394
395  // This represents an initialization of the 'element' value.
396  Stmt *element = fs->getElement();
397  const VarDecl* vd = 0;
398
399  if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) {
400    vd = cast<VarDecl>(ds->getSingleDecl());
401    if (!isTrackedVar(vd))
402      vd = 0;
403  }
404  else {
405    // Initialize the value of the reference variable.
406    const FindVarResult &res = findBlockVarDecl(cast<Expr>(element));
407    vd = res.getDecl();
408    if (!vd) {
409      Visit(element);
410      return;
411    }
412  }
413
414  if (vd)
415    vals[vd] = Initialized;
416}
417
418void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
419  if (!flagBlockUses || !handler)
420    return;
421  const BlockDecl *bd = be->getBlockDecl();
422  for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
423        e = bd->capture_end() ; i != e; ++i) {
424    const VarDecl *vd = i->getVariable();
425    if (!vd->hasLocalStorage())
426      continue;
427    if (!isTrackedVar(vd))
428      continue;
429    if (i->isByRef()) {
430      vals[vd] = Initialized;
431      continue;
432    }
433    Value v = vals[vd];
434    if (isUninitialized(v))
435      handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v));
436  }
437}
438
439void TransferFunctions::VisitDeclStmt(DeclStmt *ds) {
440  for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end();
441       DI != DE; ++DI) {
442    if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) {
443      if (isTrackedVar(vd)) {
444        if (Expr *init = vd->getInit()) {
445          Visit(init);
446
447          // If the initializer consists solely of a reference to itself, we
448          // explicitly mark the variable as uninitialized. This allows code
449          // like the following:
450          //
451          //   int x = x;
452          //
453          // to deliberately leave a variable uninitialized. Different analysis
454          // clients can detect this pattern and adjust their reporting
455          // appropriately, but we need to continue to analyze subsequent uses
456          // of the variable.
457          DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(init->IgnoreParenImpCasts());
458          vals[vd] = (DRE && DRE->getDecl() == vd) ? Uninitialized
459                                                   : Initialized;
460        }
461      } else if (Stmt *init = vd->getInit()) {
462        Visit(init);
463      }
464    }
465  }
466}
467
468void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
469  // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast
470  // cannot be block-level expressions.  Therefore, we determine if
471  // a DeclRefExpr is involved in a "load" by comparing it to the current
472  // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
473  // If a DeclRefExpr is not involved in a load, we are essentially computing
474  // its address, either for assignment to a reference or via the '&' operator.
475  // In such cases, treat the variable as being initialized, since this
476  // analysis isn't powerful enough to do alias tracking.
477  if (dr != currentDR)
478    if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
479      if (isTrackedVar(vd))
480        vals[vd] = Initialized;
481}
482
483void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) {
484  if (bo->isAssignmentOp()) {
485    const FindVarResult &res = findBlockVarDecl(bo->getLHS());
486    if (const VarDecl* vd = res.getDecl()) {
487      // We assume that DeclRefExprs wrapped in a BinaryOperator "assignment"
488      // cannot be block-level expressions.  Therefore, we determine if
489      // a DeclRefExpr is involved in a "load" by comparing it to the current
490      // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
491      SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
492                                                res.getDeclRefExpr());
493      Visit(bo->getRHS());
494      Visit(bo->getLHS());
495
496      ValueVector::reference val = vals[vd];
497      if (isUninitialized(val)) {
498        if (bo->getOpcode() != BO_Assign) {
499          reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
500          val = Unknown;
501        } else
502          val = Initialized;
503      }
504      return;
505    }
506  }
507  Visit(bo->getRHS());
508  Visit(bo->getLHS());
509}
510
511void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) {
512  switch (uo->getOpcode()) {
513    case clang::UO_PostDec:
514    case clang::UO_PostInc:
515    case clang::UO_PreDec:
516    case clang::UO_PreInc: {
517      const FindVarResult &res = findBlockVarDecl(uo->getSubExpr());
518      if (const VarDecl *vd = res.getDecl()) {
519        // We assume that DeclRefExprs wrapped in a unary operator ++/--
520        // cannot be block-level expressions.  Therefore, we determine if
521        // a DeclRefExpr is involved in a "load" by comparing it to the current
522        // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
523        SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
524                                                  res.getDeclRefExpr());
525        Visit(uo->getSubExpr());
526
527        ValueVector::reference val = vals[vd];
528        if (isUninitialized(val)) {
529          reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
530          // Don't cascade warnings.
531          val = Unknown;
532        }
533        return;
534      }
535      break;
536    }
537    default:
538      break;
539  }
540  Visit(uo->getSubExpr());
541}
542
543void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) {
544  if (ce->getCastKind() == CK_LValueToRValue) {
545    const FindVarResult &res = findBlockVarDecl(ce->getSubExpr());
546    if (const VarDecl *vd = res.getDecl()) {
547      // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast
548      // cannot be block-level expressions.  Therefore, we determine if
549      // a DeclRefExpr is involved in a "load" by comparing it to the current
550      // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
551      // Here we update 'currentDR' to be the one associated with this
552      // lvalue-to-rvalue cast.  Then, when we analyze the DeclRefExpr, we
553      // will know that we are not computing its lvalue for other purposes
554      // than to perform a load.
555      SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
556                                                res.getDeclRefExpr());
557      Visit(ce->getSubExpr());
558      if (currentVoidCast != ce) {
559        Value val = vals[vd];
560        if (isUninitialized(val)) {
561          reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
562          // Don't cascade warnings.
563          vals[vd] = Unknown;
564        }
565      }
566      return;
567    }
568  }
569  else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) {
570    if (cse->getType()->isVoidType()) {
571      // e.g. (void) x;
572      SaveAndRestore<const Expr *>
573        lastVoidCast(currentVoidCast, cse->getSubExpr()->IgnoreParens());
574      Visit(cse->getSubExpr());
575      return;
576    }
577  }
578  Visit(ce->getSubExpr());
579}
580
581void TransferFunctions::VisitUnaryExprOrTypeTraitExpr(
582                                          UnaryExprOrTypeTraitExpr *se) {
583  if (se->getKind() == UETT_SizeOf) {
584    if (se->getType()->isConstantSizeType())
585      return;
586    // Handle VLAs.
587    Visit(se->getArgumentExpr());
588  }
589}
590
591void TransferFunctions::VisitCXXTypeidExpr(CXXTypeidExpr *E) {
592  // typeid(expression) is potentially evaluated when the argument is
593  // a glvalue of polymorphic type. (C++ 5.2.8p2-3)
594  if (!E->isTypeOperand() && E->Classify(ac.getASTContext()).isGLValue()) {
595    QualType SubExprTy = E->getExprOperand()->getType();
596    if (const RecordType *Record = SubExprTy->getAs<RecordType>())
597      if (cast<CXXRecordDecl>(Record->getDecl())->isPolymorphic())
598        Visit(E->getExprOperand());
599  }
600}
601
602//------------------------------------------------------------------------====//
603// High-level "driver" logic for uninitialized values analysis.
604//====------------------------------------------------------------------------//
605
606static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
607                       AnalysisContext &ac, CFGBlockValues &vals,
608                       llvm::BitVector &wasAnalyzed,
609                       UninitVariablesHandler *handler = 0,
610                       bool flagBlockUses = false) {
611
612  wasAnalyzed[block->getBlockID()] = true;
613
614  if (const BinaryOperator *b = getLogicalOperatorInChain(block)) {
615    CFGBlock::const_pred_iterator itr = block->pred_begin();
616    BVPair vA = vals.getValueVectors(*itr, false);
617    ++itr;
618    BVPair vB = vals.getValueVectors(*itr, false);
619
620    BVPair valsAB;
621
622    if (b->getOpcode() == BO_LAnd) {
623      // Merge the 'F' bits from the first and second.
624      vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true);
625      vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false);
626      valsAB.first = vA.first;
627      valsAB.second = &vals.getScratch();
628    }
629    else {
630      // Merge the 'T' bits from the first and second.
631      assert(b->getOpcode() == BO_LOr);
632      vals.mergeIntoScratch(*vA.first, true);
633      vals.mergeIntoScratch(*vB.first, false);
634      valsAB.first = &vals.getScratch();
635      valsAB.second = vA.second ? vA.second : vA.first;
636    }
637    return vals.updateValueVectors(block, valsAB);
638  }
639
640  // Default behavior: merge in values of predecessor blocks.
641  vals.resetScratch();
642  bool isFirst = true;
643  for (CFGBlock::const_pred_iterator I = block->pred_begin(),
644       E = block->pred_end(); I != E; ++I) {
645    vals.mergeIntoScratch(vals.getValueVector(*I, block), isFirst);
646    isFirst = false;
647  }
648  // Apply the transfer function.
649  TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses);
650  for (CFGBlock::const_iterator I = block->begin(), E = block->end();
651       I != E; ++I) {
652    if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
653      tf.BlockStmt_Visit(cs->getStmt());
654    }
655  }
656  return vals.updateValueVectorWithScratch(block);
657}
658
659void clang::runUninitializedVariablesAnalysis(
660    const DeclContext &dc,
661    const CFG &cfg,
662    AnalysisContext &ac,
663    UninitVariablesHandler &handler,
664    UninitVariablesAnalysisStats &stats) {
665  CFGBlockValues vals(cfg);
666  vals.computeSetOfDeclarations(dc);
667  if (vals.hasNoDeclarations())
668    return;
669
670  stats.NumVariablesAnalyzed = vals.getNumEntries();
671
672  // Mark all variables uninitialized at the entry.
673  const CFGBlock &entry = cfg.getEntry();
674  for (CFGBlock::const_succ_iterator i = entry.succ_begin(),
675        e = entry.succ_end(); i != e; ++i) {
676    if (const CFGBlock *succ = *i) {
677      ValueVector &vec = vals.getValueVector(&entry, succ);
678      const unsigned n = vals.getNumEntries();
679      for (unsigned j = 0; j < n ; ++j) {
680        vec[j] = Uninitialized;
681      }
682    }
683  }
684
685  // Proceed with the workist.
686  DataflowWorklist worklist(cfg);
687  llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
688  worklist.enqueueSuccessors(&cfg.getEntry());
689  llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
690
691  while (const CFGBlock *block = worklist.dequeue()) {
692    // Did the block change?
693    bool changed = runOnBlock(block, cfg, ac, vals, wasAnalyzed);
694    ++stats.NumBlockVisits;
695    if (changed || !previouslyVisited[block->getBlockID()])
696      worklist.enqueueSuccessors(block);
697    previouslyVisited[block->getBlockID()] = true;
698  }
699
700  // Run through the blocks one more time, and report uninitialized variabes.
701  for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
702    if (wasAnalyzed[(*BI)->getBlockID()]) {
703      runOnBlock(*BI, cfg, ac, vals, wasAnalyzed, &handler,
704                 /* flagBlockUses */ true);
705      ++stats.NumBlockVisits;
706    }
707  }
708}
709
710UninitVariablesHandler::~UninitVariablesHandler() {}
711