UninitializedValues.cpp revision 75c4064932d481ac710a80aa88b3370ad8a6af1d
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements uninitialized values analysis for source-level CFGs.
11//
12//===----------------------------------------------------------------------===//
13
14#include <utility>
15#include "llvm/ADT/Optional.h"
16#include "llvm/ADT/SmallVector.h"
17#include "llvm/ADT/BitVector.h"
18#include "llvm/ADT/DenseMap.h"
19#include "clang/AST/Decl.h"
20#include "clang/Analysis/CFG.h"
21#include "clang/Analysis/AnalysisContext.h"
22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
23#include "clang/Analysis/Analyses/UninitializedValues.h"
24#include "clang/Analysis/Support/SaveAndRestore.h"
25
26using namespace clang;
27
28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
29  if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
30      !vd->isExceptionVariable() &&
31      vd->getDeclContext() == dc) {
32    QualType ty = vd->getType();
33    return ty->isScalarType() || ty->isVectorType();
34  }
35  return false;
36}
37
38//------------------------------------------------------------------------====//
39// DeclToIndex: a mapping from Decls we track to value indices.
40//====------------------------------------------------------------------------//
41
42namespace {
43class DeclToIndex {
44  llvm::DenseMap<const VarDecl *, unsigned> map;
45public:
46  DeclToIndex() {}
47
48  /// Compute the actual mapping from declarations to bits.
49  void computeMap(const DeclContext &dc);
50
51  /// Return the number of declarations in the map.
52  unsigned size() const { return map.size(); }
53
54  /// Returns the bit vector index for a given declaration.
55  llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
56};
57}
58
59void DeclToIndex::computeMap(const DeclContext &dc) {
60  unsigned count = 0;
61  DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
62                                               E(dc.decls_end());
63  for ( ; I != E; ++I) {
64    const VarDecl *vd = *I;
65    if (isTrackedVar(vd, &dc))
66      map[vd] = count++;
67  }
68}
69
70llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
71  llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
72  if (I == map.end())
73    return llvm::Optional<unsigned>();
74  return I->second;
75}
76
77//------------------------------------------------------------------------====//
78// CFGBlockValues: dataflow values for CFG blocks.
79//====------------------------------------------------------------------------//
80
81// These values are defined in such a way that a merge can be done using
82// a bitwise OR.
83enum Value { Unknown = 0x0,         /* 00 */
84             Initialized = 0x1,     /* 01 */
85             Uninitialized = 0x2,   /* 10 */
86             MayUninitialized = 0x3 /* 11 */ };
87
88static bool isUninitialized(const Value v) {
89  return v >= Uninitialized;
90}
91static bool isAlwaysUninit(const Value v) {
92  return v == Uninitialized;
93}
94
95namespace {
96class ValueVector {
97  llvm::BitVector vec;
98public:
99  ValueVector() {}
100  ValueVector(unsigned size) : vec(size << 1) {}
101  void resize(unsigned n) { vec.resize(n << 1); }
102  void merge(const ValueVector &rhs) { vec |= rhs.vec; }
103  bool operator!=(const ValueVector &rhs) const { return vec != rhs.vec; }
104  void reset() { vec.reset(); }
105
106  class reference {
107    ValueVector &vv;
108    const unsigned idx;
109
110    reference();  // Undefined
111  public:
112    reference(ValueVector &vv, unsigned idx) : vv(vv), idx(idx) {}
113    ~reference() {}
114
115    reference &operator=(Value v) {
116      vv.vec[idx << 1] = (((unsigned) v) & 0x1) ? true : false;
117      vv.vec[(idx << 1) | 1] = (((unsigned) v) & 0x2) ? true : false;
118      return *this;
119    }
120    operator Value() {
121      unsigned x = (vv.vec[idx << 1] ? 1 : 0) | (vv.vec[(idx << 1) | 1] ? 2 :0);
122      return (Value) x;
123    }
124  };
125
126  reference operator[](unsigned idx) { return reference(*this, idx); }
127};
128
129typedef std::pair<ValueVector *, ValueVector *> BVPair;
130
131class CFGBlockValues {
132  const CFG &cfg;
133  BVPair *vals;
134  ValueVector scratch;
135  DeclToIndex declToIndex;
136
137  ValueVector &lazyCreate(ValueVector *&bv);
138public:
139  CFGBlockValues(const CFG &cfg);
140  ~CFGBlockValues();
141
142  unsigned getNumEntries() const { return declToIndex.size(); }
143
144  void computeSetOfDeclarations(const DeclContext &dc);
145  ValueVector &getValueVector(const CFGBlock *block,
146                                const CFGBlock *dstBlock);
147
148  BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate);
149
150  void mergeIntoScratch(ValueVector const &source, bool isFirst);
151  bool updateValueVectorWithScratch(const CFGBlock *block);
152  bool updateValueVectors(const CFGBlock *block, const BVPair &newVals);
153
154  bool hasNoDeclarations() const {
155    return declToIndex.size() == 0;
156  }
157
158  bool hasEntry(const VarDecl *vd) const {
159    return declToIndex.getValueIndex(vd).hasValue();
160  }
161
162  bool hasValues(const CFGBlock *block);
163
164  void resetScratch();
165  ValueVector &getScratch() { return scratch; }
166
167  ValueVector::reference operator[](const VarDecl *vd);
168};
169} // end anonymous namespace
170
171CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {
172  unsigned n = cfg.getNumBlockIDs();
173  if (!n)
174    return;
175  vals = new std::pair<ValueVector*, ValueVector*>[n];
176  memset((void*)vals, 0, sizeof(*vals) * n);
177}
178
179CFGBlockValues::~CFGBlockValues() {
180  unsigned n = cfg.getNumBlockIDs();
181  if (n == 0)
182    return;
183  for (unsigned i = 0; i < n; ++i) {
184    delete vals[i].first;
185    delete vals[i].second;
186  }
187  delete [] vals;
188}
189
190void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
191  declToIndex.computeMap(dc);
192  scratch.resize(declToIndex.size());
193}
194
195ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) {
196  if (!bv)
197    bv = new ValueVector(declToIndex.size());
198  return *bv;
199}
200
201/// This function pattern matches for a '&&' or '||' that appears at
202/// the beginning of a CFGBlock that also (1) has a terminator and
203/// (2) has no other elements.  If such an expression is found, it is returned.
204static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) {
205  if (block->empty())
206    return 0;
207
208  const CFGStmt *cstmt = block->front().getAs<CFGStmt>();
209  if (!cstmt)
210    return 0;
211
212  BinaryOperator *b = llvm::dyn_cast_or_null<BinaryOperator>(cstmt->getStmt());
213
214  if (!b || !b->isLogicalOp())
215    return 0;
216
217  if (block->pred_size() == 2 &&
218      ((block->succ_size() == 2 && block->getTerminatorCondition() == b) ||
219       block->size() == 1))
220    return b;
221
222  return 0;
223}
224
225ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block,
226                                            const CFGBlock *dstBlock) {
227  unsigned idx = block->getBlockID();
228  if (dstBlock && getLogicalOperatorInChain(block)) {
229    if (*block->succ_begin() == dstBlock)
230      return lazyCreate(vals[idx].first);
231    assert(*(block->succ_begin()+1) == dstBlock);
232    return lazyCreate(vals[idx].second);
233  }
234
235  assert(vals[idx].second == 0);
236  return lazyCreate(vals[idx].first);
237}
238
239bool CFGBlockValues::hasValues(const CFGBlock *block) {
240  unsigned idx = block->getBlockID();
241  return vals[idx].second != 0;
242}
243
244BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block,
245                                        bool shouldLazyCreate) {
246  unsigned idx = block->getBlockID();
247  lazyCreate(vals[idx].first);
248  if (shouldLazyCreate)
249    lazyCreate(vals[idx].second);
250  return vals[idx];
251}
252
253void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
254                                      bool isFirst) {
255  if (isFirst)
256    scratch = source;
257  else
258    scratch.merge(source);
259}
260#if 0
261static void printVector(const CFGBlock *block, ValueVector &bv,
262                        unsigned num) {
263
264  llvm::errs() << block->getBlockID() << " :";
265  for (unsigned i = 0; i < bv.size(); ++i) {
266    llvm::errs() << ' ' << bv[i];
267  }
268  llvm::errs() << " : " << num << '\n';
269}
270#endif
271
272bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
273  ValueVector &dst = getValueVector(block, 0);
274  bool changed = (dst != scratch);
275  if (changed)
276    dst = scratch;
277#if 0
278  printVector(block, scratch, 0);
279#endif
280  return changed;
281}
282
283bool CFGBlockValues::updateValueVectors(const CFGBlock *block,
284                                      const BVPair &newVals) {
285  BVPair &vals = getValueVectors(block, true);
286  bool changed = *newVals.first != *vals.first ||
287                 *newVals.second != *vals.second;
288  *vals.first = *newVals.first;
289  *vals.second = *newVals.second;
290#if 0
291  printVector(block, *vals.first, 1);
292  printVector(block, *vals.second, 2);
293#endif
294  return changed;
295}
296
297void CFGBlockValues::resetScratch() {
298  scratch.reset();
299}
300
301ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
302  const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
303  assert(idx.hasValue());
304  return scratch[idx.getValue()];
305}
306
307//------------------------------------------------------------------------====//
308// Worklist: worklist for dataflow analysis.
309//====------------------------------------------------------------------------//
310
311namespace {
312class DataflowWorklist {
313  llvm::SmallVector<const CFGBlock *, 20> worklist;
314  llvm::BitVector enqueuedBlocks;
315public:
316  DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
317
318  void enqueue(const CFGBlock *block);
319  void enqueueSuccessors(const CFGBlock *block);
320  const CFGBlock *dequeue();
321
322};
323}
324
325void DataflowWorklist::enqueue(const CFGBlock *block) {
326  if (!block)
327    return;
328  unsigned idx = block->getBlockID();
329  if (enqueuedBlocks[idx])
330    return;
331  worklist.push_back(block);
332  enqueuedBlocks[idx] = true;
333}
334
335void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
336  for (CFGBlock::const_succ_iterator I = block->succ_begin(),
337       E = block->succ_end(); I != E; ++I) {
338    enqueue(*I);
339  }
340}
341
342const CFGBlock *DataflowWorklist::dequeue() {
343  if (worklist.empty())
344    return 0;
345  const CFGBlock *b = worklist.back();
346  worklist.pop_back();
347  enqueuedBlocks[b->getBlockID()] = false;
348  return b;
349}
350
351//------------------------------------------------------------------------====//
352// Transfer function for uninitialized values analysis.
353//====------------------------------------------------------------------------//
354
355namespace {
356class FindVarResult {
357  const VarDecl *vd;
358  const DeclRefExpr *dr;
359public:
360  FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {}
361
362  const DeclRefExpr *getDeclRefExpr() const { return dr; }
363  const VarDecl *getDecl() const { return vd; }
364};
365
366class TransferFunctions : public CFGRecStmtVisitor<TransferFunctions> {
367  CFGBlockValues &vals;
368  const CFG &cfg;
369  AnalysisContext &ac;
370  UninitVariablesHandler *handler;
371  const DeclRefExpr *currentDR;
372  const Expr *currentVoidCast;
373  const bool flagBlockUses;
374public:
375  TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
376                    AnalysisContext &ac,
377                    UninitVariablesHandler *handler,
378                    bool flagBlockUses)
379    : vals(vals), cfg(cfg), ac(ac), handler(handler), currentDR(0),
380      currentVoidCast(0), flagBlockUses(flagBlockUses) {}
381
382  const CFG &getCFG() { return cfg; }
383  void reportUninit(const DeclRefExpr *ex, const VarDecl *vd,
384                    bool isAlwaysUninit);
385
386  void VisitBlockExpr(BlockExpr *be);
387  void VisitDeclStmt(DeclStmt *ds);
388  void VisitDeclRefExpr(DeclRefExpr *dr);
389  void VisitUnaryOperator(UnaryOperator *uo);
390  void VisitBinaryOperator(BinaryOperator *bo);
391  void VisitCastExpr(CastExpr *ce);
392  void VisitUnaryExprOrTypeTraitExpr(UnaryExprOrTypeTraitExpr *se);
393  void VisitCXXTypeidExpr(CXXTypeidExpr *E);
394  void BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs);
395
396  bool isTrackedVar(const VarDecl *vd) {
397    return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
398  }
399
400  FindVarResult findBlockVarDecl(Expr *ex);
401};
402}
403
404void TransferFunctions::reportUninit(const DeclRefExpr *ex,
405                                     const VarDecl *vd, bool isAlwaysUnit) {
406  if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit);
407}
408
409FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) {
410  if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts()))
411    if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
412      if (isTrackedVar(vd))
413        return FindVarResult(vd, dr);
414  return FindVarResult(0, 0);
415}
416
417void TransferFunctions::BlockStmt_VisitObjCForCollectionStmt(
418    ObjCForCollectionStmt *fs) {
419
420  Visit(fs->getCollection());
421
422  // This represents an initialization of the 'element' value.
423  Stmt *element = fs->getElement();
424  const VarDecl* vd = 0;
425
426  if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) {
427    vd = cast<VarDecl>(ds->getSingleDecl());
428    if (!isTrackedVar(vd))
429      vd = 0;
430  }
431  else {
432    // Initialize the value of the reference variable.
433    const FindVarResult &res = findBlockVarDecl(cast<Expr>(element));
434    vd = res.getDecl();
435    if (!vd) {
436      Visit(element);
437      return;
438    }
439  }
440
441  if (vd)
442    vals[vd] = Initialized;
443}
444
445void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
446  if (!flagBlockUses || !handler)
447    return;
448  const BlockDecl *bd = be->getBlockDecl();
449  for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
450        e = bd->capture_end() ; i != e; ++i) {
451    const VarDecl *vd = i->getVariable();
452    if (!vd->hasLocalStorage())
453      continue;
454    if (!isTrackedVar(vd))
455      continue;
456    if (i->isByRef()) {
457      vals[vd] = Initialized;
458      continue;
459    }
460    Value v = vals[vd];
461    if (isUninitialized(v))
462      handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v));
463  }
464}
465
466void TransferFunctions::VisitDeclStmt(DeclStmt *ds) {
467  for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end();
468       DI != DE; ++DI) {
469    if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) {
470      if (isTrackedVar(vd)) {
471        if (Expr *init = vd->getInit()) {
472          Visit(init);
473
474          // If the initializer consists solely of a reference to itself, we
475          // explicitly mark the variable as uninitialized. This allows code
476          // like the following:
477          //
478          //   int x = x;
479          //
480          // to deliberately leave a variable uninitialized. Different analysis
481          // clients can detect this pattern and adjust their reporting
482          // appropriately, but we need to continue to analyze subsequent uses
483          // of the variable.
484          DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(init->IgnoreParenImpCasts());
485          vals[vd] = (DRE && DRE->getDecl() == vd) ? Uninitialized
486                                                   : Initialized;
487        }
488      } else if (Stmt *init = vd->getInit()) {
489        Visit(init);
490      }
491    }
492  }
493}
494
495void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
496  // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast
497  // cannot be block-level expressions.  Therefore, we determine if
498  // a DeclRefExpr is involved in a "load" by comparing it to the current
499  // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
500  // If a DeclRefExpr is not involved in a load, we are essentially computing
501  // its address, either for assignment to a reference or via the '&' operator.
502  // In such cases, treat the variable as being initialized, since this
503  // analysis isn't powerful enough to do alias tracking.
504  if (dr != currentDR)
505    if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
506      if (isTrackedVar(vd))
507        vals[vd] = Initialized;
508}
509
510void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) {
511  if (bo->isAssignmentOp()) {
512    const FindVarResult &res = findBlockVarDecl(bo->getLHS());
513    if (const VarDecl* vd = res.getDecl()) {
514      // We assume that DeclRefExprs wrapped in a BinaryOperator "assignment"
515      // cannot be block-level expressions.  Therefore, we determine if
516      // a DeclRefExpr is involved in a "load" by comparing it to the current
517      // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
518      SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
519                                                res.getDeclRefExpr());
520      Visit(bo->getRHS());
521      Visit(bo->getLHS());
522
523      ValueVector::reference val = vals[vd];
524      if (isUninitialized(val)) {
525        if (bo->getOpcode() != BO_Assign)
526          reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
527        val = Initialized;
528      }
529      return;
530    }
531  }
532  Visit(bo->getRHS());
533  Visit(bo->getLHS());
534}
535
536void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) {
537  switch (uo->getOpcode()) {
538    case clang::UO_PostDec:
539    case clang::UO_PostInc:
540    case clang::UO_PreDec:
541    case clang::UO_PreInc: {
542      const FindVarResult &res = findBlockVarDecl(uo->getSubExpr());
543      if (const VarDecl *vd = res.getDecl()) {
544        // We assume that DeclRefExprs wrapped in a unary operator ++/--
545        // cannot be block-level expressions.  Therefore, we determine if
546        // a DeclRefExpr is involved in a "load" by comparing it to the current
547        // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
548        SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
549                                                  res.getDeclRefExpr());
550        Visit(uo->getSubExpr());
551
552        ValueVector::reference val = vals[vd];
553        if (isUninitialized(val)) {
554          reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
555          // Don't cascade warnings.
556          val = Initialized;
557        }
558        return;
559      }
560      break;
561    }
562    default:
563      break;
564  }
565  Visit(uo->getSubExpr());
566}
567
568void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) {
569  if (ce->getCastKind() == CK_LValueToRValue) {
570    const FindVarResult &res = findBlockVarDecl(ce->getSubExpr());
571    if (const VarDecl *vd = res.getDecl()) {
572      // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast
573      // cannot be block-level expressions.  Therefore, we determine if
574      // a DeclRefExpr is involved in a "load" by comparing it to the current
575      // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr.
576      // Here we update 'currentDR' to be the one associated with this
577      // lvalue-to-rvalue cast.  Then, when we analyze the DeclRefExpr, we
578      // will know that we are not computing its lvalue for other purposes
579      // than to perform a load.
580      SaveAndRestore<const DeclRefExpr*> lastDR(currentDR,
581                                                res.getDeclRefExpr());
582      Visit(ce->getSubExpr());
583      if (currentVoidCast != ce) {
584        Value val = vals[vd];
585        if (isUninitialized(val)) {
586          reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
587          // Don't cascade warnings.
588          vals[vd] = Initialized;
589        }
590      }
591      return;
592    }
593  }
594  else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) {
595    if (cse->getType()->isVoidType()) {
596      // e.g. (void) x;
597      SaveAndRestore<const Expr *>
598        lastVoidCast(currentVoidCast, cse->getSubExpr()->IgnoreParens());
599      Visit(cse->getSubExpr());
600      return;
601    }
602  }
603  Visit(ce->getSubExpr());
604}
605
606void TransferFunctions::VisitUnaryExprOrTypeTraitExpr(
607                                          UnaryExprOrTypeTraitExpr *se) {
608  if (se->getKind() == UETT_SizeOf) {
609    if (se->getType()->isConstantSizeType())
610      return;
611    // Handle VLAs.
612    Visit(se->getArgumentExpr());
613  }
614}
615
616void TransferFunctions::VisitCXXTypeidExpr(CXXTypeidExpr *E) {
617  // typeid(expression) is potentially evaluated when the argument is
618  // a glvalue of polymorphic type. (C++ 5.2.8p2-3)
619  if (!E->isTypeOperand() && E->Classify(ac.getASTContext()).isGLValue()) {
620    QualType SubExprTy = E->getExprOperand()->getType();
621    if (const RecordType *Record = SubExprTy->getAs<RecordType>())
622      if (cast<CXXRecordDecl>(Record->getDecl())->isPolymorphic())
623        Visit(E->getExprOperand());
624  }
625}
626
627//------------------------------------------------------------------------====//
628// High-level "driver" logic for uninitialized values analysis.
629//====------------------------------------------------------------------------//
630
631static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
632                       AnalysisContext &ac, CFGBlockValues &vals,
633                       llvm::BitVector &wasAnalyzed,
634                       UninitVariablesHandler *handler = 0,
635                       bool flagBlockUses = false) {
636
637  wasAnalyzed[block->getBlockID()] = true;
638
639  if (const BinaryOperator *b = getLogicalOperatorInChain(block)) {
640    CFGBlock::const_pred_iterator itr = block->pred_begin();
641    BVPair vA = vals.getValueVectors(*itr, false);
642    ++itr;
643    BVPair vB = vals.getValueVectors(*itr, false);
644
645    BVPair valsAB;
646
647    if (b->getOpcode() == BO_LAnd) {
648      // Merge the 'F' bits from the first and second.
649      vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true);
650      vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false);
651      valsAB.first = vA.first;
652      valsAB.second = &vals.getScratch();
653    }
654    else {
655      // Merge the 'T' bits from the first and second.
656      assert(b->getOpcode() == BO_LOr);
657      vals.mergeIntoScratch(*vA.first, true);
658      vals.mergeIntoScratch(*vB.first, false);
659      valsAB.first = &vals.getScratch();
660      valsAB.second = vA.second ? vA.second : vA.first;
661    }
662    return vals.updateValueVectors(block, valsAB);
663  }
664
665  // Default behavior: merge in values of predecessor blocks.
666  vals.resetScratch();
667  bool isFirst = true;
668  for (CFGBlock::const_pred_iterator I = block->pred_begin(),
669       E = block->pred_end(); I != E; ++I) {
670    vals.mergeIntoScratch(vals.getValueVector(*I, block), isFirst);
671    isFirst = false;
672  }
673  // Apply the transfer function.
674  TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses);
675  for (CFGBlock::const_iterator I = block->begin(), E = block->end();
676       I != E; ++I) {
677    if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
678      tf.BlockStmt_Visit(cs->getStmt());
679    }
680  }
681  return vals.updateValueVectorWithScratch(block);
682}
683
684void clang::runUninitializedVariablesAnalysis(const DeclContext &dc,
685                                              const CFG &cfg,
686                                              AnalysisContext &ac,
687                                              UninitVariablesHandler &handler) {
688  CFGBlockValues vals(cfg);
689  vals.computeSetOfDeclarations(dc);
690  if (vals.hasNoDeclarations())
691    return;
692
693  // Mark all variables uninitialized at the entry.
694  const CFGBlock &entry = cfg.getEntry();
695  for (CFGBlock::const_succ_iterator i = entry.succ_begin(),
696        e = entry.succ_end(); i != e; ++i) {
697    if (const CFGBlock *succ = *i) {
698      ValueVector &vec = vals.getValueVector(&entry, succ);
699      const unsigned n = vals.getNumEntries();
700      for (unsigned j = 0; j < n ; ++j) {
701        vec[j] = Uninitialized;
702      }
703    }
704  }
705
706  // Proceed with the workist.
707  DataflowWorklist worklist(cfg);
708  llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
709  worklist.enqueueSuccessors(&cfg.getEntry());
710  llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
711
712  while (const CFGBlock *block = worklist.dequeue()) {
713    // Did the block change?
714    bool changed = runOnBlock(block, cfg, ac, vals, wasAnalyzed);
715    if (changed || !previouslyVisited[block->getBlockID()])
716      worklist.enqueueSuccessors(block);
717    previouslyVisited[block->getBlockID()] = true;
718  }
719
720  // Run through the blocks one more time, and report uninitialized variabes.
721  for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
722    if (wasAnalyzed[(*BI)->getBlockID()])
723      runOnBlock(*BI, cfg, ac, vals, wasAnalyzed, &handler,
724                 /* flagBlockUses */ true);
725  }
726}
727
728UninitVariablesHandler::~UninitVariablesHandler() {}
729
730