UninitializedValues.cpp revision da3d76b4cfbb5ebeb79e03a0abeabd403fe9260a
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements uninitialized values analysis for source-level CFGs.
11//
12//===----------------------------------------------------------------------===//
13
14#include <utility>
15#include "llvm/ADT/Optional.h"
16#include "llvm/ADT/SmallBitVector.h"
17#include "llvm/ADT/SmallVector.h"
18#include "llvm/ADT/PackedVector.h"
19#include "llvm/ADT/DenseMap.h"
20#include "clang/AST/ASTContext.h"
21#include "clang/AST/Decl.h"
22#include "clang/Analysis/CFG.h"
23#include "clang/Analysis/AnalysisContext.h"
24#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
25#include "clang/Analysis/Analyses/UninitializedValues.h"
26#include "clang/Analysis/DomainSpecific/ObjCNoReturn.h"
27#include "llvm/Support/SaveAndRestore.h"
28
29using namespace clang;
30
31#define DEBUG_LOGGING 0
32
33static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
34  if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
35      !vd->isExceptionVariable() &&
36      vd->getDeclContext() == dc) {
37    QualType ty = vd->getType();
38    return ty->isScalarType() || ty->isVectorType();
39  }
40  return false;
41}
42
43//------------------------------------------------------------------------====//
44// DeclToIndex: a mapping from Decls we track to value indices.
45//====------------------------------------------------------------------------//
46
47namespace {
48class DeclToIndex {
49  llvm::DenseMap<const VarDecl *, unsigned> map;
50public:
51  DeclToIndex() {}
52
53  /// Compute the actual mapping from declarations to bits.
54  void computeMap(const DeclContext &dc);
55
56  /// Return the number of declarations in the map.
57  unsigned size() const { return map.size(); }
58
59  /// Returns the bit vector index for a given declaration.
60  llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
61};
62}
63
64void DeclToIndex::computeMap(const DeclContext &dc) {
65  unsigned count = 0;
66  DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
67                                               E(dc.decls_end());
68  for ( ; I != E; ++I) {
69    const VarDecl *vd = *I;
70    if (isTrackedVar(vd, &dc))
71      map[vd] = count++;
72  }
73}
74
75llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
76  llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
77  if (I == map.end())
78    return llvm::Optional<unsigned>();
79  return I->second;
80}
81
82//------------------------------------------------------------------------====//
83// CFGBlockValues: dataflow values for CFG blocks.
84//====------------------------------------------------------------------------//
85
86// These values are defined in such a way that a merge can be done using
87// a bitwise OR.
88enum Value { Unknown = 0x0,         /* 00 */
89             Initialized = 0x1,     /* 01 */
90             Uninitialized = 0x2,   /* 10 */
91             MayUninitialized = 0x3 /* 11 */ };
92
93static bool isUninitialized(const Value v) {
94  return v >= Uninitialized;
95}
96static bool isAlwaysUninit(const Value v) {
97  return v == Uninitialized;
98}
99
100namespace {
101
102typedef llvm::PackedVector<Value, 2, llvm::SmallBitVector> ValueVector;
103
104class CFGBlockValues {
105  const CFG &cfg;
106  SmallVector<ValueVector, 8> vals;
107  ValueVector scratch;
108  DeclToIndex declToIndex;
109public:
110  CFGBlockValues(const CFG &cfg);
111
112  unsigned getNumEntries() const { return declToIndex.size(); }
113
114  void computeSetOfDeclarations(const DeclContext &dc);
115  ValueVector &getValueVector(const CFGBlock *block) {
116    return vals[block->getBlockID()];
117  }
118
119  void setAllScratchValues(Value V);
120  void mergeIntoScratch(ValueVector const &source, bool isFirst);
121  bool updateValueVectorWithScratch(const CFGBlock *block);
122
123  bool hasNoDeclarations() const {
124    return declToIndex.size() == 0;
125  }
126
127  void resetScratch();
128
129  ValueVector::reference operator[](const VarDecl *vd);
130
131  Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
132                 const VarDecl *vd) {
133    const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
134    assert(idx.hasValue());
135    return getValueVector(block)[idx.getValue()];
136  }
137};
138} // end anonymous namespace
139
140CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {}
141
142void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
143  declToIndex.computeMap(dc);
144  unsigned decls = declToIndex.size();
145  scratch.resize(decls);
146  unsigned n = cfg.getNumBlockIDs();
147  if (!n)
148    return;
149  vals.resize(n);
150  for (unsigned i = 0; i < n; ++i)
151    vals[i].resize(decls);
152}
153
154#if DEBUG_LOGGING
155static void printVector(const CFGBlock *block, ValueVector &bv,
156                        unsigned num) {
157  llvm::errs() << block->getBlockID() << " :";
158  for (unsigned i = 0; i < bv.size(); ++i) {
159    llvm::errs() << ' ' << bv[i];
160  }
161  llvm::errs() << " : " << num << '\n';
162}
163#endif
164
165void CFGBlockValues::setAllScratchValues(Value V) {
166  for (unsigned I = 0, E = scratch.size(); I != E; ++I)
167    scratch[I] = V;
168}
169
170void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
171                                      bool isFirst) {
172  if (isFirst)
173    scratch = source;
174  else
175    scratch |= source;
176}
177
178bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
179  ValueVector &dst = getValueVector(block);
180  bool changed = (dst != scratch);
181  if (changed)
182    dst = scratch;
183#if DEBUG_LOGGING
184  printVector(block, scratch, 0);
185#endif
186  return changed;
187}
188
189void CFGBlockValues::resetScratch() {
190  scratch.reset();
191}
192
193ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
194  const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
195  assert(idx.hasValue());
196  return scratch[idx.getValue()];
197}
198
199//------------------------------------------------------------------------====//
200// Worklist: worklist for dataflow analysis.
201//====------------------------------------------------------------------------//
202
203namespace {
204class DataflowWorklist {
205  SmallVector<const CFGBlock *, 20> worklist;
206  llvm::BitVector enqueuedBlocks;
207public:
208  DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
209
210  void enqueueSuccessors(const CFGBlock *block);
211  const CFGBlock *dequeue();
212};
213}
214
215void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
216  unsigned OldWorklistSize = worklist.size();
217  for (CFGBlock::const_succ_iterator I = block->succ_begin(),
218       E = block->succ_end(); I != E; ++I) {
219    const CFGBlock *Successor = *I;
220    if (!Successor || enqueuedBlocks[Successor->getBlockID()])
221      continue;
222    worklist.push_back(Successor);
223    enqueuedBlocks[Successor->getBlockID()] = true;
224  }
225  if (OldWorklistSize == 0 || OldWorklistSize == worklist.size())
226    return;
227
228  // Rotate the newly added blocks to the start of the worklist so that it forms
229  // a proper queue when we pop off the end of the worklist.
230  std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize,
231              worklist.end());
232}
233
234const CFGBlock *DataflowWorklist::dequeue() {
235  if (worklist.empty())
236    return 0;
237  const CFGBlock *b = worklist.back();
238  worklist.pop_back();
239  enqueuedBlocks[b->getBlockID()] = false;
240  return b;
241}
242
243//------------------------------------------------------------------------====//
244// Classification of DeclRefExprs as use or initialization.
245//====------------------------------------------------------------------------//
246
247namespace {
248class FindVarResult {
249  const VarDecl *vd;
250  const DeclRefExpr *dr;
251public:
252  FindVarResult(const VarDecl *vd, const DeclRefExpr *dr) : vd(vd), dr(dr) {}
253
254  const DeclRefExpr *getDeclRefExpr() const { return dr; }
255  const VarDecl *getDecl() const { return vd; }
256};
257
258static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
259  while (Ex) {
260    Ex = Ex->IgnoreParenNoopCasts(C);
261    if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
262      if (CE->getCastKind() == CK_LValueBitCast) {
263        Ex = CE->getSubExpr();
264        continue;
265      }
266    }
267    break;
268  }
269  return Ex;
270}
271
272/// If E is an expression comprising a reference to a single variable, find that
273/// variable.
274static FindVarResult findVar(const Expr *E, const DeclContext *DC) {
275  if (const DeclRefExpr *DRE =
276        dyn_cast<DeclRefExpr>(stripCasts(DC->getParentASTContext(), E)))
277    if (const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl()))
278      if (isTrackedVar(VD, DC))
279        return FindVarResult(VD, DRE);
280  return FindVarResult(0, 0);
281}
282
283/// \brief Classify each DeclRefExpr as an initialization or a use. Any
284/// DeclRefExpr which isn't explicitly classified will be assumed to have
285/// escaped the analysis and will be treated as an initialization.
286class ClassifyRefs : public StmtVisitor<ClassifyRefs> {
287public:
288  enum Class {
289    Init,
290    Use,
291    SelfInit,
292    Ignore
293  };
294
295private:
296  const DeclContext *DC;
297  llvm::DenseMap<const DeclRefExpr*, Class> Classification;
298
299  bool isTrackedVar(const VarDecl *VD) const {
300    return ::isTrackedVar(VD, DC);
301  }
302
303  void classify(const Expr *E, Class C);
304
305public:
306  ClassifyRefs(AnalysisDeclContext &AC) : DC(cast<DeclContext>(AC.getDecl())) {}
307
308  void VisitDeclStmt(DeclStmt *DS);
309  void VisitUnaryOperator(UnaryOperator *UO);
310  void VisitBinaryOperator(BinaryOperator *BO);
311  void VisitCallExpr(CallExpr *CE);
312  void VisitCastExpr(CastExpr *CE);
313
314  void operator()(Stmt *S) { Visit(S); }
315
316  Class get(const DeclRefExpr *DRE) const {
317    llvm::DenseMap<const DeclRefExpr*, Class>::const_iterator I
318        = Classification.find(DRE);
319    if (I != Classification.end())
320      return I->second;
321
322    const VarDecl *VD = dyn_cast<VarDecl>(DRE->getDecl());
323    if (!VD || !isTrackedVar(VD))
324      return Ignore;
325
326    return Init;
327  }
328};
329}
330
331static const DeclRefExpr *getSelfInitExpr(VarDecl *VD) {
332  if (Expr *Init = VD->getInit()) {
333    const DeclRefExpr *DRE
334      = dyn_cast<DeclRefExpr>(stripCasts(VD->getASTContext(), Init));
335    if (DRE && DRE->getDecl() == VD)
336      return DRE;
337  }
338  return 0;
339}
340
341void ClassifyRefs::classify(const Expr *E, Class C) {
342  FindVarResult Var = findVar(E, DC);
343  if (const DeclRefExpr *DRE = Var.getDeclRefExpr())
344    Classification[DRE] = std::max(Classification[DRE], C);
345}
346
347void ClassifyRefs::VisitDeclStmt(DeclStmt *DS) {
348  for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
349       DI != DE; ++DI) {
350    VarDecl *VD = dyn_cast<VarDecl>(*DI);
351    if (VD && isTrackedVar(VD))
352      if (const DeclRefExpr *DRE = getSelfInitExpr(VD))
353        Classification[DRE] = SelfInit;
354  }
355}
356
357void ClassifyRefs::VisitBinaryOperator(BinaryOperator *BO) {
358  // Ignore the evaluation of a DeclRefExpr on the LHS of an assignment. If this
359  // is not a compound-assignment, we will treat it as initializing the variable
360  // when TransferFunctions visits it. A compound-assignment does not affect
361  // whether a variable is uninitialized, and there's no point counting it as a
362  // use.
363  if (BO->isCompoundAssignmentOp())
364    classify(BO->getLHS(), Use);
365  else if (BO->getOpcode() == BO_Assign)
366    classify(BO->getLHS(), Ignore);
367}
368
369void ClassifyRefs::VisitUnaryOperator(UnaryOperator *UO) {
370  // Increment and decrement are uses despite there being no lvalue-to-rvalue
371  // conversion.
372  if (UO->isIncrementDecrementOp())
373    classify(UO->getSubExpr(), Use);
374}
375
376void ClassifyRefs::VisitCallExpr(CallExpr *CE) {
377  // If a value is passed by const reference to a function, we should not assume
378  // that it is initialized by the call, and we conservatively do not assume
379  // that it is used.
380  for (CallExpr::arg_iterator I = CE->arg_begin(), E = CE->arg_end();
381       I != E; ++I)
382    if ((*I)->getType().isConstQualified() && (*I)->isGLValue())
383      classify(*I, Ignore);
384}
385
386void ClassifyRefs::VisitCastExpr(CastExpr *CE) {
387  if (CE->getCastKind() == CK_LValueToRValue)
388    classify(CE->getSubExpr(), Use);
389  else if (CStyleCastExpr *CSE = dyn_cast<CStyleCastExpr>(CE)) {
390    if (CSE->getType()->isVoidType()) {
391      // Squelch any detected load of an uninitialized value if
392      // we cast it to void.
393      // e.g. (void) x;
394      classify(CSE->getSubExpr(), Ignore);
395    }
396  }
397}
398
399//------------------------------------------------------------------------====//
400// Transfer function for uninitialized values analysis.
401//====------------------------------------------------------------------------//
402
403namespace {
404class TransferFunctions : public StmtVisitor<TransferFunctions> {
405  CFGBlockValues &vals;
406  const CFG &cfg;
407  const CFGBlock *block;
408  AnalysisDeclContext &ac;
409  const ClassifyRefs &classification;
410  ObjCNoReturn objCNoRet;
411  UninitVariablesHandler *handler;
412
413public:
414  TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
415                    const CFGBlock *block, AnalysisDeclContext &ac,
416                    const ClassifyRefs &classification,
417                    UninitVariablesHandler *handler)
418    : vals(vals), cfg(cfg), block(block), ac(ac),
419      classification(classification), objCNoRet(ac.getASTContext()),
420      handler(handler) {}
421
422  void reportUse(const Expr *ex, const VarDecl *vd);
423
424  void VisitBinaryOperator(BinaryOperator *bo);
425  void VisitBlockExpr(BlockExpr *be);
426  void VisitCallExpr(CallExpr *ce);
427  void VisitDeclRefExpr(DeclRefExpr *dr);
428  void VisitDeclStmt(DeclStmt *ds);
429  void VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS);
430  void VisitObjCMessageExpr(ObjCMessageExpr *ME);
431
432  bool isTrackedVar(const VarDecl *vd) {
433    return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
434  }
435
436  FindVarResult findVar(const Expr *ex) {
437    return ::findVar(ex, cast<DeclContext>(ac.getDecl()));
438  }
439
440  UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
441    UninitUse Use(ex, isAlwaysUninit(v));
442
443    assert(isUninitialized(v));
444    if (Use.getKind() == UninitUse::Always)
445      return Use;
446
447    // If an edge which leads unconditionally to this use did not initialize
448    // the variable, we can say something stronger than 'may be uninitialized':
449    // we can say 'either it's used uninitialized or you have dead code'.
450    //
451    // We track the number of successors of a node which have been visited, and
452    // visit a node once we have visited all of its successors. Only edges where
453    // the variable might still be uninitialized are followed. Since a variable
454    // can't transfer from being initialized to being uninitialized, this will
455    // trace out the subgraph which inevitably leads to the use and does not
456    // initialize the variable. We do not want to skip past loops, since their
457    // non-termination might be correlated with the initialization condition.
458    //
459    // For example:
460    //
461    //         void f(bool a, bool b) {
462    // block1:   int n;
463    //           if (a) {
464    // block2:     if (b)
465    // block3:       n = 1;
466    // block4:   } else if (b) {
467    // block5:     while (!a) {
468    // block6:       do_work(&a);
469    //               n = 2;
470    //             }
471    //           }
472    // block7:   if (a)
473    // block8:     g();
474    // block9:   return n;
475    //         }
476    //
477    // Starting from the maybe-uninitialized use in block 9:
478    //  * Block 7 is not visited because we have only visited one of its two
479    //    successors.
480    //  * Block 8 is visited because we've visited its only successor.
481    // From block 8:
482    //  * Block 7 is visited because we've now visited both of its successors.
483    // From block 7:
484    //  * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
485    //    of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
486    //  * Block 3 is not visited because it initializes 'n'.
487    // Now the algorithm terminates, having visited blocks 7 and 8, and having
488    // found the frontier is blocks 2, 4, and 5.
489    //
490    // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
491    // and 4), so we report that any time either of those edges is taken (in
492    // each case when 'b == false'), 'n' is used uninitialized.
493    llvm::SmallVector<const CFGBlock*, 32> Queue;
494    llvm::SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
495    Queue.push_back(block);
496    // Specify that we've already visited all successors of the starting block.
497    // This has the dual purpose of ensuring we never add it to the queue, and
498    // of marking it as not being a candidate element of the frontier.
499    SuccsVisited[block->getBlockID()] = block->succ_size();
500    while (!Queue.empty()) {
501      const CFGBlock *B = Queue.back();
502      Queue.pop_back();
503      for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
504           I != E; ++I) {
505        const CFGBlock *Pred = *I;
506        if (vals.getValue(Pred, B, vd) == Initialized)
507          // This block initializes the variable.
508          continue;
509
510        unsigned &SV = SuccsVisited[Pred->getBlockID()];
511        if (!SV) {
512          // When visiting the first successor of a block, mark all NULL
513          // successors as having been visited.
514          for (CFGBlock::const_succ_iterator SI = Pred->succ_begin(),
515                                             SE = Pred->succ_end();
516               SI != SE; ++SI)
517            if (!*SI)
518              ++SV;
519        }
520
521        if (++SV == Pred->succ_size())
522          // All paths from this block lead to the use and don't initialize the
523          // variable.
524          Queue.push_back(Pred);
525      }
526    }
527
528    // Scan the frontier, looking for blocks where the variable was
529    // uninitialized.
530    for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
531      const CFGBlock *Block = *BI;
532      unsigned BlockID = Block->getBlockID();
533      const Stmt *Term = Block->getTerminator();
534      if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
535          Term) {
536        // This block inevitably leads to the use. If we have an edge from here
537        // to a post-dominator block, and the variable is uninitialized on that
538        // edge, we have found a bug.
539        for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
540             E = Block->succ_end(); I != E; ++I) {
541          const CFGBlock *Succ = *I;
542          if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
543              vals.getValue(Block, Succ, vd) == Uninitialized) {
544            // Switch cases are a special case: report the label to the caller
545            // as the 'terminator', not the switch statement itself. Suppress
546            // situations where no label matched: we can't be sure that's
547            // possible.
548            if (isa<SwitchStmt>(Term)) {
549              const Stmt *Label = Succ->getLabel();
550              if (!Label || !isa<SwitchCase>(Label))
551                // Might not be possible.
552                continue;
553              UninitUse::Branch Branch;
554              Branch.Terminator = Label;
555              Branch.Output = 0; // Ignored.
556              Use.addUninitBranch(Branch);
557            } else {
558              UninitUse::Branch Branch;
559              Branch.Terminator = Term;
560              Branch.Output = I - Block->succ_begin();
561              Use.addUninitBranch(Branch);
562            }
563          }
564        }
565      }
566    }
567
568    return Use;
569  }
570};
571}
572
573void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
574  if (!handler)
575    return;
576  Value v = vals[vd];
577  if (isUninitialized(v))
578    handler->handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
579}
580
581void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *FS) {
582  // This represents an initialization of the 'element' value.
583  if (DeclStmt *DS = dyn_cast<DeclStmt>(FS->getElement())) {
584    const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl());
585    if (isTrackedVar(VD))
586      vals[VD] = Initialized;
587  }
588}
589
590void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
591  const BlockDecl *bd = be->getBlockDecl();
592  for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
593        e = bd->capture_end() ; i != e; ++i) {
594    const VarDecl *vd = i->getVariable();
595    if (!isTrackedVar(vd))
596      continue;
597    if (i->isByRef()) {
598      vals[vd] = Initialized;
599      continue;
600    }
601    reportUse(be, vd);
602  }
603}
604
605void TransferFunctions::VisitCallExpr(CallExpr *ce) {
606  if (Decl *Callee = ce->getCalleeDecl()) {
607    if (Callee->hasAttr<ReturnsTwiceAttr>()) {
608      // After a call to a function like setjmp or vfork, any variable which is
609      // initialized anywhere within this function may now be initialized. For
610      // now, just assume such a call initializes all variables.  FIXME: Only
611      // mark variables as initialized if they have an initializer which is
612      // reachable from here.
613      vals.setAllScratchValues(Initialized);
614    }
615    else if (Callee->hasAttr<AnalyzerNoReturnAttr>()) {
616      // Functions labeled like "analyzer_noreturn" are often used to denote
617      // "panic" functions that in special debug situations can still return,
618      // but for the most part should not be treated as returning.  This is a
619      // useful annotation borrowed from the static analyzer that is useful for
620      // suppressing branch-specific false positives when we call one of these
621      // functions but keep pretending the path continues (when in reality the
622      // user doesn't care).
623      vals.setAllScratchValues(Unknown);
624    }
625  }
626}
627
628void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
629  switch (classification.get(dr)) {
630  case ClassifyRefs::Ignore:
631    break;
632  case ClassifyRefs::Use:
633    reportUse(dr, cast<VarDecl>(dr->getDecl()));
634    break;
635  case ClassifyRefs::Init:
636    vals[cast<VarDecl>(dr->getDecl())] = Initialized;
637    break;
638  case ClassifyRefs::SelfInit:
639    if (handler)
640      handler->handleSelfInit(cast<VarDecl>(dr->getDecl()));
641    break;
642  }
643}
644
645void TransferFunctions::VisitBinaryOperator(BinaryOperator *BO) {
646  if (BO->getOpcode() == BO_Assign) {
647    FindVarResult Var = findVar(BO->getLHS());
648    if (const VarDecl *VD = Var.getDecl())
649      vals[VD] = Initialized;
650  }
651}
652
653void TransferFunctions::VisitDeclStmt(DeclStmt *DS) {
654  for (DeclStmt::decl_iterator DI = DS->decl_begin(), DE = DS->decl_end();
655       DI != DE; ++DI) {
656    VarDecl *VD = dyn_cast<VarDecl>(*DI);
657    if (VD && isTrackedVar(VD)) {
658      if (getSelfInitExpr(VD)) {
659        // If the initializer consists solely of a reference to itself, we
660        // explicitly mark the variable as uninitialized. This allows code
661        // like the following:
662        //
663        //   int x = x;
664        //
665        // to deliberately leave a variable uninitialized. Different analysis
666        // clients can detect this pattern and adjust their reporting
667        // appropriately, but we need to continue to analyze subsequent uses
668        // of the variable.
669        vals[VD] = Uninitialized;
670      } else if (VD->getInit()) {
671        // Treat the new variable as initialized.
672        vals[VD] = Initialized;
673      } else {
674        // No initializer: the variable is now uninitialized. This matters
675        // for cases like:
676        //   while (...) {
677        //     int n;
678        //     use(n);
679        //     n = 0;
680        //   }
681        // FIXME: Mark the variable as uninitialized whenever its scope is
682        // left, since its scope could be re-entered by a jump over the
683        // declaration.
684        vals[VD] = Uninitialized;
685      }
686    }
687  }
688}
689
690void TransferFunctions::VisitObjCMessageExpr(ObjCMessageExpr *ME) {
691  // If the Objective-C message expression is an implicit no-return that
692  // is not modeled in the CFG, set the tracked dataflow values to Unknown.
693  if (objCNoRet.isImplicitNoReturn(ME)) {
694    vals.setAllScratchValues(Unknown);
695  }
696}
697
698//------------------------------------------------------------------------====//
699// High-level "driver" logic for uninitialized values analysis.
700//====------------------------------------------------------------------------//
701
702static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
703                       AnalysisDeclContext &ac, CFGBlockValues &vals,
704                       const ClassifyRefs &classification,
705                       llvm::BitVector &wasAnalyzed,
706                       UninitVariablesHandler *handler = 0) {
707  wasAnalyzed[block->getBlockID()] = true;
708  vals.resetScratch();
709  // Merge in values of predecessor blocks.
710  bool isFirst = true;
711  for (CFGBlock::const_pred_iterator I = block->pred_begin(),
712       E = block->pred_end(); I != E; ++I) {
713    const CFGBlock *pred = *I;
714    if (wasAnalyzed[pred->getBlockID()]) {
715      vals.mergeIntoScratch(vals.getValueVector(pred), isFirst);
716      isFirst = false;
717    }
718  }
719  // Apply the transfer function.
720  TransferFunctions tf(vals, cfg, block, ac, classification, handler);
721  for (CFGBlock::const_iterator I = block->begin(), E = block->end();
722       I != E; ++I) {
723    if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
724      tf.Visit(const_cast<Stmt*>(cs->getStmt()));
725    }
726  }
727  return vals.updateValueVectorWithScratch(block);
728}
729
730void clang::runUninitializedVariablesAnalysis(
731    const DeclContext &dc,
732    const CFG &cfg,
733    AnalysisDeclContext &ac,
734    UninitVariablesHandler &handler,
735    UninitVariablesAnalysisStats &stats) {
736  CFGBlockValues vals(cfg);
737  vals.computeSetOfDeclarations(dc);
738  if (vals.hasNoDeclarations())
739    return;
740
741  stats.NumVariablesAnalyzed = vals.getNumEntries();
742
743  // Precompute which expressions are uses and which are initializations.
744  ClassifyRefs classification(ac);
745  cfg.VisitBlockStmts(classification);
746
747  // Mark all variables uninitialized at the entry.
748  const CFGBlock &entry = cfg.getEntry();
749  ValueVector &vec = vals.getValueVector(&entry);
750  const unsigned n = vals.getNumEntries();
751  for (unsigned j = 0; j < n ; ++j) {
752    vec[j] = Uninitialized;
753  }
754
755  // Proceed with the workist.
756  DataflowWorklist worklist(cfg);
757  llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
758  worklist.enqueueSuccessors(&cfg.getEntry());
759  llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
760  wasAnalyzed[cfg.getEntry().getBlockID()] = true;
761
762  while (const CFGBlock *block = worklist.dequeue()) {
763    // Did the block change?
764    bool changed = runOnBlock(block, cfg, ac, vals,
765                              classification, wasAnalyzed);
766    ++stats.NumBlockVisits;
767    if (changed || !previouslyVisited[block->getBlockID()])
768      worklist.enqueueSuccessors(block);
769    previouslyVisited[block->getBlockID()] = true;
770  }
771
772  // Run through the blocks one more time, and report uninitialized variabes.
773  for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
774    const CFGBlock *block = *BI;
775    if (wasAnalyzed[block->getBlockID()]) {
776      runOnBlock(block, cfg, ac, vals, classification, wasAnalyzed, &handler);
777      ++stats.NumBlockVisits;
778    }
779  }
780}
781
782UninitVariablesHandler::~UninitVariablesHandler() {}
783