UninitializedValues.cpp revision b831c673621c5587642343cace9def134916a17b
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements uninitialized values analysis for source-level CFGs. 11// 12//===----------------------------------------------------------------------===// 13 14#include <utility> 15#include "llvm/ADT/Optional.h" 16#include "llvm/ADT/SmallVector.h" 17#include "llvm/ADT/BitVector.h" 18#include "llvm/ADT/DenseMap.h" 19#include "clang/AST/Decl.h" 20#include "clang/Analysis/CFG.h" 21#include "clang/Analysis/AnalysisContext.h" 22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h" 23#include "clang/Analysis/Analyses/UninitializedValues.h" 24#include "clang/Analysis/Support/SaveAndRestore.h" 25 26using namespace clang; 27 28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) { 29 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() && 30 vd->getDeclContext() == dc) { 31 QualType ty = vd->getType(); 32 return ty->isScalarType() || ty->isVectorType(); 33 } 34 return false; 35} 36 37//------------------------------------------------------------------------====// 38// DeclToIndex: a mapping from Decls we track to value indices. 39//====------------------------------------------------------------------------// 40 41namespace { 42class DeclToIndex { 43 llvm::DenseMap<const VarDecl *, unsigned> map; 44public: 45 DeclToIndex() {} 46 47 /// Compute the actual mapping from declarations to bits. 48 void computeMap(const DeclContext &dc); 49 50 /// Return the number of declarations in the map. 51 unsigned size() const { return map.size(); } 52 53 /// Returns the bit vector index for a given declaration. 54 llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const; 55}; 56} 57 58void DeclToIndex::computeMap(const DeclContext &dc) { 59 unsigned count = 0; 60 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()), 61 E(dc.decls_end()); 62 for ( ; I != E; ++I) { 63 const VarDecl *vd = *I; 64 if (isTrackedVar(vd, &dc)) 65 map[vd] = count++; 66 } 67} 68 69llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const { 70 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d); 71 if (I == map.end()) 72 return llvm::Optional<unsigned>(); 73 return I->second; 74} 75 76//------------------------------------------------------------------------====// 77// CFGBlockValues: dataflow values for CFG blocks. 78//====------------------------------------------------------------------------// 79 80// These values are defined in such a way that a merge can be done using 81// a bitwise OR. 82enum Value { Unknown = 0x0, /* 00 */ 83 Initialized = 0x1, /* 01 */ 84 Uninitialized = 0x2, /* 10 */ 85 MayUninitialized = 0x3 /* 11 */ }; 86 87static bool isUninitialized(const Value v) { 88 return v >= Uninitialized; 89} 90static bool isAlwaysUninit(const Value v) { 91 return v == Uninitialized; 92} 93 94namespace { 95class ValueVector { 96 llvm::BitVector vec; 97public: 98 ValueVector() {} 99 ValueVector(unsigned size) : vec(size << 1) {} 100 void resize(unsigned n) { vec.resize(n << 1); } 101 void merge(const ValueVector &rhs) { vec |= rhs.vec; } 102 bool operator!=(const ValueVector &rhs) const { return vec != rhs.vec; } 103 void reset() { vec.reset(); } 104 105 class reference { 106 ValueVector &vv; 107 const unsigned idx; 108 109 reference(); // Undefined 110 public: 111 reference(ValueVector &vv, unsigned idx) : vv(vv), idx(idx) {} 112 ~reference() {} 113 114 reference &operator=(Value v) { 115 vv.vec[idx << 1] = (((unsigned) v) & 0x1) ? true : false; 116 vv.vec[(idx << 1) | 1] = (((unsigned) v) & 0x2) ? true : false; 117 return *this; 118 } 119 operator Value() { 120 unsigned x = (vv.vec[idx << 1] ? 1 : 0) | (vv.vec[(idx << 1) | 1] ? 2 :0); 121 return (Value) x; 122 } 123 }; 124 125 reference operator[](unsigned idx) { return reference(*this, idx); } 126}; 127 128typedef std::pair<ValueVector *, ValueVector *> BVPair; 129 130class CFGBlockValues { 131 const CFG &cfg; 132 BVPair *vals; 133 ValueVector scratch; 134 DeclToIndex declToIndex; 135 136 ValueVector &lazyCreate(ValueVector *&bv); 137public: 138 CFGBlockValues(const CFG &cfg); 139 ~CFGBlockValues(); 140 141 void computeSetOfDeclarations(const DeclContext &dc); 142 ValueVector &getValueVector(const CFGBlock *block, 143 const CFGBlock *dstBlock); 144 145 BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate); 146 147 void mergeIntoScratch(ValueVector const &source, bool isFirst); 148 bool updateValueVectorWithScratch(const CFGBlock *block); 149 bool updateValueVectors(const CFGBlock *block, const BVPair &newVals); 150 151 bool hasNoDeclarations() const { 152 return declToIndex.size() == 0; 153 } 154 155 bool hasEntry(const VarDecl *vd) const { 156 return declToIndex.getValueIndex(vd).hasValue(); 157 } 158 159 void resetScratch(); 160 ValueVector &getScratch() { return scratch; } 161 162 ValueVector::reference operator[](const VarDecl *vd); 163}; 164} // end anonymous namespace 165 166CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) { 167 unsigned n = cfg.getNumBlockIDs(); 168 if (!n) 169 return; 170 vals = new std::pair<ValueVector*, ValueVector*>[n]; 171 memset(vals, 0, sizeof(*vals) * n); 172} 173 174CFGBlockValues::~CFGBlockValues() { 175 unsigned n = cfg.getNumBlockIDs(); 176 if (n == 0) 177 return; 178 for (unsigned i = 0; i < n; ++i) { 179 delete vals[i].first; 180 delete vals[i].second; 181 } 182 delete [] vals; 183} 184 185void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) { 186 declToIndex.computeMap(dc); 187 scratch.resize(declToIndex.size()); 188} 189 190ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) { 191 if (!bv) 192 bv = new ValueVector(declToIndex.size()); 193 return *bv; 194} 195 196/// This function pattern matches for a '&&' or '||' that appears at 197/// the beginning of a CFGBlock that also (1) has a terminator and 198/// (2) has no other elements. If such an expression is found, it is returned. 199static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) { 200 if (block->empty()) 201 return 0; 202 203 const CFGStmt *cstmt = block->front().getAs<CFGStmt>(); 204 if (!cstmt) 205 return 0; 206 207 BinaryOperator *b = llvm::dyn_cast_or_null<BinaryOperator>(cstmt->getStmt()); 208 209 if (!b || !b->isLogicalOp()) 210 return 0; 211 212 if (block->pred_size() == 2 && 213 ((block->succ_size() == 2 && block->getTerminatorCondition() == b) || 214 block->size() == 1)) 215 return b; 216 217 return 0; 218} 219 220ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block, 221 const CFGBlock *dstBlock) { 222 unsigned idx = block->getBlockID(); 223 if (dstBlock && getLogicalOperatorInChain(block)) { 224 if (*block->succ_begin() == dstBlock) 225 return lazyCreate(vals[idx].first); 226 assert(*(block->succ_begin()+1) == dstBlock); 227 return lazyCreate(vals[idx].second); 228 } 229 230 assert(vals[idx].second == 0); 231 return lazyCreate(vals[idx].first); 232} 233 234BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block, 235 bool shouldLazyCreate) { 236 unsigned idx = block->getBlockID(); 237 lazyCreate(vals[idx].first); 238 if (shouldLazyCreate) 239 lazyCreate(vals[idx].second); 240 return vals[idx]; 241} 242 243void CFGBlockValues::mergeIntoScratch(ValueVector const &source, 244 bool isFirst) { 245 if (isFirst) 246 scratch = source; 247 else 248 scratch.merge(source); 249} 250#if 0 251static void printVector(const CFGBlock *block, ValueVector &bv, 252 unsigned num) { 253 254 llvm::errs() << block->getBlockID() << " :"; 255 for (unsigned i = 0; i < bv.size(); ++i) { 256 llvm::errs() << ' ' << bv[i]; 257 } 258 llvm::errs() << " : " << num << '\n'; 259} 260#endif 261 262bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) { 263 ValueVector &dst = getValueVector(block, 0); 264 bool changed = (dst != scratch); 265 if (changed) 266 dst = scratch; 267#if 0 268 printVector(block, scratch, 0); 269#endif 270 return changed; 271} 272 273bool CFGBlockValues::updateValueVectors(const CFGBlock *block, 274 const BVPair &newVals) { 275 BVPair &vals = getValueVectors(block, true); 276 bool changed = *newVals.first != *vals.first || 277 *newVals.second != *vals.second; 278 *vals.first = *newVals.first; 279 *vals.second = *newVals.second; 280#if 0 281 printVector(block, *vals.first, 1); 282 printVector(block, *vals.second, 2); 283#endif 284 return changed; 285} 286 287void CFGBlockValues::resetScratch() { 288 scratch.reset(); 289} 290 291ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) { 292 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd); 293 assert(idx.hasValue()); 294 return scratch[idx.getValue()]; 295} 296 297//------------------------------------------------------------------------====// 298// Worklist: worklist for dataflow analysis. 299//====------------------------------------------------------------------------// 300 301namespace { 302class DataflowWorklist { 303 llvm::SmallVector<const CFGBlock *, 20> worklist; 304 llvm::BitVector enqueuedBlocks; 305public: 306 DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {} 307 308 void enqueue(const CFGBlock *block); 309 void enqueueSuccessors(const CFGBlock *block); 310 const CFGBlock *dequeue(); 311 312}; 313} 314 315void DataflowWorklist::enqueue(const CFGBlock *block) { 316 if (!block) 317 return; 318 unsigned idx = block->getBlockID(); 319 if (enqueuedBlocks[idx]) 320 return; 321 worklist.push_back(block); 322 enqueuedBlocks[idx] = true; 323} 324 325void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) { 326 for (CFGBlock::const_succ_iterator I = block->succ_begin(), 327 E = block->succ_end(); I != E; ++I) { 328 enqueue(*I); 329 } 330} 331 332const CFGBlock *DataflowWorklist::dequeue() { 333 if (worklist.empty()) 334 return 0; 335 const CFGBlock *b = worklist.back(); 336 worklist.pop_back(); 337 enqueuedBlocks[b->getBlockID()] = false; 338 return b; 339} 340 341//------------------------------------------------------------------------====// 342// Transfer function for uninitialized values analysis. 343//====------------------------------------------------------------------------// 344 345namespace { 346class FindVarResult { 347 const VarDecl *vd; 348 const DeclRefExpr *dr; 349public: 350 FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {} 351 352 const DeclRefExpr *getDeclRefExpr() const { return dr; } 353 const VarDecl *getDecl() const { return vd; } 354}; 355 356class TransferFunctions : public CFGRecStmtVisitor<TransferFunctions> { 357 CFGBlockValues &vals; 358 const CFG &cfg; 359 AnalysisContext ∾ 360 UninitVariablesHandler *handler; 361 const DeclRefExpr *currentDR; 362 const Expr *currentVoidCast; 363 const bool flagBlockUses; 364public: 365 TransferFunctions(CFGBlockValues &vals, const CFG &cfg, 366 AnalysisContext &ac, 367 UninitVariablesHandler *handler, 368 bool flagBlockUses) 369 : vals(vals), cfg(cfg), ac(ac), handler(handler), currentDR(0), 370 currentVoidCast(0), flagBlockUses(flagBlockUses) {} 371 372 const CFG &getCFG() { return cfg; } 373 void reportUninit(const DeclRefExpr *ex, const VarDecl *vd, 374 bool isAlwaysUninit); 375 376 void VisitBlockExpr(BlockExpr *be); 377 void VisitDeclStmt(DeclStmt *ds); 378 void VisitDeclRefExpr(DeclRefExpr *dr); 379 void VisitUnaryOperator(UnaryOperator *uo); 380 void VisitBinaryOperator(BinaryOperator *bo); 381 void VisitCastExpr(CastExpr *ce); 382 void VisitUnaryExprOrTypeTraitExpr(UnaryExprOrTypeTraitExpr *se); 383 void BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs); 384 385 bool isTrackedVar(const VarDecl *vd) { 386#if 1 387 // FIXME: This is a temporary workaround to deal with the fact 388 // that DeclContext's do not always contain all of their variables! 389 return vals.hasEntry(vd); 390#else 391 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl())); 392#endif 393 } 394 395 FindVarResult findBlockVarDecl(Expr *ex); 396}; 397} 398 399void TransferFunctions::reportUninit(const DeclRefExpr *ex, 400 const VarDecl *vd, bool isAlwaysUnit) { 401 if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit); 402} 403 404FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) { 405 if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts())) 406 if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 407 if (isTrackedVar(vd)) 408 return FindVarResult(vd, dr); 409 return FindVarResult(0, 0); 410} 411 412void TransferFunctions::BlockStmt_VisitObjCForCollectionStmt( 413 ObjCForCollectionStmt *fs) { 414 415 Visit(fs->getCollection()); 416 417 // This represents an initialization of the 'element' value. 418 Stmt *element = fs->getElement(); 419 const VarDecl* vd = 0; 420 421 if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) { 422 vd = cast<VarDecl>(ds->getSingleDecl()); 423 if (!isTrackedVar(vd)) 424 vd = 0; 425 } 426 else { 427 // Initialize the value of the reference variable. 428 const FindVarResult &res = findBlockVarDecl(cast<Expr>(element)); 429 vd = res.getDecl(); 430 if (!vd) { 431 Visit(element); 432 return; 433 } 434 } 435 436 if (vd) 437 vals[vd] = Initialized; 438} 439 440void TransferFunctions::VisitBlockExpr(BlockExpr *be) { 441 if (!flagBlockUses || !handler) 442 return; 443 AnalysisContext::referenced_decls_iterator i, e; 444 llvm::tie(i, e) = ac.getReferencedBlockVars(be->getBlockDecl()); 445 for ( ; i != e; ++i) { 446 const VarDecl *vd = *i; 447 if (vd->getAttr<BlocksAttr>() || !vd->hasLocalStorage() || 448 !isTrackedVar(vd)) 449 continue; 450 Value v = vals[vd]; 451 if (isUninitialized(v)) 452 handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v)); 453 } 454} 455 456void TransferFunctions::VisitDeclStmt(DeclStmt *ds) { 457 for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end(); 458 DI != DE; ++DI) { 459 if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) { 460 if (isTrackedVar(vd)) { 461 vals[vd] = Uninitialized; 462 if (Stmt *init = vd->getInit()) { 463 Visit(init); 464 vals[vd] = Initialized; 465 } 466 } 467 else if (Stmt *init = vd->getInit()) { 468 Visit(init); 469 } 470 } 471 } 472} 473 474void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) { 475 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 476 // cannot be block-level expressions. Therefore, we determine if 477 // a DeclRefExpr is involved in a "load" by comparing it to the current 478 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 479 // If a DeclRefExpr is not involved in a load, we are essentially computing 480 // its address, either for assignment to a reference or via the '&' operator. 481 // In such cases, treat the variable as being initialized, since this 482 // analysis isn't powerful enough to do alias tracking. 483 if (dr != currentDR) 484 if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 485 if (isTrackedVar(vd)) 486 vals[vd] = Initialized; 487} 488 489void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) { 490 if (bo->isAssignmentOp()) { 491 const FindVarResult &res = findBlockVarDecl(bo->getLHS()); 492 if (const VarDecl* vd = res.getDecl()) { 493 // We assume that DeclRefExprs wrapped in a BinaryOperator "assignment" 494 // cannot be block-level expressions. Therefore, we determine if 495 // a DeclRefExpr is involved in a "load" by comparing it to the current 496 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 497 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 498 res.getDeclRefExpr()); 499 Visit(bo->getRHS()); 500 Visit(bo->getLHS()); 501 502 ValueVector::reference val = vals[vd]; 503 if (isUninitialized(val)) { 504 if (bo->getOpcode() != BO_Assign) 505 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 506 val = Initialized; 507 } 508 return; 509 } 510 } 511 Visit(bo->getRHS()); 512 Visit(bo->getLHS()); 513} 514 515void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) { 516 switch (uo->getOpcode()) { 517 case clang::UO_PostDec: 518 case clang::UO_PostInc: 519 case clang::UO_PreDec: 520 case clang::UO_PreInc: { 521 const FindVarResult &res = findBlockVarDecl(uo->getSubExpr()); 522 if (const VarDecl *vd = res.getDecl()) { 523 // We assume that DeclRefExprs wrapped in a unary operator ++/-- 524 // cannot be block-level expressions. Therefore, we determine if 525 // a DeclRefExpr is involved in a "load" by comparing it to the current 526 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 527 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 528 res.getDeclRefExpr()); 529 Visit(uo->getSubExpr()); 530 531 ValueVector::reference val = vals[vd]; 532 if (isUninitialized(val)) { 533 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 534 // Don't cascade warnings. 535 val = Initialized; 536 } 537 return; 538 } 539 break; 540 } 541 default: 542 break; 543 } 544 Visit(uo->getSubExpr()); 545} 546 547void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) { 548 if (ce->getCastKind() == CK_LValueToRValue) { 549 const FindVarResult &res = findBlockVarDecl(ce->getSubExpr()); 550 if (const VarDecl *vd = res.getDecl()) { 551 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 552 // cannot be block-level expressions. Therefore, we determine if 553 // a DeclRefExpr is involved in a "load" by comparing it to the current 554 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 555 // Here we update 'currentDR' to be the one associated with this 556 // lvalue-to-rvalue cast. Then, when we analyze the DeclRefExpr, we 557 // will know that we are not computing its lvalue for other purposes 558 // than to perform a load. 559 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 560 res.getDeclRefExpr()); 561 Visit(ce->getSubExpr()); 562 if (currentVoidCast != ce) { 563 Value val = vals[vd]; 564 if (isUninitialized(val)) { 565 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 566 // Don't cascade warnings. 567 vals[vd] = Initialized; 568 } 569 } 570 return; 571 } 572 } 573 else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) { 574 if (cse->getType()->isVoidType()) { 575 // e.g. (void) x; 576 SaveAndRestore<const Expr *> 577 lastVoidCast(currentVoidCast, cse->getSubExpr()->IgnoreParens()); 578 Visit(cse->getSubExpr()); 579 return; 580 } 581 } 582 Visit(ce->getSubExpr()); 583} 584 585void TransferFunctions::VisitUnaryExprOrTypeTraitExpr( 586 UnaryExprOrTypeTraitExpr *se) { 587 if (se->getKind() == UETT_SizeOf) { 588 if (se->getType()->isConstantSizeType()) 589 return; 590 // Handle VLAs. 591 Visit(se->getArgumentExpr()); 592 } 593} 594 595//------------------------------------------------------------------------====// 596// High-level "driver" logic for uninitialized values analysis. 597//====------------------------------------------------------------------------// 598 599static bool runOnBlock(const CFGBlock *block, const CFG &cfg, 600 AnalysisContext &ac, CFGBlockValues &vals, 601 UninitVariablesHandler *handler = 0, 602 bool flagBlockUses = false) { 603 604 if (const BinaryOperator *b = getLogicalOperatorInChain(block)) { 605 CFGBlock::const_pred_iterator itr = block->pred_begin(); 606 BVPair vA = vals.getValueVectors(*itr, false); 607 ++itr; 608 BVPair vB = vals.getValueVectors(*itr, false); 609 610 BVPair valsAB; 611 612 if (b->getOpcode() == BO_LAnd) { 613 // Merge the 'F' bits from the first and second. 614 vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true); 615 vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false); 616 valsAB.first = vA.first; 617 valsAB.second = &vals.getScratch(); 618 } 619 else { 620 // Merge the 'T' bits from the first and second. 621 assert(b->getOpcode() == BO_LOr); 622 vals.mergeIntoScratch(*vA.first, true); 623 vals.mergeIntoScratch(*vB.first, false); 624 valsAB.first = &vals.getScratch(); 625 valsAB.second = vA.second ? vA.second : vA.first; 626 } 627 return vals.updateValueVectors(block, valsAB); 628 } 629 630 // Default behavior: merge in values of predecessor blocks. 631 vals.resetScratch(); 632 bool isFirst = true; 633 for (CFGBlock::const_pred_iterator I = block->pred_begin(), 634 E = block->pred_end(); I != E; ++I) { 635 vals.mergeIntoScratch(vals.getValueVector(*I, block), isFirst); 636 isFirst = false; 637 } 638 // Apply the transfer function. 639 TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses); 640 for (CFGBlock::const_iterator I = block->begin(), E = block->end(); 641 I != E; ++I) { 642 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) { 643 tf.BlockStmt_Visit(cs->getStmt()); 644 } 645 } 646 return vals.updateValueVectorWithScratch(block); 647} 648 649void clang::runUninitializedVariablesAnalysis(const DeclContext &dc, 650 const CFG &cfg, 651 AnalysisContext &ac, 652 UninitVariablesHandler &handler) { 653 CFGBlockValues vals(cfg); 654 vals.computeSetOfDeclarations(dc); 655 if (vals.hasNoDeclarations()) 656 return; 657 DataflowWorklist worklist(cfg); 658 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs()); 659 660 worklist.enqueueSuccessors(&cfg.getEntry()); 661 662 while (const CFGBlock *block = worklist.dequeue()) { 663 // Did the block change? 664 bool changed = runOnBlock(block, cfg, ac, vals); 665 if (changed || !previouslyVisited[block->getBlockID()]) 666 worklist.enqueueSuccessors(block); 667 previouslyVisited[block->getBlockID()] = true; 668 } 669 670 // Run through the blocks one more time, and report uninitialized variabes. 671 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) { 672 runOnBlock(*BI, cfg, ac, vals, &handler, /* flagBlockUses */ true); 673 } 674} 675 676UninitVariablesHandler::~UninitVariablesHandler() {} 677 678