UninitializedValues.cpp revision da57f3eeab7b7f7f6e6788956f0a0d9adf196a7d
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements uninitialized values analysis for source-level CFGs. 11// 12//===----------------------------------------------------------------------===// 13 14#include <utility> 15#include "llvm/ADT/Optional.h" 16#include "llvm/ADT/SmallVector.h" 17#include "llvm/ADT/BitVector.h" 18#include "llvm/ADT/DenseMap.h" 19#include "clang/AST/Decl.h" 20#include "clang/Analysis/CFG.h" 21#include "clang/Analysis/AnalysisContext.h" 22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h" 23#include "clang/Analysis/Analyses/UninitializedValues.h" 24#include "clang/Analysis/Support/SaveAndRestore.h" 25 26using namespace clang; 27 28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) { 29 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() && 30 vd->getDeclContext() == dc) { 31 QualType ty = vd->getType(); 32 return ty->isScalarType() || ty->isVectorType(); 33 } 34 return false; 35} 36 37//------------------------------------------------------------------------====// 38// DeclToIndex: a mapping from Decls we track to value indices. 39//====------------------------------------------------------------------------// 40 41namespace { 42class DeclToIndex { 43 llvm::DenseMap<const VarDecl *, unsigned> map; 44public: 45 DeclToIndex() {} 46 47 /// Compute the actual mapping from declarations to bits. 48 void computeMap(const DeclContext &dc); 49 50 /// Return the number of declarations in the map. 51 unsigned size() const { return map.size(); } 52 53 /// Returns the bit vector index for a given declaration. 54 llvm::Optional<unsigned> getValueIndex(const VarDecl *d); 55}; 56} 57 58void DeclToIndex::computeMap(const DeclContext &dc) { 59 unsigned count = 0; 60 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()), 61 E(dc.decls_end()); 62 for ( ; I != E; ++I) { 63 const VarDecl *vd = *I; 64 if (isTrackedVar(vd, &dc)) 65 map[vd] = count++; 66 } 67} 68 69llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) { 70 llvm::DenseMap<const VarDecl *, unsigned>::iterator I = map.find(d); 71 if (I == map.end()) 72 return llvm::Optional<unsigned>(); 73 return I->second; 74} 75 76//------------------------------------------------------------------------====// 77// CFGBlockValues: dataflow values for CFG blocks. 78//====------------------------------------------------------------------------// 79 80// These values are defined in such a way that a merge can be done using 81// a bitwise OR. 82enum Value { Unknown = 0x0, /* 00 */ 83 Initialized = 0x1, /* 01 */ 84 Uninitialized = 0x2, /* 10 */ 85 MayUninitialized = 0x3 /* 11 */ }; 86 87static bool isUninitialized(const Value v) { 88 return v >= Uninitialized; 89} 90static bool isAlwaysUninit(const Value v) { 91 return v == Uninitialized; 92} 93 94namespace { 95class ValueVector { 96 llvm::BitVector vec; 97public: 98 ValueVector() {} 99 ValueVector(unsigned size) : vec(size << 1) {} 100 void resize(unsigned n) { vec.resize(n << 1); } 101 void merge(const ValueVector &rhs) { vec |= rhs.vec; } 102 bool operator!=(const ValueVector &rhs) const { return vec != rhs.vec; } 103 void reset() { vec.reset(); } 104 105 class reference { 106 ValueVector &vv; 107 const unsigned idx; 108 109 reference(); // Undefined 110 public: 111 reference(ValueVector &vv, unsigned idx) : vv(vv), idx(idx) {} 112 ~reference() {} 113 114 reference &operator=(Value v) { 115 vv.vec[idx << 1] = (((unsigned) v) & 0x1) ? true : false; 116 vv.vec[(idx << 1) | 1] = (((unsigned) v) & 0x2) ? true : false; 117 return *this; 118 } 119 operator Value() { 120 unsigned x = (vv.vec[idx << 1] ? 1 : 0) | (vv.vec[(idx << 1) | 1] ? 2 :0); 121 return (Value) x; 122 } 123 }; 124 125 reference operator[](unsigned idx) { return reference(*this, idx); } 126}; 127 128typedef std::pair<ValueVector *, ValueVector *> BVPair; 129 130class CFGBlockValues { 131 const CFG &cfg; 132 BVPair *vals; 133 ValueVector scratch; 134 DeclToIndex declToIndex; 135 136 ValueVector &lazyCreate(ValueVector *&bv); 137public: 138 CFGBlockValues(const CFG &cfg); 139 ~CFGBlockValues(); 140 141 void computeSetOfDeclarations(const DeclContext &dc); 142 ValueVector &getValueVector(const CFGBlock *block, 143 const CFGBlock *dstBlock); 144 145 BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate); 146 147 void mergeIntoScratch(ValueVector const &source, bool isFirst); 148 bool updateValueVectorWithScratch(const CFGBlock *block); 149 bool updateValueVectors(const CFGBlock *block, const BVPair &newVals); 150 151 bool hasNoDeclarations() const { 152 return declToIndex.size() == 0; 153 } 154 155 void resetScratch(); 156 ValueVector &getScratch() { return scratch; } 157 158 ValueVector::reference operator[](const VarDecl *vd); 159}; 160} // end anonymous namespace 161 162CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) { 163 unsigned n = cfg.getNumBlockIDs(); 164 if (!n) 165 return; 166 vals = new std::pair<ValueVector*, ValueVector*>[n]; 167 memset(vals, 0, sizeof(*vals) * n); 168} 169 170CFGBlockValues::~CFGBlockValues() { 171 unsigned n = cfg.getNumBlockIDs(); 172 if (n == 0) 173 return; 174 for (unsigned i = 0; i < n; ++i) { 175 delete vals[i].first; 176 delete vals[i].second; 177 } 178 delete [] vals; 179} 180 181void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) { 182 declToIndex.computeMap(dc); 183 scratch.resize(declToIndex.size()); 184} 185 186ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) { 187 if (!bv) 188 bv = new ValueVector(declToIndex.size()); 189 return *bv; 190} 191 192/// This function pattern matches for a '&&' or '||' that appears at 193/// the beginning of a CFGBlock that also (1) has a terminator and 194/// (2) has no other elements. If such an expression is found, it is returned. 195static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) { 196 if (block->empty()) 197 return 0; 198 199 const CFGStmt *cstmt = block->front().getAs<CFGStmt>(); 200 if (!cstmt) 201 return 0; 202 203 BinaryOperator *b = llvm::dyn_cast_or_null<BinaryOperator>(cstmt->getStmt()); 204 205 if (!b || !b->isLogicalOp()) 206 return 0; 207 208 if (block->pred_size() == 2 && 209 ((block->succ_size() == 2 && block->getTerminatorCondition() == b) || 210 block->size() == 1)) 211 return b; 212 213 return 0; 214} 215 216ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block, 217 const CFGBlock *dstBlock) { 218 unsigned idx = block->getBlockID(); 219 if (dstBlock && getLogicalOperatorInChain(block)) { 220 if (*block->succ_begin() == dstBlock) 221 return lazyCreate(vals[idx].first); 222 assert(*(block->succ_begin()+1) == dstBlock); 223 return lazyCreate(vals[idx].second); 224 } 225 226 assert(vals[idx].second == 0); 227 return lazyCreate(vals[idx].first); 228} 229 230BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block, 231 bool shouldLazyCreate) { 232 unsigned idx = block->getBlockID(); 233 lazyCreate(vals[idx].first); 234 if (shouldLazyCreate) 235 lazyCreate(vals[idx].second); 236 return vals[idx]; 237} 238 239void CFGBlockValues::mergeIntoScratch(ValueVector const &source, 240 bool isFirst) { 241 if (isFirst) 242 scratch = source; 243 else 244 scratch.merge(source); 245} 246#if 0 247static void printVector(const CFGBlock *block, ValueVector &bv, 248 unsigned num) { 249 250 llvm::errs() << block->getBlockID() << " :"; 251 for (unsigned i = 0; i < bv.size(); ++i) { 252 llvm::errs() << ' ' << bv[i]; 253 } 254 llvm::errs() << " : " << num << '\n'; 255} 256#endif 257 258bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) { 259 ValueVector &dst = getValueVector(block, 0); 260 bool changed = (dst != scratch); 261 if (changed) 262 dst = scratch; 263#if 0 264 printVector(block, scratch, 0); 265#endif 266 return changed; 267} 268 269bool CFGBlockValues::updateValueVectors(const CFGBlock *block, 270 const BVPair &newVals) { 271 BVPair &vals = getValueVectors(block, true); 272 bool changed = *newVals.first != *vals.first || 273 *newVals.second != *vals.second; 274 *vals.first = *newVals.first; 275 *vals.second = *newVals.second; 276#if 0 277 printVector(block, *vals.first, 1); 278 printVector(block, *vals.second, 2); 279#endif 280 return changed; 281} 282 283void CFGBlockValues::resetScratch() { 284 scratch.reset(); 285} 286 287ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) { 288 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd); 289 assert(idx.hasValue()); 290 return scratch[idx.getValue()]; 291} 292 293//------------------------------------------------------------------------====// 294// Worklist: worklist for dataflow analysis. 295//====------------------------------------------------------------------------// 296 297namespace { 298class DataflowWorklist { 299 llvm::SmallVector<const CFGBlock *, 20> worklist; 300 llvm::BitVector enqueuedBlocks; 301public: 302 DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {} 303 304 void enqueue(const CFGBlock *block); 305 void enqueueSuccessors(const CFGBlock *block); 306 const CFGBlock *dequeue(); 307 308}; 309} 310 311void DataflowWorklist::enqueue(const CFGBlock *block) { 312 if (!block) 313 return; 314 unsigned idx = block->getBlockID(); 315 if (enqueuedBlocks[idx]) 316 return; 317 worklist.push_back(block); 318 enqueuedBlocks[idx] = true; 319} 320 321void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) { 322 for (CFGBlock::const_succ_iterator I = block->succ_begin(), 323 E = block->succ_end(); I != E; ++I) { 324 enqueue(*I); 325 } 326} 327 328const CFGBlock *DataflowWorklist::dequeue() { 329 if (worklist.empty()) 330 return 0; 331 const CFGBlock *b = worklist.back(); 332 worklist.pop_back(); 333 enqueuedBlocks[b->getBlockID()] = false; 334 return b; 335} 336 337//------------------------------------------------------------------------====// 338// Transfer function for uninitialized values analysis. 339//====------------------------------------------------------------------------// 340 341namespace { 342class FindVarResult { 343 const VarDecl *vd; 344 const DeclRefExpr *dr; 345public: 346 FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {} 347 348 const DeclRefExpr *getDeclRefExpr() const { return dr; } 349 const VarDecl *getDecl() const { return vd; } 350}; 351 352class TransferFunctions : public CFGRecStmtVisitor<TransferFunctions> { 353 CFGBlockValues &vals; 354 const CFG &cfg; 355 AnalysisContext ∾ 356 UninitVariablesHandler *handler; 357 const DeclRefExpr *currentDR; 358 const Expr *currentVoidCast; 359 const bool flagBlockUses; 360public: 361 TransferFunctions(CFGBlockValues &vals, const CFG &cfg, 362 AnalysisContext &ac, 363 UninitVariablesHandler *handler, 364 bool flagBlockUses) 365 : vals(vals), cfg(cfg), ac(ac), handler(handler), currentDR(0), 366 currentVoidCast(0), flagBlockUses(flagBlockUses) {} 367 368 const CFG &getCFG() { return cfg; } 369 void reportUninit(const DeclRefExpr *ex, const VarDecl *vd, 370 bool isAlwaysUninit); 371 372 void VisitBlockExpr(BlockExpr *be); 373 void VisitDeclStmt(DeclStmt *ds); 374 void VisitDeclRefExpr(DeclRefExpr *dr); 375 void VisitUnaryOperator(UnaryOperator *uo); 376 void VisitBinaryOperator(BinaryOperator *bo); 377 void VisitCastExpr(CastExpr *ce); 378 void VisitUnaryExprOrTypeTraitExpr(UnaryExprOrTypeTraitExpr *se); 379 void BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs); 380 381 bool isTrackedVar(const VarDecl *vd) { 382 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl())); 383 } 384 385 FindVarResult findBlockVarDecl(Expr *ex); 386}; 387} 388 389void TransferFunctions::reportUninit(const DeclRefExpr *ex, 390 const VarDecl *vd, bool isAlwaysUnit) { 391 if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit); 392} 393 394FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) { 395 if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts())) 396 if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 397 if (isTrackedVar(vd)) 398 return FindVarResult(vd, dr); 399 return FindVarResult(0, 0); 400} 401 402void TransferFunctions::BlockStmt_VisitObjCForCollectionStmt( 403 ObjCForCollectionStmt *fs) { 404 405 Visit(fs->getCollection()); 406 407 // This represents an initialization of the 'element' value. 408 Stmt *element = fs->getElement(); 409 const VarDecl* vd = 0; 410 411 if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) { 412 vd = cast<VarDecl>(ds->getSingleDecl()); 413 if (!isTrackedVar(vd)) 414 vd = 0; 415 } 416 else { 417 // Initialize the value of the reference variable. 418 const FindVarResult &res = findBlockVarDecl(cast<Expr>(element)); 419 vd = res.getDecl(); 420 if (!vd) { 421 Visit(element); 422 return; 423 } 424 } 425 426 if (vd) 427 vals[vd] = Initialized; 428} 429 430void TransferFunctions::VisitBlockExpr(BlockExpr *be) { 431 if (!flagBlockUses || !handler) 432 return; 433 AnalysisContext::referenced_decls_iterator i, e; 434 llvm::tie(i, e) = ac.getReferencedBlockVars(be->getBlockDecl()); 435 for ( ; i != e; ++i) { 436 const VarDecl *vd = *i; 437 if (vd->getAttr<BlocksAttr>() || !vd->hasLocalStorage() || 438 !isTrackedVar(vd)) 439 continue; 440 Value v = vals[vd]; 441 if (isUninitialized(v)) 442 handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v)); 443 } 444} 445 446void TransferFunctions::VisitDeclStmt(DeclStmt *ds) { 447 for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end(); 448 DI != DE; ++DI) { 449 if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) { 450 if (isTrackedVar(vd)) { 451 vals[vd] = Uninitialized; 452 if (Stmt *init = vd->getInit()) { 453 Visit(init); 454 vals[vd] = Initialized; 455 } 456 } 457 else if (Stmt *init = vd->getInit()) { 458 Visit(init); 459 } 460 } 461 } 462} 463 464void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) { 465 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 466 // cannot be block-level expressions. Therefore, we determine if 467 // a DeclRefExpr is involved in a "load" by comparing it to the current 468 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 469 // If a DeclRefExpr is not involved in a load, we are essentially computing 470 // its address, either for assignment to a reference or via the '&' operator. 471 // In such cases, treat the variable as being initialized, since this 472 // analysis isn't powerful enough to do alias tracking. 473 if (dr != currentDR) 474 if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 475 if (isTrackedVar(vd)) 476 vals[vd] = Initialized; 477} 478 479void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) { 480 if (bo->isAssignmentOp()) { 481 const FindVarResult &res = findBlockVarDecl(bo->getLHS()); 482 if (const VarDecl* vd = res.getDecl()) { 483 // We assume that DeclRefExprs wrapped in a BinaryOperator "assignment" 484 // cannot be block-level expressions. Therefore, we determine if 485 // a DeclRefExpr is involved in a "load" by comparing it to the current 486 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 487 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 488 res.getDeclRefExpr()); 489 Visit(bo->getRHS()); 490 Visit(bo->getLHS()); 491 492 ValueVector::reference val = vals[vd]; 493 if (isUninitialized(val)) { 494 if (bo->getOpcode() != BO_Assign) 495 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 496 val = Initialized; 497 } 498 return; 499 } 500 } 501 Visit(bo->getRHS()); 502 Visit(bo->getLHS()); 503} 504 505void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) { 506 switch (uo->getOpcode()) { 507 case clang::UO_PostDec: 508 case clang::UO_PostInc: 509 case clang::UO_PreDec: 510 case clang::UO_PreInc: { 511 const FindVarResult &res = findBlockVarDecl(uo->getSubExpr()); 512 if (const VarDecl *vd = res.getDecl()) { 513 // We assume that DeclRefExprs wrapped in a unary operator ++/-- 514 // cannot be block-level expressions. Therefore, we determine if 515 // a DeclRefExpr is involved in a "load" by comparing it to the current 516 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 517 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 518 res.getDeclRefExpr()); 519 Visit(uo->getSubExpr()); 520 521 ValueVector::reference val = vals[vd]; 522 if (isUninitialized(val)) { 523 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 524 // Don't cascade warnings. 525 val = Initialized; 526 } 527 return; 528 } 529 break; 530 } 531 default: 532 break; 533 } 534 Visit(uo->getSubExpr()); 535} 536 537void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) { 538 if (ce->getCastKind() == CK_LValueToRValue) { 539 const FindVarResult &res = findBlockVarDecl(ce->getSubExpr()); 540 if (const VarDecl *vd = res.getDecl()) { 541 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 542 // cannot be block-level expressions. Therefore, we determine if 543 // a DeclRefExpr is involved in a "load" by comparing it to the current 544 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 545 // Here we update 'currentDR' to be the one associated with this 546 // lvalue-to-rvalue cast. Then, when we analyze the DeclRefExpr, we 547 // will know that we are not computing its lvalue for other purposes 548 // than to perform a load. 549 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 550 res.getDeclRefExpr()); 551 Visit(ce->getSubExpr()); 552 if (currentVoidCast != ce) { 553 Value val = vals[vd]; 554 if (isUninitialized(val)) { 555 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 556 // Don't cascade warnings. 557 vals[vd] = Initialized; 558 } 559 } 560 return; 561 } 562 } 563 else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) { 564 if (cse->getType()->isVoidType()) { 565 // e.g. (void) x; 566 SaveAndRestore<const Expr *> 567 lastVoidCast(currentVoidCast, cse->getSubExpr()->IgnoreParens()); 568 Visit(cse->getSubExpr()); 569 return; 570 } 571 } 572 Visit(ce->getSubExpr()); 573} 574 575void TransferFunctions::VisitUnaryExprOrTypeTraitExpr( 576 UnaryExprOrTypeTraitExpr *se) { 577 if (se->getKind() == UETT_SizeOf) { 578 if (se->getType()->isConstantSizeType()) 579 return; 580 // Handle VLAs. 581 Visit(se->getArgumentExpr()); 582 } 583} 584 585//------------------------------------------------------------------------====// 586// High-level "driver" logic for uninitialized values analysis. 587//====------------------------------------------------------------------------// 588 589static bool runOnBlock(const CFGBlock *block, const CFG &cfg, 590 AnalysisContext &ac, CFGBlockValues &vals, 591 UninitVariablesHandler *handler = 0, 592 bool flagBlockUses = false) { 593 594 if (const BinaryOperator *b = getLogicalOperatorInChain(block)) { 595 CFGBlock::const_pred_iterator itr = block->pred_begin(); 596 BVPair vA = vals.getValueVectors(*itr, false); 597 ++itr; 598 BVPair vB = vals.getValueVectors(*itr, false); 599 600 BVPair valsAB; 601 602 if (b->getOpcode() == BO_LAnd) { 603 // Merge the 'F' bits from the first and second. 604 vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true); 605 vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false); 606 valsAB.first = vA.first; 607 valsAB.second = &vals.getScratch(); 608 } 609 else { 610 // Merge the 'T' bits from the first and second. 611 assert(b->getOpcode() == BO_LOr); 612 vals.mergeIntoScratch(*vA.first, true); 613 vals.mergeIntoScratch(*vB.first, false); 614 valsAB.first = &vals.getScratch(); 615 valsAB.second = vA.second ? vA.second : vA.first; 616 } 617 return vals.updateValueVectors(block, valsAB); 618 } 619 620 // Default behavior: merge in values of predecessor blocks. 621 vals.resetScratch(); 622 bool isFirst = true; 623 for (CFGBlock::const_pred_iterator I = block->pred_begin(), 624 E = block->pred_end(); I != E; ++I) { 625 vals.mergeIntoScratch(vals.getValueVector(*I, block), isFirst); 626 isFirst = false; 627 } 628 // Apply the transfer function. 629 TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses); 630 for (CFGBlock::const_iterator I = block->begin(), E = block->end(); 631 I != E; ++I) { 632 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) { 633 tf.BlockStmt_Visit(cs->getStmt()); 634 } 635 } 636 return vals.updateValueVectorWithScratch(block); 637} 638 639void clang::runUninitializedVariablesAnalysis(const DeclContext &dc, 640 const CFG &cfg, 641 AnalysisContext &ac, 642 UninitVariablesHandler &handler) { 643 CFGBlockValues vals(cfg); 644 vals.computeSetOfDeclarations(dc); 645 if (vals.hasNoDeclarations()) 646 return; 647 DataflowWorklist worklist(cfg); 648 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs()); 649 650 worklist.enqueueSuccessors(&cfg.getEntry()); 651 652 while (const CFGBlock *block = worklist.dequeue()) { 653 // Did the block change? 654 bool changed = runOnBlock(block, cfg, ac, vals); 655 if (changed || !previouslyVisited[block->getBlockID()]) 656 worklist.enqueueSuccessors(block); 657 previouslyVisited[block->getBlockID()] = true; 658 } 659 660 // Run through the blocks one more time, and report uninitialized variabes. 661 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) { 662 runOnBlock(*BI, cfg, ac, vals, &handler, /* flagBlockUses */ true); 663 } 664} 665 666UninitVariablesHandler::~UninitVariablesHandler() {} 667 668