UninitializedValues.cpp revision 5f9e272e632e951b1efe824cd16acb4d96077930
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements uninitialized values analysis for source-level CFGs. 11// 12//===----------------------------------------------------------------------===// 13 14#include <utility> 15#include "llvm/ADT/Optional.h" 16#include "llvm/ADT/SmallVector.h" 17#include "llvm/ADT/PackedVector.h" 18#include "llvm/ADT/DenseMap.h" 19#include "clang/AST/Decl.h" 20#include "clang/Analysis/CFG.h" 21#include "clang/Analysis/AnalysisContext.h" 22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h" 23#include "clang/Analysis/Analyses/UninitializedValues.h" 24#include "clang/Analysis/Support/SaveAndRestore.h" 25 26using namespace clang; 27 28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) { 29 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() && 30 !vd->isExceptionVariable() && 31 vd->getDeclContext() == dc) { 32 QualType ty = vd->getType(); 33 return ty->isScalarType() || ty->isVectorType(); 34 } 35 return false; 36} 37 38//------------------------------------------------------------------------====// 39// DeclToIndex: a mapping from Decls we track to value indices. 40//====------------------------------------------------------------------------// 41 42namespace { 43class DeclToIndex { 44 llvm::DenseMap<const VarDecl *, unsigned> map; 45public: 46 DeclToIndex() {} 47 48 /// Compute the actual mapping from declarations to bits. 49 void computeMap(const DeclContext &dc); 50 51 /// Return the number of declarations in the map. 52 unsigned size() const { return map.size(); } 53 54 /// Returns the bit vector index for a given declaration. 55 llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const; 56}; 57} 58 59void DeclToIndex::computeMap(const DeclContext &dc) { 60 unsigned count = 0; 61 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()), 62 E(dc.decls_end()); 63 for ( ; I != E; ++I) { 64 const VarDecl *vd = *I; 65 if (isTrackedVar(vd, &dc)) 66 map[vd] = count++; 67 } 68} 69 70llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const { 71 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d); 72 if (I == map.end()) 73 return llvm::Optional<unsigned>(); 74 return I->second; 75} 76 77//------------------------------------------------------------------------====// 78// CFGBlockValues: dataflow values for CFG blocks. 79//====------------------------------------------------------------------------// 80 81// These values are defined in such a way that a merge can be done using 82// a bitwise OR. 83enum Value { Unknown = 0x0, /* 00 */ 84 Initialized = 0x1, /* 01 */ 85 Uninitialized = 0x2, /* 10 */ 86 MayUninitialized = 0x3 /* 11 */ }; 87 88static bool isUninitialized(const Value v) { 89 return v >= Uninitialized; 90} 91static bool isAlwaysUninit(const Value v) { 92 return v == Uninitialized; 93} 94 95namespace { 96 97typedef llvm::PackedVector<Value, 2> ValueVector; 98typedef std::pair<ValueVector *, ValueVector *> BVPair; 99 100class CFGBlockValues { 101 const CFG &cfg; 102 BVPair *vals; 103 ValueVector scratch; 104 DeclToIndex declToIndex; 105 106 ValueVector &lazyCreate(ValueVector *&bv); 107public: 108 CFGBlockValues(const CFG &cfg); 109 ~CFGBlockValues(); 110 111 unsigned getNumEntries() const { return declToIndex.size(); } 112 113 void computeSetOfDeclarations(const DeclContext &dc); 114 ValueVector &getValueVector(const CFGBlock *block, 115 const CFGBlock *dstBlock); 116 117 BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate); 118 119 void mergeIntoScratch(ValueVector const &source, bool isFirst); 120 bool updateValueVectorWithScratch(const CFGBlock *block); 121 bool updateValueVectors(const CFGBlock *block, const BVPair &newVals); 122 123 bool hasNoDeclarations() const { 124 return declToIndex.size() == 0; 125 } 126 127 bool hasEntry(const VarDecl *vd) const { 128 return declToIndex.getValueIndex(vd).hasValue(); 129 } 130 131 bool hasValues(const CFGBlock *block); 132 133 void resetScratch(); 134 ValueVector &getScratch() { return scratch; } 135 136 ValueVector::reference operator[](const VarDecl *vd); 137}; 138} // end anonymous namespace 139 140CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) { 141 unsigned n = cfg.getNumBlockIDs(); 142 if (!n) 143 return; 144 vals = new std::pair<ValueVector*, ValueVector*>[n]; 145 memset((void*)vals, 0, sizeof(*vals) * n); 146} 147 148CFGBlockValues::~CFGBlockValues() { 149 unsigned n = cfg.getNumBlockIDs(); 150 if (n == 0) 151 return; 152 for (unsigned i = 0; i < n; ++i) { 153 delete vals[i].first; 154 delete vals[i].second; 155 } 156 delete [] vals; 157} 158 159void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) { 160 declToIndex.computeMap(dc); 161 scratch.resize(declToIndex.size()); 162} 163 164ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) { 165 if (!bv) 166 bv = new ValueVector(declToIndex.size()); 167 return *bv; 168} 169 170/// This function pattern matches for a '&&' or '||' that appears at 171/// the beginning of a CFGBlock that also (1) has a terminator and 172/// (2) has no other elements. If such an expression is found, it is returned. 173static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) { 174 if (block->empty()) 175 return 0; 176 177 const CFGStmt *cstmt = block->front().getAs<CFGStmt>(); 178 if (!cstmt) 179 return 0; 180 181 BinaryOperator *b = dyn_cast_or_null<BinaryOperator>(cstmt->getStmt()); 182 183 if (!b || !b->isLogicalOp()) 184 return 0; 185 186 if (block->pred_size() == 2) { 187 if (block->getTerminatorCondition() == b) { 188 if (block->succ_size() == 2) 189 return b; 190 } 191 else if (block->size() == 1) 192 return b; 193 } 194 195 return 0; 196} 197 198ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block, 199 const CFGBlock *dstBlock) { 200 unsigned idx = block->getBlockID(); 201 if (dstBlock && getLogicalOperatorInChain(block)) { 202 if (*block->succ_begin() == dstBlock) 203 return lazyCreate(vals[idx].first); 204 assert(*(block->succ_begin()+1) == dstBlock); 205 return lazyCreate(vals[idx].second); 206 } 207 208 assert(vals[idx].second == 0); 209 return lazyCreate(vals[idx].first); 210} 211 212bool CFGBlockValues::hasValues(const CFGBlock *block) { 213 unsigned idx = block->getBlockID(); 214 return vals[idx].second != 0; 215} 216 217BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block, 218 bool shouldLazyCreate) { 219 unsigned idx = block->getBlockID(); 220 lazyCreate(vals[idx].first); 221 if (shouldLazyCreate) 222 lazyCreate(vals[idx].second); 223 return vals[idx]; 224} 225 226void CFGBlockValues::mergeIntoScratch(ValueVector const &source, 227 bool isFirst) { 228 if (isFirst) 229 scratch = source; 230 else 231 scratch |= source; 232} 233#if 0 234static void printVector(const CFGBlock *block, ValueVector &bv, 235 unsigned num) { 236 237 llvm::errs() << block->getBlockID() << " :"; 238 for (unsigned i = 0; i < bv.size(); ++i) { 239 llvm::errs() << ' ' << bv[i]; 240 } 241 llvm::errs() << " : " << num << '\n'; 242} 243#endif 244 245bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) { 246 ValueVector &dst = getValueVector(block, 0); 247 bool changed = (dst != scratch); 248 if (changed) 249 dst = scratch; 250#if 0 251 printVector(block, scratch, 0); 252#endif 253 return changed; 254} 255 256bool CFGBlockValues::updateValueVectors(const CFGBlock *block, 257 const BVPair &newVals) { 258 BVPair &vals = getValueVectors(block, true); 259 bool changed = *newVals.first != *vals.first || 260 *newVals.second != *vals.second; 261 *vals.first = *newVals.first; 262 *vals.second = *newVals.second; 263#if 0 264 printVector(block, *vals.first, 1); 265 printVector(block, *vals.second, 2); 266#endif 267 return changed; 268} 269 270void CFGBlockValues::resetScratch() { 271 scratch.reset(); 272} 273 274ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) { 275 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd); 276 assert(idx.hasValue()); 277 return scratch[idx.getValue()]; 278} 279 280//------------------------------------------------------------------------====// 281// Worklist: worklist for dataflow analysis. 282//====------------------------------------------------------------------------// 283 284namespace { 285class DataflowWorklist { 286 SmallVector<const CFGBlock *, 20> worklist; 287 llvm::BitVector enqueuedBlocks; 288public: 289 DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {} 290 291 void enqueueSuccessors(const CFGBlock *block); 292 const CFGBlock *dequeue(); 293}; 294} 295 296void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) { 297 unsigned OldWorklistSize = worklist.size(); 298 for (CFGBlock::const_succ_iterator I = block->succ_begin(), 299 E = block->succ_end(); I != E; ++I) { 300 const CFGBlock *Successor = *I; 301 if (!Successor || enqueuedBlocks[Successor->getBlockID()]) 302 continue; 303 worklist.push_back(Successor); 304 enqueuedBlocks[Successor->getBlockID()] = true; 305 } 306 if (OldWorklistSize == 0 || OldWorklistSize == worklist.size()) 307 return; 308 309 // Rotate the newly added blocks to the start of the worklist so that it forms 310 // a proper queue when we pop off the end of the worklist. 311 std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize, 312 worklist.end()); 313} 314 315const CFGBlock *DataflowWorklist::dequeue() { 316 if (worklist.empty()) 317 return 0; 318 const CFGBlock *b = worklist.back(); 319 worklist.pop_back(); 320 enqueuedBlocks[b->getBlockID()] = false; 321 return b; 322} 323 324//------------------------------------------------------------------------====// 325// Transfer function for uninitialized values analysis. 326//====------------------------------------------------------------------------// 327 328namespace { 329class FindVarResult { 330 const VarDecl *vd; 331 const DeclRefExpr *dr; 332public: 333 FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {} 334 335 const DeclRefExpr *getDeclRefExpr() const { return dr; } 336 const VarDecl *getDecl() const { return vd; } 337}; 338 339class TransferFunctions : public StmtVisitor<TransferFunctions> { 340 CFGBlockValues &vals; 341 const CFG &cfg; 342 AnalysisContext ∾ 343 UninitVariablesHandler *handler; 344 const bool flagBlockUses; 345 346 /// The last DeclRefExpr seen when analyzing a block. Used to 347 /// cheat when detecting cases when the address of a variable is taken. 348 DeclRefExpr *lastDR; 349 350 /// The last lvalue-to-rvalue conversion of a variable whose value 351 /// was uninitialized. Normally this results in a warning, but it is 352 /// possible to either silence the warning in some cases, or we 353 /// propagate the uninitialized value. 354 CastExpr *lastLoad; 355public: 356 TransferFunctions(CFGBlockValues &vals, const CFG &cfg, 357 AnalysisContext &ac, 358 UninitVariablesHandler *handler, 359 bool flagBlockUses) 360 : vals(vals), cfg(cfg), ac(ac), handler(handler), 361 flagBlockUses(flagBlockUses), lastDR(0), lastLoad(0) {} 362 363 const CFG &getCFG() { return cfg; } 364 void reportUninit(const DeclRefExpr *ex, const VarDecl *vd, 365 bool isAlwaysUninit); 366 367 void VisitBlockExpr(BlockExpr *be); 368 void VisitDeclStmt(DeclStmt *ds); 369 void VisitDeclRefExpr(DeclRefExpr *dr); 370 void VisitUnaryOperator(UnaryOperator *uo); 371 void VisitBinaryOperator(BinaryOperator *bo); 372 void VisitCastExpr(CastExpr *ce); 373 void VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs); 374 void Visit(Stmt *s); 375 376 bool isTrackedVar(const VarDecl *vd) { 377 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl())); 378 } 379 380 FindVarResult findBlockVarDecl(Expr *ex); 381 382 void ProcessUses(Stmt *s = 0); 383}; 384} 385 386void TransferFunctions::reportUninit(const DeclRefExpr *ex, 387 const VarDecl *vd, bool isAlwaysUnit) { 388 if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit); 389} 390 391FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) { 392 if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts())) 393 if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 394 if (isTrackedVar(vd)) 395 return FindVarResult(vd, dr); 396 return FindVarResult(0, 0); 397} 398 399void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs) { 400 // This represents an initialization of the 'element' value. 401 Stmt *element = fs->getElement(); 402 const VarDecl* vd = 0; 403 404 if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) { 405 vd = cast<VarDecl>(ds->getSingleDecl()); 406 if (!isTrackedVar(vd)) 407 vd = 0; 408 } 409 else { 410 // Initialize the value of the reference variable. 411 const FindVarResult &res = findBlockVarDecl(cast<Expr>(element)); 412 vd = res.getDecl(); 413 } 414 415 if (vd) 416 vals[vd] = Initialized; 417} 418 419void TransferFunctions::VisitBlockExpr(BlockExpr *be) { 420 if (!flagBlockUses || !handler) 421 return; 422 const BlockDecl *bd = be->getBlockDecl(); 423 for (BlockDecl::capture_const_iterator i = bd->capture_begin(), 424 e = bd->capture_end() ; i != e; ++i) { 425 const VarDecl *vd = i->getVariable(); 426 if (!vd->hasLocalStorage()) 427 continue; 428 if (!isTrackedVar(vd)) 429 continue; 430 if (i->isByRef()) { 431 vals[vd] = Initialized; 432 continue; 433 } 434 Value v = vals[vd]; 435 if (isUninitialized(v)) 436 handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v)); 437 } 438} 439 440void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) { 441 // Record the last DeclRefExpr seen. This is an lvalue computation. 442 // We use this value to later detect if a variable "escapes" the analysis. 443 if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 444 if (isTrackedVar(vd)) { 445 ProcessUses(); 446 lastDR = dr; 447 } 448} 449 450void TransferFunctions::VisitDeclStmt(DeclStmt *ds) { 451 for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end(); 452 DI != DE; ++DI) { 453 if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) { 454 if (isTrackedVar(vd)) { 455 if (Expr *init = vd->getInit()) { 456 // If the initializer consists solely of a reference to itself, we 457 // explicitly mark the variable as uninitialized. This allows code 458 // like the following: 459 // 460 // int x = x; 461 // 462 // to deliberately leave a variable uninitialized. Different analysis 463 // clients can detect this pattern and adjust their reporting 464 // appropriately, but we need to continue to analyze subsequent uses 465 // of the variable. 466 if (init == lastLoad) { 467 DeclRefExpr *DR = 468 cast<DeclRefExpr>(lastLoad->getSubExpr()->IgnoreParens()); 469 if (DR->getDecl() == vd) { 470 // int x = x; 471 // Propagate uninitialized value, but don't immediately report 472 // a problem. 473 vals[vd] = Uninitialized; 474 lastLoad = 0; 475 lastDR = 0; 476 return; 477 } 478 } 479 480 // All other cases: treat the new variable as initialized. 481 vals[vd] = Initialized; 482 } 483 } 484 } 485 } 486} 487 488void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) { 489 if (bo->isAssignmentOp()) { 490 const FindVarResult &res = findBlockVarDecl(bo->getLHS()); 491 if (const VarDecl* vd = res.getDecl()) { 492 ValueVector::reference val = vals[vd]; 493 if (isUninitialized(val)) { 494 if (bo->getOpcode() != BO_Assign) 495 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 496 else 497 val = Initialized; 498 } 499 } 500 } 501} 502 503void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) { 504 switch (uo->getOpcode()) { 505 case clang::UO_PostDec: 506 case clang::UO_PostInc: 507 case clang::UO_PreDec: 508 case clang::UO_PreInc: { 509 const FindVarResult &res = findBlockVarDecl(uo->getSubExpr()); 510 if (const VarDecl *vd = res.getDecl()) { 511 assert(res.getDeclRefExpr() == lastDR); 512 // We null out lastDR to indicate we have fully processed it 513 // and we don't want the auto-value setting in Visit(). 514 lastDR = 0; 515 516 ValueVector::reference val = vals[vd]; 517 if (isUninitialized(val)) 518 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 519 } 520 break; 521 } 522 default: 523 break; 524 } 525} 526 527void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) { 528 if (ce->getCastKind() == CK_LValueToRValue) { 529 const FindVarResult &res = findBlockVarDecl(ce->getSubExpr()); 530 if (const VarDecl *vd = res.getDecl()) { 531 assert(res.getDeclRefExpr() == lastDR); 532 if (isUninitialized(vals[vd])) { 533 // Record this load of an uninitialized value. Normally this 534 // results in a warning, but we delay reporting the issue 535 // in case it is wrapped in a void cast, etc. 536 lastLoad = ce; 537 } 538 } 539 } 540 else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) { 541 if (cse->getType()->isVoidType()) { 542 // e.g. (void) x; 543 if (lastLoad == cse->getSubExpr()) { 544 // Squelch any detected load of an uninitialized value if 545 // we cast it to void. 546 lastLoad = 0; 547 lastDR = 0; 548 } 549 } 550 } 551} 552 553void TransferFunctions::Visit(clang::Stmt *s) { 554 StmtVisitor<TransferFunctions>::Visit(s); 555 ProcessUses(s); 556} 557 558void TransferFunctions::ProcessUses(Stmt *s) { 559 // This method is typically called after visiting a CFGElement statement 560 // in the CFG. We delay processing of reporting many loads of uninitialized 561 // values until here. 562 if (lastLoad) { 563 // If we just visited the lvalue-to-rvalue cast, there is nothing 564 // left to do. 565 if (lastLoad == s) 566 return; 567 568 // If we reach here, we have seen a load of an uninitialized value 569 // and it hasn't been casted to void or otherwise handled. In this 570 // situation, report the incident. 571 DeclRefExpr *DR = cast<DeclRefExpr>(lastLoad->getSubExpr()->IgnoreParens()); 572 VarDecl *VD = cast<VarDecl>(DR->getDecl()); 573 reportUninit(DR, VD, isAlwaysUninit(vals[VD])); 574 lastLoad = 0; 575 576 if (DR == lastDR) { 577 lastDR = 0; 578 return; 579 } 580 } 581 582 // Any other uses of 'lastDR' involve taking an lvalue of variable. 583 // In this case, it "escapes" the analysis. 584 if (lastDR && lastDR != s) { 585 vals[cast<VarDecl>(lastDR->getDecl())] = Initialized; 586 lastDR = 0; 587 } 588} 589 590//------------------------------------------------------------------------====// 591// High-level "driver" logic for uninitialized values analysis. 592//====------------------------------------------------------------------------// 593 594static bool runOnBlock(const CFGBlock *block, const CFG &cfg, 595 AnalysisContext &ac, CFGBlockValues &vals, 596 llvm::BitVector &wasAnalyzed, 597 UninitVariablesHandler *handler = 0, 598 bool flagBlockUses = false) { 599 600 wasAnalyzed[block->getBlockID()] = true; 601 602 if (const BinaryOperator *b = getLogicalOperatorInChain(block)) { 603 CFGBlock::const_pred_iterator itr = block->pred_begin(); 604 BVPair vA = vals.getValueVectors(*itr, false); 605 ++itr; 606 BVPair vB = vals.getValueVectors(*itr, false); 607 608 BVPair valsAB; 609 610 if (b->getOpcode() == BO_LAnd) { 611 // Merge the 'F' bits from the first and second. 612 vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true); 613 vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false); 614 valsAB.first = vA.first; 615 valsAB.second = &vals.getScratch(); 616 } 617 else { 618 // Merge the 'T' bits from the first and second. 619 assert(b->getOpcode() == BO_LOr); 620 vals.mergeIntoScratch(*vA.first, true); 621 vals.mergeIntoScratch(*vB.first, false); 622 valsAB.first = &vals.getScratch(); 623 valsAB.second = vA.second ? vA.second : vA.first; 624 } 625 return vals.updateValueVectors(block, valsAB); 626 } 627 628 // Default behavior: merge in values of predecessor blocks. 629 vals.resetScratch(); 630 bool isFirst = true; 631 for (CFGBlock::const_pred_iterator I = block->pred_begin(), 632 E = block->pred_end(); I != E; ++I) { 633 vals.mergeIntoScratch(vals.getValueVector(*I, block), isFirst); 634 isFirst = false; 635 } 636 // Apply the transfer function. 637 TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses); 638 for (CFGBlock::const_iterator I = block->begin(), E = block->end(); 639 I != E; ++I) { 640 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) { 641 tf.Visit(cs->getStmt()); 642 } 643 } 644 tf.ProcessUses(); 645 return vals.updateValueVectorWithScratch(block); 646} 647 648void clang::runUninitializedVariablesAnalysis( 649 const DeclContext &dc, 650 const CFG &cfg, 651 AnalysisContext &ac, 652 UninitVariablesHandler &handler, 653 UninitVariablesAnalysisStats &stats) { 654 CFGBlockValues vals(cfg); 655 vals.computeSetOfDeclarations(dc); 656 if (vals.hasNoDeclarations()) 657 return; 658 659 stats.NumVariablesAnalyzed = vals.getNumEntries(); 660 661 // Mark all variables uninitialized at the entry. 662 const CFGBlock &entry = cfg.getEntry(); 663 for (CFGBlock::const_succ_iterator i = entry.succ_begin(), 664 e = entry.succ_end(); i != e; ++i) { 665 if (const CFGBlock *succ = *i) { 666 ValueVector &vec = vals.getValueVector(&entry, succ); 667 const unsigned n = vals.getNumEntries(); 668 for (unsigned j = 0; j < n ; ++j) { 669 vec[j] = Uninitialized; 670 } 671 } 672 } 673 674 // Proceed with the workist. 675 DataflowWorklist worklist(cfg); 676 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs()); 677 worklist.enqueueSuccessors(&cfg.getEntry()); 678 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false); 679 680 while (const CFGBlock *block = worklist.dequeue()) { 681 // Did the block change? 682 bool changed = runOnBlock(block, cfg, ac, vals, wasAnalyzed); 683 ++stats.NumBlockVisits; 684 if (changed || !previouslyVisited[block->getBlockID()]) 685 worklist.enqueueSuccessors(block); 686 previouslyVisited[block->getBlockID()] = true; 687 } 688 689 // Run through the blocks one more time, and report uninitialized variabes. 690 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) { 691 if (wasAnalyzed[(*BI)->getBlockID()]) { 692 runOnBlock(*BI, cfg, ac, vals, wasAnalyzed, &handler, 693 /* flagBlockUses */ true); 694 ++stats.NumBlockVisits; 695 } 696 } 697} 698 699UninitVariablesHandler::~UninitVariablesHandler() {} 700