UninitializedValues.cpp revision 4ddb3871307376d27d0f276c9da0ecce0384f01f
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements uninitialized values analysis for source-level CFGs. 11// 12//===----------------------------------------------------------------------===// 13 14#include <utility> 15#include "llvm/ADT/Optional.h" 16#include "llvm/ADT/SmallVector.h" 17#include "llvm/ADT/BitVector.h" 18#include "llvm/ADT/DenseMap.h" 19#include "clang/AST/Decl.h" 20#include "clang/Analysis/CFG.h" 21#include "clang/Analysis/AnalysisContext.h" 22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h" 23#include "clang/Analysis/Analyses/UninitializedValues.h" 24#include "clang/Analysis/Support/SaveAndRestore.h" 25 26using namespace clang; 27 28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) { 29 return vd->isLocalVarDecl() && !vd->hasGlobalStorage() && 30 vd->getType()->isScalarType() && 31 vd->getDeclContext() == dc; 32} 33 34//------------------------------------------------------------------------====// 35// DeclToIndex: a mapping from Decls we track to value indices. 36//====------------------------------------------------------------------------// 37 38namespace { 39class DeclToIndex { 40 llvm::DenseMap<const VarDecl *, unsigned> map; 41public: 42 DeclToIndex() {} 43 44 /// Compute the actual mapping from declarations to bits. 45 void computeMap(const DeclContext &dc); 46 47 /// Return the number of declarations in the map. 48 unsigned size() const { return map.size(); } 49 50 /// Returns the bit vector index for a given declaration. 51 llvm::Optional<unsigned> getValueIndex(const VarDecl *d); 52}; 53} 54 55void DeclToIndex::computeMap(const DeclContext &dc) { 56 unsigned count = 0; 57 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()), 58 E(dc.decls_end()); 59 for ( ; I != E; ++I) { 60 const VarDecl *vd = *I; 61 if (isTrackedVar(vd, &dc)) 62 map[vd] = count++; 63 } 64} 65 66llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) { 67 llvm::DenseMap<const VarDecl *, unsigned>::iterator I = map.find(d); 68 if (I == map.end()) 69 return llvm::Optional<unsigned>(); 70 return I->second; 71} 72 73//------------------------------------------------------------------------====// 74// CFGBlockValues: dataflow values for CFG blocks. 75//====------------------------------------------------------------------------// 76 77// These values are defined in such a way that a merge can be done using 78// a bitwise OR. 79enum Value { Unknown = 0x0, /* 00 */ 80 Initialized = 0x1, /* 01 */ 81 Uninitialized = 0x2, /* 10 */ 82 MayUninitialized = 0x3 /* 11 */ }; 83 84static bool isUninitialized(const Value v) { 85 return v >= Uninitialized; 86} 87static bool isAlwaysUninit(const Value v) { 88 return v == Uninitialized; 89} 90 91class ValueVector { 92 llvm::BitVector vec; 93public: 94 ValueVector() {} 95 ValueVector(unsigned size) : vec(size << 1) {} 96 void resize(unsigned n) { vec.resize(n << 1); } 97 void merge(const ValueVector &rhs) { vec |= rhs.vec; } 98 bool operator!=(const ValueVector &rhs) const { return vec != rhs.vec; } 99 void reset() { vec.reset(); } 100 101 class reference { 102 ValueVector &vv; 103 const unsigned idx; 104 105 reference(); // Undefined 106 public: 107 reference(ValueVector &vv, unsigned idx) : vv(vv), idx(idx) {} 108 ~reference() {} 109 110 reference &operator=(Value v) { 111 vv.vec[idx << 1] = (((unsigned) v) & 0x1) ? true : false; 112 vv.vec[(idx << 1) | 1] = (((unsigned) v) & 0x2) ? true : false; 113 return *this; 114 } 115 operator Value() { 116 unsigned x = (vv.vec[idx << 1] ? 1 : 0) | (vv.vec[(idx << 1) | 1] ? 2 :0); 117 return (Value) x; 118 } 119 }; 120 121 reference operator[](unsigned idx) { return reference(*this, idx); } 122}; 123 124typedef std::pair<ValueVector *, ValueVector *> BVPair; 125 126namespace { 127class CFGBlockValues { 128 const CFG &cfg; 129 BVPair *vals; 130 ValueVector scratch; 131 DeclToIndex declToIndex; 132 133 ValueVector &lazyCreate(ValueVector *&bv); 134public: 135 CFGBlockValues(const CFG &cfg); 136 ~CFGBlockValues(); 137 138 void computeSetOfDeclarations(const DeclContext &dc); 139 ValueVector &getValueVector(const CFGBlock *block, 140 const CFGBlock *dstBlock); 141 142 BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate); 143 144 void mergeIntoScratch(ValueVector const &source, bool isFirst); 145 bool updateValueVectorWithScratch(const CFGBlock *block); 146 bool updateValueVectors(const CFGBlock *block, const BVPair &newVals); 147 148 bool hasNoDeclarations() const { 149 return declToIndex.size() == 0; 150 } 151 152 void resetScratch(); 153 ValueVector &getScratch() { return scratch; } 154 155 ValueVector::reference operator[](const VarDecl *vd); 156}; 157} 158 159CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) { 160 unsigned n = cfg.getNumBlockIDs(); 161 if (!n) 162 return; 163 vals = new std::pair<ValueVector*, ValueVector*>[n]; 164 memset(vals, 0, sizeof(*vals) * n); 165} 166 167CFGBlockValues::~CFGBlockValues() { 168 unsigned n = cfg.getNumBlockIDs(); 169 if (n == 0) 170 return; 171 for (unsigned i = 0; i < n; ++i) { 172 delete vals[i].first; 173 delete vals[i].second; 174 } 175 delete [] vals; 176} 177 178void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) { 179 declToIndex.computeMap(dc); 180 scratch.resize(declToIndex.size()); 181} 182 183ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) { 184 if (!bv) 185 bv = new ValueVector(declToIndex.size()); 186 return *bv; 187} 188 189/// This function pattern matches for a '&&' or '||' that appears at 190/// the beginning of a CFGBlock that also (1) has a terminator and 191/// (2) has no other elements. If such an expression is found, it is returned. 192static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) { 193 if (block->empty()) 194 return 0; 195 196 const CFGStmt *cstmt = block->front().getAs<CFGStmt>(); 197 if (!cstmt) 198 return 0; 199 200 BinaryOperator *b = llvm::dyn_cast_or_null<BinaryOperator>(cstmt->getStmt()); 201 202 if (!b || !b->isLogicalOp()) 203 return 0; 204 205 if (block->pred_size() == 2 && 206 ((block->succ_size() == 2 && block->getTerminatorCondition() == b) || 207 block->size() == 1)) 208 return b; 209 210 return 0; 211} 212 213ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block, 214 const CFGBlock *dstBlock) { 215 unsigned idx = block->getBlockID(); 216 if (dstBlock && getLogicalOperatorInChain(block)) { 217 if (*block->succ_begin() == dstBlock) 218 return lazyCreate(vals[idx].first); 219 assert(*(block->succ_begin()+1) == dstBlock); 220 return lazyCreate(vals[idx].second); 221 } 222 223 assert(vals[idx].second == 0); 224 return lazyCreate(vals[idx].first); 225} 226 227BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block, 228 bool shouldLazyCreate) { 229 unsigned idx = block->getBlockID(); 230 lazyCreate(vals[idx].first); 231 if (shouldLazyCreate) 232 lazyCreate(vals[idx].second); 233 return vals[idx]; 234} 235 236void CFGBlockValues::mergeIntoScratch(ValueVector const &source, 237 bool isFirst) { 238 if (isFirst) 239 scratch = source; 240 else 241 scratch.merge(source); 242} 243#if 0 244static void printVector(const CFGBlock *block, ValueVector &bv, 245 unsigned num) { 246 247 llvm::errs() << block->getBlockID() << " :"; 248 for (unsigned i = 0; i < bv.size(); ++i) { 249 llvm::errs() << ' ' << bv[i]; 250 } 251 llvm::errs() << " : " << num << '\n'; 252} 253#endif 254 255bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) { 256 ValueVector &dst = getValueVector(block, 0); 257 bool changed = (dst != scratch); 258 if (changed) 259 dst = scratch; 260#if 0 261 printVector(block, scratch, 0); 262#endif 263 return changed; 264} 265 266bool CFGBlockValues::updateValueVectors(const CFGBlock *block, 267 const BVPair &newVals) { 268 BVPair &vals = getValueVectors(block, true); 269 bool changed = *newVals.first != *vals.first || 270 *newVals.second != *vals.second; 271 *vals.first = *newVals.first; 272 *vals.second = *newVals.second; 273#if 0 274 printVector(block, *vals.first, 1); 275 printVector(block, *vals.second, 2); 276#endif 277 return changed; 278} 279 280void CFGBlockValues::resetScratch() { 281 scratch.reset(); 282} 283 284ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) { 285 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd); 286 assert(idx.hasValue()); 287 return scratch[idx.getValue()]; 288} 289 290//------------------------------------------------------------------------====// 291// Worklist: worklist for dataflow analysis. 292//====------------------------------------------------------------------------// 293 294namespace { 295class DataflowWorklist { 296 llvm::SmallVector<const CFGBlock *, 20> worklist; 297 llvm::BitVector enqueuedBlocks; 298public: 299 DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {} 300 301 void enqueue(const CFGBlock *block); 302 void enqueueSuccessors(const CFGBlock *block); 303 const CFGBlock *dequeue(); 304 305}; 306} 307 308void DataflowWorklist::enqueue(const CFGBlock *block) { 309 if (!block) 310 return; 311 unsigned idx = block->getBlockID(); 312 if (enqueuedBlocks[idx]) 313 return; 314 worklist.push_back(block); 315 enqueuedBlocks[idx] = true; 316} 317 318void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) { 319 for (CFGBlock::const_succ_iterator I = block->succ_begin(), 320 E = block->succ_end(); I != E; ++I) { 321 enqueue(*I); 322 } 323} 324 325const CFGBlock *DataflowWorklist::dequeue() { 326 if (worklist.empty()) 327 return 0; 328 const CFGBlock *b = worklist.back(); 329 worklist.pop_back(); 330 enqueuedBlocks[b->getBlockID()] = false; 331 return b; 332} 333 334//------------------------------------------------------------------------====// 335// Transfer function for uninitialized values analysis. 336//====------------------------------------------------------------------------// 337 338namespace { 339class FindVarResult { 340 const VarDecl *vd; 341 const DeclRefExpr *dr; 342public: 343 FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {} 344 345 const DeclRefExpr *getDeclRefExpr() const { return dr; } 346 const VarDecl *getDecl() const { return vd; } 347}; 348 349class TransferFunctions : public CFGRecStmtVisitor<TransferFunctions> { 350 CFGBlockValues &vals; 351 const CFG &cfg; 352 AnalysisContext ∾ 353 UninitVariablesHandler *handler; 354 const DeclRefExpr *currentDR; 355 const Expr *currentVoidCast; 356 const bool flagBlockUses; 357public: 358 TransferFunctions(CFGBlockValues &vals, const CFG &cfg, 359 AnalysisContext &ac, 360 UninitVariablesHandler *handler, 361 bool flagBlockUses) 362 : vals(vals), cfg(cfg), ac(ac), handler(handler), currentDR(0), 363 currentVoidCast(0), flagBlockUses(flagBlockUses) {} 364 365 const CFG &getCFG() { return cfg; } 366 void reportUninit(const DeclRefExpr *ex, const VarDecl *vd, 367 bool isAlwaysUninit); 368 369 void VisitBlockExpr(BlockExpr *be); 370 void VisitDeclStmt(DeclStmt *ds); 371 void VisitDeclRefExpr(DeclRefExpr *dr); 372 void VisitUnaryOperator(UnaryOperator *uo); 373 void VisitBinaryOperator(BinaryOperator *bo); 374 void VisitCastExpr(CastExpr *ce); 375 void VisitUnaryExprOrTypeTraitExpr(UnaryExprOrTypeTraitExpr *se); 376 void BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs); 377 378 bool isTrackedVar(const VarDecl *vd) { 379 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl())); 380 } 381 382 FindVarResult findBlockVarDecl(Expr *ex); 383}; 384} 385 386void TransferFunctions::reportUninit(const DeclRefExpr *ex, 387 const VarDecl *vd, bool isAlwaysUnit) { 388 if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit); 389} 390 391FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) { 392 if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts())) 393 if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 394 if (isTrackedVar(vd)) 395 return FindVarResult(vd, dr); 396 return FindVarResult(0, 0); 397} 398 399void TransferFunctions::BlockStmt_VisitObjCForCollectionStmt( 400 ObjCForCollectionStmt *fs) { 401 402 Visit(fs->getCollection()); 403 404 // This represents an initialization of the 'element' value. 405 Stmt *element = fs->getElement(); 406 const VarDecl* vd = 0; 407 408 if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) { 409 vd = cast<VarDecl>(ds->getSingleDecl()); 410 if (!isTrackedVar(vd)) 411 vd = 0; 412 } 413 else { 414 // Initialize the value of the reference variable. 415 const FindVarResult &res = findBlockVarDecl(cast<Expr>(element)); 416 vd = res.getDecl(); 417 if (!vd) { 418 Visit(element); 419 return; 420 } 421 } 422 423 if (vd) 424 vals[vd] = Initialized; 425} 426 427void TransferFunctions::VisitBlockExpr(BlockExpr *be) { 428 if (!flagBlockUses || !handler) 429 return; 430 AnalysisContext::referenced_decls_iterator i, e; 431 llvm::tie(i, e) = ac.getReferencedBlockVars(be->getBlockDecl()); 432 for ( ; i != e; ++i) { 433 const VarDecl *vd = *i; 434 if (vd->getAttr<BlocksAttr>() || !vd->hasLocalStorage() || 435 !isTrackedVar(vd)) 436 continue; 437 Value v = vals[vd]; 438 if (isUninitialized(v)) 439 handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v)); 440 } 441} 442 443void TransferFunctions::VisitDeclStmt(DeclStmt *ds) { 444 for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end(); 445 DI != DE; ++DI) { 446 if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) { 447 if (isTrackedVar(vd)) { 448 vals[vd] = Uninitialized; 449 if (Stmt *init = vd->getInit()) { 450 Visit(init); 451 vals[vd] = Initialized; 452 } 453 } 454 else if (Stmt *init = vd->getInit()) { 455 Visit(init); 456 } 457 } 458 } 459} 460 461void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) { 462 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 463 // cannot be block-level expressions. Therefore, we determine if 464 // a DeclRefExpr is involved in a "load" by comparing it to the current 465 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 466 // If a DeclRefExpr is not involved in a load, we are essentially computing 467 // its address, either for assignment to a reference or via the '&' operator. 468 // In such cases, treat the variable as being initialized, since this 469 // analysis isn't powerful enough to do alias tracking. 470 if (dr != currentDR) 471 if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 472 if (isTrackedVar(vd)) 473 vals[vd] = Initialized; 474} 475 476void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) { 477 if (bo->isAssignmentOp()) { 478 const FindVarResult &res = findBlockVarDecl(bo->getLHS()); 479 if (const VarDecl* vd = res.getDecl()) { 480 // We assume that DeclRefExprs wrapped in a BinaryOperator "assignment" 481 // cannot be block-level expressions. Therefore, we determine if 482 // a DeclRefExpr is involved in a "load" by comparing it to the current 483 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 484 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 485 res.getDeclRefExpr()); 486 Visit(bo->getRHS()); 487 Visit(bo->getLHS()); 488 489 ValueVector::reference val = vals[vd]; 490 if (isUninitialized(val)) { 491 if (bo->getOpcode() != BO_Assign) 492 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 493 val = Initialized; 494 } 495 return; 496 } 497 } 498 Visit(bo->getRHS()); 499 Visit(bo->getLHS()); 500} 501 502void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) { 503 switch (uo->getOpcode()) { 504 case clang::UO_PostDec: 505 case clang::UO_PostInc: 506 case clang::UO_PreDec: 507 case clang::UO_PreInc: { 508 const FindVarResult &res = findBlockVarDecl(uo->getSubExpr()); 509 if (const VarDecl *vd = res.getDecl()) { 510 // We assume that DeclRefExprs wrapped in a unary operator ++/-- 511 // cannot be block-level expressions. Therefore, we determine if 512 // a DeclRefExpr is involved in a "load" by comparing it to the current 513 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 514 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 515 res.getDeclRefExpr()); 516 Visit(uo->getSubExpr()); 517 518 ValueVector::reference val = vals[vd]; 519 if (isUninitialized(val)) { 520 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 521 // Don't cascade warnings. 522 val = Initialized; 523 } 524 return; 525 } 526 break; 527 } 528 default: 529 break; 530 } 531 Visit(uo->getSubExpr()); 532} 533 534void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) { 535 if (ce->getCastKind() == CK_LValueToRValue) { 536 const FindVarResult &res = findBlockVarDecl(ce->getSubExpr()); 537 if (const VarDecl *vd = res.getDecl()) { 538 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 539 // cannot be block-level expressions. Therefore, we determine if 540 // a DeclRefExpr is involved in a "load" by comparing it to the current 541 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 542 // Here we update 'currentDR' to be the one associated with this 543 // lvalue-to-rvalue cast. Then, when we analyze the DeclRefExpr, we 544 // will know that we are not computing its lvalue for other purposes 545 // than to perform a load. 546 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 547 res.getDeclRefExpr()); 548 Visit(ce->getSubExpr()); 549 if (currentVoidCast != ce) { 550 Value val = vals[vd]; 551 if (isUninitialized(val)) { 552 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 553 // Don't cascade warnings. 554 vals[vd] = Initialized; 555 } 556 } 557 return; 558 } 559 } 560 else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) { 561 if (cse->getType()->isVoidType()) { 562 // e.g. (void) x; 563 SaveAndRestore<const Expr *> 564 lastVoidCast(currentVoidCast, cse->getSubExpr()->IgnoreParens()); 565 Visit(cse->getSubExpr()); 566 return; 567 } 568 } 569 Visit(ce->getSubExpr()); 570} 571 572void TransferFunctions::VisitUnaryExprOrTypeTraitExpr( 573 UnaryExprOrTypeTraitExpr *se) { 574 if (se->getKind() == UETT_SizeOf) { 575 if (se->getType()->isConstantSizeType()) 576 return; 577 // Handle VLAs. 578 Visit(se->getArgumentExpr()); 579 } 580} 581 582//------------------------------------------------------------------------====// 583// High-level "driver" logic for uninitialized values analysis. 584//====------------------------------------------------------------------------// 585 586static bool runOnBlock(const CFGBlock *block, const CFG &cfg, 587 AnalysisContext &ac, CFGBlockValues &vals, 588 UninitVariablesHandler *handler = 0, 589 bool flagBlockUses = false) { 590 591 if (const BinaryOperator *b = getLogicalOperatorInChain(block)) { 592 CFGBlock::const_pred_iterator itr = block->pred_begin(); 593 BVPair vA = vals.getValueVectors(*itr, false); 594 ++itr; 595 BVPair vB = vals.getValueVectors(*itr, false); 596 597 BVPair valsAB; 598 599 if (b->getOpcode() == BO_LAnd) { 600 // Merge the 'F' bits from the first and second. 601 vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true); 602 vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false); 603 valsAB.first = vA.first; 604 valsAB.second = &vals.getScratch(); 605 } 606 else { 607 // Merge the 'T' bits from the first and second. 608 assert(b->getOpcode() == BO_LOr); 609 vals.mergeIntoScratch(*vA.first, true); 610 vals.mergeIntoScratch(*vB.first, false); 611 valsAB.first = &vals.getScratch(); 612 valsAB.second = vA.second ? vA.second : vA.first; 613 } 614 return vals.updateValueVectors(block, valsAB); 615 } 616 617 // Default behavior: merge in values of predecessor blocks. 618 vals.resetScratch(); 619 bool isFirst = true; 620 for (CFGBlock::const_pred_iterator I = block->pred_begin(), 621 E = block->pred_end(); I != E; ++I) { 622 vals.mergeIntoScratch(vals.getValueVector(*I, block), isFirst); 623 isFirst = false; 624 } 625 // Apply the transfer function. 626 TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses); 627 for (CFGBlock::const_iterator I = block->begin(), E = block->end(); 628 I != E; ++I) { 629 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) { 630 tf.BlockStmt_Visit(cs->getStmt()); 631 } 632 } 633 return vals.updateValueVectorWithScratch(block); 634} 635 636void clang::runUninitializedVariablesAnalysis(const DeclContext &dc, 637 const CFG &cfg, 638 AnalysisContext &ac, 639 UninitVariablesHandler &handler) { 640 CFGBlockValues vals(cfg); 641 vals.computeSetOfDeclarations(dc); 642 if (vals.hasNoDeclarations()) 643 return; 644 DataflowWorklist worklist(cfg); 645 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs()); 646 647 worklist.enqueueSuccessors(&cfg.getEntry()); 648 649 while (const CFGBlock *block = worklist.dequeue()) { 650 // Did the block change? 651 bool changed = runOnBlock(block, cfg, ac, vals); 652 if (changed || !previouslyVisited[block->getBlockID()]) 653 worklist.enqueueSuccessors(block); 654 previouslyVisited[block->getBlockID()] = true; 655 } 656 657 // Run through the blocks one more time, and report uninitialized variabes. 658 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) { 659 runOnBlock(*BI, cfg, ac, vals, &handler, /* flagBlockUses */ true); 660 } 661} 662 663UninitVariablesHandler::~UninitVariablesHandler() {} 664 665