UninitializedValues.cpp revision 5d98994c7749312a43ce6adf45537979a98e7afd
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements uninitialized values analysis for source-level CFGs. 11// 12//===----------------------------------------------------------------------===// 13 14#include <utility> 15#include "llvm/ADT/Optional.h" 16#include "llvm/ADT/SmallVector.h" 17#include "llvm/ADT/PackedVector.h" 18#include "llvm/ADT/DenseMap.h" 19#include "clang/AST/Decl.h" 20#include "clang/Analysis/CFG.h" 21#include "clang/Analysis/AnalysisContext.h" 22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h" 23#include "clang/Analysis/Analyses/UninitializedValues.h" 24#include "clang/Analysis/Support/SaveAndRestore.h" 25 26using namespace clang; 27 28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) { 29 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() && 30 !vd->isExceptionVariable() && 31 vd->getDeclContext() == dc) { 32 QualType ty = vd->getType(); 33 return ty->isScalarType() || ty->isVectorType(); 34 } 35 return false; 36} 37 38//------------------------------------------------------------------------====// 39// DeclToIndex: a mapping from Decls we track to value indices. 40//====------------------------------------------------------------------------// 41 42namespace { 43class DeclToIndex { 44 llvm::DenseMap<const VarDecl *, unsigned> map; 45public: 46 DeclToIndex() {} 47 48 /// Compute the actual mapping from declarations to bits. 49 void computeMap(const DeclContext &dc); 50 51 /// Return the number of declarations in the map. 52 unsigned size() const { return map.size(); } 53 54 /// Returns the bit vector index for a given declaration. 55 llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const; 56}; 57} 58 59void DeclToIndex::computeMap(const DeclContext &dc) { 60 unsigned count = 0; 61 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()), 62 E(dc.decls_end()); 63 for ( ; I != E; ++I) { 64 const VarDecl *vd = *I; 65 if (isTrackedVar(vd, &dc)) 66 map[vd] = count++; 67 } 68} 69 70llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const { 71 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d); 72 if (I == map.end()) 73 return llvm::Optional<unsigned>(); 74 return I->second; 75} 76 77//------------------------------------------------------------------------====// 78// CFGBlockValues: dataflow values for CFG blocks. 79//====------------------------------------------------------------------------// 80 81// These values are defined in such a way that a merge can be done using 82// a bitwise OR. 83enum Value { Unknown = 0x0, /* 00 */ 84 Initialized = 0x1, /* 01 */ 85 Uninitialized = 0x2, /* 10 */ 86 MayUninitialized = 0x3 /* 11 */ }; 87 88static bool isUninitialized(const Value v) { 89 return v >= Uninitialized; 90} 91static bool isAlwaysUninit(const Value v) { 92 return v == Uninitialized; 93} 94 95namespace { 96 97typedef llvm::PackedVector<Value, 2> ValueVector; 98typedef std::pair<ValueVector *, ValueVector *> BVPair; 99 100class CFGBlockValues { 101 const CFG &cfg; 102 BVPair *vals; 103 ValueVector scratch; 104 DeclToIndex declToIndex; 105 106 ValueVector &lazyCreate(ValueVector *&bv); 107public: 108 CFGBlockValues(const CFG &cfg); 109 ~CFGBlockValues(); 110 111 unsigned getNumEntries() const { return declToIndex.size(); } 112 113 void computeSetOfDeclarations(const DeclContext &dc); 114 ValueVector &getValueVector(const CFGBlock *block, 115 const CFGBlock *dstBlock); 116 117 BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate); 118 119 void mergeIntoScratch(ValueVector const &source, bool isFirst); 120 bool updateValueVectorWithScratch(const CFGBlock *block); 121 bool updateValueVectors(const CFGBlock *block, const BVPair &newVals); 122 123 bool hasNoDeclarations() const { 124 return declToIndex.size() == 0; 125 } 126 127 bool hasEntry(const VarDecl *vd) const { 128 return declToIndex.getValueIndex(vd).hasValue(); 129 } 130 131 bool hasValues(const CFGBlock *block); 132 133 void resetScratch(); 134 ValueVector &getScratch() { return scratch; } 135 136 ValueVector::reference operator[](const VarDecl *vd); 137}; 138} // end anonymous namespace 139 140CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) { 141 unsigned n = cfg.getNumBlockIDs(); 142 if (!n) 143 return; 144 vals = new std::pair<ValueVector*, ValueVector*>[n]; 145 memset((void*)vals, 0, sizeof(*vals) * n); 146} 147 148CFGBlockValues::~CFGBlockValues() { 149 unsigned n = cfg.getNumBlockIDs(); 150 if (n == 0) 151 return; 152 for (unsigned i = 0; i < n; ++i) { 153 delete vals[i].first; 154 delete vals[i].second; 155 } 156 delete [] vals; 157} 158 159void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) { 160 declToIndex.computeMap(dc); 161 scratch.resize(declToIndex.size()); 162} 163 164ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) { 165 if (!bv) 166 bv = new ValueVector(declToIndex.size()); 167 return *bv; 168} 169 170/// This function pattern matches for a '&&' or '||' that appears at 171/// the beginning of a CFGBlock that also (1) has a terminator and 172/// (2) has no other elements. If such an expression is found, it is returned. 173static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) { 174 if (block->empty()) 175 return 0; 176 177 const CFGStmt *cstmt = block->front().getAs<CFGStmt>(); 178 if (!cstmt) 179 return 0; 180 181 BinaryOperator *b = llvm::dyn_cast_or_null<BinaryOperator>(cstmt->getStmt()); 182 183 if (!b || !b->isLogicalOp()) 184 return 0; 185 186 if (block->pred_size() == 2) { 187 if (block->getTerminatorCondition() == b) { 188 if (block->succ_size() == 2) 189 return b; 190 } 191 else if (block->size() == 1) 192 return b; 193 } 194 195 return 0; 196} 197 198ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block, 199 const CFGBlock *dstBlock) { 200 unsigned idx = block->getBlockID(); 201 if (dstBlock && getLogicalOperatorInChain(block)) { 202 if (*block->succ_begin() == dstBlock) 203 return lazyCreate(vals[idx].first); 204 assert(*(block->succ_begin()+1) == dstBlock); 205 return lazyCreate(vals[idx].second); 206 } 207 208 assert(vals[idx].second == 0); 209 return lazyCreate(vals[idx].first); 210} 211 212bool CFGBlockValues::hasValues(const CFGBlock *block) { 213 unsigned idx = block->getBlockID(); 214 return vals[idx].second != 0; 215} 216 217BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block, 218 bool shouldLazyCreate) { 219 unsigned idx = block->getBlockID(); 220 lazyCreate(vals[idx].first); 221 if (shouldLazyCreate) 222 lazyCreate(vals[idx].second); 223 return vals[idx]; 224} 225 226void CFGBlockValues::mergeIntoScratch(ValueVector const &source, 227 bool isFirst) { 228 if (isFirst) 229 scratch = source; 230 else 231 scratch |= source; 232} 233#if 0 234static void printVector(const CFGBlock *block, ValueVector &bv, 235 unsigned num) { 236 237 llvm::errs() << block->getBlockID() << " :"; 238 for (unsigned i = 0; i < bv.size(); ++i) { 239 llvm::errs() << ' ' << bv[i]; 240 } 241 llvm::errs() << " : " << num << '\n'; 242} 243#endif 244 245bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) { 246 ValueVector &dst = getValueVector(block, 0); 247 bool changed = (dst != scratch); 248 if (changed) 249 dst = scratch; 250#if 0 251 printVector(block, scratch, 0); 252#endif 253 return changed; 254} 255 256bool CFGBlockValues::updateValueVectors(const CFGBlock *block, 257 const BVPair &newVals) { 258 BVPair &vals = getValueVectors(block, true); 259 bool changed = *newVals.first != *vals.first || 260 *newVals.second != *vals.second; 261 *vals.first = *newVals.first; 262 *vals.second = *newVals.second; 263#if 0 264 printVector(block, *vals.first, 1); 265 printVector(block, *vals.second, 2); 266#endif 267 return changed; 268} 269 270void CFGBlockValues::resetScratch() { 271 scratch.reset(); 272} 273 274ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) { 275 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd); 276 assert(idx.hasValue()); 277 return scratch[idx.getValue()]; 278} 279 280//------------------------------------------------------------------------====// 281// Worklist: worklist for dataflow analysis. 282//====------------------------------------------------------------------------// 283 284namespace { 285class DataflowWorklist { 286 llvm::SmallVector<const CFGBlock *, 20> worklist; 287 llvm::BitVector enqueuedBlocks; 288public: 289 DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {} 290 291 void enqueue(const CFGBlock *block); 292 void enqueueSuccessors(const CFGBlock *block); 293 const CFGBlock *dequeue(); 294 295}; 296} 297 298void DataflowWorklist::enqueue(const CFGBlock *block) { 299 if (!block) 300 return; 301 unsigned idx = block->getBlockID(); 302 if (enqueuedBlocks[idx]) 303 return; 304 worklist.push_back(block); 305 enqueuedBlocks[idx] = true; 306} 307 308void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) { 309 for (CFGBlock::const_succ_iterator I = block->succ_begin(), 310 E = block->succ_end(); I != E; ++I) { 311 enqueue(*I); 312 } 313} 314 315const CFGBlock *DataflowWorklist::dequeue() { 316 if (worklist.empty()) 317 return 0; 318 const CFGBlock *b = worklist.back(); 319 worklist.pop_back(); 320 enqueuedBlocks[b->getBlockID()] = false; 321 return b; 322} 323 324//------------------------------------------------------------------------====// 325// Transfer function for uninitialized values analysis. 326//====------------------------------------------------------------------------// 327 328namespace { 329class FindVarResult { 330 const VarDecl *vd; 331 const DeclRefExpr *dr; 332public: 333 FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {} 334 335 const DeclRefExpr *getDeclRefExpr() const { return dr; } 336 const VarDecl *getDecl() const { return vd; } 337}; 338 339class TransferFunctions : public CFGRecStmtVisitor<TransferFunctions> { 340 CFGBlockValues &vals; 341 const CFG &cfg; 342 AnalysisContext ∾ 343 UninitVariablesHandler *handler; 344 const DeclRefExpr *currentDR; 345 const Expr *currentVoidCast; 346 const bool flagBlockUses; 347public: 348 TransferFunctions(CFGBlockValues &vals, const CFG &cfg, 349 AnalysisContext &ac, 350 UninitVariablesHandler *handler, 351 bool flagBlockUses) 352 : vals(vals), cfg(cfg), ac(ac), handler(handler), currentDR(0), 353 currentVoidCast(0), flagBlockUses(flagBlockUses) {} 354 355 const CFG &getCFG() { return cfg; } 356 void reportUninit(const DeclRefExpr *ex, const VarDecl *vd, 357 bool isAlwaysUninit); 358 359 void VisitBlockExpr(BlockExpr *be); 360 void VisitDeclStmt(DeclStmt *ds); 361 void VisitDeclRefExpr(DeclRefExpr *dr); 362 void VisitUnaryOperator(UnaryOperator *uo); 363 void VisitBinaryOperator(BinaryOperator *bo); 364 void VisitCastExpr(CastExpr *ce); 365 void VisitUnaryExprOrTypeTraitExpr(UnaryExprOrTypeTraitExpr *se); 366 void VisitCXXTypeidExpr(CXXTypeidExpr *E); 367 void BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs); 368 369 bool isTrackedVar(const VarDecl *vd) { 370 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl())); 371 } 372 373 FindVarResult findBlockVarDecl(Expr *ex); 374}; 375} 376 377void TransferFunctions::reportUninit(const DeclRefExpr *ex, 378 const VarDecl *vd, bool isAlwaysUnit) { 379 if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit); 380} 381 382FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) { 383 if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts())) 384 if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 385 if (isTrackedVar(vd)) 386 return FindVarResult(vd, dr); 387 return FindVarResult(0, 0); 388} 389 390void TransferFunctions::BlockStmt_VisitObjCForCollectionStmt( 391 ObjCForCollectionStmt *fs) { 392 393 Visit(fs->getCollection()); 394 395 // This represents an initialization of the 'element' value. 396 Stmt *element = fs->getElement(); 397 const VarDecl* vd = 0; 398 399 if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) { 400 vd = cast<VarDecl>(ds->getSingleDecl()); 401 if (!isTrackedVar(vd)) 402 vd = 0; 403 } 404 else { 405 // Initialize the value of the reference variable. 406 const FindVarResult &res = findBlockVarDecl(cast<Expr>(element)); 407 vd = res.getDecl(); 408 if (!vd) { 409 Visit(element); 410 return; 411 } 412 } 413 414 if (vd) 415 vals[vd] = Initialized; 416} 417 418void TransferFunctions::VisitBlockExpr(BlockExpr *be) { 419 if (!flagBlockUses || !handler) 420 return; 421 const BlockDecl *bd = be->getBlockDecl(); 422 for (BlockDecl::capture_const_iterator i = bd->capture_begin(), 423 e = bd->capture_end() ; i != e; ++i) { 424 const VarDecl *vd = i->getVariable(); 425 if (!vd->hasLocalStorage()) 426 continue; 427 if (!isTrackedVar(vd)) 428 continue; 429 if (i->isByRef()) { 430 vals[vd] = Initialized; 431 continue; 432 } 433 Value v = vals[vd]; 434 if (isUninitialized(v)) 435 handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v)); 436 } 437} 438 439void TransferFunctions::VisitDeclStmt(DeclStmt *ds) { 440 for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end(); 441 DI != DE; ++DI) { 442 if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) { 443 if (isTrackedVar(vd)) { 444 if (Expr *init = vd->getInit()) { 445 Visit(init); 446 447 // If the initializer consists solely of a reference to itself, we 448 // explicitly mark the variable as uninitialized. This allows code 449 // like the following: 450 // 451 // int x = x; 452 // 453 // to deliberately leave a variable uninitialized. Different analysis 454 // clients can detect this pattern and adjust their reporting 455 // appropriately, but we need to continue to analyze subsequent uses 456 // of the variable. 457 DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(init->IgnoreParenImpCasts()); 458 vals[vd] = (DRE && DRE->getDecl() == vd) ? Uninitialized 459 : Initialized; 460 } 461 } else if (Stmt *init = vd->getInit()) { 462 Visit(init); 463 } 464 } 465 } 466} 467 468void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) { 469 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 470 // cannot be block-level expressions. Therefore, we determine if 471 // a DeclRefExpr is involved in a "load" by comparing it to the current 472 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 473 // If a DeclRefExpr is not involved in a load, we are essentially computing 474 // its address, either for assignment to a reference or via the '&' operator. 475 // In such cases, treat the variable as being initialized, since this 476 // analysis isn't powerful enough to do alias tracking. 477 if (dr != currentDR) 478 if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 479 if (isTrackedVar(vd)) 480 vals[vd] = Initialized; 481} 482 483void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) { 484 if (bo->isAssignmentOp()) { 485 const FindVarResult &res = findBlockVarDecl(bo->getLHS()); 486 if (const VarDecl* vd = res.getDecl()) { 487 // We assume that DeclRefExprs wrapped in a BinaryOperator "assignment" 488 // cannot be block-level expressions. Therefore, we determine if 489 // a DeclRefExpr is involved in a "load" by comparing it to the current 490 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 491 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 492 res.getDeclRefExpr()); 493 Visit(bo->getRHS()); 494 Visit(bo->getLHS()); 495 496 ValueVector::reference val = vals[vd]; 497 if (isUninitialized(val)) { 498 if (bo->getOpcode() != BO_Assign) 499 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 500 val = Initialized; 501 } 502 return; 503 } 504 } 505 Visit(bo->getRHS()); 506 Visit(bo->getLHS()); 507} 508 509void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) { 510 switch (uo->getOpcode()) { 511 case clang::UO_PostDec: 512 case clang::UO_PostInc: 513 case clang::UO_PreDec: 514 case clang::UO_PreInc: { 515 const FindVarResult &res = findBlockVarDecl(uo->getSubExpr()); 516 if (const VarDecl *vd = res.getDecl()) { 517 // We assume that DeclRefExprs wrapped in a unary operator ++/-- 518 // cannot be block-level expressions. Therefore, we determine if 519 // a DeclRefExpr is involved in a "load" by comparing it to the current 520 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 521 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 522 res.getDeclRefExpr()); 523 Visit(uo->getSubExpr()); 524 525 ValueVector::reference val = vals[vd]; 526 if (isUninitialized(val)) { 527 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 528 // Don't cascade warnings. 529 val = Initialized; 530 } 531 return; 532 } 533 break; 534 } 535 default: 536 break; 537 } 538 Visit(uo->getSubExpr()); 539} 540 541void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) { 542 if (ce->getCastKind() == CK_LValueToRValue) { 543 const FindVarResult &res = findBlockVarDecl(ce->getSubExpr()); 544 if (const VarDecl *vd = res.getDecl()) { 545 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 546 // cannot be block-level expressions. Therefore, we determine if 547 // a DeclRefExpr is involved in a "load" by comparing it to the current 548 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 549 // Here we update 'currentDR' to be the one associated with this 550 // lvalue-to-rvalue cast. Then, when we analyze the DeclRefExpr, we 551 // will know that we are not computing its lvalue for other purposes 552 // than to perform a load. 553 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 554 res.getDeclRefExpr()); 555 Visit(ce->getSubExpr()); 556 if (currentVoidCast != ce) { 557 Value val = vals[vd]; 558 if (isUninitialized(val)) { 559 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 560 // Don't cascade warnings. 561 vals[vd] = Initialized; 562 } 563 } 564 return; 565 } 566 } 567 else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) { 568 if (cse->getType()->isVoidType()) { 569 // e.g. (void) x; 570 SaveAndRestore<const Expr *> 571 lastVoidCast(currentVoidCast, cse->getSubExpr()->IgnoreParens()); 572 Visit(cse->getSubExpr()); 573 return; 574 } 575 } 576 Visit(ce->getSubExpr()); 577} 578 579void TransferFunctions::VisitUnaryExprOrTypeTraitExpr( 580 UnaryExprOrTypeTraitExpr *se) { 581 if (se->getKind() == UETT_SizeOf) { 582 if (se->getType()->isConstantSizeType()) 583 return; 584 // Handle VLAs. 585 Visit(se->getArgumentExpr()); 586 } 587} 588 589void TransferFunctions::VisitCXXTypeidExpr(CXXTypeidExpr *E) { 590 // typeid(expression) is potentially evaluated when the argument is 591 // a glvalue of polymorphic type. (C++ 5.2.8p2-3) 592 if (!E->isTypeOperand() && E->Classify(ac.getASTContext()).isGLValue()) { 593 QualType SubExprTy = E->getExprOperand()->getType(); 594 if (const RecordType *Record = SubExprTy->getAs<RecordType>()) 595 if (cast<CXXRecordDecl>(Record->getDecl())->isPolymorphic()) 596 Visit(E->getExprOperand()); 597 } 598} 599 600//------------------------------------------------------------------------====// 601// High-level "driver" logic for uninitialized values analysis. 602//====------------------------------------------------------------------------// 603 604static bool runOnBlock(const CFGBlock *block, const CFG &cfg, 605 AnalysisContext &ac, CFGBlockValues &vals, 606 llvm::BitVector &wasAnalyzed, 607 UninitVariablesHandler *handler = 0, 608 bool flagBlockUses = false) { 609 610 wasAnalyzed[block->getBlockID()] = true; 611 612 if (const BinaryOperator *b = getLogicalOperatorInChain(block)) { 613 CFGBlock::const_pred_iterator itr = block->pred_begin(); 614 BVPair vA = vals.getValueVectors(*itr, false); 615 ++itr; 616 BVPair vB = vals.getValueVectors(*itr, false); 617 618 BVPair valsAB; 619 620 if (b->getOpcode() == BO_LAnd) { 621 // Merge the 'F' bits from the first and second. 622 vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true); 623 vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false); 624 valsAB.first = vA.first; 625 valsAB.second = &vals.getScratch(); 626 } 627 else { 628 // Merge the 'T' bits from the first and second. 629 assert(b->getOpcode() == BO_LOr); 630 vals.mergeIntoScratch(*vA.first, true); 631 vals.mergeIntoScratch(*vB.first, false); 632 valsAB.first = &vals.getScratch(); 633 valsAB.second = vA.second ? vA.second : vA.first; 634 } 635 return vals.updateValueVectors(block, valsAB); 636 } 637 638 // Default behavior: merge in values of predecessor blocks. 639 vals.resetScratch(); 640 bool isFirst = true; 641 for (CFGBlock::const_pred_iterator I = block->pred_begin(), 642 E = block->pred_end(); I != E; ++I) { 643 vals.mergeIntoScratch(vals.getValueVector(*I, block), isFirst); 644 isFirst = false; 645 } 646 // Apply the transfer function. 647 TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses); 648 for (CFGBlock::const_iterator I = block->begin(), E = block->end(); 649 I != E; ++I) { 650 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) { 651 tf.BlockStmt_Visit(cs->getStmt()); 652 } 653 } 654 return vals.updateValueVectorWithScratch(block); 655} 656 657void clang::runUninitializedVariablesAnalysis( 658 const DeclContext &dc, 659 const CFG &cfg, 660 AnalysisContext &ac, 661 UninitVariablesHandler &handler, 662 UninitVariablesAnalysisStats &stats) { 663 CFGBlockValues vals(cfg); 664 vals.computeSetOfDeclarations(dc); 665 if (vals.hasNoDeclarations()) 666 return; 667 668 stats.NumVariablesAnalyzed = vals.getNumEntries(); 669 670 // Mark all variables uninitialized at the entry. 671 const CFGBlock &entry = cfg.getEntry(); 672 for (CFGBlock::const_succ_iterator i = entry.succ_begin(), 673 e = entry.succ_end(); i != e; ++i) { 674 if (const CFGBlock *succ = *i) { 675 ValueVector &vec = vals.getValueVector(&entry, succ); 676 const unsigned n = vals.getNumEntries(); 677 for (unsigned j = 0; j < n ; ++j) { 678 vec[j] = Uninitialized; 679 } 680 } 681 } 682 683 // Proceed with the workist. 684 DataflowWorklist worklist(cfg); 685 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs()); 686 worklist.enqueueSuccessors(&cfg.getEntry()); 687 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false); 688 689 while (const CFGBlock *block = worklist.dequeue()) { 690 // Did the block change? 691 bool changed = runOnBlock(block, cfg, ac, vals, wasAnalyzed); 692 ++stats.NumBlockVisits; 693 if (changed || !previouslyVisited[block->getBlockID()]) 694 worklist.enqueueSuccessors(block); 695 previouslyVisited[block->getBlockID()] = true; 696 } 697 698 // Run through the blocks one more time, and report uninitialized variabes. 699 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) { 700 if (wasAnalyzed[(*BI)->getBlockID()]) { 701 runOnBlock(*BI, cfg, ac, vals, wasAnalyzed, &handler, 702 /* flagBlockUses */ true); 703 ++stats.NumBlockVisits; 704 } 705 } 706} 707 708UninitVariablesHandler::~UninitVariablesHandler() {} 709