UninitializedValues.cpp revision b88fb027bfe2f85da3a341f42549900bd658ac8b
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements uninitialized values analysis for source-level CFGs. 11// 12//===----------------------------------------------------------------------===// 13 14#include <utility> 15#include "llvm/ADT/Optional.h" 16#include "llvm/ADT/SmallVector.h" 17#include "llvm/ADT/BitVector.h" 18#include "llvm/ADT/DenseMap.h" 19#include "clang/AST/Decl.h" 20#include "clang/Analysis/CFG.h" 21#include "clang/Analysis/AnalysisContext.h" 22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h" 23#include "clang/Analysis/Analyses/UninitializedValues.h" 24#include "clang/Analysis/Support/SaveAndRestore.h" 25 26using namespace clang; 27 28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) { 29 if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() && 30 vd->getDeclContext() == dc) { 31 QualType ty = vd->getType(); 32 return ty->isScalarType() || ty->isVectorType(); 33 } 34 return false; 35} 36 37//------------------------------------------------------------------------====// 38// DeclToIndex: a mapping from Decls we track to value indices. 39//====------------------------------------------------------------------------// 40 41namespace { 42class DeclToIndex { 43 llvm::DenseMap<const VarDecl *, unsigned> map; 44public: 45 DeclToIndex() {} 46 47 /// Compute the actual mapping from declarations to bits. 48 void computeMap(const DeclContext &dc); 49 50 /// Return the number of declarations in the map. 51 unsigned size() const { return map.size(); } 52 53 /// Returns the bit vector index for a given declaration. 54 llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const; 55}; 56} 57 58void DeclToIndex::computeMap(const DeclContext &dc) { 59 unsigned count = 0; 60 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()), 61 E(dc.decls_end()); 62 for ( ; I != E; ++I) { 63 const VarDecl *vd = *I; 64 if (isTrackedVar(vd, &dc)) 65 map[vd] = count++; 66 } 67} 68 69llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const { 70 llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d); 71 if (I == map.end()) 72 return llvm::Optional<unsigned>(); 73 return I->second; 74} 75 76//------------------------------------------------------------------------====// 77// CFGBlockValues: dataflow values for CFG blocks. 78//====------------------------------------------------------------------------// 79 80// These values are defined in such a way that a merge can be done using 81// a bitwise OR. 82enum Value { Unknown = 0x0, /* 00 */ 83 Initialized = 0x1, /* 01 */ 84 Uninitialized = 0x2, /* 10 */ 85 MayUninitialized = 0x3 /* 11 */ }; 86 87static bool isUninitialized(const Value v) { 88 return v >= Uninitialized; 89} 90static bool isAlwaysUninit(const Value v) { 91 return v == Uninitialized; 92} 93 94namespace { 95class ValueVector { 96 llvm::BitVector vec; 97public: 98 ValueVector() {} 99 ValueVector(unsigned size) : vec(size << 1) {} 100 void resize(unsigned n) { vec.resize(n << 1); } 101 void merge(const ValueVector &rhs) { vec |= rhs.vec; } 102 bool operator!=(const ValueVector &rhs) const { return vec != rhs.vec; } 103 void reset() { vec.reset(); } 104 105 class reference { 106 ValueVector &vv; 107 const unsigned idx; 108 109 reference(); // Undefined 110 public: 111 reference(ValueVector &vv, unsigned idx) : vv(vv), idx(idx) {} 112 ~reference() {} 113 114 reference &operator=(Value v) { 115 vv.vec[idx << 1] = (((unsigned) v) & 0x1) ? true : false; 116 vv.vec[(idx << 1) | 1] = (((unsigned) v) & 0x2) ? true : false; 117 return *this; 118 } 119 operator Value() { 120 unsigned x = (vv.vec[idx << 1] ? 1 : 0) | (vv.vec[(idx << 1) | 1] ? 2 :0); 121 return (Value) x; 122 } 123 }; 124 125 reference operator[](unsigned idx) { return reference(*this, idx); } 126}; 127 128typedef std::pair<ValueVector *, ValueVector *> BVPair; 129 130class CFGBlockValues { 131 const CFG &cfg; 132 BVPair *vals; 133 ValueVector scratch; 134 DeclToIndex declToIndex; 135 136 ValueVector &lazyCreate(ValueVector *&bv); 137public: 138 CFGBlockValues(const CFG &cfg); 139 ~CFGBlockValues(); 140 141 unsigned getNumEntries() const { return declToIndex.size(); } 142 143 void computeSetOfDeclarations(const DeclContext &dc); 144 ValueVector &getValueVector(const CFGBlock *block, 145 const CFGBlock *dstBlock); 146 147 BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate); 148 149 void mergeIntoScratch(ValueVector const &source, bool isFirst); 150 bool updateValueVectorWithScratch(const CFGBlock *block); 151 bool updateValueVectors(const CFGBlock *block, const BVPair &newVals); 152 153 bool hasNoDeclarations() const { 154 return declToIndex.size() == 0; 155 } 156 157 bool hasEntry(const VarDecl *vd) const { 158 return declToIndex.getValueIndex(vd).hasValue(); 159 } 160 161 bool hasValues(const CFGBlock *block); 162 163 void resetScratch(); 164 ValueVector &getScratch() { return scratch; } 165 166 ValueVector::reference operator[](const VarDecl *vd); 167}; 168} // end anonymous namespace 169 170CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) { 171 unsigned n = cfg.getNumBlockIDs(); 172 if (!n) 173 return; 174 vals = new std::pair<ValueVector*, ValueVector*>[n]; 175 memset(vals, 0, sizeof(*vals) * n); 176} 177 178CFGBlockValues::~CFGBlockValues() { 179 unsigned n = cfg.getNumBlockIDs(); 180 if (n == 0) 181 return; 182 for (unsigned i = 0; i < n; ++i) { 183 delete vals[i].first; 184 delete vals[i].second; 185 } 186 delete [] vals; 187} 188 189void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) { 190 declToIndex.computeMap(dc); 191 scratch.resize(declToIndex.size()); 192} 193 194ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) { 195 if (!bv) 196 bv = new ValueVector(declToIndex.size()); 197 return *bv; 198} 199 200/// This function pattern matches for a '&&' or '||' that appears at 201/// the beginning of a CFGBlock that also (1) has a terminator and 202/// (2) has no other elements. If such an expression is found, it is returned. 203static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) { 204 if (block->empty()) 205 return 0; 206 207 const CFGStmt *cstmt = block->front().getAs<CFGStmt>(); 208 if (!cstmt) 209 return 0; 210 211 BinaryOperator *b = llvm::dyn_cast_or_null<BinaryOperator>(cstmt->getStmt()); 212 213 if (!b || !b->isLogicalOp()) 214 return 0; 215 216 if (block->pred_size() == 2 && 217 ((block->succ_size() == 2 && block->getTerminatorCondition() == b) || 218 block->size() == 1)) 219 return b; 220 221 return 0; 222} 223 224ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block, 225 const CFGBlock *dstBlock) { 226 unsigned idx = block->getBlockID(); 227 if (dstBlock && getLogicalOperatorInChain(block)) { 228 if (*block->succ_begin() == dstBlock) 229 return lazyCreate(vals[idx].first); 230 assert(*(block->succ_begin()+1) == dstBlock); 231 return lazyCreate(vals[idx].second); 232 } 233 234 assert(vals[idx].second == 0); 235 return lazyCreate(vals[idx].first); 236} 237 238bool CFGBlockValues::hasValues(const CFGBlock *block) { 239 unsigned idx = block->getBlockID(); 240 return vals[idx].second != 0; 241} 242 243BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block, 244 bool shouldLazyCreate) { 245 unsigned idx = block->getBlockID(); 246 lazyCreate(vals[idx].first); 247 if (shouldLazyCreate) 248 lazyCreate(vals[idx].second); 249 return vals[idx]; 250} 251 252void CFGBlockValues::mergeIntoScratch(ValueVector const &source, 253 bool isFirst) { 254 if (isFirst) 255 scratch = source; 256 else 257 scratch.merge(source); 258} 259#if 0 260static void printVector(const CFGBlock *block, ValueVector &bv, 261 unsigned num) { 262 263 llvm::errs() << block->getBlockID() << " :"; 264 for (unsigned i = 0; i < bv.size(); ++i) { 265 llvm::errs() << ' ' << bv[i]; 266 } 267 llvm::errs() << " : " << num << '\n'; 268} 269#endif 270 271bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) { 272 ValueVector &dst = getValueVector(block, 0); 273 bool changed = (dst != scratch); 274 if (changed) 275 dst = scratch; 276#if 0 277 printVector(block, scratch, 0); 278#endif 279 return changed; 280} 281 282bool CFGBlockValues::updateValueVectors(const CFGBlock *block, 283 const BVPair &newVals) { 284 BVPair &vals = getValueVectors(block, true); 285 bool changed = *newVals.first != *vals.first || 286 *newVals.second != *vals.second; 287 *vals.first = *newVals.first; 288 *vals.second = *newVals.second; 289#if 0 290 printVector(block, *vals.first, 1); 291 printVector(block, *vals.second, 2); 292#endif 293 return changed; 294} 295 296void CFGBlockValues::resetScratch() { 297 scratch.reset(); 298} 299 300ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) { 301 const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd); 302 assert(idx.hasValue()); 303 return scratch[idx.getValue()]; 304} 305 306//------------------------------------------------------------------------====// 307// Worklist: worklist for dataflow analysis. 308//====------------------------------------------------------------------------// 309 310namespace { 311class DataflowWorklist { 312 llvm::SmallVector<const CFGBlock *, 20> worklist; 313 llvm::BitVector enqueuedBlocks; 314public: 315 DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {} 316 317 void enqueue(const CFGBlock *block); 318 void enqueueSuccessors(const CFGBlock *block); 319 const CFGBlock *dequeue(); 320 321}; 322} 323 324void DataflowWorklist::enqueue(const CFGBlock *block) { 325 if (!block) 326 return; 327 unsigned idx = block->getBlockID(); 328 if (enqueuedBlocks[idx]) 329 return; 330 worklist.push_back(block); 331 enqueuedBlocks[idx] = true; 332} 333 334void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) { 335 for (CFGBlock::const_succ_iterator I = block->succ_begin(), 336 E = block->succ_end(); I != E; ++I) { 337 enqueue(*I); 338 } 339} 340 341const CFGBlock *DataflowWorklist::dequeue() { 342 if (worklist.empty()) 343 return 0; 344 const CFGBlock *b = worklist.back(); 345 worklist.pop_back(); 346 enqueuedBlocks[b->getBlockID()] = false; 347 return b; 348} 349 350//------------------------------------------------------------------------====// 351// Transfer function for uninitialized values analysis. 352//====------------------------------------------------------------------------// 353 354namespace { 355class FindVarResult { 356 const VarDecl *vd; 357 const DeclRefExpr *dr; 358public: 359 FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {} 360 361 const DeclRefExpr *getDeclRefExpr() const { return dr; } 362 const VarDecl *getDecl() const { return vd; } 363}; 364 365class TransferFunctions : public CFGRecStmtVisitor<TransferFunctions> { 366 CFGBlockValues &vals; 367 const CFG &cfg; 368 AnalysisContext ∾ 369 UninitVariablesHandler *handler; 370 const DeclRefExpr *currentDR; 371 const Expr *currentVoidCast; 372 const bool flagBlockUses; 373public: 374 TransferFunctions(CFGBlockValues &vals, const CFG &cfg, 375 AnalysisContext &ac, 376 UninitVariablesHandler *handler, 377 bool flagBlockUses) 378 : vals(vals), cfg(cfg), ac(ac), handler(handler), currentDR(0), 379 currentVoidCast(0), flagBlockUses(flagBlockUses) {} 380 381 const CFG &getCFG() { return cfg; } 382 void reportUninit(const DeclRefExpr *ex, const VarDecl *vd, 383 bool isAlwaysUninit); 384 385 void VisitBlockExpr(BlockExpr *be); 386 void VisitDeclStmt(DeclStmt *ds); 387 void VisitDeclRefExpr(DeclRefExpr *dr); 388 void VisitUnaryOperator(UnaryOperator *uo); 389 void VisitBinaryOperator(BinaryOperator *bo); 390 void VisitCastExpr(CastExpr *ce); 391 void VisitUnaryExprOrTypeTraitExpr(UnaryExprOrTypeTraitExpr *se); 392 void BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs); 393 394 bool isTrackedVar(const VarDecl *vd) { 395#if 1 396 // FIXME: This is a temporary workaround to deal with the fact 397 // that DeclContext's do not always contain all of their variables! 398 return vals.hasEntry(vd); 399#else 400 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl())); 401#endif 402 } 403 404 FindVarResult findBlockVarDecl(Expr *ex); 405}; 406} 407 408void TransferFunctions::reportUninit(const DeclRefExpr *ex, 409 const VarDecl *vd, bool isAlwaysUnit) { 410 if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit); 411} 412 413FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) { 414 if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts())) 415 if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 416 if (isTrackedVar(vd)) 417 return FindVarResult(vd, dr); 418 return FindVarResult(0, 0); 419} 420 421void TransferFunctions::BlockStmt_VisitObjCForCollectionStmt( 422 ObjCForCollectionStmt *fs) { 423 424 Visit(fs->getCollection()); 425 426 // This represents an initialization of the 'element' value. 427 Stmt *element = fs->getElement(); 428 const VarDecl* vd = 0; 429 430 if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) { 431 vd = cast<VarDecl>(ds->getSingleDecl()); 432 if (!isTrackedVar(vd)) 433 vd = 0; 434 } 435 else { 436 // Initialize the value of the reference variable. 437 const FindVarResult &res = findBlockVarDecl(cast<Expr>(element)); 438 vd = res.getDecl(); 439 if (!vd) { 440 Visit(element); 441 return; 442 } 443 } 444 445 if (vd) 446 vals[vd] = Initialized; 447} 448 449void TransferFunctions::VisitBlockExpr(BlockExpr *be) { 450 if (!flagBlockUses || !handler) 451 return; 452 const BlockDecl *bd = be->getBlockDecl(); 453 for (BlockDecl::capture_const_iterator i = bd->capture_begin(), 454 e = bd->capture_end() ; i != e; ++i) { 455 const VarDecl *vd = i->getVariable(); 456 if (!vd->hasLocalStorage()) 457 continue; 458 if (!isTrackedVar(vd)) 459 continue; 460 if (i->isByRef()) { 461 vals[vd] = Initialized; 462 continue; 463 } 464 Value v = vals[vd]; 465 if (isUninitialized(v)) 466 handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v)); 467 } 468} 469 470void TransferFunctions::VisitDeclStmt(DeclStmt *ds) { 471 for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end(); 472 DI != DE; ++DI) { 473 if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) { 474 if (isTrackedVar(vd)) { 475 if (Expr *init = vd->getInit()) { 476 Visit(init); 477 478 // If the initializer consists solely of a reference to itself, we 479 // explicitly mark the variable as uninitialized. This allows code 480 // like the following: 481 // 482 // int x = x; 483 // 484 // to deliberately leave a variable uninitialized. Different analysis 485 // clients can detect this pattern and adjust their reporting 486 // appropriately, but we need to continue to analyze subsequent uses 487 // of the variable. 488 DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(init->IgnoreParenImpCasts()); 489 vals[vd] = (DRE && DRE->getDecl() == vd) ? Uninitialized 490 : Initialized; 491 } 492 } else if (Stmt *init = vd->getInit()) { 493 Visit(init); 494 } 495 } 496 } 497} 498 499void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) { 500 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 501 // cannot be block-level expressions. Therefore, we determine if 502 // a DeclRefExpr is involved in a "load" by comparing it to the current 503 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 504 // If a DeclRefExpr is not involved in a load, we are essentially computing 505 // its address, either for assignment to a reference or via the '&' operator. 506 // In such cases, treat the variable as being initialized, since this 507 // analysis isn't powerful enough to do alias tracking. 508 if (dr != currentDR) 509 if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 510 if (isTrackedVar(vd)) 511 vals[vd] = Initialized; 512} 513 514void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) { 515 if (bo->isAssignmentOp()) { 516 const FindVarResult &res = findBlockVarDecl(bo->getLHS()); 517 if (const VarDecl* vd = res.getDecl()) { 518 // We assume that DeclRefExprs wrapped in a BinaryOperator "assignment" 519 // cannot be block-level expressions. Therefore, we determine if 520 // a DeclRefExpr is involved in a "load" by comparing it to the current 521 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 522 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 523 res.getDeclRefExpr()); 524 Visit(bo->getRHS()); 525 Visit(bo->getLHS()); 526 527 ValueVector::reference val = vals[vd]; 528 if (isUninitialized(val)) { 529 if (bo->getOpcode() != BO_Assign) 530 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 531 val = Initialized; 532 } 533 return; 534 } 535 } 536 Visit(bo->getRHS()); 537 Visit(bo->getLHS()); 538} 539 540void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) { 541 switch (uo->getOpcode()) { 542 case clang::UO_PostDec: 543 case clang::UO_PostInc: 544 case clang::UO_PreDec: 545 case clang::UO_PreInc: { 546 const FindVarResult &res = findBlockVarDecl(uo->getSubExpr()); 547 if (const VarDecl *vd = res.getDecl()) { 548 // We assume that DeclRefExprs wrapped in a unary operator ++/-- 549 // cannot be block-level expressions. Therefore, we determine if 550 // a DeclRefExpr is involved in a "load" by comparing it to the current 551 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 552 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 553 res.getDeclRefExpr()); 554 Visit(uo->getSubExpr()); 555 556 ValueVector::reference val = vals[vd]; 557 if (isUninitialized(val)) { 558 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 559 // Don't cascade warnings. 560 val = Initialized; 561 } 562 return; 563 } 564 break; 565 } 566 default: 567 break; 568 } 569 Visit(uo->getSubExpr()); 570} 571 572void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) { 573 if (ce->getCastKind() == CK_LValueToRValue) { 574 const FindVarResult &res = findBlockVarDecl(ce->getSubExpr()); 575 if (const VarDecl *vd = res.getDecl()) { 576 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 577 // cannot be block-level expressions. Therefore, we determine if 578 // a DeclRefExpr is involved in a "load" by comparing it to the current 579 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 580 // Here we update 'currentDR' to be the one associated with this 581 // lvalue-to-rvalue cast. Then, when we analyze the DeclRefExpr, we 582 // will know that we are not computing its lvalue for other purposes 583 // than to perform a load. 584 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 585 res.getDeclRefExpr()); 586 Visit(ce->getSubExpr()); 587 if (currentVoidCast != ce) { 588 Value val = vals[vd]; 589 if (isUninitialized(val)) { 590 reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val)); 591 // Don't cascade warnings. 592 vals[vd] = Initialized; 593 } 594 } 595 return; 596 } 597 } 598 else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) { 599 if (cse->getType()->isVoidType()) { 600 // e.g. (void) x; 601 SaveAndRestore<const Expr *> 602 lastVoidCast(currentVoidCast, cse->getSubExpr()->IgnoreParens()); 603 Visit(cse->getSubExpr()); 604 return; 605 } 606 } 607 Visit(ce->getSubExpr()); 608} 609 610void TransferFunctions::VisitUnaryExprOrTypeTraitExpr( 611 UnaryExprOrTypeTraitExpr *se) { 612 if (se->getKind() == UETT_SizeOf) { 613 if (se->getType()->isConstantSizeType()) 614 return; 615 // Handle VLAs. 616 Visit(se->getArgumentExpr()); 617 } 618} 619 620//------------------------------------------------------------------------====// 621// High-level "driver" logic for uninitialized values analysis. 622//====------------------------------------------------------------------------// 623 624static bool runOnBlock(const CFGBlock *block, const CFG &cfg, 625 AnalysisContext &ac, CFGBlockValues &vals, 626 llvm::BitVector &wasAnalyzed, 627 UninitVariablesHandler *handler = 0, 628 bool flagBlockUses = false) { 629 630 wasAnalyzed[block->getBlockID()] = true; 631 632 if (const BinaryOperator *b = getLogicalOperatorInChain(block)) { 633 CFGBlock::const_pred_iterator itr = block->pred_begin(); 634 BVPair vA = vals.getValueVectors(*itr, false); 635 ++itr; 636 BVPair vB = vals.getValueVectors(*itr, false); 637 638 BVPair valsAB; 639 640 if (b->getOpcode() == BO_LAnd) { 641 // Merge the 'F' bits from the first and second. 642 vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true); 643 vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false); 644 valsAB.first = vA.first; 645 valsAB.second = &vals.getScratch(); 646 } 647 else { 648 // Merge the 'T' bits from the first and second. 649 assert(b->getOpcode() == BO_LOr); 650 vals.mergeIntoScratch(*vA.first, true); 651 vals.mergeIntoScratch(*vB.first, false); 652 valsAB.first = &vals.getScratch(); 653 valsAB.second = vA.second ? vA.second : vA.first; 654 } 655 return vals.updateValueVectors(block, valsAB); 656 } 657 658 // Default behavior: merge in values of predecessor blocks. 659 vals.resetScratch(); 660 bool isFirst = true; 661 for (CFGBlock::const_pred_iterator I = block->pred_begin(), 662 E = block->pred_end(); I != E; ++I) { 663 vals.mergeIntoScratch(vals.getValueVector(*I, block), isFirst); 664 isFirst = false; 665 } 666 // Apply the transfer function. 667 TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses); 668 for (CFGBlock::const_iterator I = block->begin(), E = block->end(); 669 I != E; ++I) { 670 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) { 671 tf.BlockStmt_Visit(cs->getStmt()); 672 } 673 } 674 return vals.updateValueVectorWithScratch(block); 675} 676 677void clang::runUninitializedVariablesAnalysis(const DeclContext &dc, 678 const CFG &cfg, 679 AnalysisContext &ac, 680 UninitVariablesHandler &handler) { 681 CFGBlockValues vals(cfg); 682 vals.computeSetOfDeclarations(dc); 683 if (vals.hasNoDeclarations()) 684 return; 685 686 // Mark all variables uninitialized at the entry. 687 const CFGBlock &entry = cfg.getEntry(); 688 for (CFGBlock::const_succ_iterator i = entry.succ_begin(), 689 e = entry.succ_end(); i != e; ++i) { 690 if (const CFGBlock *succ = *i) { 691 ValueVector &vec = vals.getValueVector(&entry, succ); 692 const unsigned n = vals.getNumEntries(); 693 for (unsigned j = 0; j < n ; ++j) { 694 vec[j] = Uninitialized; 695 } 696 } 697 } 698 699 // Proceed with the workist. 700 DataflowWorklist worklist(cfg); 701 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs()); 702 worklist.enqueueSuccessors(&cfg.getEntry()); 703 llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false); 704 705 while (const CFGBlock *block = worklist.dequeue()) { 706 // Did the block change? 707 bool changed = runOnBlock(block, cfg, ac, vals, wasAnalyzed); 708 if (changed || !previouslyVisited[block->getBlockID()]) 709 worklist.enqueueSuccessors(block); 710 previouslyVisited[block->getBlockID()] = true; 711 } 712 713 // Run through the blocks one more time, and report uninitialized variabes. 714 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) { 715 if (wasAnalyzed[(*BI)->getBlockID()]) 716 runOnBlock(*BI, cfg, ac, vals, wasAnalyzed, &handler, 717 /* flagBlockUses */ true); 718 } 719} 720 721UninitVariablesHandler::~UninitVariablesHandler() {} 722 723