UninitializedValues.cpp revision 6f34213f8d6ae8c77685b53664527e39bfaaca3b
1//==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file implements uninitialized values analysis for source-level CFGs. 11// 12//===----------------------------------------------------------------------===// 13 14#include <utility> 15#include "llvm/ADT/Optional.h" 16#include "llvm/ADT/SmallVector.h" 17#include "llvm/ADT/BitVector.h" 18#include "llvm/ADT/DenseMap.h" 19#include "clang/AST/Decl.h" 20#include "clang/Analysis/CFG.h" 21#include "clang/Analysis/AnalysisContext.h" 22#include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h" 23#include "clang/Analysis/Analyses/UninitializedValues.h" 24#include "clang/Analysis/Support/SaveAndRestore.h" 25 26using namespace clang; 27 28static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) { 29 return vd->isLocalVarDecl() && !vd->hasGlobalStorage() && 30 vd->getType()->isScalarType() && 31 vd->getDeclContext() == dc; 32} 33 34//------------------------------------------------------------------------====// 35// DeclToBit: a mapping from Decls we track to bitvector indices. 36//====------------------------------------------------------------------------// 37 38namespace { 39class DeclToBit { 40 llvm::DenseMap<const VarDecl *, unsigned> map; 41public: 42 DeclToBit() {} 43 44 /// Compute the actual mapping from declarations to bits. 45 void computeMap(const DeclContext &dc); 46 47 /// Return the number of declarations in the map. 48 unsigned size() const { return map.size(); } 49 50 /// Returns the bit vector index for a given declaration. 51 llvm::Optional<unsigned> getBitVectorIndex(const VarDecl *d); 52}; 53} 54 55void DeclToBit::computeMap(const DeclContext &dc) { 56 unsigned count = 0; 57 DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()), 58 E(dc.decls_end()); 59 for ( ; I != E; ++I) { 60 const VarDecl *vd = *I; 61 if (isTrackedVar(vd, &dc)) 62 map[vd] = count++; 63 } 64} 65 66llvm::Optional<unsigned> DeclToBit::getBitVectorIndex(const VarDecl *d) { 67 llvm::DenseMap<const VarDecl *, unsigned>::iterator I = map.find(d); 68 if (I == map.end()) 69 return llvm::Optional<unsigned>(); 70 return I->second; 71} 72 73//------------------------------------------------------------------------====// 74// CFGBlockValues: dataflow values for CFG blocks. 75//====------------------------------------------------------------------------// 76 77typedef std::pair<llvm::BitVector *, llvm::BitVector *> BVPair; 78 79namespace { 80class CFGBlockValues { 81 const CFG &cfg; 82 BVPair *vals; 83 llvm::BitVector scratch; 84 DeclToBit declToBit; 85 86 llvm::BitVector &lazyCreate(llvm::BitVector *&bv); 87public: 88 CFGBlockValues(const CFG &cfg); 89 ~CFGBlockValues(); 90 91 void computeSetOfDeclarations(const DeclContext &dc); 92 llvm::BitVector &getBitVector(const CFGBlock *block, 93 const CFGBlock *dstBlock); 94 95 BVPair &getBitVectors(const CFGBlock *block, bool shouldLazyCreate); 96 97 void mergeIntoScratch(llvm::BitVector const &source, bool isFirst); 98 bool updateBitVectorWithScratch(const CFGBlock *block); 99 bool updateBitVectors(const CFGBlock *block, const BVPair &newVals); 100 101 bool hasNoDeclarations() const { 102 return declToBit.size() == 0; 103 } 104 105 void resetScratch(); 106 llvm::BitVector &getScratch() { return scratch; } 107 108 llvm::BitVector::reference operator[](const VarDecl *vd); 109}; 110} 111 112CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) { 113 unsigned n = cfg.getNumBlockIDs(); 114 if (!n) 115 return; 116 vals = new std::pair<llvm::BitVector*, llvm::BitVector*>[n]; 117 memset(vals, 0, sizeof(*vals) * n); 118} 119 120CFGBlockValues::~CFGBlockValues() { 121 unsigned n = cfg.getNumBlockIDs(); 122 if (n == 0) 123 return; 124 for (unsigned i = 0; i < n; ++i) { 125 delete vals[i].first; 126 delete vals[i].second; 127 } 128 delete [] vals; 129} 130 131void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) { 132 declToBit.computeMap(dc); 133 scratch.resize(declToBit.size()); 134} 135 136llvm::BitVector &CFGBlockValues::lazyCreate(llvm::BitVector *&bv) { 137 if (!bv) 138 bv = new llvm::BitVector(declToBit.size()); 139 return *bv; 140} 141 142/// This function pattern matches for a '&&' or '||' that appears at 143/// the beginning of a CFGBlock that also (1) has a terminator and 144/// (2) has no other elements. If such an expression is found, it is returned. 145static BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) { 146 if (block->empty()) 147 return 0; 148 149 const CFGStmt *cstmt = block->front().getAs<CFGStmt>(); 150 BinaryOperator *b = llvm::dyn_cast_or_null<BinaryOperator>(cstmt->getStmt()); 151 152 if (!b || !b->isLogicalOp()) 153 return 0; 154 155 if (block->pred_size() == 2 && 156 ((block->succ_size() == 2 && block->getTerminatorCondition() == b) || 157 block->size() == 1)) 158 return b; 159 160 return 0; 161} 162 163llvm::BitVector &CFGBlockValues::getBitVector(const CFGBlock *block, 164 const CFGBlock *dstBlock) { 165 unsigned idx = block->getBlockID(); 166 if (dstBlock && getLogicalOperatorInChain(block)) { 167 if (*block->succ_begin() == dstBlock) 168 return lazyCreate(vals[idx].first); 169 assert(*(block->succ_begin()+1) == dstBlock); 170 return lazyCreate(vals[idx].second); 171 } 172 173 assert(vals[idx].second == 0); 174 return lazyCreate(vals[idx].first); 175} 176 177BVPair &CFGBlockValues::getBitVectors(const clang::CFGBlock *block, 178 bool shouldLazyCreate) { 179 unsigned idx = block->getBlockID(); 180 lazyCreate(vals[idx].first); 181 if (shouldLazyCreate) 182 lazyCreate(vals[idx].second); 183 return vals[idx]; 184} 185 186void CFGBlockValues::mergeIntoScratch(llvm::BitVector const &source, 187 bool isFirst) { 188 if (isFirst) 189 scratch = source; 190 else 191 scratch |= source; 192} 193#if 0 194static void printVector(const CFGBlock *block, llvm::BitVector &bv, 195 unsigned num) { 196 197 llvm::errs() << block->getBlockID() << " :"; 198 for (unsigned i = 0; i < bv.size(); ++i) { 199 llvm::errs() << ' ' << bv[i]; 200 } 201 llvm::errs() << " : " << num << '\n'; 202} 203#endif 204 205bool CFGBlockValues::updateBitVectorWithScratch(const CFGBlock *block) { 206 llvm::BitVector &dst = getBitVector(block, 0); 207 bool changed = (dst != scratch); 208 if (changed) 209 dst = scratch; 210#if 0 211 printVector(block, scratch, 0); 212#endif 213 return changed; 214} 215 216bool CFGBlockValues::updateBitVectors(const CFGBlock *block, 217 const BVPair &newVals) { 218 BVPair &vals = getBitVectors(block, true); 219 bool changed = *newVals.first != *vals.first || 220 *newVals.second != *vals.second; 221 *vals.first = *newVals.first; 222 *vals.second = *newVals.second; 223#if 0 224 printVector(block, *vals.first, 1); 225 printVector(block, *vals.second, 2); 226#endif 227 return changed; 228} 229 230void CFGBlockValues::resetScratch() { 231 scratch.reset(); 232} 233 234llvm::BitVector::reference CFGBlockValues::operator[](const VarDecl *vd) { 235 const llvm::Optional<unsigned> &idx = declToBit.getBitVectorIndex(vd); 236 assert(idx.hasValue()); 237 return scratch[idx.getValue()]; 238} 239 240//------------------------------------------------------------------------====// 241// Worklist: worklist for dataflow analysis. 242//====------------------------------------------------------------------------// 243 244namespace { 245class DataflowWorklist { 246 llvm::SmallVector<const CFGBlock *, 20> worklist; 247 llvm::BitVector enqueuedBlocks; 248public: 249 DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {} 250 251 void enqueue(const CFGBlock *block); 252 void enqueueSuccessors(const CFGBlock *block); 253 const CFGBlock *dequeue(); 254 255}; 256} 257 258void DataflowWorklist::enqueue(const CFGBlock *block) { 259 if (!block) 260 return; 261 unsigned idx = block->getBlockID(); 262 if (enqueuedBlocks[idx]) 263 return; 264 worklist.push_back(block); 265 enqueuedBlocks[idx] = true; 266} 267 268void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) { 269 for (CFGBlock::const_succ_iterator I = block->succ_begin(), 270 E = block->succ_end(); I != E; ++I) { 271 enqueue(*I); 272 } 273} 274 275const CFGBlock *DataflowWorklist::dequeue() { 276 if (worklist.empty()) 277 return 0; 278 const CFGBlock *b = worklist.back(); 279 worklist.pop_back(); 280 enqueuedBlocks[b->getBlockID()] = false; 281 return b; 282} 283 284//------------------------------------------------------------------------====// 285// Transfer function for uninitialized values analysis. 286//====------------------------------------------------------------------------// 287 288static const bool Initialized = false; 289static const bool Uninitialized = true; 290 291namespace { 292class FindVarResult { 293 const VarDecl *vd; 294 const DeclRefExpr *dr; 295public: 296 FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {} 297 298 const DeclRefExpr *getDeclRefExpr() const { return dr; } 299 const VarDecl *getDecl() const { return vd; } 300}; 301 302class TransferFunctions : public CFGRecStmtVisitor<TransferFunctions> { 303 CFGBlockValues &vals; 304 const CFG &cfg; 305 AnalysisContext ∾ 306 UninitVariablesHandler *handler; 307 const DeclRefExpr *currentDR; 308 const Expr *currentVoidCast; 309 const bool flagBlockUses; 310public: 311 TransferFunctions(CFGBlockValues &vals, const CFG &cfg, 312 AnalysisContext &ac, 313 UninitVariablesHandler *handler, 314 bool flagBlockUses) 315 : vals(vals), cfg(cfg), ac(ac), handler(handler), currentDR(0), 316 currentVoidCast(0), flagBlockUses(flagBlockUses) {} 317 318 const CFG &getCFG() { return cfg; } 319 void reportUninit(const DeclRefExpr *ex, const VarDecl *vd); 320 321 void VisitBlockExpr(BlockExpr *be); 322 void VisitDeclStmt(DeclStmt *ds); 323 void VisitDeclRefExpr(DeclRefExpr *dr); 324 void VisitUnaryOperator(UnaryOperator *uo); 325 void VisitBinaryOperator(BinaryOperator *bo); 326 void VisitCastExpr(CastExpr *ce); 327 void VisitUnaryExprOrTypeTraitExpr(UnaryExprOrTypeTraitExpr *se); 328 void BlockStmt_VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs); 329 330 bool isTrackedVar(const VarDecl *vd) { 331 return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl())); 332 } 333 334 FindVarResult findBlockVarDecl(Expr *ex); 335}; 336} 337 338void TransferFunctions::reportUninit(const DeclRefExpr *ex, 339 const VarDecl *vd) { 340 if (handler) handler->handleUseOfUninitVariable(ex, vd); 341} 342 343FindVarResult TransferFunctions::findBlockVarDecl(Expr* ex) { 344 if (DeclRefExpr* dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts())) 345 if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 346 if (isTrackedVar(vd)) 347 return FindVarResult(vd, dr); 348 return FindVarResult(0, 0); 349} 350 351void TransferFunctions::BlockStmt_VisitObjCForCollectionStmt( 352 ObjCForCollectionStmt *fs) { 353 354 Visit(fs->getCollection()); 355 356 // This represents an initialization of the 'element' value. 357 Stmt *element = fs->getElement(); 358 const VarDecl* vd = 0; 359 360 if (DeclStmt* ds = dyn_cast<DeclStmt>(element)) { 361 vd = cast<VarDecl>(ds->getSingleDecl()); 362 if (!isTrackedVar(vd)) 363 vd = 0; 364 } 365 else { 366 // Initialize the value of the reference variable. 367 const FindVarResult &res = findBlockVarDecl(cast<Expr>(element)); 368 vd = res.getDecl(); 369 if (!vd) { 370 Visit(element); 371 return; 372 } 373 } 374 375 if (vd) 376 vals[vd] = Initialized; 377} 378 379void TransferFunctions::VisitBlockExpr(BlockExpr *be) { 380 if (!flagBlockUses || !handler) 381 return; 382 AnalysisContext::referenced_decls_iterator i, e; 383 llvm::tie(i, e) = ac.getReferencedBlockVars(be->getBlockDecl()); 384 for ( ; i != e; ++i) { 385 const VarDecl *vd = *i; 386 if (vd->getAttr<BlocksAttr>() || !vd->hasLocalStorage() || 387 !isTrackedVar(vd)) 388 continue; 389 if (vals[vd] == Uninitialized) 390 handler->handleUseOfUninitVariable(be, vd); 391 } 392} 393 394void TransferFunctions::VisitDeclStmt(DeclStmt *ds) { 395 for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end(); 396 DI != DE; ++DI) { 397 if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) { 398 if (isTrackedVar(vd)) { 399 vals[vd] = Uninitialized; 400 if (Stmt *init = vd->getInit()) { 401 Visit(init); 402 vals[vd] = Initialized; 403 } 404 } 405 else if (Stmt *init = vd->getInit()) { 406 Visit(init); 407 } 408 } 409 } 410} 411 412void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) { 413 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 414 // cannot be block-level expressions. Therefore, we determine if 415 // a DeclRefExpr is involved in a "load" by comparing it to the current 416 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 417 // If a DeclRefExpr is not involved in a load, we are essentially computing 418 // its address, either for assignment to a reference or via the '&' operator. 419 // In such cases, treat the variable as being initialized, since this 420 // analysis isn't powerful enough to do alias tracking. 421 if (dr != currentDR) 422 if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl())) 423 if (isTrackedVar(vd)) 424 vals[vd] = Initialized; 425} 426 427void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) { 428 if (bo->isAssignmentOp()) { 429 const FindVarResult &res = findBlockVarDecl(bo->getLHS()); 430 if (const VarDecl* vd = res.getDecl()) { 431 // We assume that DeclRefExprs wrapped in a BinaryOperator "assignment" 432 // cannot be block-level expressions. Therefore, we determine if 433 // a DeclRefExpr is involved in a "load" by comparing it to the current 434 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 435 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 436 res.getDeclRefExpr()); 437 Visit(bo->getRHS()); 438 Visit(bo->getLHS()); 439 440 llvm::BitVector::reference bit = vals[vd]; 441 if (bit == Uninitialized) { 442 if (bo->getOpcode() != BO_Assign) 443 reportUninit(res.getDeclRefExpr(), vd); 444 bit = Initialized; 445 } 446 return; 447 } 448 } 449 Visit(bo->getRHS()); 450 Visit(bo->getLHS()); 451} 452 453void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) { 454 switch (uo->getOpcode()) { 455 case clang::UO_PostDec: 456 case clang::UO_PostInc: 457 case clang::UO_PreDec: 458 case clang::UO_PreInc: { 459 const FindVarResult &res = findBlockVarDecl(uo->getSubExpr()); 460 if (const VarDecl *vd = res.getDecl()) { 461 // We assume that DeclRefExprs wrapped in a unary operator ++/-- 462 // cannot be block-level expressions. Therefore, we determine if 463 // a DeclRefExpr is involved in a "load" by comparing it to the current 464 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 465 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 466 res.getDeclRefExpr()); 467 Visit(uo->getSubExpr()); 468 469 llvm::BitVector::reference bit = vals[vd]; 470 if (bit == Uninitialized) { 471 reportUninit(res.getDeclRefExpr(), vd); 472 bit = Initialized; 473 } 474 return; 475 } 476 break; 477 } 478 default: 479 break; 480 } 481 Visit(uo->getSubExpr()); 482} 483 484void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) { 485 if (ce->getCastKind() == CK_LValueToRValue) { 486 const FindVarResult &res = findBlockVarDecl(ce->getSubExpr()); 487 if (const VarDecl *vd = res.getDecl()) { 488 // We assume that DeclRefExprs wrapped in an lvalue-to-rvalue cast 489 // cannot be block-level expressions. Therefore, we determine if 490 // a DeclRefExpr is involved in a "load" by comparing it to the current 491 // DeclRefExpr found when analyzing the last lvalue-to-rvalue CastExpr. 492 // Here we update 'currentDR' to be the one associated with this 493 // lvalue-to-rvalue cast. Then, when we analyze the DeclRefExpr, we 494 // will know that we are not computing its lvalue for other purposes 495 // than to perform a load. 496 SaveAndRestore<const DeclRefExpr*> lastDR(currentDR, 497 res.getDeclRefExpr()); 498 Visit(ce->getSubExpr()); 499 if (currentVoidCast != ce && vals[vd] == Uninitialized) { 500 reportUninit(res.getDeclRefExpr(), vd); 501 // Don't cascade warnings. 502 vals[vd] = Initialized; 503 } 504 return; 505 } 506 } 507 else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) { 508 if (cse->getType()->isVoidType()) { 509 // e.g. (void) x; 510 SaveAndRestore<const Expr *> 511 lastVoidCast(currentVoidCast, cse->getSubExpr()->IgnoreParens()); 512 Visit(cse->getSubExpr()); 513 return; 514 } 515 } 516 Visit(ce->getSubExpr()); 517} 518 519void TransferFunctions::VisitUnaryExprOrTypeTraitExpr( 520 UnaryExprOrTypeTraitExpr *se) { 521 if (se->getKind() == UETT_SizeOf) { 522 if (se->getType()->isConstantSizeType()) 523 return; 524 // Handle VLAs. 525 Visit(se->getArgumentExpr()); 526 } 527} 528 529//------------------------------------------------------------------------====// 530// High-level "driver" logic for uninitialized values analysis. 531//====------------------------------------------------------------------------// 532 533static bool runOnBlock(const CFGBlock *block, const CFG &cfg, 534 AnalysisContext &ac, CFGBlockValues &vals, 535 UninitVariablesHandler *handler = 0, 536 bool flagBlockUses = false) { 537 538 if (const BinaryOperator *b = getLogicalOperatorInChain(block)) { 539 CFGBlock::const_pred_iterator itr = block->pred_begin(); 540 BVPair vA = vals.getBitVectors(*itr, false); 541 ++itr; 542 BVPair vB = vals.getBitVectors(*itr, false); 543 544 BVPair valsAB; 545 546 if (b->getOpcode() == BO_LAnd) { 547 // Merge the 'F' bits from the first and second. 548 vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true); 549 vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false); 550 valsAB.first = vA.first; 551 valsAB.second = &vals.getScratch(); 552 } 553 else { 554 // Merge the 'T' bits from the first and second. 555 assert(b->getOpcode() == BO_LOr); 556 vals.mergeIntoScratch(*vA.first, true); 557 vals.mergeIntoScratch(*vB.first, false); 558 valsAB.first = &vals.getScratch(); 559 valsAB.second = vA.second ? vA.second : vA.first; 560 } 561 return vals.updateBitVectors(block, valsAB); 562 } 563 564 // Default behavior: merge in values of predecessor blocks. 565 vals.resetScratch(); 566 bool isFirst = true; 567 for (CFGBlock::const_pred_iterator I = block->pred_begin(), 568 E = block->pred_end(); I != E; ++I) { 569 vals.mergeIntoScratch(vals.getBitVector(*I, block), isFirst); 570 isFirst = false; 571 } 572 // Apply the transfer function. 573 TransferFunctions tf(vals, cfg, ac, handler, flagBlockUses); 574 for (CFGBlock::const_iterator I = block->begin(), E = block->end(); 575 I != E; ++I) { 576 if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) { 577 tf.BlockStmt_Visit(cs->getStmt()); 578 } 579 } 580 return vals.updateBitVectorWithScratch(block); 581} 582 583void clang::runUninitializedVariablesAnalysis(const DeclContext &dc, 584 const CFG &cfg, 585 AnalysisContext &ac, 586 UninitVariablesHandler &handler) { 587 CFGBlockValues vals(cfg); 588 vals.computeSetOfDeclarations(dc); 589 if (vals.hasNoDeclarations()) 590 return; 591 DataflowWorklist worklist(cfg); 592 llvm::BitVector previouslyVisited(cfg.getNumBlockIDs()); 593 594 worklist.enqueueSuccessors(&cfg.getEntry()); 595 596 while (const CFGBlock *block = worklist.dequeue()) { 597 // Did the block change? 598 bool changed = runOnBlock(block, cfg, ac, vals); 599 if (changed || !previouslyVisited[block->getBlockID()]) 600 worklist.enqueueSuccessors(block); 601 previouslyVisited[block->getBlockID()] = true; 602 } 603 604 // Run through the blocks one more time, and report uninitialized variabes. 605 for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) { 606 runOnBlock(*BI, cfg, ac, vals, &handler, /* flagBlockUses */ true); 607 } 608} 609 610UninitVariablesHandler::~UninitVariablesHandler() {} 611 612