AnalysisBasedWarnings.cpp revision df8327c28d293cf7c6952b86dba26863235dcc0f
1//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines analysis_warnings::[Policy,Executor]. 11// Together they are used by Sema to issue warnings based on inexpensive 12// static analysis algorithms in libAnalysis. 13// 14//===----------------------------------------------------------------------===// 15 16#include "clang/Sema/AnalysisBasedWarnings.h" 17#include "clang/Sema/SemaInternal.h" 18#include "clang/Sema/ScopeInfo.h" 19#include "clang/Basic/SourceManager.h" 20#include "clang/Basic/SourceLocation.h" 21#include "clang/Lex/Preprocessor.h" 22#include "clang/AST/DeclObjC.h" 23#include "clang/AST/DeclCXX.h" 24#include "clang/AST/ExprObjC.h" 25#include "clang/AST/ExprCXX.h" 26#include "clang/AST/StmtObjC.h" 27#include "clang/AST/StmtCXX.h" 28#include "clang/AST/EvaluatedExprVisitor.h" 29#include "clang/AST/StmtVisitor.h" 30#include "clang/Analysis/AnalysisContext.h" 31#include "clang/Analysis/CFG.h" 32#include "clang/Analysis/Analyses/ReachableCode.h" 33#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h" 34#include "clang/Analysis/Analyses/ThreadSafety.h" 35#include "clang/Analysis/CFGStmtMap.h" 36#include "clang/Analysis/Analyses/UninitializedValues.h" 37#include "llvm/ADT/BitVector.h" 38#include "llvm/ADT/FoldingSet.h" 39#include "llvm/ADT/ImmutableMap.h" 40#include "llvm/ADT/PostOrderIterator.h" 41#include "llvm/ADT/SmallVector.h" 42#include "llvm/ADT/StringRef.h" 43#include "llvm/Support/Casting.h" 44#include <algorithm> 45#include <vector> 46 47using namespace clang; 48 49//===----------------------------------------------------------------------===// 50// Unreachable code analysis. 51//===----------------------------------------------------------------------===// 52 53namespace { 54 class UnreachableCodeHandler : public reachable_code::Callback { 55 Sema &S; 56 public: 57 UnreachableCodeHandler(Sema &s) : S(s) {} 58 59 void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) { 60 S.Diag(L, diag::warn_unreachable) << R1 << R2; 61 } 62 }; 63} 64 65/// CheckUnreachable - Check for unreachable code. 66static void CheckUnreachable(Sema &S, AnalysisContext &AC) { 67 UnreachableCodeHandler UC(S); 68 reachable_code::FindUnreachableCode(AC, UC); 69} 70 71//===----------------------------------------------------------------------===// 72// Check for missing return value. 73//===----------------------------------------------------------------------===// 74 75enum ControlFlowKind { 76 UnknownFallThrough, 77 NeverFallThrough, 78 MaybeFallThrough, 79 AlwaysFallThrough, 80 NeverFallThroughOrReturn 81}; 82 83/// CheckFallThrough - Check that we don't fall off the end of a 84/// Statement that should return a value. 85/// 86/// \returns AlwaysFallThrough iff we always fall off the end of the statement, 87/// MaybeFallThrough iff we might or might not fall off the end, 88/// NeverFallThroughOrReturn iff we never fall off the end of the statement or 89/// return. We assume NeverFallThrough iff we never fall off the end of the 90/// statement but we may return. We assume that functions not marked noreturn 91/// will return. 92static ControlFlowKind CheckFallThrough(AnalysisContext &AC) { 93 CFG *cfg = AC.getCFG(); 94 if (cfg == 0) return UnknownFallThrough; 95 96 // The CFG leaves in dead things, and we don't want the dead code paths to 97 // confuse us, so we mark all live things first. 98 llvm::BitVector live(cfg->getNumBlockIDs()); 99 unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(), 100 live); 101 102 bool AddEHEdges = AC.getAddEHEdges(); 103 if (!AddEHEdges && count != cfg->getNumBlockIDs()) 104 // When there are things remaining dead, and we didn't add EH edges 105 // from CallExprs to the catch clauses, we have to go back and 106 // mark them as live. 107 for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) { 108 CFGBlock &b = **I; 109 if (!live[b.getBlockID()]) { 110 if (b.pred_begin() == b.pred_end()) { 111 if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator())) 112 // When not adding EH edges from calls, catch clauses 113 // can otherwise seem dead. Avoid noting them as dead. 114 count += reachable_code::ScanReachableFromBlock(&b, live); 115 continue; 116 } 117 } 118 } 119 120 // Now we know what is live, we check the live precessors of the exit block 121 // and look for fall through paths, being careful to ignore normal returns, 122 // and exceptional paths. 123 bool HasLiveReturn = false; 124 bool HasFakeEdge = false; 125 bool HasPlainEdge = false; 126 bool HasAbnormalEdge = false; 127 128 // Ignore default cases that aren't likely to be reachable because all 129 // enums in a switch(X) have explicit case statements. 130 CFGBlock::FilterOptions FO; 131 FO.IgnoreDefaultsWithCoveredEnums = 1; 132 133 for (CFGBlock::filtered_pred_iterator 134 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) { 135 const CFGBlock& B = **I; 136 if (!live[B.getBlockID()]) 137 continue; 138 139 // Skip blocks which contain an element marked as no-return. They don't 140 // represent actually viable edges into the exit block, so mark them as 141 // abnormal. 142 if (B.hasNoReturnElement()) { 143 HasAbnormalEdge = true; 144 continue; 145 } 146 147 // Destructors can appear after the 'return' in the CFG. This is 148 // normal. We need to look pass the destructors for the return 149 // statement (if it exists). 150 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend(); 151 152 for ( ; ri != re ; ++ri) 153 if (isa<CFGStmt>(*ri)) 154 break; 155 156 // No more CFGElements in the block? 157 if (ri == re) { 158 if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) { 159 HasAbnormalEdge = true; 160 continue; 161 } 162 // A labeled empty statement, or the entry block... 163 HasPlainEdge = true; 164 continue; 165 } 166 167 CFGStmt CS = cast<CFGStmt>(*ri); 168 const Stmt *S = CS.getStmt(); 169 if (isa<ReturnStmt>(S)) { 170 HasLiveReturn = true; 171 continue; 172 } 173 if (isa<ObjCAtThrowStmt>(S)) { 174 HasFakeEdge = true; 175 continue; 176 } 177 if (isa<CXXThrowExpr>(S)) { 178 HasFakeEdge = true; 179 continue; 180 } 181 if (const AsmStmt *AS = dyn_cast<AsmStmt>(S)) { 182 if (AS->isMSAsm()) { 183 HasFakeEdge = true; 184 HasLiveReturn = true; 185 continue; 186 } 187 } 188 if (isa<CXXTryStmt>(S)) { 189 HasAbnormalEdge = true; 190 continue; 191 } 192 if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit()) 193 == B.succ_end()) { 194 HasAbnormalEdge = true; 195 continue; 196 } 197 198 HasPlainEdge = true; 199 } 200 if (!HasPlainEdge) { 201 if (HasLiveReturn) 202 return NeverFallThrough; 203 return NeverFallThroughOrReturn; 204 } 205 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn) 206 return MaybeFallThrough; 207 // This says AlwaysFallThrough for calls to functions that are not marked 208 // noreturn, that don't return. If people would like this warning to be more 209 // accurate, such functions should be marked as noreturn. 210 return AlwaysFallThrough; 211} 212 213namespace { 214 215struct CheckFallThroughDiagnostics { 216 unsigned diag_MaybeFallThrough_HasNoReturn; 217 unsigned diag_MaybeFallThrough_ReturnsNonVoid; 218 unsigned diag_AlwaysFallThrough_HasNoReturn; 219 unsigned diag_AlwaysFallThrough_ReturnsNonVoid; 220 unsigned diag_NeverFallThroughOrReturn; 221 bool funMode; 222 SourceLocation FuncLoc; 223 224 static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) { 225 CheckFallThroughDiagnostics D; 226 D.FuncLoc = Func->getLocation(); 227 D.diag_MaybeFallThrough_HasNoReturn = 228 diag::warn_falloff_noreturn_function; 229 D.diag_MaybeFallThrough_ReturnsNonVoid = 230 diag::warn_maybe_falloff_nonvoid_function; 231 D.diag_AlwaysFallThrough_HasNoReturn = 232 diag::warn_falloff_noreturn_function; 233 D.diag_AlwaysFallThrough_ReturnsNonVoid = 234 diag::warn_falloff_nonvoid_function; 235 236 // Don't suggest that virtual functions be marked "noreturn", since they 237 // might be overridden by non-noreturn functions. 238 bool isVirtualMethod = false; 239 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func)) 240 isVirtualMethod = Method->isVirtual(); 241 242 if (!isVirtualMethod) 243 D.diag_NeverFallThroughOrReturn = 244 diag::warn_suggest_noreturn_function; 245 else 246 D.diag_NeverFallThroughOrReturn = 0; 247 248 D.funMode = true; 249 return D; 250 } 251 252 static CheckFallThroughDiagnostics MakeForBlock() { 253 CheckFallThroughDiagnostics D; 254 D.diag_MaybeFallThrough_HasNoReturn = 255 diag::err_noreturn_block_has_return_expr; 256 D.diag_MaybeFallThrough_ReturnsNonVoid = 257 diag::err_maybe_falloff_nonvoid_block; 258 D.diag_AlwaysFallThrough_HasNoReturn = 259 diag::err_noreturn_block_has_return_expr; 260 D.diag_AlwaysFallThrough_ReturnsNonVoid = 261 diag::err_falloff_nonvoid_block; 262 D.diag_NeverFallThroughOrReturn = 263 diag::warn_suggest_noreturn_block; 264 D.funMode = false; 265 return D; 266 } 267 268 bool checkDiagnostics(Diagnostic &D, bool ReturnsVoid, 269 bool HasNoReturn) const { 270 if (funMode) { 271 return (ReturnsVoid || 272 D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function, 273 FuncLoc) == Diagnostic::Ignored) 274 && (!HasNoReturn || 275 D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr, 276 FuncLoc) == Diagnostic::Ignored) 277 && (!ReturnsVoid || 278 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc) 279 == Diagnostic::Ignored); 280 } 281 282 // For blocks. 283 return ReturnsVoid && !HasNoReturn 284 && (!ReturnsVoid || 285 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc) 286 == Diagnostic::Ignored); 287 } 288}; 289 290} 291 292/// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a 293/// function that should return a value. Check that we don't fall off the end 294/// of a noreturn function. We assume that functions and blocks not marked 295/// noreturn will return. 296static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body, 297 const BlockExpr *blkExpr, 298 const CheckFallThroughDiagnostics& CD, 299 AnalysisContext &AC) { 300 301 bool ReturnsVoid = false; 302 bool HasNoReturn = false; 303 304 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 305 ReturnsVoid = FD->getResultType()->isVoidType(); 306 HasNoReturn = FD->hasAttr<NoReturnAttr>() || 307 FD->getType()->getAs<FunctionType>()->getNoReturnAttr(); 308 } 309 else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 310 ReturnsVoid = MD->getResultType()->isVoidType(); 311 HasNoReturn = MD->hasAttr<NoReturnAttr>(); 312 } 313 else if (isa<BlockDecl>(D)) { 314 QualType BlockTy = blkExpr->getType(); 315 if (const FunctionType *FT = 316 BlockTy->getPointeeType()->getAs<FunctionType>()) { 317 if (FT->getResultType()->isVoidType()) 318 ReturnsVoid = true; 319 if (FT->getNoReturnAttr()) 320 HasNoReturn = true; 321 } 322 } 323 324 Diagnostic &Diags = S.getDiagnostics(); 325 326 // Short circuit for compilation speed. 327 if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn)) 328 return; 329 330 // FIXME: Function try block 331 if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) { 332 switch (CheckFallThrough(AC)) { 333 case UnknownFallThrough: 334 break; 335 336 case MaybeFallThrough: 337 if (HasNoReturn) 338 S.Diag(Compound->getRBracLoc(), 339 CD.diag_MaybeFallThrough_HasNoReturn); 340 else if (!ReturnsVoid) 341 S.Diag(Compound->getRBracLoc(), 342 CD.diag_MaybeFallThrough_ReturnsNonVoid); 343 break; 344 case AlwaysFallThrough: 345 if (HasNoReturn) 346 S.Diag(Compound->getRBracLoc(), 347 CD.diag_AlwaysFallThrough_HasNoReturn); 348 else if (!ReturnsVoid) 349 S.Diag(Compound->getRBracLoc(), 350 CD.diag_AlwaysFallThrough_ReturnsNonVoid); 351 break; 352 case NeverFallThroughOrReturn: 353 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) { 354 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 355 S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn) 356 << 0 << FD; 357 } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 358 S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn) 359 << 1 << MD; 360 } else { 361 S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn); 362 } 363 } 364 break; 365 case NeverFallThrough: 366 break; 367 } 368 } 369} 370 371//===----------------------------------------------------------------------===// 372// -Wuninitialized 373//===----------------------------------------------------------------------===// 374 375namespace { 376/// ContainsReference - A visitor class to search for references to 377/// a particular declaration (the needle) within any evaluated component of an 378/// expression (recursively). 379class ContainsReference : public EvaluatedExprVisitor<ContainsReference> { 380 bool FoundReference; 381 const DeclRefExpr *Needle; 382 383public: 384 ContainsReference(ASTContext &Context, const DeclRefExpr *Needle) 385 : EvaluatedExprVisitor<ContainsReference>(Context), 386 FoundReference(false), Needle(Needle) {} 387 388 void VisitExpr(Expr *E) { 389 // Stop evaluating if we already have a reference. 390 if (FoundReference) 391 return; 392 393 EvaluatedExprVisitor<ContainsReference>::VisitExpr(E); 394 } 395 396 void VisitDeclRefExpr(DeclRefExpr *E) { 397 if (E == Needle) 398 FoundReference = true; 399 else 400 EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E); 401 } 402 403 bool doesContainReference() const { return FoundReference; } 404}; 405} 406 407static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) { 408 // Don't issue a fixit if there is already an initializer. 409 if (VD->getInit()) 410 return false; 411 412 // Suggest possible initialization (if any). 413 const char *initialization = 0; 414 QualType VariableTy = VD->getType().getCanonicalType(); 415 416 if (VariableTy->isObjCObjectPointerType() || 417 VariableTy->isBlockPointerType()) { 418 // Check if 'nil' is defined. 419 if (S.PP.getMacroInfo(&S.getASTContext().Idents.get("nil"))) 420 initialization = " = nil"; 421 else 422 initialization = " = 0"; 423 } 424 else if (VariableTy->isRealFloatingType()) 425 initialization = " = 0.0"; 426 else if (VariableTy->isBooleanType() && S.Context.getLangOptions().CPlusPlus) 427 initialization = " = false"; 428 else if (VariableTy->isEnumeralType()) 429 return false; 430 else if (VariableTy->isPointerType() || VariableTy->isMemberPointerType()) { 431 if (S.Context.getLangOptions().CPlusPlus0x) 432 initialization = " = nullptr"; 433 // Check if 'NULL' is defined. 434 else if (S.PP.getMacroInfo(&S.getASTContext().Idents.get("NULL"))) 435 initialization = " = NULL"; 436 else 437 initialization = " = 0"; 438 } 439 else if (VariableTy->isScalarType()) 440 initialization = " = 0"; 441 442 if (initialization) { 443 SourceLocation loc = S.PP.getLocForEndOfToken(VD->getLocEnd()); 444 S.Diag(loc, diag::note_var_fixit_add_initialization) << VD->getDeclName() 445 << FixItHint::CreateInsertion(loc, initialization); 446 return true; 447 } 448 return false; 449} 450 451/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an 452/// uninitialized variable. This manages the different forms of diagnostic 453/// emitted for particular types of uses. Returns true if the use was diagnosed 454/// as a warning. If a pariticular use is one we omit warnings for, returns 455/// false. 456static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD, 457 const Expr *E, bool isAlwaysUninit) { 458 bool isSelfInit = false; 459 460 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(E)) { 461 if (isAlwaysUninit) { 462 // Inspect the initializer of the variable declaration which is 463 // being referenced prior to its initialization. We emit 464 // specialized diagnostics for self-initialization, and we 465 // specifically avoid warning about self references which take the 466 // form of: 467 // 468 // int x = x; 469 // 470 // This is used to indicate to GCC that 'x' is intentionally left 471 // uninitialized. Proven code paths which access 'x' in 472 // an uninitialized state after this will still warn. 473 // 474 // TODO: Should we suppress maybe-uninitialized warnings for 475 // variables initialized in this way? 476 if (const Expr *Initializer = VD->getInit()) { 477 if (DRE == Initializer->IgnoreParenImpCasts()) 478 return false; 479 480 ContainsReference CR(S.Context, DRE); 481 CR.Visit(const_cast<Expr*>(Initializer)); 482 isSelfInit = CR.doesContainReference(); 483 } 484 if (isSelfInit) { 485 S.Diag(DRE->getLocStart(), 486 diag::warn_uninit_self_reference_in_init) 487 << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange(); 488 } else { 489 S.Diag(DRE->getLocStart(), diag::warn_uninit_var) 490 << VD->getDeclName() << DRE->getSourceRange(); 491 } 492 } else { 493 S.Diag(DRE->getLocStart(), diag::warn_maybe_uninit_var) 494 << VD->getDeclName() << DRE->getSourceRange(); 495 } 496 } else { 497 const BlockExpr *BE = cast<BlockExpr>(E); 498 S.Diag(BE->getLocStart(), 499 isAlwaysUninit ? diag::warn_uninit_var_captured_by_block 500 : diag::warn_maybe_uninit_var_captured_by_block) 501 << VD->getDeclName(); 502 } 503 504 // Report where the variable was declared when the use wasn't within 505 // the initializer of that declaration & we didn't already suggest 506 // an initialization fixit. 507 if (!isSelfInit && !SuggestInitializationFixit(S, VD)) 508 S.Diag(VD->getLocStart(), diag::note_uninit_var_def) 509 << VD->getDeclName(); 510 511 return true; 512} 513 514typedef std::pair<const Expr*, bool> UninitUse; 515 516namespace { 517struct SLocSort { 518 bool operator()(const UninitUse &a, const UninitUse &b) { 519 SourceLocation aLoc = a.first->getLocStart(); 520 SourceLocation bLoc = b.first->getLocStart(); 521 return aLoc.getRawEncoding() < bLoc.getRawEncoding(); 522 } 523}; 524 525class UninitValsDiagReporter : public UninitVariablesHandler { 526 Sema &S; 527 typedef SmallVector<UninitUse, 2> UsesVec; 528 typedef llvm::DenseMap<const VarDecl *, UsesVec*> UsesMap; 529 UsesMap *uses; 530 531public: 532 UninitValsDiagReporter(Sema &S) : S(S), uses(0) {} 533 ~UninitValsDiagReporter() { 534 flushDiagnostics(); 535 } 536 537 void handleUseOfUninitVariable(const Expr *ex, const VarDecl *vd, 538 bool isAlwaysUninit) { 539 if (!uses) 540 uses = new UsesMap(); 541 542 UsesVec *&vec = (*uses)[vd]; 543 if (!vec) 544 vec = new UsesVec(); 545 546 vec->push_back(std::make_pair(ex, isAlwaysUninit)); 547 } 548 549 void flushDiagnostics() { 550 if (!uses) 551 return; 552 553 for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) { 554 const VarDecl *vd = i->first; 555 UsesVec *vec = i->second; 556 557 // Sort the uses by their SourceLocations. While not strictly 558 // guaranteed to produce them in line/column order, this will provide 559 // a stable ordering. 560 std::sort(vec->begin(), vec->end(), SLocSort()); 561 562 for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve; 563 ++vi) { 564 if (DiagnoseUninitializedUse(S, vd, vi->first, 565 /*isAlwaysUninit=*/vi->second)) 566 // Skip further diagnostics for this variable. We try to warn only on 567 // the first point at which a variable is used uninitialized. 568 break; 569 } 570 571 delete vec; 572 } 573 delete uses; 574 } 575}; 576} 577 578 579//===----------------------------------------------------------------------===// 580// -Wthread-safety 581//===----------------------------------------------------------------------===// 582namespace clang { 583namespace thread_safety { 584typedef std::pair<SourceLocation, PartialDiagnostic> DelayedDiag; 585typedef llvm::SmallVector<DelayedDiag, 4> DiagList; 586 587struct SortDiagBySourceLocation { 588 Sema &S; 589 SortDiagBySourceLocation(Sema &S) : S(S) {} 590 591 bool operator()(const DelayedDiag &left, const DelayedDiag &right) { 592 // Although this call will be slow, this is only called when outputting 593 // multiple warnings. 594 return S.getSourceManager().isBeforeInTranslationUnit(left.first, 595 right.first); 596 } 597}; 598 599class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler { 600 Sema &S; 601 DiagList Warnings; 602 603 // Helper functions 604 void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) { 605 PartialDiagnostic Warning = S.PDiag(DiagID) << LockName; 606 Warnings.push_back(DelayedDiag(Loc, Warning)); 607 } 608 609 public: 610 ThreadSafetyReporter(Sema &S) : S(S) {} 611 612 /// \brief Emit all buffered diagnostics in order of sourcelocation. 613 /// We need to output diagnostics produced while iterating through 614 /// the lockset in deterministic order, so this function orders diagnostics 615 /// and outputs them. 616 void emitDiagnostics() { 617 SortDiagBySourceLocation SortDiagBySL(S); 618 sort(Warnings.begin(), Warnings.end(), SortDiagBySL); 619 for (DiagList::iterator I = Warnings.begin(), E = Warnings.end(); 620 I != E; ++I) 621 S.Diag(I->first, I->second); 622 } 623 624 void handleInvalidLockExp(SourceLocation Loc) { 625 PartialDiagnostic Warning = S.PDiag(diag::warn_cannot_resolve_lock) << Loc; 626 Warnings.push_back(DelayedDiag(Loc, Warning)); 627 } 628 void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) { 629 warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc); 630 } 631 632 void handleDoubleLock(Name LockName, SourceLocation Loc) { 633 warnLockMismatch(diag::warn_double_lock, LockName, Loc); 634 } 635 636 void handleMutexHeldEndOfScope(Name LockName, SourceLocation Loc){ 637 warnLockMismatch(diag::warn_lock_at_end_of_scope, LockName, Loc); 638 } 639 640 void handleNoLockLoopEntry(Name LockName, SourceLocation Loc) { 641 warnLockMismatch(diag::warn_expecting_lock_held_on_loop, LockName, Loc); 642 } 643 644 void handleNoUnlock(Name LockName, llvm::StringRef FunName, 645 SourceLocation Loc) { 646 PartialDiagnostic Warning = 647 S.PDiag(diag::warn_no_unlock) << LockName << FunName; 648 Warnings.push_back(DelayedDiag(Loc, Warning)); 649 } 650 651 void handleExclusiveAndShared(Name LockName, SourceLocation Loc1, 652 SourceLocation Loc2) { 653 PartialDiagnostic Warning = 654 S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName; 655 PartialDiagnostic Note = 656 S.PDiag(diag::note_lock_exclusive_and_shared) << LockName; 657 Warnings.push_back(DelayedDiag(Loc1, Warning)); 658 Warnings.push_back(DelayedDiag(Loc2, Note)); 659 } 660 661 void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK, 662 AccessKind AK, SourceLocation Loc) { 663 assert((POK == POK_VarAccess || POK == POK_VarDereference) 664 && "Only works for variables"); 665 unsigned DiagID = POK == POK_VarAccess? 666 diag::warn_variable_requires_any_lock: 667 diag::warn_var_deref_requires_any_lock; 668 PartialDiagnostic Warning = S.PDiag(DiagID) 669 << D->getName() << getLockKindFromAccessKind(AK); 670 Warnings.push_back(DelayedDiag(Loc, Warning)); 671 } 672 673 void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK, 674 Name LockName, LockKind LK, SourceLocation Loc) { 675 unsigned DiagID = 0; 676 switch (POK) { 677 case POK_VarAccess: 678 DiagID = diag::warn_variable_requires_lock; 679 break; 680 case POK_VarDereference: 681 DiagID = diag::warn_var_deref_requires_lock; 682 break; 683 case POK_FunctionCall: 684 DiagID = diag::warn_fun_requires_lock; 685 break; 686 } 687 PartialDiagnostic Warning = S.PDiag(DiagID) 688 << D->getName() << LockName << LK; 689 Warnings.push_back(DelayedDiag(Loc, Warning)); 690 } 691 692 void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) { 693 PartialDiagnostic Warning = 694 S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName; 695 Warnings.push_back(DelayedDiag(Loc, Warning)); 696 } 697}; 698} 699} 700 701//===----------------------------------------------------------------------===// 702// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based 703// warnings on a function, method, or block. 704//===----------------------------------------------------------------------===// 705 706clang::sema::AnalysisBasedWarnings::Policy::Policy() { 707 enableCheckFallThrough = 1; 708 enableCheckUnreachable = 0; 709 enableThreadSafetyAnalysis = 0; 710} 711 712clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s) 713 : S(s), 714 NumFunctionsAnalyzed(0), 715 NumFunctionsWithBadCFGs(0), 716 NumCFGBlocks(0), 717 MaxCFGBlocksPerFunction(0), 718 NumUninitAnalysisFunctions(0), 719 NumUninitAnalysisVariables(0), 720 MaxUninitAnalysisVariablesPerFunction(0), 721 NumUninitAnalysisBlockVisits(0), 722 MaxUninitAnalysisBlockVisitsPerFunction(0) { 723 Diagnostic &D = S.getDiagnostics(); 724 DefaultPolicy.enableCheckUnreachable = (unsigned) 725 (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) != 726 Diagnostic::Ignored); 727 DefaultPolicy.enableThreadSafetyAnalysis = (unsigned) 728 (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) != 729 Diagnostic::Ignored); 730 731} 732 733static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) { 734 for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 735 i = fscope->PossiblyUnreachableDiags.begin(), 736 e = fscope->PossiblyUnreachableDiags.end(); 737 i != e; ++i) { 738 const sema::PossiblyUnreachableDiag &D = *i; 739 S.Diag(D.Loc, D.PD); 740 } 741} 742 743void clang::sema:: 744AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P, 745 sema::FunctionScopeInfo *fscope, 746 const Decl *D, const BlockExpr *blkExpr) { 747 748 // We avoid doing analysis-based warnings when there are errors for 749 // two reasons: 750 // (1) The CFGs often can't be constructed (if the body is invalid), so 751 // don't bother trying. 752 // (2) The code already has problems; running the analysis just takes more 753 // time. 754 Diagnostic &Diags = S.getDiagnostics(); 755 756 // Do not do any analysis for declarations in system headers if we are 757 // going to just ignore them. 758 if (Diags.getSuppressSystemWarnings() && 759 S.SourceMgr.isInSystemHeader(D->getLocation())) 760 return; 761 762 // For code in dependent contexts, we'll do this at instantiation time. 763 if (cast<DeclContext>(D)->isDependentContext()) 764 return; 765 766 if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) { 767 // Flush out any possibly unreachable diagnostics. 768 flushDiagnostics(S, fscope); 769 return; 770 } 771 772 const Stmt *Body = D->getBody(); 773 assert(Body); 774 775 AnalysisContext AC(D, 0); 776 777 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2 778 // explosion for destrutors that can result and the compile time hit. 779 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true; 780 AC.getCFGBuildOptions().AddEHEdges = false; 781 AC.getCFGBuildOptions().AddInitializers = true; 782 AC.getCFGBuildOptions().AddImplicitDtors = true; 783 784 // Force that certain expressions appear as CFGElements in the CFG. This 785 // is used to speed up various analyses. 786 // FIXME: This isn't the right factoring. This is here for initial 787 // prototyping, but we need a way for analyses to say what expressions they 788 // expect to always be CFGElements and then fill in the BuildOptions 789 // appropriately. This is essentially a layering violation. 790 if (P.enableCheckUnreachable) { 791 // Unreachable code analysis requires a linearized CFG. 792 AC.getCFGBuildOptions().setAllAlwaysAdd(); 793 } 794 else { 795 AC.getCFGBuildOptions() 796 .setAlwaysAdd(Stmt::BinaryOperatorClass) 797 .setAlwaysAdd(Stmt::BlockExprClass) 798 .setAlwaysAdd(Stmt::CStyleCastExprClass) 799 .setAlwaysAdd(Stmt::DeclRefExprClass) 800 .setAlwaysAdd(Stmt::ImplicitCastExprClass) 801 .setAlwaysAdd(Stmt::UnaryOperatorClass); 802 } 803 804 // Construct the analysis context with the specified CFG build options. 805 806 // Emit delayed diagnostics. 807 if (!fscope->PossiblyUnreachableDiags.empty()) { 808 bool analyzed = false; 809 810 // Register the expressions with the CFGBuilder. 811 for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 812 i = fscope->PossiblyUnreachableDiags.begin(), 813 e = fscope->PossiblyUnreachableDiags.end(); 814 i != e; ++i) { 815 if (const Stmt *stmt = i->stmt) 816 AC.registerForcedBlockExpression(stmt); 817 } 818 819 if (AC.getCFG()) { 820 analyzed = true; 821 for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 822 i = fscope->PossiblyUnreachableDiags.begin(), 823 e = fscope->PossiblyUnreachableDiags.end(); 824 i != e; ++i) 825 { 826 const sema::PossiblyUnreachableDiag &D = *i; 827 bool processed = false; 828 if (const Stmt *stmt = i->stmt) { 829 const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt); 830 assert(block); 831 if (CFGReverseBlockReachabilityAnalysis *cra = AC.getCFGReachablityAnalysis()) { 832 // Can this block be reached from the entrance? 833 if (cra->isReachable(&AC.getCFG()->getEntry(), block)) 834 S.Diag(D.Loc, D.PD); 835 processed = true; 836 } 837 } 838 if (!processed) { 839 // Emit the warning anyway if we cannot map to a basic block. 840 S.Diag(D.Loc, D.PD); 841 } 842 } 843 } 844 845 if (!analyzed) 846 flushDiagnostics(S, fscope); 847 } 848 849 850 // Warning: check missing 'return' 851 if (P.enableCheckFallThrough) { 852 const CheckFallThroughDiagnostics &CD = 853 (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock() 854 : CheckFallThroughDiagnostics::MakeForFunction(D)); 855 CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC); 856 } 857 858 // Warning: check for unreachable code 859 if (P.enableCheckUnreachable) 860 CheckUnreachable(S, AC); 861 862 // Check for thread safety violations 863 if (P.enableThreadSafetyAnalysis) { 864 thread_safety::ThreadSafetyReporter Reporter(S); 865 thread_safety::runThreadSafetyAnalysis(AC, Reporter); 866 Reporter.emitDiagnostics(); 867 } 868 869 if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart()) 870 != Diagnostic::Ignored || 871 Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart()) 872 != Diagnostic::Ignored) { 873 if (CFG *cfg = AC.getCFG()) { 874 UninitValsDiagReporter reporter(S); 875 UninitVariablesAnalysisStats stats; 876 std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats)); 877 runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC, 878 reporter, stats); 879 880 if (S.CollectStats && stats.NumVariablesAnalyzed > 0) { 881 ++NumUninitAnalysisFunctions; 882 NumUninitAnalysisVariables += stats.NumVariablesAnalyzed; 883 NumUninitAnalysisBlockVisits += stats.NumBlockVisits; 884 MaxUninitAnalysisVariablesPerFunction = 885 std::max(MaxUninitAnalysisVariablesPerFunction, 886 stats.NumVariablesAnalyzed); 887 MaxUninitAnalysisBlockVisitsPerFunction = 888 std::max(MaxUninitAnalysisBlockVisitsPerFunction, 889 stats.NumBlockVisits); 890 } 891 } 892 } 893 894 // Collect statistics about the CFG if it was built. 895 if (S.CollectStats && AC.isCFGBuilt()) { 896 ++NumFunctionsAnalyzed; 897 if (CFG *cfg = AC.getCFG()) { 898 // If we successfully built a CFG for this context, record some more 899 // detail information about it. 900 NumCFGBlocks += cfg->getNumBlockIDs(); 901 MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction, 902 cfg->getNumBlockIDs()); 903 } else { 904 ++NumFunctionsWithBadCFGs; 905 } 906 } 907} 908 909void clang::sema::AnalysisBasedWarnings::PrintStats() const { 910 llvm::errs() << "\n*** Analysis Based Warnings Stats:\n"; 911 912 unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs; 913 unsigned AvgCFGBlocksPerFunction = 914 !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt; 915 llvm::errs() << NumFunctionsAnalyzed << " functions analyzed (" 916 << NumFunctionsWithBadCFGs << " w/o CFGs).\n" 917 << " " << NumCFGBlocks << " CFG blocks built.\n" 918 << " " << AvgCFGBlocksPerFunction 919 << " average CFG blocks per function.\n" 920 << " " << MaxCFGBlocksPerFunction 921 << " max CFG blocks per function.\n"; 922 923 unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0 924 : NumUninitAnalysisVariables/NumUninitAnalysisFunctions; 925 unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0 926 : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions; 927 llvm::errs() << NumUninitAnalysisFunctions 928 << " functions analyzed for uninitialiazed variables\n" 929 << " " << NumUninitAnalysisVariables << " variables analyzed.\n" 930 << " " << AvgUninitVariablesPerFunction 931 << " average variables per function.\n" 932 << " " << MaxUninitAnalysisVariablesPerFunction 933 << " max variables per function.\n" 934 << " " << NumUninitAnalysisBlockVisits << " block visits.\n" 935 << " " << AvgUninitBlockVisitsPerFunction 936 << " average block visits per function.\n" 937 << " " << MaxUninitAnalysisBlockVisitsPerFunction 938 << " max block visits per function.\n"; 939} 940