AnalysisBasedWarnings.cpp revision 039970aae2b7e59ac4de5f147dfbc3c91b275b9e
1//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines analysis_warnings::[Policy,Executor]. 11// Together they are used by Sema to issue warnings based on inexpensive 12// static analysis algorithms in libAnalysis. 13// 14//===----------------------------------------------------------------------===// 15 16#include "clang/Sema/AnalysisBasedWarnings.h" 17#include "clang/AST/DeclCXX.h" 18#include "clang/AST/DeclObjC.h" 19#include "clang/AST/EvaluatedExprVisitor.h" 20#include "clang/AST/ExprCXX.h" 21#include "clang/AST/ExprObjC.h" 22#include "clang/AST/ParentMap.h" 23#include "clang/AST/RecursiveASTVisitor.h" 24#include "clang/AST/StmtCXX.h" 25#include "clang/AST/StmtObjC.h" 26#include "clang/AST/StmtVisitor.h" 27#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h" 28#include "clang/Analysis/Analyses/Consumed.h" 29#include "clang/Analysis/Analyses/ReachableCode.h" 30#include "clang/Analysis/Analyses/ThreadSafety.h" 31#include "clang/Analysis/Analyses/UninitializedValues.h" 32#include "clang/Analysis/AnalysisContext.h" 33#include "clang/Analysis/CFG.h" 34#include "clang/Analysis/CFGStmtMap.h" 35#include "clang/Basic/SourceLocation.h" 36#include "clang/Basic/SourceManager.h" 37#include "clang/Lex/Lexer.h" 38#include "clang/Lex/Preprocessor.h" 39#include "clang/Sema/ScopeInfo.h" 40#include "clang/Sema/SemaInternal.h" 41#include "llvm/ADT/ArrayRef.h" 42#include "llvm/ADT/BitVector.h" 43#include "llvm/ADT/FoldingSet.h" 44#include "llvm/ADT/ImmutableMap.h" 45#include "llvm/ADT/MapVector.h" 46#include "llvm/ADT/PostOrderIterator.h" 47#include "llvm/ADT/SmallString.h" 48#include "llvm/ADT/SmallVector.h" 49#include "llvm/ADT/StringRef.h" 50#include "llvm/Support/Casting.h" 51#include <algorithm> 52#include <deque> 53#include <iterator> 54#include <vector> 55 56using namespace clang; 57 58//===----------------------------------------------------------------------===// 59// Unreachable code analysis. 60//===----------------------------------------------------------------------===// 61 62namespace { 63 class UnreachableCodeHandler : public reachable_code::Callback { 64 Sema &S; 65 public: 66 UnreachableCodeHandler(Sema &s) : S(s) {} 67 68 void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) { 69 S.Diag(L, diag::warn_unreachable) << R1 << R2; 70 } 71 }; 72} 73 74/// CheckUnreachable - Check for unreachable code. 75static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) { 76 UnreachableCodeHandler UC(S); 77 reachable_code::FindUnreachableCode(AC, UC); 78} 79 80//===----------------------------------------------------------------------===// 81// Check for missing return value. 82//===----------------------------------------------------------------------===// 83 84enum ControlFlowKind { 85 UnknownFallThrough, 86 NeverFallThrough, 87 MaybeFallThrough, 88 AlwaysFallThrough, 89 NeverFallThroughOrReturn 90}; 91 92/// CheckFallThrough - Check that we don't fall off the end of a 93/// Statement that should return a value. 94/// 95/// \returns AlwaysFallThrough iff we always fall off the end of the statement, 96/// MaybeFallThrough iff we might or might not fall off the end, 97/// NeverFallThroughOrReturn iff we never fall off the end of the statement or 98/// return. We assume NeverFallThrough iff we never fall off the end of the 99/// statement but we may return. We assume that functions not marked noreturn 100/// will return. 101static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) { 102 CFG *cfg = AC.getCFG(); 103 if (cfg == 0) return UnknownFallThrough; 104 105 // The CFG leaves in dead things, and we don't want the dead code paths to 106 // confuse us, so we mark all live things first. 107 llvm::BitVector live(cfg->getNumBlockIDs()); 108 unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(), 109 live); 110 111 bool AddEHEdges = AC.getAddEHEdges(); 112 if (!AddEHEdges && count != cfg->getNumBlockIDs()) 113 // When there are things remaining dead, and we didn't add EH edges 114 // from CallExprs to the catch clauses, we have to go back and 115 // mark them as live. 116 for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) { 117 CFGBlock &b = **I; 118 if (!live[b.getBlockID()]) { 119 if (b.pred_begin() == b.pred_end()) { 120 if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator())) 121 // When not adding EH edges from calls, catch clauses 122 // can otherwise seem dead. Avoid noting them as dead. 123 count += reachable_code::ScanReachableFromBlock(&b, live); 124 continue; 125 } 126 } 127 } 128 129 // Now we know what is live, we check the live precessors of the exit block 130 // and look for fall through paths, being careful to ignore normal returns, 131 // and exceptional paths. 132 bool HasLiveReturn = false; 133 bool HasFakeEdge = false; 134 bool HasPlainEdge = false; 135 bool HasAbnormalEdge = false; 136 137 // Ignore default cases that aren't likely to be reachable because all 138 // enums in a switch(X) have explicit case statements. 139 CFGBlock::FilterOptions FO; 140 FO.IgnoreDefaultsWithCoveredEnums = 1; 141 142 for (CFGBlock::filtered_pred_iterator 143 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) { 144 const CFGBlock& B = **I; 145 if (!live[B.getBlockID()]) 146 continue; 147 148 // Skip blocks which contain an element marked as no-return. They don't 149 // represent actually viable edges into the exit block, so mark them as 150 // abnormal. 151 if (B.hasNoReturnElement()) { 152 HasAbnormalEdge = true; 153 continue; 154 } 155 156 // Destructors can appear after the 'return' in the CFG. This is 157 // normal. We need to look pass the destructors for the return 158 // statement (if it exists). 159 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend(); 160 161 for ( ; ri != re ; ++ri) 162 if (ri->getAs<CFGStmt>()) 163 break; 164 165 // No more CFGElements in the block? 166 if (ri == re) { 167 if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) { 168 HasAbnormalEdge = true; 169 continue; 170 } 171 // A labeled empty statement, or the entry block... 172 HasPlainEdge = true; 173 continue; 174 } 175 176 CFGStmt CS = ri->castAs<CFGStmt>(); 177 const Stmt *S = CS.getStmt(); 178 if (isa<ReturnStmt>(S)) { 179 HasLiveReturn = true; 180 continue; 181 } 182 if (isa<ObjCAtThrowStmt>(S)) { 183 HasFakeEdge = true; 184 continue; 185 } 186 if (isa<CXXThrowExpr>(S)) { 187 HasFakeEdge = true; 188 continue; 189 } 190 if (isa<MSAsmStmt>(S)) { 191 // TODO: Verify this is correct. 192 HasFakeEdge = true; 193 HasLiveReturn = true; 194 continue; 195 } 196 if (isa<CXXTryStmt>(S)) { 197 HasAbnormalEdge = true; 198 continue; 199 } 200 if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit()) 201 == B.succ_end()) { 202 HasAbnormalEdge = true; 203 continue; 204 } 205 206 HasPlainEdge = true; 207 } 208 if (!HasPlainEdge) { 209 if (HasLiveReturn) 210 return NeverFallThrough; 211 return NeverFallThroughOrReturn; 212 } 213 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn) 214 return MaybeFallThrough; 215 // This says AlwaysFallThrough for calls to functions that are not marked 216 // noreturn, that don't return. If people would like this warning to be more 217 // accurate, such functions should be marked as noreturn. 218 return AlwaysFallThrough; 219} 220 221namespace { 222 223struct CheckFallThroughDiagnostics { 224 unsigned diag_MaybeFallThrough_HasNoReturn; 225 unsigned diag_MaybeFallThrough_ReturnsNonVoid; 226 unsigned diag_AlwaysFallThrough_HasNoReturn; 227 unsigned diag_AlwaysFallThrough_ReturnsNonVoid; 228 unsigned diag_NeverFallThroughOrReturn; 229 enum { Function, Block, Lambda } funMode; 230 SourceLocation FuncLoc; 231 232 static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) { 233 CheckFallThroughDiagnostics D; 234 D.FuncLoc = Func->getLocation(); 235 D.diag_MaybeFallThrough_HasNoReturn = 236 diag::warn_falloff_noreturn_function; 237 D.diag_MaybeFallThrough_ReturnsNonVoid = 238 diag::warn_maybe_falloff_nonvoid_function; 239 D.diag_AlwaysFallThrough_HasNoReturn = 240 diag::warn_falloff_noreturn_function; 241 D.diag_AlwaysFallThrough_ReturnsNonVoid = 242 diag::warn_falloff_nonvoid_function; 243 244 // Don't suggest that virtual functions be marked "noreturn", since they 245 // might be overridden by non-noreturn functions. 246 bool isVirtualMethod = false; 247 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func)) 248 isVirtualMethod = Method->isVirtual(); 249 250 // Don't suggest that template instantiations be marked "noreturn" 251 bool isTemplateInstantiation = false; 252 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func)) 253 isTemplateInstantiation = Function->isTemplateInstantiation(); 254 255 if (!isVirtualMethod && !isTemplateInstantiation) 256 D.diag_NeverFallThroughOrReturn = 257 diag::warn_suggest_noreturn_function; 258 else 259 D.diag_NeverFallThroughOrReturn = 0; 260 261 D.funMode = Function; 262 return D; 263 } 264 265 static CheckFallThroughDiagnostics MakeForBlock() { 266 CheckFallThroughDiagnostics D; 267 D.diag_MaybeFallThrough_HasNoReturn = 268 diag::err_noreturn_block_has_return_expr; 269 D.diag_MaybeFallThrough_ReturnsNonVoid = 270 diag::err_maybe_falloff_nonvoid_block; 271 D.diag_AlwaysFallThrough_HasNoReturn = 272 diag::err_noreturn_block_has_return_expr; 273 D.diag_AlwaysFallThrough_ReturnsNonVoid = 274 diag::err_falloff_nonvoid_block; 275 D.diag_NeverFallThroughOrReturn = 276 diag::warn_suggest_noreturn_block; 277 D.funMode = Block; 278 return D; 279 } 280 281 static CheckFallThroughDiagnostics MakeForLambda() { 282 CheckFallThroughDiagnostics D; 283 D.diag_MaybeFallThrough_HasNoReturn = 284 diag::err_noreturn_lambda_has_return_expr; 285 D.diag_MaybeFallThrough_ReturnsNonVoid = 286 diag::warn_maybe_falloff_nonvoid_lambda; 287 D.diag_AlwaysFallThrough_HasNoReturn = 288 diag::err_noreturn_lambda_has_return_expr; 289 D.diag_AlwaysFallThrough_ReturnsNonVoid = 290 diag::warn_falloff_nonvoid_lambda; 291 D.diag_NeverFallThroughOrReturn = 0; 292 D.funMode = Lambda; 293 return D; 294 } 295 296 bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid, 297 bool HasNoReturn) const { 298 if (funMode == Function) { 299 return (ReturnsVoid || 300 D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function, 301 FuncLoc) == DiagnosticsEngine::Ignored) 302 && (!HasNoReturn || 303 D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr, 304 FuncLoc) == DiagnosticsEngine::Ignored) 305 && (!ReturnsVoid || 306 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc) 307 == DiagnosticsEngine::Ignored); 308 } 309 310 // For blocks / lambdas. 311 return ReturnsVoid && !HasNoReturn 312 && ((funMode == Lambda) || 313 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc) 314 == DiagnosticsEngine::Ignored); 315 } 316}; 317 318} 319 320/// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a 321/// function that should return a value. Check that we don't fall off the end 322/// of a noreturn function. We assume that functions and blocks not marked 323/// noreturn will return. 324static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body, 325 const BlockExpr *blkExpr, 326 const CheckFallThroughDiagnostics& CD, 327 AnalysisDeclContext &AC) { 328 329 bool ReturnsVoid = false; 330 bool HasNoReturn = false; 331 332 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 333 ReturnsVoid = FD->getResultType()->isVoidType(); 334 HasNoReturn = FD->isNoReturn(); 335 } 336 else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 337 ReturnsVoid = MD->getResultType()->isVoidType(); 338 HasNoReturn = MD->hasAttr<NoReturnAttr>(); 339 } 340 else if (isa<BlockDecl>(D)) { 341 QualType BlockTy = blkExpr->getType(); 342 if (const FunctionType *FT = 343 BlockTy->getPointeeType()->getAs<FunctionType>()) { 344 if (FT->getResultType()->isVoidType()) 345 ReturnsVoid = true; 346 if (FT->getNoReturnAttr()) 347 HasNoReturn = true; 348 } 349 } 350 351 DiagnosticsEngine &Diags = S.getDiagnostics(); 352 353 // Short circuit for compilation speed. 354 if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn)) 355 return; 356 357 // FIXME: Function try block 358 if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) { 359 switch (CheckFallThrough(AC)) { 360 case UnknownFallThrough: 361 break; 362 363 case MaybeFallThrough: 364 if (HasNoReturn) 365 S.Diag(Compound->getRBracLoc(), 366 CD.diag_MaybeFallThrough_HasNoReturn); 367 else if (!ReturnsVoid) 368 S.Diag(Compound->getRBracLoc(), 369 CD.diag_MaybeFallThrough_ReturnsNonVoid); 370 break; 371 case AlwaysFallThrough: 372 if (HasNoReturn) 373 S.Diag(Compound->getRBracLoc(), 374 CD.diag_AlwaysFallThrough_HasNoReturn); 375 else if (!ReturnsVoid) 376 S.Diag(Compound->getRBracLoc(), 377 CD.diag_AlwaysFallThrough_ReturnsNonVoid); 378 break; 379 case NeverFallThroughOrReturn: 380 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) { 381 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 382 S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn) 383 << 0 << FD; 384 } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 385 S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn) 386 << 1 << MD; 387 } else { 388 S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn); 389 } 390 } 391 break; 392 case NeverFallThrough: 393 break; 394 } 395 } 396} 397 398//===----------------------------------------------------------------------===// 399// -Wuninitialized 400//===----------------------------------------------------------------------===// 401 402namespace { 403/// ContainsReference - A visitor class to search for references to 404/// a particular declaration (the needle) within any evaluated component of an 405/// expression (recursively). 406class ContainsReference : public EvaluatedExprVisitor<ContainsReference> { 407 bool FoundReference; 408 const DeclRefExpr *Needle; 409 410public: 411 ContainsReference(ASTContext &Context, const DeclRefExpr *Needle) 412 : EvaluatedExprVisitor<ContainsReference>(Context), 413 FoundReference(false), Needle(Needle) {} 414 415 void VisitExpr(Expr *E) { 416 // Stop evaluating if we already have a reference. 417 if (FoundReference) 418 return; 419 420 EvaluatedExprVisitor<ContainsReference>::VisitExpr(E); 421 } 422 423 void VisitDeclRefExpr(DeclRefExpr *E) { 424 if (E == Needle) 425 FoundReference = true; 426 else 427 EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E); 428 } 429 430 bool doesContainReference() const { return FoundReference; } 431}; 432} 433 434static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) { 435 QualType VariableTy = VD->getType().getCanonicalType(); 436 if (VariableTy->isBlockPointerType() && 437 !VD->hasAttr<BlocksAttr>()) { 438 S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization) << VD->getDeclName() 439 << FixItHint::CreateInsertion(VD->getLocation(), "__block "); 440 return true; 441 } 442 443 // Don't issue a fixit if there is already an initializer. 444 if (VD->getInit()) 445 return false; 446 447 // Suggest possible initialization (if any). 448 std::string Init = S.getFixItZeroInitializerForType(VariableTy); 449 if (Init.empty()) 450 return false; 451 452 // Don't suggest a fixit inside macros. 453 if (VD->getLocEnd().isMacroID()) 454 return false; 455 456 SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd()); 457 458 S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName() 459 << FixItHint::CreateInsertion(Loc, Init); 460 return true; 461} 462 463/// Create a fixit to remove an if-like statement, on the assumption that its 464/// condition is CondVal. 465static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then, 466 const Stmt *Else, bool CondVal, 467 FixItHint &Fixit1, FixItHint &Fixit2) { 468 if (CondVal) { 469 // If condition is always true, remove all but the 'then'. 470 Fixit1 = FixItHint::CreateRemoval( 471 CharSourceRange::getCharRange(If->getLocStart(), 472 Then->getLocStart())); 473 if (Else) { 474 SourceLocation ElseKwLoc = Lexer::getLocForEndOfToken( 475 Then->getLocEnd(), 0, S.getSourceManager(), S.getLangOpts()); 476 Fixit2 = FixItHint::CreateRemoval( 477 SourceRange(ElseKwLoc, Else->getLocEnd())); 478 } 479 } else { 480 // If condition is always false, remove all but the 'else'. 481 if (Else) 482 Fixit1 = FixItHint::CreateRemoval( 483 CharSourceRange::getCharRange(If->getLocStart(), 484 Else->getLocStart())); 485 else 486 Fixit1 = FixItHint::CreateRemoval(If->getSourceRange()); 487 } 488} 489 490/// DiagUninitUse -- Helper function to produce a diagnostic for an 491/// uninitialized use of a variable. 492static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use, 493 bool IsCapturedByBlock) { 494 bool Diagnosed = false; 495 496 // Diagnose each branch which leads to a sometimes-uninitialized use. 497 for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end(); 498 I != E; ++I) { 499 assert(Use.getKind() == UninitUse::Sometimes); 500 501 const Expr *User = Use.getUser(); 502 const Stmt *Term = I->Terminator; 503 504 // Information used when building the diagnostic. 505 unsigned DiagKind; 506 StringRef Str; 507 SourceRange Range; 508 509 // FixIts to suppress the diagnostic by removing the dead condition. 510 // For all binary terminators, branch 0 is taken if the condition is true, 511 // and branch 1 is taken if the condition is false. 512 int RemoveDiagKind = -1; 513 const char *FixitStr = 514 S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false") 515 : (I->Output ? "1" : "0"); 516 FixItHint Fixit1, Fixit2; 517 518 switch (Term->getStmtClass()) { 519 default: 520 // Don't know how to report this. Just fall back to 'may be used 521 // uninitialized'. This happens for range-based for, which the user 522 // can't explicitly fix. 523 // FIXME: This also happens if the first use of a variable is always 524 // uninitialized, eg "for (int n; n < 10; ++n)". We should report that 525 // with the 'is uninitialized' diagnostic. 526 continue; 527 528 // "condition is true / condition is false". 529 case Stmt::IfStmtClass: { 530 const IfStmt *IS = cast<IfStmt>(Term); 531 DiagKind = 0; 532 Str = "if"; 533 Range = IS->getCond()->getSourceRange(); 534 RemoveDiagKind = 0; 535 CreateIfFixit(S, IS, IS->getThen(), IS->getElse(), 536 I->Output, Fixit1, Fixit2); 537 break; 538 } 539 case Stmt::ConditionalOperatorClass: { 540 const ConditionalOperator *CO = cast<ConditionalOperator>(Term); 541 DiagKind = 0; 542 Str = "?:"; 543 Range = CO->getCond()->getSourceRange(); 544 RemoveDiagKind = 0; 545 CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(), 546 I->Output, Fixit1, Fixit2); 547 break; 548 } 549 case Stmt::BinaryOperatorClass: { 550 const BinaryOperator *BO = cast<BinaryOperator>(Term); 551 if (!BO->isLogicalOp()) 552 continue; 553 DiagKind = 0; 554 Str = BO->getOpcodeStr(); 555 Range = BO->getLHS()->getSourceRange(); 556 RemoveDiagKind = 0; 557 if ((BO->getOpcode() == BO_LAnd && I->Output) || 558 (BO->getOpcode() == BO_LOr && !I->Output)) 559 // true && y -> y, false || y -> y. 560 Fixit1 = FixItHint::CreateRemoval(SourceRange(BO->getLocStart(), 561 BO->getOperatorLoc())); 562 else 563 // false && y -> false, true || y -> true. 564 Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr); 565 break; 566 } 567 568 // "loop is entered / loop is exited". 569 case Stmt::WhileStmtClass: 570 DiagKind = 1; 571 Str = "while"; 572 Range = cast<WhileStmt>(Term)->getCond()->getSourceRange(); 573 RemoveDiagKind = 1; 574 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr); 575 break; 576 case Stmt::ForStmtClass: 577 DiagKind = 1; 578 Str = "for"; 579 Range = cast<ForStmt>(Term)->getCond()->getSourceRange(); 580 RemoveDiagKind = 1; 581 if (I->Output) 582 Fixit1 = FixItHint::CreateRemoval(Range); 583 else 584 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr); 585 break; 586 587 // "condition is true / loop is exited". 588 case Stmt::DoStmtClass: 589 DiagKind = 2; 590 Str = "do"; 591 Range = cast<DoStmt>(Term)->getCond()->getSourceRange(); 592 RemoveDiagKind = 1; 593 Fixit1 = FixItHint::CreateReplacement(Range, FixitStr); 594 break; 595 596 // "switch case is taken". 597 case Stmt::CaseStmtClass: 598 DiagKind = 3; 599 Str = "case"; 600 Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange(); 601 break; 602 case Stmt::DefaultStmtClass: 603 DiagKind = 3; 604 Str = "default"; 605 Range = cast<DefaultStmt>(Term)->getDefaultLoc(); 606 break; 607 } 608 609 S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var) 610 << VD->getDeclName() << IsCapturedByBlock << DiagKind 611 << Str << I->Output << Range; 612 S.Diag(User->getLocStart(), diag::note_uninit_var_use) 613 << IsCapturedByBlock << User->getSourceRange(); 614 if (RemoveDiagKind != -1) 615 S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond) 616 << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2; 617 618 Diagnosed = true; 619 } 620 621 if (!Diagnosed) 622 S.Diag(Use.getUser()->getLocStart(), 623 Use.getKind() == UninitUse::Always ? diag::warn_uninit_var 624 : diag::warn_maybe_uninit_var) 625 << VD->getDeclName() << IsCapturedByBlock 626 << Use.getUser()->getSourceRange(); 627} 628 629/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an 630/// uninitialized variable. This manages the different forms of diagnostic 631/// emitted for particular types of uses. Returns true if the use was diagnosed 632/// as a warning. If a particular use is one we omit warnings for, returns 633/// false. 634static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD, 635 const UninitUse &Use, 636 bool alwaysReportSelfInit = false) { 637 638 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) { 639 // Inspect the initializer of the variable declaration which is 640 // being referenced prior to its initialization. We emit 641 // specialized diagnostics for self-initialization, and we 642 // specifically avoid warning about self references which take the 643 // form of: 644 // 645 // int x = x; 646 // 647 // This is used to indicate to GCC that 'x' is intentionally left 648 // uninitialized. Proven code paths which access 'x' in 649 // an uninitialized state after this will still warn. 650 if (const Expr *Initializer = VD->getInit()) { 651 if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts()) 652 return false; 653 654 ContainsReference CR(S.Context, DRE); 655 CR.Visit(const_cast<Expr*>(Initializer)); 656 if (CR.doesContainReference()) { 657 S.Diag(DRE->getLocStart(), 658 diag::warn_uninit_self_reference_in_init) 659 << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange(); 660 return true; 661 } 662 } 663 664 DiagUninitUse(S, VD, Use, false); 665 } else { 666 const BlockExpr *BE = cast<BlockExpr>(Use.getUser()); 667 if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>()) 668 S.Diag(BE->getLocStart(), 669 diag::warn_uninit_byref_blockvar_captured_by_block) 670 << VD->getDeclName(); 671 else 672 DiagUninitUse(S, VD, Use, true); 673 } 674 675 // Report where the variable was declared when the use wasn't within 676 // the initializer of that declaration & we didn't already suggest 677 // an initialization fixit. 678 if (!SuggestInitializationFixit(S, VD)) 679 S.Diag(VD->getLocStart(), diag::note_uninit_var_def) 680 << VD->getDeclName(); 681 682 return true; 683} 684 685namespace { 686 class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> { 687 public: 688 FallthroughMapper(Sema &S) 689 : FoundSwitchStatements(false), 690 S(S) { 691 } 692 693 bool foundSwitchStatements() const { return FoundSwitchStatements; } 694 695 void markFallthroughVisited(const AttributedStmt *Stmt) { 696 bool Found = FallthroughStmts.erase(Stmt); 697 assert(Found); 698 (void)Found; 699 } 700 701 typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts; 702 703 const AttrStmts &getFallthroughStmts() const { 704 return FallthroughStmts; 705 } 706 707 void fillReachableBlocks(CFG *Cfg) { 708 assert(ReachableBlocks.empty() && "ReachableBlocks already filled"); 709 std::deque<const CFGBlock *> BlockQueue; 710 711 ReachableBlocks.insert(&Cfg->getEntry()); 712 BlockQueue.push_back(&Cfg->getEntry()); 713 // Mark all case blocks reachable to avoid problems with switching on 714 // constants, covered enums, etc. 715 // These blocks can contain fall-through annotations, and we don't want to 716 // issue a warn_fallthrough_attr_unreachable for them. 717 for (CFG::iterator I = Cfg->begin(), E = Cfg->end(); I != E; ++I) { 718 const CFGBlock *B = *I; 719 const Stmt *L = B->getLabel(); 720 if (L && isa<SwitchCase>(L) && ReachableBlocks.insert(B)) 721 BlockQueue.push_back(B); 722 } 723 724 while (!BlockQueue.empty()) { 725 const CFGBlock *P = BlockQueue.front(); 726 BlockQueue.pop_front(); 727 for (CFGBlock::const_succ_iterator I = P->succ_begin(), 728 E = P->succ_end(); 729 I != E; ++I) { 730 if (*I && ReachableBlocks.insert(*I)) 731 BlockQueue.push_back(*I); 732 } 733 } 734 } 735 736 bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt) { 737 assert(!ReachableBlocks.empty() && "ReachableBlocks empty"); 738 739 int UnannotatedCnt = 0; 740 AnnotatedCnt = 0; 741 742 std::deque<const CFGBlock*> BlockQueue; 743 744 std::copy(B.pred_begin(), B.pred_end(), std::back_inserter(BlockQueue)); 745 746 while (!BlockQueue.empty()) { 747 const CFGBlock *P = BlockQueue.front(); 748 BlockQueue.pop_front(); 749 750 const Stmt *Term = P->getTerminator(); 751 if (Term && isa<SwitchStmt>(Term)) 752 continue; // Switch statement, good. 753 754 const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel()); 755 if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end()) 756 continue; // Previous case label has no statements, good. 757 758 const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel()); 759 if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end()) 760 continue; // Case label is preceded with a normal label, good. 761 762 if (!ReachableBlocks.count(P)) { 763 for (CFGBlock::const_reverse_iterator ElemIt = P->rbegin(), 764 ElemEnd = P->rend(); 765 ElemIt != ElemEnd; ++ElemIt) { 766 if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) { 767 if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) { 768 S.Diag(AS->getLocStart(), 769 diag::warn_fallthrough_attr_unreachable); 770 markFallthroughVisited(AS); 771 ++AnnotatedCnt; 772 break; 773 } 774 // Don't care about other unreachable statements. 775 } 776 } 777 // If there are no unreachable statements, this may be a special 778 // case in CFG: 779 // case X: { 780 // A a; // A has a destructor. 781 // break; 782 // } 783 // // <<<< This place is represented by a 'hanging' CFG block. 784 // case Y: 785 continue; 786 } 787 788 const Stmt *LastStmt = getLastStmt(*P); 789 if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) { 790 markFallthroughVisited(AS); 791 ++AnnotatedCnt; 792 continue; // Fallthrough annotation, good. 793 } 794 795 if (!LastStmt) { // This block contains no executable statements. 796 // Traverse its predecessors. 797 std::copy(P->pred_begin(), P->pred_end(), 798 std::back_inserter(BlockQueue)); 799 continue; 800 } 801 802 ++UnannotatedCnt; 803 } 804 return !!UnannotatedCnt; 805 } 806 807 // RecursiveASTVisitor setup. 808 bool shouldWalkTypesOfTypeLocs() const { return false; } 809 810 bool VisitAttributedStmt(AttributedStmt *S) { 811 if (asFallThroughAttr(S)) 812 FallthroughStmts.insert(S); 813 return true; 814 } 815 816 bool VisitSwitchStmt(SwitchStmt *S) { 817 FoundSwitchStatements = true; 818 return true; 819 } 820 821 // We don't want to traverse local type declarations. We analyze their 822 // methods separately. 823 bool TraverseDecl(Decl *D) { return true; } 824 825 private: 826 827 static const AttributedStmt *asFallThroughAttr(const Stmt *S) { 828 if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) { 829 if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs())) 830 return AS; 831 } 832 return 0; 833 } 834 835 static const Stmt *getLastStmt(const CFGBlock &B) { 836 if (const Stmt *Term = B.getTerminator()) 837 return Term; 838 for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(), 839 ElemEnd = B.rend(); 840 ElemIt != ElemEnd; ++ElemIt) { 841 if (Optional<CFGStmt> CS = ElemIt->getAs<CFGStmt>()) 842 return CS->getStmt(); 843 } 844 // Workaround to detect a statement thrown out by CFGBuilder: 845 // case X: {} case Y: 846 // case X: ; case Y: 847 if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel())) 848 if (!isa<SwitchCase>(SW->getSubStmt())) 849 return SW->getSubStmt(); 850 851 return 0; 852 } 853 854 bool FoundSwitchStatements; 855 AttrStmts FallthroughStmts; 856 Sema &S; 857 llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks; 858 }; 859} 860 861static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC, 862 bool PerFunction) { 863 // Only perform this analysis when using C++11. There is no good workflow 864 // for this warning when not using C++11. There is no good way to silence 865 // the warning (no attribute is available) unless we are using C++11's support 866 // for generalized attributes. Once could use pragmas to silence the warning, 867 // but as a general solution that is gross and not in the spirit of this 868 // warning. 869 // 870 // NOTE: This an intermediate solution. There are on-going discussions on 871 // how to properly support this warning outside of C++11 with an annotation. 872 if (!AC.getASTContext().getLangOpts().CPlusPlus11) 873 return; 874 875 FallthroughMapper FM(S); 876 FM.TraverseStmt(AC.getBody()); 877 878 if (!FM.foundSwitchStatements()) 879 return; 880 881 if (PerFunction && FM.getFallthroughStmts().empty()) 882 return; 883 884 CFG *Cfg = AC.getCFG(); 885 886 if (!Cfg) 887 return; 888 889 FM.fillReachableBlocks(Cfg); 890 891 for (CFG::reverse_iterator I = Cfg->rbegin(), E = Cfg->rend(); I != E; ++I) { 892 const CFGBlock *B = *I; 893 const Stmt *Label = B->getLabel(); 894 895 if (!Label || !isa<SwitchCase>(Label)) 896 continue; 897 898 int AnnotatedCnt; 899 900 if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt)) 901 continue; 902 903 S.Diag(Label->getLocStart(), 904 PerFunction ? diag::warn_unannotated_fallthrough_per_function 905 : diag::warn_unannotated_fallthrough); 906 907 if (!AnnotatedCnt) { 908 SourceLocation L = Label->getLocStart(); 909 if (L.isMacroID()) 910 continue; 911 if (S.getLangOpts().CPlusPlus11) { 912 const Stmt *Term = B->getTerminator(); 913 // Skip empty cases. 914 while (B->empty() && !Term && B->succ_size() == 1) { 915 B = *B->succ_begin(); 916 Term = B->getTerminator(); 917 } 918 if (!(B->empty() && Term && isa<BreakStmt>(Term))) { 919 Preprocessor &PP = S.getPreprocessor(); 920 TokenValue Tokens[] = { 921 tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"), 922 tok::coloncolon, PP.getIdentifierInfo("fallthrough"), 923 tok::r_square, tok::r_square 924 }; 925 StringRef AnnotationSpelling = "[[clang::fallthrough]]"; 926 StringRef MacroName = PP.getLastMacroWithSpelling(L, Tokens); 927 if (!MacroName.empty()) 928 AnnotationSpelling = MacroName; 929 SmallString<64> TextToInsert(AnnotationSpelling); 930 TextToInsert += "; "; 931 S.Diag(L, diag::note_insert_fallthrough_fixit) << 932 AnnotationSpelling << 933 FixItHint::CreateInsertion(L, TextToInsert); 934 } 935 } 936 S.Diag(L, diag::note_insert_break_fixit) << 937 FixItHint::CreateInsertion(L, "break; "); 938 } 939 } 940 941 const FallthroughMapper::AttrStmts &Fallthroughs = FM.getFallthroughStmts(); 942 for (FallthroughMapper::AttrStmts::const_iterator I = Fallthroughs.begin(), 943 E = Fallthroughs.end(); 944 I != E; ++I) { 945 S.Diag((*I)->getLocStart(), diag::warn_fallthrough_attr_invalid_placement); 946 } 947 948} 949 950namespace { 951typedef std::pair<const Stmt *, 952 sema::FunctionScopeInfo::WeakObjectUseMap::const_iterator> 953 StmtUsesPair; 954 955class StmtUseSorter { 956 const SourceManager &SM; 957 958public: 959 explicit StmtUseSorter(const SourceManager &SM) : SM(SM) { } 960 961 bool operator()(const StmtUsesPair &LHS, const StmtUsesPair &RHS) { 962 return SM.isBeforeInTranslationUnit(LHS.first->getLocStart(), 963 RHS.first->getLocStart()); 964 } 965}; 966} 967 968static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM, 969 const Stmt *S) { 970 assert(S); 971 972 do { 973 switch (S->getStmtClass()) { 974 case Stmt::ForStmtClass: 975 case Stmt::WhileStmtClass: 976 case Stmt::CXXForRangeStmtClass: 977 case Stmt::ObjCForCollectionStmtClass: 978 return true; 979 case Stmt::DoStmtClass: { 980 const Expr *Cond = cast<DoStmt>(S)->getCond(); 981 llvm::APSInt Val; 982 if (!Cond->EvaluateAsInt(Val, Ctx)) 983 return true; 984 return Val.getBoolValue(); 985 } 986 default: 987 break; 988 } 989 } while ((S = PM.getParent(S))); 990 991 return false; 992} 993 994 995static void diagnoseRepeatedUseOfWeak(Sema &S, 996 const sema::FunctionScopeInfo *CurFn, 997 const Decl *D, 998 const ParentMap &PM) { 999 typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy; 1000 typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap; 1001 typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector; 1002 1003 ASTContext &Ctx = S.getASTContext(); 1004 1005 const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses(); 1006 1007 // Extract all weak objects that are referenced more than once. 1008 SmallVector<StmtUsesPair, 8> UsesByStmt; 1009 for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end(); 1010 I != E; ++I) { 1011 const WeakUseVector &Uses = I->second; 1012 1013 // Find the first read of the weak object. 1014 WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end(); 1015 for ( ; UI != UE; ++UI) { 1016 if (UI->isUnsafe()) 1017 break; 1018 } 1019 1020 // If there were only writes to this object, don't warn. 1021 if (UI == UE) 1022 continue; 1023 1024 // If there was only one read, followed by any number of writes, and the 1025 // read is not within a loop, don't warn. Additionally, don't warn in a 1026 // loop if the base object is a local variable -- local variables are often 1027 // changed in loops. 1028 if (UI == Uses.begin()) { 1029 WeakUseVector::const_iterator UI2 = UI; 1030 for (++UI2; UI2 != UE; ++UI2) 1031 if (UI2->isUnsafe()) 1032 break; 1033 1034 if (UI2 == UE) { 1035 if (!isInLoop(Ctx, PM, UI->getUseExpr())) 1036 continue; 1037 1038 const WeakObjectProfileTy &Profile = I->first; 1039 if (!Profile.isExactProfile()) 1040 continue; 1041 1042 const NamedDecl *Base = Profile.getBase(); 1043 if (!Base) 1044 Base = Profile.getProperty(); 1045 assert(Base && "A profile always has a base or property."); 1046 1047 if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base)) 1048 if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base)) 1049 continue; 1050 } 1051 } 1052 1053 UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I)); 1054 } 1055 1056 if (UsesByStmt.empty()) 1057 return; 1058 1059 // Sort by first use so that we emit the warnings in a deterministic order. 1060 std::sort(UsesByStmt.begin(), UsesByStmt.end(), 1061 StmtUseSorter(S.getSourceManager())); 1062 1063 // Classify the current code body for better warning text. 1064 // This enum should stay in sync with the cases in 1065 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak. 1066 // FIXME: Should we use a common classification enum and the same set of 1067 // possibilities all throughout Sema? 1068 enum { 1069 Function, 1070 Method, 1071 Block, 1072 Lambda 1073 } FunctionKind; 1074 1075 if (isa<sema::BlockScopeInfo>(CurFn)) 1076 FunctionKind = Block; 1077 else if (isa<sema::LambdaScopeInfo>(CurFn)) 1078 FunctionKind = Lambda; 1079 else if (isa<ObjCMethodDecl>(D)) 1080 FunctionKind = Method; 1081 else 1082 FunctionKind = Function; 1083 1084 // Iterate through the sorted problems and emit warnings for each. 1085 for (SmallVectorImpl<StmtUsesPair>::const_iterator I = UsesByStmt.begin(), 1086 E = UsesByStmt.end(); 1087 I != E; ++I) { 1088 const Stmt *FirstRead = I->first; 1089 const WeakObjectProfileTy &Key = I->second->first; 1090 const WeakUseVector &Uses = I->second->second; 1091 1092 // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy 1093 // may not contain enough information to determine that these are different 1094 // properties. We can only be 100% sure of a repeated use in certain cases, 1095 // and we adjust the diagnostic kind accordingly so that the less certain 1096 // case can be turned off if it is too noisy. 1097 unsigned DiagKind; 1098 if (Key.isExactProfile()) 1099 DiagKind = diag::warn_arc_repeated_use_of_weak; 1100 else 1101 DiagKind = diag::warn_arc_possible_repeated_use_of_weak; 1102 1103 // Classify the weak object being accessed for better warning text. 1104 // This enum should stay in sync with the cases in 1105 // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak. 1106 enum { 1107 Variable, 1108 Property, 1109 ImplicitProperty, 1110 Ivar 1111 } ObjectKind; 1112 1113 const NamedDecl *D = Key.getProperty(); 1114 if (isa<VarDecl>(D)) 1115 ObjectKind = Variable; 1116 else if (isa<ObjCPropertyDecl>(D)) 1117 ObjectKind = Property; 1118 else if (isa<ObjCMethodDecl>(D)) 1119 ObjectKind = ImplicitProperty; 1120 else if (isa<ObjCIvarDecl>(D)) 1121 ObjectKind = Ivar; 1122 else 1123 llvm_unreachable("Unexpected weak object kind!"); 1124 1125 // Show the first time the object was read. 1126 S.Diag(FirstRead->getLocStart(), DiagKind) 1127 << int(ObjectKind) << D << int(FunctionKind) 1128 << FirstRead->getSourceRange(); 1129 1130 // Print all the other accesses as notes. 1131 for (WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end(); 1132 UI != UE; ++UI) { 1133 if (UI->getUseExpr() == FirstRead) 1134 continue; 1135 S.Diag(UI->getUseExpr()->getLocStart(), 1136 diag::note_arc_weak_also_accessed_here) 1137 << UI->getUseExpr()->getSourceRange(); 1138 } 1139 } 1140} 1141 1142 1143namespace { 1144struct SLocSort { 1145 bool operator()(const UninitUse &a, const UninitUse &b) { 1146 // Prefer a more confident report over a less confident one. 1147 if (a.getKind() != b.getKind()) 1148 return a.getKind() > b.getKind(); 1149 SourceLocation aLoc = a.getUser()->getLocStart(); 1150 SourceLocation bLoc = b.getUser()->getLocStart(); 1151 return aLoc.getRawEncoding() < bLoc.getRawEncoding(); 1152 } 1153}; 1154 1155class UninitValsDiagReporter : public UninitVariablesHandler { 1156 Sema &S; 1157 typedef SmallVector<UninitUse, 2> UsesVec; 1158 typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType; 1159 // Prefer using MapVector to DenseMap, so that iteration order will be 1160 // the same as insertion order. This is needed to obtain a deterministic 1161 // order of diagnostics when calling flushDiagnostics(). 1162 typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap; 1163 UsesMap *uses; 1164 1165public: 1166 UninitValsDiagReporter(Sema &S) : S(S), uses(0) {} 1167 ~UninitValsDiagReporter() { 1168 flushDiagnostics(); 1169 } 1170 1171 MappedType &getUses(const VarDecl *vd) { 1172 if (!uses) 1173 uses = new UsesMap(); 1174 1175 MappedType &V = (*uses)[vd]; 1176 if (!V.getPointer()) 1177 V.setPointer(new UsesVec()); 1178 1179 return V; 1180 } 1181 1182 void handleUseOfUninitVariable(const VarDecl *vd, const UninitUse &use) { 1183 getUses(vd).getPointer()->push_back(use); 1184 } 1185 1186 void handleSelfInit(const VarDecl *vd) { 1187 getUses(vd).setInt(true); 1188 } 1189 1190 void flushDiagnostics() { 1191 if (!uses) 1192 return; 1193 1194 for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) { 1195 const VarDecl *vd = i->first; 1196 const MappedType &V = i->second; 1197 1198 UsesVec *vec = V.getPointer(); 1199 bool hasSelfInit = V.getInt(); 1200 1201 // Specially handle the case where we have uses of an uninitialized 1202 // variable, but the root cause is an idiomatic self-init. We want 1203 // to report the diagnostic at the self-init since that is the root cause. 1204 if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec)) 1205 DiagnoseUninitializedUse(S, vd, 1206 UninitUse(vd->getInit()->IgnoreParenCasts(), 1207 /* isAlwaysUninit */ true), 1208 /* alwaysReportSelfInit */ true); 1209 else { 1210 // Sort the uses by their SourceLocations. While not strictly 1211 // guaranteed to produce them in line/column order, this will provide 1212 // a stable ordering. 1213 std::sort(vec->begin(), vec->end(), SLocSort()); 1214 1215 for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve; 1216 ++vi) { 1217 // If we have self-init, downgrade all uses to 'may be uninitialized'. 1218 UninitUse Use = hasSelfInit ? UninitUse(vi->getUser(), false) : *vi; 1219 1220 if (DiagnoseUninitializedUse(S, vd, Use)) 1221 // Skip further diagnostics for this variable. We try to warn only 1222 // on the first point at which a variable is used uninitialized. 1223 break; 1224 } 1225 } 1226 1227 // Release the uses vector. 1228 delete vec; 1229 } 1230 delete uses; 1231 } 1232 1233private: 1234 static bool hasAlwaysUninitializedUse(const UsesVec* vec) { 1235 for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) { 1236 if (i->getKind() == UninitUse::Always) { 1237 return true; 1238 } 1239 } 1240 return false; 1241} 1242}; 1243} 1244 1245namespace clang { 1246namespace { 1247typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes; 1248typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag; 1249typedef std::list<DelayedDiag> DiagList; 1250 1251struct SortDiagBySourceLocation { 1252 SourceManager &SM; 1253 SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {} 1254 1255 bool operator()(const DelayedDiag &left, const DelayedDiag &right) { 1256 // Although this call will be slow, this is only called when outputting 1257 // multiple warnings. 1258 return SM.isBeforeInTranslationUnit(left.first.first, right.first.first); 1259 } 1260}; 1261}} 1262 1263//===----------------------------------------------------------------------===// 1264// -Wthread-safety 1265//===----------------------------------------------------------------------===// 1266namespace clang { 1267namespace thread_safety { 1268namespace { 1269class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler { 1270 Sema &S; 1271 DiagList Warnings; 1272 SourceLocation FunLocation, FunEndLocation; 1273 1274 // Helper functions 1275 void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) { 1276 // Gracefully handle rare cases when the analysis can't get a more 1277 // precise source location. 1278 if (!Loc.isValid()) 1279 Loc = FunLocation; 1280 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName); 1281 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1282 } 1283 1284 public: 1285 ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL) 1286 : S(S), FunLocation(FL), FunEndLocation(FEL) {} 1287 1288 /// \brief Emit all buffered diagnostics in order of sourcelocation. 1289 /// We need to output diagnostics produced while iterating through 1290 /// the lockset in deterministic order, so this function orders diagnostics 1291 /// and outputs them. 1292 void emitDiagnostics() { 1293 Warnings.sort(SortDiagBySourceLocation(S.getSourceManager())); 1294 for (DiagList::iterator I = Warnings.begin(), E = Warnings.end(); 1295 I != E; ++I) { 1296 S.Diag(I->first.first, I->first.second); 1297 const OptionalNotes &Notes = I->second; 1298 for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI) 1299 S.Diag(Notes[NoteI].first, Notes[NoteI].second); 1300 } 1301 } 1302 1303 void handleInvalidLockExp(SourceLocation Loc) { 1304 PartialDiagnosticAt Warning(Loc, 1305 S.PDiag(diag::warn_cannot_resolve_lock) << Loc); 1306 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1307 } 1308 void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) { 1309 warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc); 1310 } 1311 1312 void handleDoubleLock(Name LockName, SourceLocation Loc) { 1313 warnLockMismatch(diag::warn_double_lock, LockName, Loc); 1314 } 1315 1316 void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked, 1317 SourceLocation LocEndOfScope, 1318 LockErrorKind LEK){ 1319 unsigned DiagID = 0; 1320 switch (LEK) { 1321 case LEK_LockedSomePredecessors: 1322 DiagID = diag::warn_lock_some_predecessors; 1323 break; 1324 case LEK_LockedSomeLoopIterations: 1325 DiagID = diag::warn_expecting_lock_held_on_loop; 1326 break; 1327 case LEK_LockedAtEndOfFunction: 1328 DiagID = diag::warn_no_unlock; 1329 break; 1330 case LEK_NotLockedAtEndOfFunction: 1331 DiagID = diag::warn_expecting_locked; 1332 break; 1333 } 1334 if (LocEndOfScope.isInvalid()) 1335 LocEndOfScope = FunEndLocation; 1336 1337 PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName); 1338 if (LocLocked.isValid()) { 1339 PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here)); 1340 Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note))); 1341 return; 1342 } 1343 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1344 } 1345 1346 1347 void handleExclusiveAndShared(Name LockName, SourceLocation Loc1, 1348 SourceLocation Loc2) { 1349 PartialDiagnosticAt Warning( 1350 Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName); 1351 PartialDiagnosticAt Note( 1352 Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName); 1353 Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note))); 1354 } 1355 1356 void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK, 1357 AccessKind AK, SourceLocation Loc) { 1358 assert((POK == POK_VarAccess || POK == POK_VarDereference) 1359 && "Only works for variables"); 1360 unsigned DiagID = POK == POK_VarAccess? 1361 diag::warn_variable_requires_any_lock: 1362 diag::warn_var_deref_requires_any_lock; 1363 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) 1364 << D->getNameAsString() << getLockKindFromAccessKind(AK)); 1365 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1366 } 1367 1368 void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK, 1369 Name LockName, LockKind LK, SourceLocation Loc, 1370 Name *PossibleMatch) { 1371 unsigned DiagID = 0; 1372 if (PossibleMatch) { 1373 switch (POK) { 1374 case POK_VarAccess: 1375 DiagID = diag::warn_variable_requires_lock_precise; 1376 break; 1377 case POK_VarDereference: 1378 DiagID = diag::warn_var_deref_requires_lock_precise; 1379 break; 1380 case POK_FunctionCall: 1381 DiagID = diag::warn_fun_requires_lock_precise; 1382 break; 1383 } 1384 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) 1385 << D->getNameAsString() << LockName << LK); 1386 PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match) 1387 << *PossibleMatch); 1388 Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note))); 1389 } else { 1390 switch (POK) { 1391 case POK_VarAccess: 1392 DiagID = diag::warn_variable_requires_lock; 1393 break; 1394 case POK_VarDereference: 1395 DiagID = diag::warn_var_deref_requires_lock; 1396 break; 1397 case POK_FunctionCall: 1398 DiagID = diag::warn_fun_requires_lock; 1399 break; 1400 } 1401 PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) 1402 << D->getNameAsString() << LockName << LK); 1403 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1404 } 1405 } 1406 1407 void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) { 1408 PartialDiagnosticAt Warning(Loc, 1409 S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName); 1410 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1411 } 1412}; 1413} 1414} 1415} 1416 1417//===----------------------------------------------------------------------===// 1418// -Wconsumed 1419//===----------------------------------------------------------------------===// 1420 1421namespace clang { 1422namespace consumed { 1423namespace { 1424class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase { 1425 1426 Sema &S; 1427 DiagList Warnings; 1428 1429public: 1430 1431 ConsumedWarningsHandler(Sema &S) : S(S) {} 1432 1433 void emitDiagnostics() { 1434 Warnings.sort(SortDiagBySourceLocation(S.getSourceManager())); 1435 1436 for (DiagList::iterator I = Warnings.begin(), E = Warnings.end(); 1437 I != E; ++I) { 1438 1439 const OptionalNotes &Notes = I->second; 1440 S.Diag(I->first.first, I->first.second); 1441 1442 for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI) { 1443 S.Diag(Notes[NoteI].first, Notes[NoteI].second); 1444 } 1445 } 1446 } 1447 1448 /// Warn about unnecessary-test errors. 1449 /// \param VariableName -- The name of the variable that holds the unique 1450 /// value. 1451 /// 1452 /// \param Loc -- The SourceLocation of the unnecessary test. 1453 void warnUnnecessaryTest(StringRef VariableName, StringRef VariableState, 1454 SourceLocation Loc) { 1455 1456 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unnecessary_test) << 1457 VariableName << VariableState); 1458 1459 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1460 } 1461 1462 /// Warn about use-while-consumed errors. 1463 /// \param MethodName -- The name of the method that was incorrectly 1464 /// invoked. 1465 /// 1466 /// \param Loc -- The SourceLocation of the method invocation. 1467 void warnUseOfTempWhileConsumed(StringRef MethodName, SourceLocation Loc) { 1468 1469 PartialDiagnosticAt Warning(Loc, S.PDiag( 1470 diag::warn_use_of_temp_while_consumed) << MethodName); 1471 1472 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1473 } 1474 1475 /// Warn about use-in-unknown-state errors. 1476 /// \param MethodName -- The name of the method that was incorrectly 1477 /// invoked. 1478 /// 1479 /// \param Loc -- The SourceLocation of the method invocation. 1480 void warnUseOfTempInUnknownState(StringRef MethodName, SourceLocation Loc) { 1481 1482 PartialDiagnosticAt Warning(Loc, S.PDiag( 1483 diag::warn_use_of_temp_in_unknown_state) << MethodName); 1484 1485 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1486 } 1487 1488 /// Warn about use-while-consumed errors. 1489 /// \param MethodName -- The name of the method that was incorrectly 1490 /// invoked. 1491 /// 1492 /// \param VariableName -- The name of the variable that holds the unique 1493 /// value. 1494 /// 1495 /// \param Loc -- The SourceLocation of the method invocation. 1496 void warnUseWhileConsumed(StringRef MethodName, StringRef VariableName, 1497 SourceLocation Loc) { 1498 1499 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_while_consumed) << 1500 MethodName << VariableName); 1501 1502 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1503 } 1504 1505 /// Warn about use-in-unknown-state errors. 1506 /// \param MethodName -- The name of the method that was incorrectly 1507 /// invoked. 1508 /// 1509 /// \param VariableName -- The name of the variable that holds the unique 1510 /// value. 1511 /// 1512 /// \param Loc -- The SourceLocation of the method invocation. 1513 void warnUseInUnknownState(StringRef MethodName, StringRef VariableName, 1514 SourceLocation Loc) { 1515 1516 PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_unknown_state) << 1517 MethodName << VariableName); 1518 1519 Warnings.push_back(DelayedDiag(Warning, OptionalNotes())); 1520 } 1521}; 1522}}} 1523 1524//===----------------------------------------------------------------------===// 1525// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based 1526// warnings on a function, method, or block. 1527//===----------------------------------------------------------------------===// 1528 1529clang::sema::AnalysisBasedWarnings::Policy::Policy() { 1530 enableCheckFallThrough = 1; 1531 enableCheckUnreachable = 0; 1532 enableThreadSafetyAnalysis = 0; 1533 enableConsumedAnalysis = 0; 1534} 1535 1536clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s) 1537 : S(s), 1538 NumFunctionsAnalyzed(0), 1539 NumFunctionsWithBadCFGs(0), 1540 NumCFGBlocks(0), 1541 MaxCFGBlocksPerFunction(0), 1542 NumUninitAnalysisFunctions(0), 1543 NumUninitAnalysisVariables(0), 1544 MaxUninitAnalysisVariablesPerFunction(0), 1545 NumUninitAnalysisBlockVisits(0), 1546 MaxUninitAnalysisBlockVisitsPerFunction(0) { 1547 DiagnosticsEngine &D = S.getDiagnostics(); 1548 DefaultPolicy.enableCheckUnreachable = (unsigned) 1549 (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) != 1550 DiagnosticsEngine::Ignored); 1551 DefaultPolicy.enableThreadSafetyAnalysis = (unsigned) 1552 (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) != 1553 DiagnosticsEngine::Ignored); 1554 DefaultPolicy.enableConsumedAnalysis = 1555 (unsigned)(D.getDiagnosticLevel(diag::warn_use_while_consumed, 1556 SourceLocation()) != 1557 DiagnosticsEngine::Ignored); 1558} 1559 1560static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) { 1561 for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 1562 i = fscope->PossiblyUnreachableDiags.begin(), 1563 e = fscope->PossiblyUnreachableDiags.end(); 1564 i != e; ++i) { 1565 const sema::PossiblyUnreachableDiag &D = *i; 1566 S.Diag(D.Loc, D.PD); 1567 } 1568} 1569 1570void clang::sema:: 1571AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P, 1572 sema::FunctionScopeInfo *fscope, 1573 const Decl *D, const BlockExpr *blkExpr) { 1574 1575 // We avoid doing analysis-based warnings when there are errors for 1576 // two reasons: 1577 // (1) The CFGs often can't be constructed (if the body is invalid), so 1578 // don't bother trying. 1579 // (2) The code already has problems; running the analysis just takes more 1580 // time. 1581 DiagnosticsEngine &Diags = S.getDiagnostics(); 1582 1583 // Do not do any analysis for declarations in system headers if we are 1584 // going to just ignore them. 1585 if (Diags.getSuppressSystemWarnings() && 1586 S.SourceMgr.isInSystemHeader(D->getLocation())) 1587 return; 1588 1589 // For code in dependent contexts, we'll do this at instantiation time. 1590 if (cast<DeclContext>(D)->isDependentContext()) 1591 return; 1592 1593 if (Diags.hasUncompilableErrorOccurred() || Diags.hasFatalErrorOccurred()) { 1594 // Flush out any possibly unreachable diagnostics. 1595 flushDiagnostics(S, fscope); 1596 return; 1597 } 1598 1599 const Stmt *Body = D->getBody(); 1600 assert(Body); 1601 1602 AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D); 1603 1604 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2 1605 // explosion for destrutors that can result and the compile time hit. 1606 AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true; 1607 AC.getCFGBuildOptions().AddEHEdges = false; 1608 AC.getCFGBuildOptions().AddInitializers = true; 1609 AC.getCFGBuildOptions().AddImplicitDtors = true; 1610 AC.getCFGBuildOptions().AddTemporaryDtors = true; 1611 1612 // Force that certain expressions appear as CFGElements in the CFG. This 1613 // is used to speed up various analyses. 1614 // FIXME: This isn't the right factoring. This is here for initial 1615 // prototyping, but we need a way for analyses to say what expressions they 1616 // expect to always be CFGElements and then fill in the BuildOptions 1617 // appropriately. This is essentially a layering violation. 1618 if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis || 1619 P.enableConsumedAnalysis) { 1620 // Unreachable code analysis and thread safety require a linearized CFG. 1621 AC.getCFGBuildOptions().setAllAlwaysAdd(); 1622 } 1623 else { 1624 AC.getCFGBuildOptions() 1625 .setAlwaysAdd(Stmt::BinaryOperatorClass) 1626 .setAlwaysAdd(Stmt::CompoundAssignOperatorClass) 1627 .setAlwaysAdd(Stmt::BlockExprClass) 1628 .setAlwaysAdd(Stmt::CStyleCastExprClass) 1629 .setAlwaysAdd(Stmt::DeclRefExprClass) 1630 .setAlwaysAdd(Stmt::ImplicitCastExprClass) 1631 .setAlwaysAdd(Stmt::UnaryOperatorClass) 1632 .setAlwaysAdd(Stmt::AttributedStmtClass); 1633 } 1634 1635 // Construct the analysis context with the specified CFG build options. 1636 1637 // Emit delayed diagnostics. 1638 if (!fscope->PossiblyUnreachableDiags.empty()) { 1639 bool analyzed = false; 1640 1641 // Register the expressions with the CFGBuilder. 1642 for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 1643 i = fscope->PossiblyUnreachableDiags.begin(), 1644 e = fscope->PossiblyUnreachableDiags.end(); 1645 i != e; ++i) { 1646 if (const Stmt *stmt = i->stmt) 1647 AC.registerForcedBlockExpression(stmt); 1648 } 1649 1650 if (AC.getCFG()) { 1651 analyzed = true; 1652 for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 1653 i = fscope->PossiblyUnreachableDiags.begin(), 1654 e = fscope->PossiblyUnreachableDiags.end(); 1655 i != e; ++i) 1656 { 1657 const sema::PossiblyUnreachableDiag &D = *i; 1658 bool processed = false; 1659 if (const Stmt *stmt = i->stmt) { 1660 const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt); 1661 CFGReverseBlockReachabilityAnalysis *cra = 1662 AC.getCFGReachablityAnalysis(); 1663 // FIXME: We should be able to assert that block is non-null, but 1664 // the CFG analysis can skip potentially-evaluated expressions in 1665 // edge cases; see test/Sema/vla-2.c. 1666 if (block && cra) { 1667 // Can this block be reached from the entrance? 1668 if (cra->isReachable(&AC.getCFG()->getEntry(), block)) 1669 S.Diag(D.Loc, D.PD); 1670 processed = true; 1671 } 1672 } 1673 if (!processed) { 1674 // Emit the warning anyway if we cannot map to a basic block. 1675 S.Diag(D.Loc, D.PD); 1676 } 1677 } 1678 } 1679 1680 if (!analyzed) 1681 flushDiagnostics(S, fscope); 1682 } 1683 1684 1685 // Warning: check missing 'return' 1686 if (P.enableCheckFallThrough) { 1687 const CheckFallThroughDiagnostics &CD = 1688 (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock() 1689 : (isa<CXXMethodDecl>(D) && 1690 cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call && 1691 cast<CXXMethodDecl>(D)->getParent()->isLambda()) 1692 ? CheckFallThroughDiagnostics::MakeForLambda() 1693 : CheckFallThroughDiagnostics::MakeForFunction(D)); 1694 CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC); 1695 } 1696 1697 // Warning: check for unreachable code 1698 if (P.enableCheckUnreachable) { 1699 // Only check for unreachable code on non-template instantiations. 1700 // Different template instantiations can effectively change the control-flow 1701 // and it is very difficult to prove that a snippet of code in a template 1702 // is unreachable for all instantiations. 1703 bool isTemplateInstantiation = false; 1704 if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D)) 1705 isTemplateInstantiation = Function->isTemplateInstantiation(); 1706 if (!isTemplateInstantiation) 1707 CheckUnreachable(S, AC); 1708 } 1709 1710 // Check for thread safety violations 1711 if (P.enableThreadSafetyAnalysis) { 1712 SourceLocation FL = AC.getDecl()->getLocation(); 1713 SourceLocation FEL = AC.getDecl()->getLocEnd(); 1714 thread_safety::ThreadSafetyReporter Reporter(S, FL, FEL); 1715 if (Diags.getDiagnosticLevel(diag::warn_thread_safety_beta,D->getLocStart()) 1716 != DiagnosticsEngine::Ignored) 1717 Reporter.setIssueBetaWarnings(true); 1718 1719 thread_safety::runThreadSafetyAnalysis(AC, Reporter); 1720 Reporter.emitDiagnostics(); 1721 } 1722 1723 // Check for violations of consumed properties. 1724 if (P.enableConsumedAnalysis) { 1725 consumed::ConsumedWarningsHandler WarningHandler(S); 1726 consumed::ConsumedAnalyzer Analyzer(WarningHandler); 1727 Analyzer.run(AC); 1728 } 1729 1730 if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart()) 1731 != DiagnosticsEngine::Ignored || 1732 Diags.getDiagnosticLevel(diag::warn_sometimes_uninit_var,D->getLocStart()) 1733 != DiagnosticsEngine::Ignored || 1734 Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart()) 1735 != DiagnosticsEngine::Ignored) { 1736 if (CFG *cfg = AC.getCFG()) { 1737 UninitValsDiagReporter reporter(S); 1738 UninitVariablesAnalysisStats stats; 1739 std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats)); 1740 runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC, 1741 reporter, stats); 1742 1743 if (S.CollectStats && stats.NumVariablesAnalyzed > 0) { 1744 ++NumUninitAnalysisFunctions; 1745 NumUninitAnalysisVariables += stats.NumVariablesAnalyzed; 1746 NumUninitAnalysisBlockVisits += stats.NumBlockVisits; 1747 MaxUninitAnalysisVariablesPerFunction = 1748 std::max(MaxUninitAnalysisVariablesPerFunction, 1749 stats.NumVariablesAnalyzed); 1750 MaxUninitAnalysisBlockVisitsPerFunction = 1751 std::max(MaxUninitAnalysisBlockVisitsPerFunction, 1752 stats.NumBlockVisits); 1753 } 1754 } 1755 } 1756 1757 bool FallThroughDiagFull = 1758 Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough, 1759 D->getLocStart()) != DiagnosticsEngine::Ignored; 1760 bool FallThroughDiagPerFunction = 1761 Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough_per_function, 1762 D->getLocStart()) != DiagnosticsEngine::Ignored; 1763 if (FallThroughDiagFull || FallThroughDiagPerFunction) { 1764 DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull); 1765 } 1766 1767 if (S.getLangOpts().ObjCARCWeak && 1768 Diags.getDiagnosticLevel(diag::warn_arc_repeated_use_of_weak, 1769 D->getLocStart()) != DiagnosticsEngine::Ignored) 1770 diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap()); 1771 1772 // Collect statistics about the CFG if it was built. 1773 if (S.CollectStats && AC.isCFGBuilt()) { 1774 ++NumFunctionsAnalyzed; 1775 if (CFG *cfg = AC.getCFG()) { 1776 // If we successfully built a CFG for this context, record some more 1777 // detail information about it. 1778 NumCFGBlocks += cfg->getNumBlockIDs(); 1779 MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction, 1780 cfg->getNumBlockIDs()); 1781 } else { 1782 ++NumFunctionsWithBadCFGs; 1783 } 1784 } 1785} 1786 1787void clang::sema::AnalysisBasedWarnings::PrintStats() const { 1788 llvm::errs() << "\n*** Analysis Based Warnings Stats:\n"; 1789 1790 unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs; 1791 unsigned AvgCFGBlocksPerFunction = 1792 !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt; 1793 llvm::errs() << NumFunctionsAnalyzed << " functions analyzed (" 1794 << NumFunctionsWithBadCFGs << " w/o CFGs).\n" 1795 << " " << NumCFGBlocks << " CFG blocks built.\n" 1796 << " " << AvgCFGBlocksPerFunction 1797 << " average CFG blocks per function.\n" 1798 << " " << MaxCFGBlocksPerFunction 1799 << " max CFG blocks per function.\n"; 1800 1801 unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0 1802 : NumUninitAnalysisVariables/NumUninitAnalysisFunctions; 1803 unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0 1804 : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions; 1805 llvm::errs() << NumUninitAnalysisFunctions 1806 << " functions analyzed for uninitialiazed variables\n" 1807 << " " << NumUninitAnalysisVariables << " variables analyzed.\n" 1808 << " " << AvgUninitVariablesPerFunction 1809 << " average variables per function.\n" 1810 << " " << MaxUninitAnalysisVariablesPerFunction 1811 << " max variables per function.\n" 1812 << " " << NumUninitAnalysisBlockVisits << " block visits.\n" 1813 << " " << AvgUninitBlockVisitsPerFunction 1814 << " average block visits per function.\n" 1815 << " " << MaxUninitAnalysisBlockVisitsPerFunction 1816 << " max block visits per function.\n"; 1817} 1818