AnalysisBasedWarnings.cpp revision 1de85338543dd6228eb518185e385d94d377f4cb
1//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines analysis_warnings::[Policy,Executor]. 11// Together they are used by Sema to issue warnings based on inexpensive 12// static analysis algorithms in libAnalysis. 13// 14//===----------------------------------------------------------------------===// 15 16#include "clang/Sema/AnalysisBasedWarnings.h" 17#include "clang/Sema/SemaInternal.h" 18#include "clang/Sema/ScopeInfo.h" 19#include "clang/Basic/SourceManager.h" 20#include "clang/Lex/Preprocessor.h" 21#include "clang/AST/DeclObjC.h" 22#include "clang/AST/DeclCXX.h" 23#include "clang/AST/ExprObjC.h" 24#include "clang/AST/ExprCXX.h" 25#include "clang/AST/StmtObjC.h" 26#include "clang/AST/StmtCXX.h" 27#include "clang/AST/EvaluatedExprVisitor.h" 28#include "clang/Analysis/AnalysisContext.h" 29#include "clang/Analysis/CFG.h" 30#include "clang/Analysis/Analyses/ReachableCode.h" 31#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h" 32#include "clang/Analysis/CFGStmtMap.h" 33#include "clang/Analysis/Analyses/UninitializedValues.h" 34#include "llvm/ADT/BitVector.h" 35#include "llvm/Support/Casting.h" 36 37using namespace clang; 38 39//===----------------------------------------------------------------------===// 40// Unreachable code analysis. 41//===----------------------------------------------------------------------===// 42 43namespace { 44 class UnreachableCodeHandler : public reachable_code::Callback { 45 Sema &S; 46 public: 47 UnreachableCodeHandler(Sema &s) : S(s) {} 48 49 void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) { 50 S.Diag(L, diag::warn_unreachable) << R1 << R2; 51 } 52 }; 53} 54 55/// CheckUnreachable - Check for unreachable code. 56static void CheckUnreachable(Sema &S, AnalysisContext &AC) { 57 UnreachableCodeHandler UC(S); 58 reachable_code::FindUnreachableCode(AC, UC); 59} 60 61//===----------------------------------------------------------------------===// 62// Check for missing return value. 63//===----------------------------------------------------------------------===// 64 65enum ControlFlowKind { 66 UnknownFallThrough, 67 NeverFallThrough, 68 MaybeFallThrough, 69 AlwaysFallThrough, 70 NeverFallThroughOrReturn 71}; 72 73/// CheckFallThrough - Check that we don't fall off the end of a 74/// Statement that should return a value. 75/// 76/// \returns AlwaysFallThrough iff we always fall off the end of the statement, 77/// MaybeFallThrough iff we might or might not fall off the end, 78/// NeverFallThroughOrReturn iff we never fall off the end of the statement or 79/// return. We assume NeverFallThrough iff we never fall off the end of the 80/// statement but we may return. We assume that functions not marked noreturn 81/// will return. 82static ControlFlowKind CheckFallThrough(AnalysisContext &AC) { 83 CFG *cfg = AC.getCFG(); 84 if (cfg == 0) return UnknownFallThrough; 85 86 // The CFG leaves in dead things, and we don't want the dead code paths to 87 // confuse us, so we mark all live things first. 88 llvm::BitVector live(cfg->getNumBlockIDs()); 89 unsigned count = reachable_code::ScanReachableFromBlock(cfg->getEntry(), 90 live); 91 92 bool AddEHEdges = AC.getAddEHEdges(); 93 if (!AddEHEdges && count != cfg->getNumBlockIDs()) 94 // When there are things remaining dead, and we didn't add EH edges 95 // from CallExprs to the catch clauses, we have to go back and 96 // mark them as live. 97 for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) { 98 CFGBlock &b = **I; 99 if (!live[b.getBlockID()]) { 100 if (b.pred_begin() == b.pred_end()) { 101 if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator())) 102 // When not adding EH edges from calls, catch clauses 103 // can otherwise seem dead. Avoid noting them as dead. 104 count += reachable_code::ScanReachableFromBlock(b, live); 105 continue; 106 } 107 } 108 } 109 110 // Now we know what is live, we check the live precessors of the exit block 111 // and look for fall through paths, being careful to ignore normal returns, 112 // and exceptional paths. 113 bool HasLiveReturn = false; 114 bool HasFakeEdge = false; 115 bool HasPlainEdge = false; 116 bool HasAbnormalEdge = false; 117 118 // Ignore default cases that aren't likely to be reachable because all 119 // enums in a switch(X) have explicit case statements. 120 CFGBlock::FilterOptions FO; 121 FO.IgnoreDefaultsWithCoveredEnums = 1; 122 123 for (CFGBlock::filtered_pred_iterator 124 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) { 125 const CFGBlock& B = **I; 126 if (!live[B.getBlockID()]) 127 continue; 128 129 // Destructors can appear after the 'return' in the CFG. This is 130 // normal. We need to look pass the destructors for the return 131 // statement (if it exists). 132 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend(); 133 bool hasNoReturnDtor = false; 134 135 for ( ; ri != re ; ++ri) { 136 CFGElement CE = *ri; 137 138 // FIXME: The right solution is to just sever the edges in the 139 // CFG itself. 140 if (const CFGImplicitDtor *iDtor = ri->getAs<CFGImplicitDtor>()) 141 if (iDtor->isNoReturn(AC.getASTContext())) { 142 hasNoReturnDtor = true; 143 HasFakeEdge = true; 144 break; 145 } 146 147 if (isa<CFGStmt>(CE)) 148 break; 149 } 150 151 if (hasNoReturnDtor) 152 continue; 153 154 // No more CFGElements in the block? 155 if (ri == re) { 156 if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) { 157 HasAbnormalEdge = true; 158 continue; 159 } 160 // A labeled empty statement, or the entry block... 161 HasPlainEdge = true; 162 continue; 163 } 164 165 CFGStmt CS = cast<CFGStmt>(*ri); 166 Stmt *S = CS.getStmt(); 167 if (isa<ReturnStmt>(S)) { 168 HasLiveReturn = true; 169 continue; 170 } 171 if (isa<ObjCAtThrowStmt>(S)) { 172 HasFakeEdge = true; 173 continue; 174 } 175 if (isa<CXXThrowExpr>(S)) { 176 HasFakeEdge = true; 177 continue; 178 } 179 if (const AsmStmt *AS = dyn_cast<AsmStmt>(S)) { 180 if (AS->isMSAsm()) { 181 HasFakeEdge = true; 182 HasLiveReturn = true; 183 continue; 184 } 185 } 186 if (isa<CXXTryStmt>(S)) { 187 HasAbnormalEdge = true; 188 continue; 189 } 190 191 bool NoReturnEdge = false; 192 if (CallExpr *C = dyn_cast<CallExpr>(S)) { 193 if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit()) 194 == B.succ_end()) { 195 HasAbnormalEdge = true; 196 continue; 197 } 198 Expr *CEE = C->getCallee()->IgnoreParenCasts(); 199 QualType calleeType = CEE->getType(); 200 if (calleeType == AC.getASTContext().BoundMemberTy) { 201 calleeType = Expr::findBoundMemberType(CEE); 202 assert(!calleeType.isNull() && "analyzing unresolved call?"); 203 } 204 if (getFunctionExtInfo(calleeType).getNoReturn()) { 205 NoReturnEdge = true; 206 HasFakeEdge = true; 207 } else if (DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(CEE)) { 208 ValueDecl *VD = DRE->getDecl(); 209 if (VD->hasAttr<NoReturnAttr>()) { 210 NoReturnEdge = true; 211 HasFakeEdge = true; 212 } 213 } 214 } 215 // FIXME: Add noreturn message sends. 216 if (NoReturnEdge == false) 217 HasPlainEdge = true; 218 } 219 if (!HasPlainEdge) { 220 if (HasLiveReturn) 221 return NeverFallThrough; 222 return NeverFallThroughOrReturn; 223 } 224 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn) 225 return MaybeFallThrough; 226 // This says AlwaysFallThrough for calls to functions that are not marked 227 // noreturn, that don't return. If people would like this warning to be more 228 // accurate, such functions should be marked as noreturn. 229 return AlwaysFallThrough; 230} 231 232namespace { 233 234struct CheckFallThroughDiagnostics { 235 unsigned diag_MaybeFallThrough_HasNoReturn; 236 unsigned diag_MaybeFallThrough_ReturnsNonVoid; 237 unsigned diag_AlwaysFallThrough_HasNoReturn; 238 unsigned diag_AlwaysFallThrough_ReturnsNonVoid; 239 unsigned diag_NeverFallThroughOrReturn; 240 bool funMode; 241 SourceLocation FuncLoc; 242 243 static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) { 244 CheckFallThroughDiagnostics D; 245 D.FuncLoc = Func->getLocation(); 246 D.diag_MaybeFallThrough_HasNoReturn = 247 diag::warn_falloff_noreturn_function; 248 D.diag_MaybeFallThrough_ReturnsNonVoid = 249 diag::warn_maybe_falloff_nonvoid_function; 250 D.diag_AlwaysFallThrough_HasNoReturn = 251 diag::warn_falloff_noreturn_function; 252 D.diag_AlwaysFallThrough_ReturnsNonVoid = 253 diag::warn_falloff_nonvoid_function; 254 255 // Don't suggest that virtual functions be marked "noreturn", since they 256 // might be overridden by non-noreturn functions. 257 bool isVirtualMethod = false; 258 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func)) 259 isVirtualMethod = Method->isVirtual(); 260 261 if (!isVirtualMethod) 262 D.diag_NeverFallThroughOrReturn = 263 diag::warn_suggest_noreturn_function; 264 else 265 D.diag_NeverFallThroughOrReturn = 0; 266 267 D.funMode = true; 268 return D; 269 } 270 271 static CheckFallThroughDiagnostics MakeForBlock() { 272 CheckFallThroughDiagnostics D; 273 D.diag_MaybeFallThrough_HasNoReturn = 274 diag::err_noreturn_block_has_return_expr; 275 D.diag_MaybeFallThrough_ReturnsNonVoid = 276 diag::err_maybe_falloff_nonvoid_block; 277 D.diag_AlwaysFallThrough_HasNoReturn = 278 diag::err_noreturn_block_has_return_expr; 279 D.diag_AlwaysFallThrough_ReturnsNonVoid = 280 diag::err_falloff_nonvoid_block; 281 D.diag_NeverFallThroughOrReturn = 282 diag::warn_suggest_noreturn_block; 283 D.funMode = false; 284 return D; 285 } 286 287 bool checkDiagnostics(Diagnostic &D, bool ReturnsVoid, 288 bool HasNoReturn) const { 289 if (funMode) { 290 return (ReturnsVoid || 291 D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function, 292 FuncLoc) == Diagnostic::Ignored) 293 && (!HasNoReturn || 294 D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr, 295 FuncLoc) == Diagnostic::Ignored) 296 && (!ReturnsVoid || 297 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc) 298 == Diagnostic::Ignored); 299 } 300 301 // For blocks. 302 return ReturnsVoid && !HasNoReturn 303 && (!ReturnsVoid || 304 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc) 305 == Diagnostic::Ignored); 306 } 307}; 308 309} 310 311/// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a 312/// function that should return a value. Check that we don't fall off the end 313/// of a noreturn function. We assume that functions and blocks not marked 314/// noreturn will return. 315static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body, 316 const BlockExpr *blkExpr, 317 const CheckFallThroughDiagnostics& CD, 318 AnalysisContext &AC) { 319 320 bool ReturnsVoid = false; 321 bool HasNoReturn = false; 322 323 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 324 ReturnsVoid = FD->getResultType()->isVoidType(); 325 HasNoReturn = FD->hasAttr<NoReturnAttr>() || 326 FD->getType()->getAs<FunctionType>()->getNoReturnAttr(); 327 } 328 else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 329 ReturnsVoid = MD->getResultType()->isVoidType(); 330 HasNoReturn = MD->hasAttr<NoReturnAttr>(); 331 } 332 else if (isa<BlockDecl>(D)) { 333 QualType BlockTy = blkExpr->getType(); 334 if (const FunctionType *FT = 335 BlockTy->getPointeeType()->getAs<FunctionType>()) { 336 if (FT->getResultType()->isVoidType()) 337 ReturnsVoid = true; 338 if (FT->getNoReturnAttr()) 339 HasNoReturn = true; 340 } 341 } 342 343 Diagnostic &Diags = S.getDiagnostics(); 344 345 // Short circuit for compilation speed. 346 if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn)) 347 return; 348 349 // FIXME: Function try block 350 if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) { 351 switch (CheckFallThrough(AC)) { 352 case UnknownFallThrough: 353 break; 354 355 case MaybeFallThrough: 356 if (HasNoReturn) 357 S.Diag(Compound->getRBracLoc(), 358 CD.diag_MaybeFallThrough_HasNoReturn); 359 else if (!ReturnsVoid) 360 S.Diag(Compound->getRBracLoc(), 361 CD.diag_MaybeFallThrough_ReturnsNonVoid); 362 break; 363 case AlwaysFallThrough: 364 if (HasNoReturn) 365 S.Diag(Compound->getRBracLoc(), 366 CD.diag_AlwaysFallThrough_HasNoReturn); 367 else if (!ReturnsVoid) 368 S.Diag(Compound->getRBracLoc(), 369 CD.diag_AlwaysFallThrough_ReturnsNonVoid); 370 break; 371 case NeverFallThroughOrReturn: 372 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) 373 S.Diag(Compound->getLBracLoc(), 374 CD.diag_NeverFallThroughOrReturn); 375 break; 376 case NeverFallThrough: 377 break; 378 } 379 } 380} 381 382//===----------------------------------------------------------------------===// 383// -Wuninitialized 384//===----------------------------------------------------------------------===// 385 386namespace { 387/// ContainsReference - A visitor class to search for references to 388/// a particular declaration (the needle) within any evaluated component of an 389/// expression (recursively). 390class ContainsReference : public EvaluatedExprVisitor<ContainsReference> { 391 bool FoundReference; 392 const DeclRefExpr *Needle; 393 394public: 395 ContainsReference(ASTContext &Context, const DeclRefExpr *Needle) 396 : EvaluatedExprVisitor<ContainsReference>(Context), 397 FoundReference(false), Needle(Needle) {} 398 399 void VisitExpr(Expr *E) { 400 // Stop evaluating if we already have a reference. 401 if (FoundReference) 402 return; 403 404 EvaluatedExprVisitor<ContainsReference>::VisitExpr(E); 405 } 406 407 void VisitDeclRefExpr(DeclRefExpr *E) { 408 if (E == Needle) 409 FoundReference = true; 410 else 411 EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E); 412 } 413 414 bool doesContainReference() const { return FoundReference; } 415}; 416} 417 418/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an 419/// uninitialized variable. This manages the different forms of diagnostic 420/// emitted for particular types of uses. Returns true if the use was diagnosed 421/// as a warning. If a pariticular use is one we omit warnings for, returns 422/// false. 423static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD, 424 const Expr *E, bool isAlwaysUninit) { 425 bool isSelfInit = false; 426 427 if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(E)) { 428 if (isAlwaysUninit) { 429 // Inspect the initializer of the variable declaration which is 430 // being referenced prior to its initialization. We emit 431 // specialized diagnostics for self-initialization, and we 432 // specifically avoid warning about self references which take the 433 // form of: 434 // 435 // int x = x; 436 // 437 // This is used to indicate to GCC that 'x' is intentionally left 438 // uninitialized. Proven code paths which access 'x' in 439 // an uninitialized state after this will still warn. 440 // 441 // TODO: Should we suppress maybe-uninitialized warnings for 442 // variables initialized in this way? 443 if (const Expr *Initializer = VD->getInit()) { 444 if (DRE == Initializer->IgnoreParenImpCasts()) 445 return false; 446 447 ContainsReference CR(S.Context, DRE); 448 CR.Visit(const_cast<Expr*>(Initializer)); 449 isSelfInit = CR.doesContainReference(); 450 } 451 if (isSelfInit) { 452 S.Diag(DRE->getLocStart(), 453 diag::warn_uninit_self_reference_in_init) 454 << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange(); 455 } else { 456 S.Diag(DRE->getLocStart(), diag::warn_uninit_var) 457 << VD->getDeclName() << DRE->getSourceRange(); 458 } 459 } else { 460 S.Diag(DRE->getLocStart(), diag::warn_maybe_uninit_var) 461 << VD->getDeclName() << DRE->getSourceRange(); 462 } 463 } else { 464 const BlockExpr *BE = cast<BlockExpr>(E); 465 S.Diag(BE->getLocStart(), 466 isAlwaysUninit ? diag::warn_uninit_var_captured_by_block 467 : diag::warn_maybe_uninit_var_captured_by_block) 468 << VD->getDeclName(); 469 } 470 471 // Report where the variable was declared when the use wasn't within 472 // the initializer of that declaration. 473 if (!isSelfInit) 474 S.Diag(VD->getLocStart(), diag::note_uninit_var_def) 475 << VD->getDeclName(); 476 477 return true; 478} 479 480static void SuggestInitializationFixit(Sema &S, const VarDecl *VD) { 481 // Don't issue a fixit if there is already an initializer. 482 if (VD->getInit()) 483 return; 484 485 // Suggest possible initialization (if any). 486 const char *initialization = 0; 487 QualType VariableTy = VD->getType().getCanonicalType(); 488 489 if (VariableTy->getAs<ObjCObjectPointerType>()) { 490 // Check if 'nil' is defined. 491 if (S.PP.getMacroInfo(&S.getASTContext().Idents.get("nil"))) 492 initialization = " = nil"; 493 else 494 initialization = " = 0"; 495 } 496 else if (VariableTy->isRealFloatingType()) 497 initialization = " = 0.0"; 498 else if (VariableTy->isBooleanType() && S.Context.getLangOptions().CPlusPlus) 499 initialization = " = false"; 500 else if (VariableTy->isEnumeralType()) 501 return; 502 else if (VariableTy->isScalarType()) 503 initialization = " = 0"; 504 505 if (initialization) { 506 SourceLocation loc = S.PP.getLocForEndOfToken(VD->getLocEnd()); 507 S.Diag(loc, diag::note_var_fixit_add_initialization) 508 << FixItHint::CreateInsertion(loc, initialization); 509 } 510} 511 512typedef std::pair<const Expr*, bool> UninitUse; 513 514namespace { 515struct SLocSort { 516 bool operator()(const UninitUse &a, const UninitUse &b) { 517 SourceLocation aLoc = a.first->getLocStart(); 518 SourceLocation bLoc = b.first->getLocStart(); 519 return aLoc.getRawEncoding() < bLoc.getRawEncoding(); 520 } 521}; 522 523class UninitValsDiagReporter : public UninitVariablesHandler { 524 Sema &S; 525 typedef llvm::SmallVector<UninitUse, 2> UsesVec; 526 typedef llvm::DenseMap<const VarDecl *, UsesVec*> UsesMap; 527 UsesMap *uses; 528 529public: 530 UninitValsDiagReporter(Sema &S) : S(S), uses(0) {} 531 ~UninitValsDiagReporter() { 532 flushDiagnostics(); 533 } 534 535 void handleUseOfUninitVariable(const Expr *ex, const VarDecl *vd, 536 bool isAlwaysUninit) { 537 if (!uses) 538 uses = new UsesMap(); 539 540 UsesVec *&vec = (*uses)[vd]; 541 if (!vec) 542 vec = new UsesVec(); 543 544 vec->push_back(std::make_pair(ex, isAlwaysUninit)); 545 } 546 547 void flushDiagnostics() { 548 if (!uses) 549 return; 550 551 for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) { 552 const VarDecl *vd = i->first; 553 UsesVec *vec = i->second; 554 555 bool fixitIssued = false; 556 557 // Sort the uses by their SourceLocations. While not strictly 558 // guaranteed to produce them in line/column order, this will provide 559 // a stable ordering. 560 std::sort(vec->begin(), vec->end(), SLocSort()); 561 562 for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve; 563 ++vi) { 564 if (!DiagnoseUninitializedUse(S, vd, vi->first, 565 /*isAlwaysUninit=*/vi->second)) 566 continue; 567 568 // Suggest a fixit hint the first time we diagnose a use of a variable. 569 if (!fixitIssued) { 570 SuggestInitializationFixit(S, vd); 571 fixitIssued = true; 572 } 573 } 574 575 delete vec; 576 } 577 delete uses; 578 } 579}; 580} 581 582//===----------------------------------------------------------------------===// 583// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based 584// warnings on a function, method, or block. 585//===----------------------------------------------------------------------===// 586 587clang::sema::AnalysisBasedWarnings::Policy::Policy() { 588 enableCheckFallThrough = 1; 589 enableCheckUnreachable = 0; 590} 591 592clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s) : S(s) { 593 Diagnostic &D = S.getDiagnostics(); 594 DefaultPolicy.enableCheckUnreachable = (unsigned) 595 (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) != 596 Diagnostic::Ignored); 597} 598 599static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) { 600 for (llvm::SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 601 i = fscope->PossiblyUnreachableDiags.begin(), 602 e = fscope->PossiblyUnreachableDiags.end(); 603 i != e; ++i) { 604 const sema::PossiblyUnreachableDiag &D = *i; 605 S.Diag(D.Loc, D.PD); 606 } 607} 608 609void clang::sema:: 610AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P, 611 sema::FunctionScopeInfo *fscope, 612 const Decl *D, const BlockExpr *blkExpr) { 613 614 // We avoid doing analysis-based warnings when there are errors for 615 // two reasons: 616 // (1) The CFGs often can't be constructed (if the body is invalid), so 617 // don't bother trying. 618 // (2) The code already has problems; running the analysis just takes more 619 // time. 620 Diagnostic &Diags = S.getDiagnostics(); 621 622 // Do not do any analysis for declarations in system headers if we are 623 // going to just ignore them. 624 if (Diags.getSuppressSystemWarnings() && 625 S.SourceMgr.isInSystemHeader(D->getLocation())) 626 return; 627 628 // For code in dependent contexts, we'll do this at instantiation time. 629 if (cast<DeclContext>(D)->isDependentContext()) 630 return; 631 632 if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) { 633 // Flush out any possibly unreachable diagnostics. 634 flushDiagnostics(S, fscope); 635 return; 636 } 637 638 const Stmt *Body = D->getBody(); 639 assert(Body); 640 641 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2 642 // explosion for destrutors that can result and the compile time hit. 643 AnalysisContext AC(D, 0, /*useUnoptimizedCFG=*/false, /*addehedges=*/false, 644 /*addImplicitDtors=*/true, /*addInitializers=*/true); 645 646 // Emit delayed diagnostics. 647 if (!fscope->PossiblyUnreachableDiags.empty()) { 648 bool analyzed = false; 649 650 // Register the expressions with the CFGBuilder. 651 for (llvm::SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 652 i = fscope->PossiblyUnreachableDiags.begin(), 653 e = fscope->PossiblyUnreachableDiags.end(); 654 i != e; ++i) { 655 if (const Stmt *stmt = i->stmt) 656 AC.registerForcedBlockExpression(stmt); 657 } 658 659 if (AC.getCFG()) { 660 analyzed = true; 661 for (llvm::SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 662 i = fscope->PossiblyUnreachableDiags.begin(), 663 e = fscope->PossiblyUnreachableDiags.end(); 664 i != e; ++i) 665 { 666 const sema::PossiblyUnreachableDiag &D = *i; 667 bool processed = false; 668 if (const Stmt *stmt = i->stmt) { 669 const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt); 670 assert(block); 671 if (CFGReverseBlockReachabilityAnalysis *cra = AC.getCFGReachablityAnalysis()) { 672 // Can this block be reached from the entrance? 673 if (cra->isReachable(&AC.getCFG()->getEntry(), block)) 674 S.Diag(D.Loc, D.PD); 675 processed = true; 676 } 677 } 678 if (!processed) { 679 // Emit the warning anyway if we cannot map to a basic block. 680 S.Diag(D.Loc, D.PD); 681 } 682 } 683 } 684 685 if (!analyzed) 686 flushDiagnostics(S, fscope); 687 } 688 689 690 // Warning: check missing 'return' 691 if (P.enableCheckFallThrough) { 692 const CheckFallThroughDiagnostics &CD = 693 (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock() 694 : CheckFallThroughDiagnostics::MakeForFunction(D)); 695 CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC); 696 } 697 698 // Warning: check for unreachable code 699 if (P.enableCheckUnreachable) 700 CheckUnreachable(S, AC); 701 702 if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart()) 703 != Diagnostic::Ignored || 704 Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart()) 705 != Diagnostic::Ignored) { 706 if (CFG *cfg = AC.getCFG()) { 707 UninitValsDiagReporter reporter(S); 708 runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC, 709 reporter); 710 } 711 } 712} 713