AnalysisBasedWarnings.cpp revision 6f41715df2c6a31c0c3ab3088b8cd18a3c8321b8
1//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines analysis_warnings::[Policy,Executor]. 11// Together they are used by Sema to issue warnings based on inexpensive 12// static analysis algorithms in libAnalysis. 13// 14//===----------------------------------------------------------------------===// 15 16#include "clang/Sema/AnalysisBasedWarnings.h" 17#include "clang/Sema/SemaInternal.h" 18#include "clang/Sema/ScopeInfo.h" 19#include "clang/Basic/SourceManager.h" 20#include "clang/Lex/Preprocessor.h" 21#include "clang/AST/DeclObjC.h" 22#include "clang/AST/DeclCXX.h" 23#include "clang/AST/ExprObjC.h" 24#include "clang/AST/ExprCXX.h" 25#include "clang/AST/StmtObjC.h" 26#include "clang/AST/StmtCXX.h" 27#include "clang/AST/EvaluatedExprVisitor.h" 28#include "clang/Analysis/AnalysisContext.h" 29#include "clang/Analysis/CFG.h" 30#include "clang/Analysis/Analyses/ReachableCode.h" 31#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h" 32#include "clang/Analysis/CFGStmtMap.h" 33#include "clang/Analysis/Analyses/UninitializedValues.h" 34#include "llvm/ADT/BitVector.h" 35#include "llvm/Support/Casting.h" 36 37using namespace clang; 38 39//===----------------------------------------------------------------------===// 40// Unreachable code analysis. 41//===----------------------------------------------------------------------===// 42 43namespace { 44 class UnreachableCodeHandler : public reachable_code::Callback { 45 Sema &S; 46 public: 47 UnreachableCodeHandler(Sema &s) : S(s) {} 48 49 void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) { 50 S.Diag(L, diag::warn_unreachable) << R1 << R2; 51 } 52 }; 53} 54 55/// CheckUnreachable - Check for unreachable code. 56static void CheckUnreachable(Sema &S, AnalysisContext &AC) { 57 UnreachableCodeHandler UC(S); 58 reachable_code::FindUnreachableCode(AC, UC); 59} 60 61//===----------------------------------------------------------------------===// 62// Check for missing return value. 63//===----------------------------------------------------------------------===// 64 65enum ControlFlowKind { 66 UnknownFallThrough, 67 NeverFallThrough, 68 MaybeFallThrough, 69 AlwaysFallThrough, 70 NeverFallThroughOrReturn 71}; 72 73/// CheckFallThrough - Check that we don't fall off the end of a 74/// Statement that should return a value. 75/// 76/// \returns AlwaysFallThrough iff we always fall off the end of the statement, 77/// MaybeFallThrough iff we might or might not fall off the end, 78/// NeverFallThroughOrReturn iff we never fall off the end of the statement or 79/// return. We assume NeverFallThrough iff we never fall off the end of the 80/// statement but we may return. We assume that functions not marked noreturn 81/// will return. 82static ControlFlowKind CheckFallThrough(AnalysisContext &AC) { 83 CFG *cfg = AC.getCFG(); 84 if (cfg == 0) return UnknownFallThrough; 85 86 // The CFG leaves in dead things, and we don't want the dead code paths to 87 // confuse us, so we mark all live things first. 88 llvm::BitVector live(cfg->getNumBlockIDs()); 89 unsigned count = reachable_code::ScanReachableFromBlock(cfg->getEntry(), 90 live); 91 92 bool AddEHEdges = AC.getAddEHEdges(); 93 if (!AddEHEdges && count != cfg->getNumBlockIDs()) 94 // When there are things remaining dead, and we didn't add EH edges 95 // from CallExprs to the catch clauses, we have to go back and 96 // mark them as live. 97 for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) { 98 CFGBlock &b = **I; 99 if (!live[b.getBlockID()]) { 100 if (b.pred_begin() == b.pred_end()) { 101 if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator())) 102 // When not adding EH edges from calls, catch clauses 103 // can otherwise seem dead. Avoid noting them as dead. 104 count += reachable_code::ScanReachableFromBlock(b, live); 105 continue; 106 } 107 } 108 } 109 110 // Now we know what is live, we check the live precessors of the exit block 111 // and look for fall through paths, being careful to ignore normal returns, 112 // and exceptional paths. 113 bool HasLiveReturn = false; 114 bool HasFakeEdge = false; 115 bool HasPlainEdge = false; 116 bool HasAbnormalEdge = false; 117 118 // Ignore default cases that aren't likely to be reachable because all 119 // enums in a switch(X) have explicit case statements. 120 CFGBlock::FilterOptions FO; 121 FO.IgnoreDefaultsWithCoveredEnums = 1; 122 123 for (CFGBlock::filtered_pred_iterator 124 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) { 125 const CFGBlock& B = **I; 126 if (!live[B.getBlockID()]) 127 continue; 128 129 // Destructors can appear after the 'return' in the CFG. This is 130 // normal. We need to look pass the destructors for the return 131 // statement (if it exists). 132 CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend(); 133 bool hasNoReturnDtor = false; 134 135 for ( ; ri != re ; ++ri) { 136 CFGElement CE = *ri; 137 138 // FIXME: The right solution is to just sever the edges in the 139 // CFG itself. 140 if (const CFGImplicitDtor *iDtor = ri->getAs<CFGImplicitDtor>()) 141 if (iDtor->isNoReturn(AC.getASTContext())) { 142 hasNoReturnDtor = true; 143 HasFakeEdge = true; 144 break; 145 } 146 147 if (isa<CFGStmt>(CE)) 148 break; 149 } 150 151 if (hasNoReturnDtor) 152 continue; 153 154 // No more CFGElements in the block? 155 if (ri == re) { 156 if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) { 157 HasAbnormalEdge = true; 158 continue; 159 } 160 // A labeled empty statement, or the entry block... 161 HasPlainEdge = true; 162 continue; 163 } 164 165 CFGStmt CS = cast<CFGStmt>(*ri); 166 Stmt *S = CS.getStmt(); 167 if (isa<ReturnStmt>(S)) { 168 HasLiveReturn = true; 169 continue; 170 } 171 if (isa<ObjCAtThrowStmt>(S)) { 172 HasFakeEdge = true; 173 continue; 174 } 175 if (isa<CXXThrowExpr>(S)) { 176 HasFakeEdge = true; 177 continue; 178 } 179 if (const AsmStmt *AS = dyn_cast<AsmStmt>(S)) { 180 if (AS->isMSAsm()) { 181 HasFakeEdge = true; 182 HasLiveReturn = true; 183 continue; 184 } 185 } 186 if (isa<CXXTryStmt>(S)) { 187 HasAbnormalEdge = true; 188 continue; 189 } 190 191 bool NoReturnEdge = false; 192 if (CallExpr *C = dyn_cast<CallExpr>(S)) { 193 if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit()) 194 == B.succ_end()) { 195 HasAbnormalEdge = true; 196 continue; 197 } 198 Expr *CEE = C->getCallee()->IgnoreParenCasts(); 199 if (getFunctionExtInfo(CEE->getType()).getNoReturn()) { 200 NoReturnEdge = true; 201 HasFakeEdge = true; 202 } else if (DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(CEE)) { 203 ValueDecl *VD = DRE->getDecl(); 204 if (VD->hasAttr<NoReturnAttr>()) { 205 NoReturnEdge = true; 206 HasFakeEdge = true; 207 } 208 } 209 } 210 // FIXME: Add noreturn message sends. 211 if (NoReturnEdge == false) 212 HasPlainEdge = true; 213 } 214 if (!HasPlainEdge) { 215 if (HasLiveReturn) 216 return NeverFallThrough; 217 return NeverFallThroughOrReturn; 218 } 219 if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn) 220 return MaybeFallThrough; 221 // This says AlwaysFallThrough for calls to functions that are not marked 222 // noreturn, that don't return. If people would like this warning to be more 223 // accurate, such functions should be marked as noreturn. 224 return AlwaysFallThrough; 225} 226 227namespace { 228 229struct CheckFallThroughDiagnostics { 230 unsigned diag_MaybeFallThrough_HasNoReturn; 231 unsigned diag_MaybeFallThrough_ReturnsNonVoid; 232 unsigned diag_AlwaysFallThrough_HasNoReturn; 233 unsigned diag_AlwaysFallThrough_ReturnsNonVoid; 234 unsigned diag_NeverFallThroughOrReturn; 235 bool funMode; 236 SourceLocation FuncLoc; 237 238 static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) { 239 CheckFallThroughDiagnostics D; 240 D.FuncLoc = Func->getLocation(); 241 D.diag_MaybeFallThrough_HasNoReturn = 242 diag::warn_falloff_noreturn_function; 243 D.diag_MaybeFallThrough_ReturnsNonVoid = 244 diag::warn_maybe_falloff_nonvoid_function; 245 D.diag_AlwaysFallThrough_HasNoReturn = 246 diag::warn_falloff_noreturn_function; 247 D.diag_AlwaysFallThrough_ReturnsNonVoid = 248 diag::warn_falloff_nonvoid_function; 249 250 // Don't suggest that virtual functions be marked "noreturn", since they 251 // might be overridden by non-noreturn functions. 252 bool isVirtualMethod = false; 253 if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func)) 254 isVirtualMethod = Method->isVirtual(); 255 256 if (!isVirtualMethod) 257 D.diag_NeverFallThroughOrReturn = 258 diag::warn_suggest_noreturn_function; 259 else 260 D.diag_NeverFallThroughOrReturn = 0; 261 262 D.funMode = true; 263 return D; 264 } 265 266 static CheckFallThroughDiagnostics MakeForBlock() { 267 CheckFallThroughDiagnostics D; 268 D.diag_MaybeFallThrough_HasNoReturn = 269 diag::err_noreturn_block_has_return_expr; 270 D.diag_MaybeFallThrough_ReturnsNonVoid = 271 diag::err_maybe_falloff_nonvoid_block; 272 D.diag_AlwaysFallThrough_HasNoReturn = 273 diag::err_noreturn_block_has_return_expr; 274 D.diag_AlwaysFallThrough_ReturnsNonVoid = 275 diag::err_falloff_nonvoid_block; 276 D.diag_NeverFallThroughOrReturn = 277 diag::warn_suggest_noreturn_block; 278 D.funMode = false; 279 return D; 280 } 281 282 bool checkDiagnostics(Diagnostic &D, bool ReturnsVoid, 283 bool HasNoReturn) const { 284 if (funMode) { 285 return (ReturnsVoid || 286 D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function, 287 FuncLoc) == Diagnostic::Ignored) 288 && (!HasNoReturn || 289 D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr, 290 FuncLoc) == Diagnostic::Ignored) 291 && (!ReturnsVoid || 292 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc) 293 == Diagnostic::Ignored); 294 } 295 296 // For blocks. 297 return ReturnsVoid && !HasNoReturn 298 && (!ReturnsVoid || 299 D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc) 300 == Diagnostic::Ignored); 301 } 302}; 303 304} 305 306/// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a 307/// function that should return a value. Check that we don't fall off the end 308/// of a noreturn function. We assume that functions and blocks not marked 309/// noreturn will return. 310static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body, 311 const BlockExpr *blkExpr, 312 const CheckFallThroughDiagnostics& CD, 313 AnalysisContext &AC) { 314 315 bool ReturnsVoid = false; 316 bool HasNoReturn = false; 317 318 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 319 ReturnsVoid = FD->getResultType()->isVoidType(); 320 HasNoReturn = FD->hasAttr<NoReturnAttr>() || 321 FD->getType()->getAs<FunctionType>()->getNoReturnAttr(); 322 } 323 else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 324 ReturnsVoid = MD->getResultType()->isVoidType(); 325 HasNoReturn = MD->hasAttr<NoReturnAttr>(); 326 } 327 else if (isa<BlockDecl>(D)) { 328 QualType BlockTy = blkExpr->getType(); 329 if (const FunctionType *FT = 330 BlockTy->getPointeeType()->getAs<FunctionType>()) { 331 if (FT->getResultType()->isVoidType()) 332 ReturnsVoid = true; 333 if (FT->getNoReturnAttr()) 334 HasNoReturn = true; 335 } 336 } 337 338 Diagnostic &Diags = S.getDiagnostics(); 339 340 // Short circuit for compilation speed. 341 if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn)) 342 return; 343 344 // FIXME: Function try block 345 if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) { 346 switch (CheckFallThrough(AC)) { 347 case UnknownFallThrough: 348 break; 349 350 case MaybeFallThrough: 351 if (HasNoReturn) 352 S.Diag(Compound->getRBracLoc(), 353 CD.diag_MaybeFallThrough_HasNoReturn); 354 else if (!ReturnsVoid) 355 S.Diag(Compound->getRBracLoc(), 356 CD.diag_MaybeFallThrough_ReturnsNonVoid); 357 break; 358 case AlwaysFallThrough: 359 if (HasNoReturn) 360 S.Diag(Compound->getRBracLoc(), 361 CD.diag_AlwaysFallThrough_HasNoReturn); 362 else if (!ReturnsVoid) 363 S.Diag(Compound->getRBracLoc(), 364 CD.diag_AlwaysFallThrough_ReturnsNonVoid); 365 break; 366 case NeverFallThroughOrReturn: 367 if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) 368 S.Diag(Compound->getLBracLoc(), 369 CD.diag_NeverFallThroughOrReturn); 370 break; 371 case NeverFallThrough: 372 break; 373 } 374 } 375} 376 377//===----------------------------------------------------------------------===// 378// -Wuninitialized 379//===----------------------------------------------------------------------===// 380 381namespace { 382class ContainsReference : public EvaluatedExprVisitor<ContainsReference> { 383 bool containsReference; 384 const DeclRefExpr *dr; 385public: 386 ContainsReference(ASTContext &context, 387 const DeclRefExpr *dr) : 388 EvaluatedExprVisitor<ContainsReference>(context), 389 containsReference(false), dr(dr) {} 390 391 void VisitExpr(Expr *e) { 392 // Stop evaluating if we already have a reference. 393 if (containsReference) 394 return; 395 396 EvaluatedExprVisitor<ContainsReference>::VisitExpr(e); 397 } 398 399 void VisitDeclRefExpr(DeclRefExpr *e) { 400 if (e == dr) 401 containsReference = true; 402 else 403 EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(e); 404 } 405 406 bool doesContainReference() const { return containsReference; } 407}; 408} 409 410static bool isSelfInit(ASTContext &context, 411 const VarDecl *vd, const DeclRefExpr *dr) { 412 if (const Expr *exp = vd->getInit()) { 413 ContainsReference contains(context, dr); 414 contains.Visit(const_cast<Expr*>(exp)); 415 return contains.doesContainReference(); 416 } 417 return false; 418} 419 420typedef std::pair<const Expr*, bool> UninitUse; 421 422namespace { 423struct SLocSort { 424 bool operator()(const UninitUse &a, const UninitUse &b) { 425 SourceLocation aLoc = a.first->getLocStart(); 426 SourceLocation bLoc = b.first->getLocStart(); 427 return aLoc.getRawEncoding() < bLoc.getRawEncoding(); 428 } 429}; 430 431class UninitValsDiagReporter : public UninitVariablesHandler { 432 Sema &S; 433 typedef llvm::SmallVector<UninitUse, 2> UsesVec; 434 typedef llvm::DenseMap<const VarDecl *, UsesVec*> UsesMap; 435 UsesMap *uses; 436 437public: 438 UninitValsDiagReporter(Sema &S) : S(S), uses(0) {} 439 ~UninitValsDiagReporter() { 440 flushDiagnostics(); 441 } 442 443 void handleUseOfUninitVariable(const Expr *ex, const VarDecl *vd, 444 bool isAlwaysUninit) { 445 if (!uses) 446 uses = new UsesMap(); 447 448 UsesVec *&vec = (*uses)[vd]; 449 if (!vec) 450 vec = new UsesVec(); 451 452 vec->push_back(std::make_pair(ex, isAlwaysUninit)); 453 } 454 455 void flushDiagnostics() { 456 if (!uses) 457 return; 458 459 for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) { 460 const VarDecl *vd = i->first; 461 UsesVec *vec = i->second; 462 463 bool fixitIssued = false; 464 465 // Sort the uses by their SourceLocations. While not strictly 466 // guaranteed to produce them in line/column order, this will provide 467 // a stable ordering. 468 std::sort(vec->begin(), vec->end(), SLocSort()); 469 470 for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve; ++vi) 471 { 472 const bool isAlwaysUninit = vi->second; 473 if (const DeclRefExpr *dr = dyn_cast<DeclRefExpr>(vi->first)) { 474 S.Diag(dr->getLocStart(), 475 isAlwaysUninit ? 476 (isSelfInit(S.Context, vd, dr) 477 ? diag::warn_uninit_self_reference_in_init 478 : diag::warn_uninit_var) 479 : diag::warn_maybe_uninit_var) 480 << vd->getDeclName() << dr->getSourceRange(); 481 } 482 else { 483 const BlockExpr *be = cast<BlockExpr>(vi->first); 484 S.Diag(be->getLocStart(), 485 isAlwaysUninit ? diag::warn_uninit_var_captured_by_block 486 : diag::warn_maybe_uninit_var_captured_by_block) 487 << vd->getDeclName(); 488 } 489 490 // Report where the variable was declared. 491 S.Diag(vd->getLocStart(), diag::note_uninit_var_def) 492 << vd->getDeclName(); 493 494 // Only report the fixit once. 495 if (fixitIssued) 496 continue; 497 498 fixitIssued = true; 499 500 // Don't issue a fixit if there is already an initializer. 501 if (vd->getInit()) 502 continue; 503 504 // Suggest possible initialization (if any). 505 const char *initialization = 0; 506 QualType vdTy = vd->getType().getCanonicalType(); 507 508 if (vdTy->getAs<ObjCObjectPointerType>()) { 509 // Check if 'nil' is defined. 510 if (S.PP.getMacroInfo(&S.getASTContext().Idents.get("nil"))) 511 initialization = " = nil"; 512 else 513 initialization = " = 0"; 514 } 515 else if (vdTy->isRealFloatingType()) 516 initialization = " = 0.0"; 517 else if (vdTy->isBooleanType() && S.Context.getLangOptions().CPlusPlus) 518 initialization = " = false"; 519 else if (vdTy->isEnumeralType()) 520 continue; 521 else if (vdTy->isScalarType()) 522 initialization = " = 0"; 523 524 if (initialization) { 525 SourceLocation loc = S.PP.getLocForEndOfToken(vd->getLocEnd()); 526 S.Diag(loc, diag::note_var_fixit_add_initialization) 527 << FixItHint::CreateInsertion(loc, initialization); 528 } 529 } 530 delete vec; 531 } 532 delete uses; 533 } 534}; 535} 536 537//===----------------------------------------------------------------------===// 538// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based 539// warnings on a function, method, or block. 540//===----------------------------------------------------------------------===// 541 542clang::sema::AnalysisBasedWarnings::Policy::Policy() { 543 enableCheckFallThrough = 1; 544 enableCheckUnreachable = 0; 545} 546 547clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s) : S(s) { 548 Diagnostic &D = S.getDiagnostics(); 549 DefaultPolicy.enableCheckUnreachable = (unsigned) 550 (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) != 551 Diagnostic::Ignored); 552} 553 554static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) { 555 for (llvm::SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 556 i = fscope->PossiblyUnreachableDiags.begin(), 557 e = fscope->PossiblyUnreachableDiags.end(); 558 i != e; ++i) { 559 const sema::PossiblyUnreachableDiag &D = *i; 560 S.Diag(D.Loc, D.PD); 561 } 562} 563 564void clang::sema:: 565AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P, 566 sema::FunctionScopeInfo *fscope, 567 const Decl *D, const BlockExpr *blkExpr) { 568 569 // We avoid doing analysis-based warnings when there are errors for 570 // two reasons: 571 // (1) The CFGs often can't be constructed (if the body is invalid), so 572 // don't bother trying. 573 // (2) The code already has problems; running the analysis just takes more 574 // time. 575 Diagnostic &Diags = S.getDiagnostics(); 576 577 // Do not do any analysis for declarations in system headers if we are 578 // going to just ignore them. 579 if (Diags.getSuppressSystemWarnings() && 580 S.SourceMgr.isInSystemHeader(D->getLocation())) 581 return; 582 583 // For code in dependent contexts, we'll do this at instantiation time. 584 if (cast<DeclContext>(D)->isDependentContext()) 585 return; 586 587 if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) { 588 // Flush out any possibly unreachable diagnostics. 589 flushDiagnostics(S, fscope); 590 return; 591 } 592 593 const Stmt *Body = D->getBody(); 594 assert(Body); 595 596 // Don't generate EH edges for CallExprs as we'd like to avoid the n^2 597 // explosion for destrutors that can result and the compile time hit. 598 AnalysisContext AC(D, 0, /*useUnoptimizedCFG=*/false, /*addehedges=*/false, 599 /*addImplicitDtors=*/true, /*addInitializers=*/true); 600 601 // Emit delayed diagnostics. 602 if (!fscope->PossiblyUnreachableDiags.empty()) { 603 bool analyzed = false; 604 605 // Register the expressions with the CFGBuilder. 606 for (llvm::SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 607 i = fscope->PossiblyUnreachableDiags.begin(), 608 e = fscope->PossiblyUnreachableDiags.end(); 609 i != e; ++i) { 610 if (const Stmt *stmt = i->stmt) 611 AC.registerForcedBlockExpression(stmt); 612 } 613 614 if (AC.getCFG()) { 615 analyzed = true; 616 for (llvm::SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator 617 i = fscope->PossiblyUnreachableDiags.begin(), 618 e = fscope->PossiblyUnreachableDiags.end(); 619 i != e; ++i) 620 { 621 const sema::PossiblyUnreachableDiag &D = *i; 622 bool processed = false; 623 if (const Stmt *stmt = i->stmt) { 624 const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt); 625 assert(block); 626 if (CFGReverseBlockReachabilityAnalysis *cra = AC.getCFGReachablityAnalysis()) { 627 // Can this block be reached from the entrance? 628 if (cra->isReachable(&AC.getCFG()->getEntry(), block)) 629 S.Diag(D.Loc, D.PD); 630 processed = true; 631 } 632 } 633 if (!processed) { 634 // Emit the warning anyway if we cannot map to a basic block. 635 S.Diag(D.Loc, D.PD); 636 } 637 } 638 } 639 640 if (!analyzed) 641 flushDiagnostics(S, fscope); 642 } 643 644 645 // Warning: check missing 'return' 646 if (P.enableCheckFallThrough) { 647 const CheckFallThroughDiagnostics &CD = 648 (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock() 649 : CheckFallThroughDiagnostics::MakeForFunction(D)); 650 CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC); 651 } 652 653 // Warning: check for unreachable code 654 if (P.enableCheckUnreachable) 655 CheckUnreachable(S, AC); 656 657 if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart()) 658 != Diagnostic::Ignored || 659 Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart()) 660 != Diagnostic::Ignored) { 661 if (CFG *cfg = AC.getCFG()) { 662 UninitValsDiagReporter reporter(S); 663 runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC, 664 reporter); 665 } 666 } 667} 668