AnalysisBasedWarnings.cpp revision 5d98994c7749312a43ce6adf45537979a98e7afd
1//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines analysis_warnings::[Policy,Executor].
11// Together they are used by Sema to issue warnings based on inexpensive
12// static analysis algorithms in libAnalysis.
13//
14//===----------------------------------------------------------------------===//
15
16#include "clang/Sema/AnalysisBasedWarnings.h"
17#include "clang/Sema/SemaInternal.h"
18#include "clang/Sema/ScopeInfo.h"
19#include "clang/Basic/SourceManager.h"
20#include "clang/Lex/Preprocessor.h"
21#include "clang/AST/DeclObjC.h"
22#include "clang/AST/DeclCXX.h"
23#include "clang/AST/ExprObjC.h"
24#include "clang/AST/ExprCXX.h"
25#include "clang/AST/StmtObjC.h"
26#include "clang/AST/StmtCXX.h"
27#include "clang/AST/EvaluatedExprVisitor.h"
28#include "clang/Analysis/AnalysisContext.h"
29#include "clang/Analysis/CFG.h"
30#include "clang/Analysis/Analyses/ReachableCode.h"
31#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
32#include "clang/Analysis/CFGStmtMap.h"
33#include "clang/Analysis/Analyses/UninitializedValues.h"
34#include "llvm/ADT/BitVector.h"
35#include "llvm/Support/Casting.h"
36
37using namespace clang;
38
39//===----------------------------------------------------------------------===//
40// Unreachable code analysis.
41//===----------------------------------------------------------------------===//
42
43namespace {
44  class UnreachableCodeHandler : public reachable_code::Callback {
45    Sema &S;
46  public:
47    UnreachableCodeHandler(Sema &s) : S(s) {}
48
49    void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
50      S.Diag(L, diag::warn_unreachable) << R1 << R2;
51    }
52  };
53}
54
55/// CheckUnreachable - Check for unreachable code.
56static void CheckUnreachable(Sema &S, AnalysisContext &AC) {
57  UnreachableCodeHandler UC(S);
58  reachable_code::FindUnreachableCode(AC, UC);
59}
60
61//===----------------------------------------------------------------------===//
62// Check for missing return value.
63//===----------------------------------------------------------------------===//
64
65enum ControlFlowKind {
66  UnknownFallThrough,
67  NeverFallThrough,
68  MaybeFallThrough,
69  AlwaysFallThrough,
70  NeverFallThroughOrReturn
71};
72
73/// CheckFallThrough - Check that we don't fall off the end of a
74/// Statement that should return a value.
75///
76/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
77/// MaybeFallThrough iff we might or might not fall off the end,
78/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
79/// return.  We assume NeverFallThrough iff we never fall off the end of the
80/// statement but we may return.  We assume that functions not marked noreturn
81/// will return.
82static ControlFlowKind CheckFallThrough(AnalysisContext &AC) {
83  CFG *cfg = AC.getCFG();
84  if (cfg == 0) return UnknownFallThrough;
85
86  // The CFG leaves in dead things, and we don't want the dead code paths to
87  // confuse us, so we mark all live things first.
88  llvm::BitVector live(cfg->getNumBlockIDs());
89  unsigned count = reachable_code::ScanReachableFromBlock(cfg->getEntry(),
90                                                          live);
91
92  bool AddEHEdges = AC.getAddEHEdges();
93  if (!AddEHEdges && count != cfg->getNumBlockIDs())
94    // When there are things remaining dead, and we didn't add EH edges
95    // from CallExprs to the catch clauses, we have to go back and
96    // mark them as live.
97    for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
98      CFGBlock &b = **I;
99      if (!live[b.getBlockID()]) {
100        if (b.pred_begin() == b.pred_end()) {
101          if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
102            // When not adding EH edges from calls, catch clauses
103            // can otherwise seem dead.  Avoid noting them as dead.
104            count += reachable_code::ScanReachableFromBlock(b, live);
105          continue;
106        }
107      }
108    }
109
110  // Now we know what is live, we check the live precessors of the exit block
111  // and look for fall through paths, being careful to ignore normal returns,
112  // and exceptional paths.
113  bool HasLiveReturn = false;
114  bool HasFakeEdge = false;
115  bool HasPlainEdge = false;
116  bool HasAbnormalEdge = false;
117
118  // Ignore default cases that aren't likely to be reachable because all
119  // enums in a switch(X) have explicit case statements.
120  CFGBlock::FilterOptions FO;
121  FO.IgnoreDefaultsWithCoveredEnums = 1;
122
123  for (CFGBlock::filtered_pred_iterator
124	 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
125    const CFGBlock& B = **I;
126    if (!live[B.getBlockID()])
127      continue;
128
129    // Destructors can appear after the 'return' in the CFG.  This is
130    // normal.  We need to look pass the destructors for the return
131    // statement (if it exists).
132    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
133    bool hasNoReturnDtor = false;
134
135    for ( ; ri != re ; ++ri) {
136      CFGElement CE = *ri;
137
138      // FIXME: The right solution is to just sever the edges in the
139      // CFG itself.
140      if (const CFGImplicitDtor *iDtor = ri->getAs<CFGImplicitDtor>())
141        if (iDtor->isNoReturn(AC.getASTContext())) {
142          hasNoReturnDtor = true;
143          HasFakeEdge = true;
144          break;
145        }
146
147      if (isa<CFGStmt>(CE))
148        break;
149    }
150
151    if (hasNoReturnDtor)
152      continue;
153
154    // No more CFGElements in the block?
155    if (ri == re) {
156      if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
157        HasAbnormalEdge = true;
158        continue;
159      }
160      // A labeled empty statement, or the entry block...
161      HasPlainEdge = true;
162      continue;
163    }
164
165    CFGStmt CS = cast<CFGStmt>(*ri);
166    Stmt *S = CS.getStmt();
167    if (isa<ReturnStmt>(S)) {
168      HasLiveReturn = true;
169      continue;
170    }
171    if (isa<ObjCAtThrowStmt>(S)) {
172      HasFakeEdge = true;
173      continue;
174    }
175    if (isa<CXXThrowExpr>(S)) {
176      HasFakeEdge = true;
177      continue;
178    }
179    if (const AsmStmt *AS = dyn_cast<AsmStmt>(S)) {
180      if (AS->isMSAsm()) {
181        HasFakeEdge = true;
182        HasLiveReturn = true;
183        continue;
184      }
185    }
186    if (isa<CXXTryStmt>(S)) {
187      HasAbnormalEdge = true;
188      continue;
189    }
190
191    bool NoReturnEdge = false;
192    if (CallExpr *C = dyn_cast<CallExpr>(S)) {
193      if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
194            == B.succ_end()) {
195        HasAbnormalEdge = true;
196        continue;
197      }
198      Expr *CEE = C->getCallee()->IgnoreParenCasts();
199      QualType calleeType = CEE->getType();
200      if (calleeType == AC.getASTContext().BoundMemberTy) {
201        calleeType = Expr::findBoundMemberType(CEE);
202        assert(!calleeType.isNull() && "analyzing unresolved call?");
203      }
204      if (getFunctionExtInfo(calleeType).getNoReturn()) {
205        NoReturnEdge = true;
206        HasFakeEdge = true;
207      } else if (DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(CEE)) {
208        ValueDecl *VD = DRE->getDecl();
209        if (VD->hasAttr<NoReturnAttr>()) {
210          NoReturnEdge = true;
211          HasFakeEdge = true;
212        }
213      }
214    }
215    // FIXME: Add noreturn message sends.
216    if (NoReturnEdge == false)
217      HasPlainEdge = true;
218  }
219  if (!HasPlainEdge) {
220    if (HasLiveReturn)
221      return NeverFallThrough;
222    return NeverFallThroughOrReturn;
223  }
224  if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
225    return MaybeFallThrough;
226  // This says AlwaysFallThrough for calls to functions that are not marked
227  // noreturn, that don't return.  If people would like this warning to be more
228  // accurate, such functions should be marked as noreturn.
229  return AlwaysFallThrough;
230}
231
232namespace {
233
234struct CheckFallThroughDiagnostics {
235  unsigned diag_MaybeFallThrough_HasNoReturn;
236  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
237  unsigned diag_AlwaysFallThrough_HasNoReturn;
238  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
239  unsigned diag_NeverFallThroughOrReturn;
240  bool funMode;
241  SourceLocation FuncLoc;
242
243  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
244    CheckFallThroughDiagnostics D;
245    D.FuncLoc = Func->getLocation();
246    D.diag_MaybeFallThrough_HasNoReturn =
247      diag::warn_falloff_noreturn_function;
248    D.diag_MaybeFallThrough_ReturnsNonVoid =
249      diag::warn_maybe_falloff_nonvoid_function;
250    D.diag_AlwaysFallThrough_HasNoReturn =
251      diag::warn_falloff_noreturn_function;
252    D.diag_AlwaysFallThrough_ReturnsNonVoid =
253      diag::warn_falloff_nonvoid_function;
254
255    // Don't suggest that virtual functions be marked "noreturn", since they
256    // might be overridden by non-noreturn functions.
257    bool isVirtualMethod = false;
258    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
259      isVirtualMethod = Method->isVirtual();
260
261    if (!isVirtualMethod)
262      D.diag_NeverFallThroughOrReturn =
263        diag::warn_suggest_noreturn_function;
264    else
265      D.diag_NeverFallThroughOrReturn = 0;
266
267    D.funMode = true;
268    return D;
269  }
270
271  static CheckFallThroughDiagnostics MakeForBlock() {
272    CheckFallThroughDiagnostics D;
273    D.diag_MaybeFallThrough_HasNoReturn =
274      diag::err_noreturn_block_has_return_expr;
275    D.diag_MaybeFallThrough_ReturnsNonVoid =
276      diag::err_maybe_falloff_nonvoid_block;
277    D.diag_AlwaysFallThrough_HasNoReturn =
278      diag::err_noreturn_block_has_return_expr;
279    D.diag_AlwaysFallThrough_ReturnsNonVoid =
280      diag::err_falloff_nonvoid_block;
281    D.diag_NeverFallThroughOrReturn =
282      diag::warn_suggest_noreturn_block;
283    D.funMode = false;
284    return D;
285  }
286
287  bool checkDiagnostics(Diagnostic &D, bool ReturnsVoid,
288                        bool HasNoReturn) const {
289    if (funMode) {
290      return (ReturnsVoid ||
291              D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
292                                   FuncLoc) == Diagnostic::Ignored)
293        && (!HasNoReturn ||
294            D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
295                                 FuncLoc) == Diagnostic::Ignored)
296        && (!ReturnsVoid ||
297            D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
298              == Diagnostic::Ignored);
299    }
300
301    // For blocks.
302    return  ReturnsVoid && !HasNoReturn
303            && (!ReturnsVoid ||
304                D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
305                  == Diagnostic::Ignored);
306  }
307};
308
309}
310
311/// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
312/// function that should return a value.  Check that we don't fall off the end
313/// of a noreturn function.  We assume that functions and blocks not marked
314/// noreturn will return.
315static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
316                                    const BlockExpr *blkExpr,
317                                    const CheckFallThroughDiagnostics& CD,
318                                    AnalysisContext &AC) {
319
320  bool ReturnsVoid = false;
321  bool HasNoReturn = false;
322
323  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
324    ReturnsVoid = FD->getResultType()->isVoidType();
325    HasNoReturn = FD->hasAttr<NoReturnAttr>() ||
326       FD->getType()->getAs<FunctionType>()->getNoReturnAttr();
327  }
328  else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
329    ReturnsVoid = MD->getResultType()->isVoidType();
330    HasNoReturn = MD->hasAttr<NoReturnAttr>();
331  }
332  else if (isa<BlockDecl>(D)) {
333    QualType BlockTy = blkExpr->getType();
334    if (const FunctionType *FT =
335          BlockTy->getPointeeType()->getAs<FunctionType>()) {
336      if (FT->getResultType()->isVoidType())
337        ReturnsVoid = true;
338      if (FT->getNoReturnAttr())
339        HasNoReturn = true;
340    }
341  }
342
343  Diagnostic &Diags = S.getDiagnostics();
344
345  // Short circuit for compilation speed.
346  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
347      return;
348
349  // FIXME: Function try block
350  if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
351    switch (CheckFallThrough(AC)) {
352      case UnknownFallThrough:
353        break;
354
355      case MaybeFallThrough:
356        if (HasNoReturn)
357          S.Diag(Compound->getRBracLoc(),
358                 CD.diag_MaybeFallThrough_HasNoReturn);
359        else if (!ReturnsVoid)
360          S.Diag(Compound->getRBracLoc(),
361                 CD.diag_MaybeFallThrough_ReturnsNonVoid);
362        break;
363      case AlwaysFallThrough:
364        if (HasNoReturn)
365          S.Diag(Compound->getRBracLoc(),
366                 CD.diag_AlwaysFallThrough_HasNoReturn);
367        else if (!ReturnsVoid)
368          S.Diag(Compound->getRBracLoc(),
369                 CD.diag_AlwaysFallThrough_ReturnsNonVoid);
370        break;
371      case NeverFallThroughOrReturn:
372        if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn)
373          S.Diag(Compound->getLBracLoc(),
374                 CD.diag_NeverFallThroughOrReturn);
375        break;
376      case NeverFallThrough:
377        break;
378    }
379  }
380}
381
382//===----------------------------------------------------------------------===//
383// -Wuninitialized
384//===----------------------------------------------------------------------===//
385
386namespace {
387/// ContainsReference - A visitor class to search for references to
388/// a particular declaration (the needle) within any evaluated component of an
389/// expression (recursively).
390class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
391  bool FoundReference;
392  const DeclRefExpr *Needle;
393
394public:
395  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
396    : EvaluatedExprVisitor<ContainsReference>(Context),
397      FoundReference(false), Needle(Needle) {}
398
399  void VisitExpr(Expr *E) {
400    // Stop evaluating if we already have a reference.
401    if (FoundReference)
402      return;
403
404    EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
405  }
406
407  void VisitDeclRefExpr(DeclRefExpr *E) {
408    if (E == Needle)
409      FoundReference = true;
410    else
411      EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
412  }
413
414  bool doesContainReference() const { return FoundReference; }
415};
416}
417
418/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
419/// uninitialized variable. This manages the different forms of diagnostic
420/// emitted for particular types of uses. Returns true if the use was diagnosed
421/// as a warning. If a pariticular use is one we omit warnings for, returns
422/// false.
423static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
424                                     const Expr *E, bool isAlwaysUninit) {
425  bool isSelfInit = false;
426
427  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(E)) {
428    if (isAlwaysUninit) {
429      // Inspect the initializer of the variable declaration which is
430      // being referenced prior to its initialization. We emit
431      // specialized diagnostics for self-initialization, and we
432      // specifically avoid warning about self references which take the
433      // form of:
434      //
435      //   int x = x;
436      //
437      // This is used to indicate to GCC that 'x' is intentionally left
438      // uninitialized. Proven code paths which access 'x' in
439      // an uninitialized state after this will still warn.
440      //
441      // TODO: Should we suppress maybe-uninitialized warnings for
442      // variables initialized in this way?
443      if (const Expr *Initializer = VD->getInit()) {
444        if (DRE == Initializer->IgnoreParenImpCasts())
445          return false;
446
447        ContainsReference CR(S.Context, DRE);
448        CR.Visit(const_cast<Expr*>(Initializer));
449        isSelfInit = CR.doesContainReference();
450      }
451      if (isSelfInit) {
452        S.Diag(DRE->getLocStart(),
453               diag::warn_uninit_self_reference_in_init)
454        << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
455      } else {
456        S.Diag(DRE->getLocStart(), diag::warn_uninit_var)
457          << VD->getDeclName() << DRE->getSourceRange();
458      }
459    } else {
460      S.Diag(DRE->getLocStart(), diag::warn_maybe_uninit_var)
461        << VD->getDeclName() << DRE->getSourceRange();
462    }
463  } else {
464    const BlockExpr *BE = cast<BlockExpr>(E);
465    S.Diag(BE->getLocStart(),
466           isAlwaysUninit ? diag::warn_uninit_var_captured_by_block
467                          : diag::warn_maybe_uninit_var_captured_by_block)
468      << VD->getDeclName();
469  }
470
471  // Report where the variable was declared when the use wasn't within
472  // the initializer of that declaration.
473  if (!isSelfInit)
474    S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
475      << VD->getDeclName();
476
477  return true;
478}
479
480static void SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
481  // Don't issue a fixit if there is already an initializer.
482  if (VD->getInit())
483    return;
484
485  // Suggest possible initialization (if any).
486  const char *initialization = 0;
487  QualType VariableTy = VD->getType().getCanonicalType();
488
489  if (VariableTy->isObjCObjectPointerType() ||
490      VariableTy->isBlockPointerType()) {
491    // Check if 'nil' is defined.
492    if (S.PP.getMacroInfo(&S.getASTContext().Idents.get("nil")))
493      initialization = " = nil";
494    else
495      initialization = " = 0";
496  }
497  else if (VariableTy->isRealFloatingType())
498    initialization = " = 0.0";
499  else if (VariableTy->isBooleanType() && S.Context.getLangOptions().CPlusPlus)
500    initialization = " = false";
501  else if (VariableTy->isEnumeralType())
502    return;
503  else if (VariableTy->isPointerType() || VariableTy->isMemberPointerType()) {
504    // Check if 'NULL' is defined.
505    if (S.PP.getMacroInfo(&S.getASTContext().Idents.get("NULL")))
506      initialization = " = NULL";
507    else
508      initialization = " = 0";
509  }
510  else if (VariableTy->isScalarType())
511    initialization = " = 0";
512
513  if (initialization) {
514    SourceLocation loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
515    S.Diag(loc, diag::note_var_fixit_add_initialization)
516      << FixItHint::CreateInsertion(loc, initialization);
517  }
518}
519
520typedef std::pair<const Expr*, bool> UninitUse;
521
522namespace {
523struct SLocSort {
524  bool operator()(const UninitUse &a, const UninitUse &b) {
525    SourceLocation aLoc = a.first->getLocStart();
526    SourceLocation bLoc = b.first->getLocStart();
527    return aLoc.getRawEncoding() < bLoc.getRawEncoding();
528  }
529};
530
531class UninitValsDiagReporter : public UninitVariablesHandler {
532  Sema &S;
533  typedef llvm::SmallVector<UninitUse, 2> UsesVec;
534  typedef llvm::DenseMap<const VarDecl *, UsesVec*> UsesMap;
535  UsesMap *uses;
536
537public:
538  UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
539  ~UninitValsDiagReporter() {
540    flushDiagnostics();
541  }
542
543  void handleUseOfUninitVariable(const Expr *ex, const VarDecl *vd,
544                                 bool isAlwaysUninit) {
545    if (!uses)
546      uses = new UsesMap();
547
548    UsesVec *&vec = (*uses)[vd];
549    if (!vec)
550      vec = new UsesVec();
551
552    vec->push_back(std::make_pair(ex, isAlwaysUninit));
553  }
554
555  void flushDiagnostics() {
556    if (!uses)
557      return;
558
559    for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
560      const VarDecl *vd = i->first;
561      UsesVec *vec = i->second;
562
563      bool fixitIssued = false;
564
565      // Sort the uses by their SourceLocations.  While not strictly
566      // guaranteed to produce them in line/column order, this will provide
567      // a stable ordering.
568      std::sort(vec->begin(), vec->end(), SLocSort());
569
570      for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
571           ++vi) {
572        if (!DiagnoseUninitializedUse(S, vd, vi->first,
573                                      /*isAlwaysUninit=*/vi->second))
574          continue;
575
576        // Suggest a fixit hint the first time we diagnose a use of a variable.
577        if (!fixitIssued) {
578          SuggestInitializationFixit(S, vd);
579          fixitIssued = true;
580        }
581      }
582
583      delete vec;
584    }
585    delete uses;
586  }
587};
588}
589
590//===----------------------------------------------------------------------===//
591// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
592//  warnings on a function, method, or block.
593//===----------------------------------------------------------------------===//
594
595clang::sema::AnalysisBasedWarnings::Policy::Policy() {
596  enableCheckFallThrough = 1;
597  enableCheckUnreachable = 0;
598}
599
600clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
601  : S(s),
602    NumFunctionsAnalyzed(0),
603    NumCFGBlocks(0),
604    MaxCFGBlocksPerFunction(0) {
605  Diagnostic &D = S.getDiagnostics();
606  DefaultPolicy.enableCheckUnreachable = (unsigned)
607    (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
608        Diagnostic::Ignored);
609}
610
611static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
612  for (llvm::SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
613       i = fscope->PossiblyUnreachableDiags.begin(),
614       e = fscope->PossiblyUnreachableDiags.end();
615       i != e; ++i) {
616    const sema::PossiblyUnreachableDiag &D = *i;
617    S.Diag(D.Loc, D.PD);
618  }
619}
620
621void clang::sema::
622AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
623                                     sema::FunctionScopeInfo *fscope,
624                                     const Decl *D, const BlockExpr *blkExpr) {
625
626  // We avoid doing analysis-based warnings when there are errors for
627  // two reasons:
628  // (1) The CFGs often can't be constructed (if the body is invalid), so
629  //     don't bother trying.
630  // (2) The code already has problems; running the analysis just takes more
631  //     time.
632  Diagnostic &Diags = S.getDiagnostics();
633
634  // Do not do any analysis for declarations in system headers if we are
635  // going to just ignore them.
636  if (Diags.getSuppressSystemWarnings() &&
637      S.SourceMgr.isInSystemHeader(D->getLocation()))
638    return;
639
640  // For code in dependent contexts, we'll do this at instantiation time.
641  if (cast<DeclContext>(D)->isDependentContext())
642    return;
643
644  if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) {
645    // Flush out any possibly unreachable diagnostics.
646    flushDiagnostics(S, fscope);
647    return;
648  }
649
650  const Stmt *Body = D->getBody();
651  assert(Body);
652
653  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
654  // explosion for destrutors that can result and the compile time hit.
655  AnalysisContext AC(D, 0, /*useUnoptimizedCFG=*/false, /*addehedges=*/false,
656                     /*addImplicitDtors=*/true, /*addInitializers=*/true);
657
658  // Emit delayed diagnostics.
659  if (!fscope->PossiblyUnreachableDiags.empty()) {
660    bool analyzed = false;
661
662    // Register the expressions with the CFGBuilder.
663    for (llvm::SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
664         i = fscope->PossiblyUnreachableDiags.begin(),
665         e = fscope->PossiblyUnreachableDiags.end();
666         i != e; ++i) {
667      if (const Stmt *stmt = i->stmt)
668        AC.registerForcedBlockExpression(stmt);
669    }
670
671    if (AC.getCFG()) {
672      analyzed = true;
673      for (llvm::SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
674            i = fscope->PossiblyUnreachableDiags.begin(),
675            e = fscope->PossiblyUnreachableDiags.end();
676            i != e; ++i)
677      {
678        const sema::PossiblyUnreachableDiag &D = *i;
679        bool processed = false;
680        if (const Stmt *stmt = i->stmt) {
681          const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
682          assert(block);
683          if (CFGReverseBlockReachabilityAnalysis *cra = AC.getCFGReachablityAnalysis()) {
684            // Can this block be reached from the entrance?
685            if (cra->isReachable(&AC.getCFG()->getEntry(), block))
686              S.Diag(D.Loc, D.PD);
687            processed = true;
688          }
689        }
690        if (!processed) {
691          // Emit the warning anyway if we cannot map to a basic block.
692          S.Diag(D.Loc, D.PD);
693        }
694      }
695    }
696
697    if (!analyzed)
698      flushDiagnostics(S, fscope);
699  }
700
701
702  // Warning: check missing 'return'
703  if (P.enableCheckFallThrough) {
704    const CheckFallThroughDiagnostics &CD =
705      (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
706                         : CheckFallThroughDiagnostics::MakeForFunction(D));
707    CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
708  }
709
710  // Warning: check for unreachable code
711  if (P.enableCheckUnreachable)
712    CheckUnreachable(S, AC);
713
714  if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
715      != Diagnostic::Ignored ||
716      Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
717      != Diagnostic::Ignored) {
718    if (CFG *cfg = AC.getCFG()) {
719      UninitValsDiagReporter reporter(S);
720      UninitVariablesAnalysisStats stats = {};
721      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
722                                        reporter, stats);
723
724      if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
725        ++NumUninitAnalysisFunctions;
726        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
727        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
728        MaxUninitAnalysisVariablesPerFunction =
729            std::max(MaxUninitAnalysisVariablesPerFunction,
730                     stats.NumVariablesAnalyzed);
731        MaxUninitAnalysisBlockVisitsPerFunction =
732            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
733                     stats.NumBlockVisits);
734      }
735    }
736  }
737
738  // Collect statistics about the CFG if it was built.
739  if (S.CollectStats && AC.isCFGBuilt()) {
740    ++NumFunctionsAnalyzed;
741    if (CFG *cfg = AC.getCFG()) {
742      // If we successfully built a CFG for this context, record some more
743      // detail information about it.
744      unsigned NumBlocks = std::distance(cfg->begin(), cfg->end());
745      NumCFGBlocks += NumBlocks;
746      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
747                                         NumBlocks);
748    } else {
749      ++NumFunctionsWithBadCFGs;
750    }
751  }
752}
753
754void clang::sema::AnalysisBasedWarnings::PrintStats() const {
755  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
756
757  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
758  unsigned AvgCFGBlocksPerFunction =
759      !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
760  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
761               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
762               << "  " << NumCFGBlocks << " CFG blocks built.\n"
763               << "  " << AvgCFGBlocksPerFunction
764               << " average CFG blocks per function.\n"
765               << "  " << MaxCFGBlocksPerFunction
766               << " max CFG blocks per function.\n";
767
768  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
769      : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
770  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
771      : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
772  llvm::errs() << NumUninitAnalysisFunctions
773               << " functions analyzed for uninitialiazed variables\n"
774               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
775               << "  " << AvgUninitVariablesPerFunction
776               << " average variables per function.\n"
777               << "  " << MaxUninitAnalysisVariablesPerFunction
778               << " max variables per function.\n"
779               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
780               << "  " << AvgUninitBlockVisitsPerFunction
781               << " average block visits per function.\n"
782               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
783               << " max block visits per function.\n";
784}
785