AnalysisBasedWarnings.cpp revision bdb97ff687ce85e45cc728b87612ed546f48c1e7
1//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines analysis_warnings::[Policy,Executor].
11// Together they are used by Sema to issue warnings based on inexpensive
12// static analysis algorithms in libAnalysis.
13//
14//===----------------------------------------------------------------------===//
15
16#include "clang/Sema/AnalysisBasedWarnings.h"
17#include "clang/Sema/SemaInternal.h"
18#include "clang/Sema/ScopeInfo.h"
19#include "clang/Basic/SourceManager.h"
20#include "clang/Basic/SourceLocation.h"
21#include "clang/Lex/Preprocessor.h"
22#include "clang/Lex/Lexer.h"
23#include "clang/AST/DeclObjC.h"
24#include "clang/AST/DeclCXX.h"
25#include "clang/AST/ExprObjC.h"
26#include "clang/AST/ExprCXX.h"
27#include "clang/AST/StmtObjC.h"
28#include "clang/AST/StmtCXX.h"
29#include "clang/AST/EvaluatedExprVisitor.h"
30#include "clang/AST/StmtVisitor.h"
31#include "clang/AST/RecursiveASTVisitor.h"
32#include "clang/Analysis/AnalysisContext.h"
33#include "clang/Analysis/CFG.h"
34#include "clang/Analysis/Analyses/ReachableCode.h"
35#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
36#include "clang/Analysis/Analyses/ThreadSafety.h"
37#include "clang/Analysis/CFGStmtMap.h"
38#include "clang/Analysis/Analyses/UninitializedValues.h"
39#include "llvm/ADT/BitVector.h"
40#include "llvm/ADT/FoldingSet.h"
41#include "llvm/ADT/ImmutableMap.h"
42#include "llvm/ADT/PostOrderIterator.h"
43#include "llvm/ADT/SmallVector.h"
44#include "llvm/ADT/StringRef.h"
45#include "llvm/Support/Casting.h"
46#include <algorithm>
47#include <iterator>
48#include <vector>
49#include <deque>
50
51using namespace clang;
52
53//===----------------------------------------------------------------------===//
54// Unreachable code analysis.
55//===----------------------------------------------------------------------===//
56
57namespace {
58  class UnreachableCodeHandler : public reachable_code::Callback {
59    Sema &S;
60  public:
61    UnreachableCodeHandler(Sema &s) : S(s) {}
62
63    void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
64      S.Diag(L, diag::warn_unreachable) << R1 << R2;
65    }
66  };
67}
68
69/// CheckUnreachable - Check for unreachable code.
70static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
71  UnreachableCodeHandler UC(S);
72  reachable_code::FindUnreachableCode(AC, UC);
73}
74
75//===----------------------------------------------------------------------===//
76// Check for missing return value.
77//===----------------------------------------------------------------------===//
78
79enum ControlFlowKind {
80  UnknownFallThrough,
81  NeverFallThrough,
82  MaybeFallThrough,
83  AlwaysFallThrough,
84  NeverFallThroughOrReturn
85};
86
87/// CheckFallThrough - Check that we don't fall off the end of a
88/// Statement that should return a value.
89///
90/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
91/// MaybeFallThrough iff we might or might not fall off the end,
92/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
93/// return.  We assume NeverFallThrough iff we never fall off the end of the
94/// statement but we may return.  We assume that functions not marked noreturn
95/// will return.
96static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
97  CFG *cfg = AC.getCFG();
98  if (cfg == 0) return UnknownFallThrough;
99
100  // The CFG leaves in dead things, and we don't want the dead code paths to
101  // confuse us, so we mark all live things first.
102  llvm::BitVector live(cfg->getNumBlockIDs());
103  unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
104                                                          live);
105
106  bool AddEHEdges = AC.getAddEHEdges();
107  if (!AddEHEdges && count != cfg->getNumBlockIDs())
108    // When there are things remaining dead, and we didn't add EH edges
109    // from CallExprs to the catch clauses, we have to go back and
110    // mark them as live.
111    for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
112      CFGBlock &b = **I;
113      if (!live[b.getBlockID()]) {
114        if (b.pred_begin() == b.pred_end()) {
115          if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
116            // When not adding EH edges from calls, catch clauses
117            // can otherwise seem dead.  Avoid noting them as dead.
118            count += reachable_code::ScanReachableFromBlock(&b, live);
119          continue;
120        }
121      }
122    }
123
124  // Now we know what is live, we check the live precessors of the exit block
125  // and look for fall through paths, being careful to ignore normal returns,
126  // and exceptional paths.
127  bool HasLiveReturn = false;
128  bool HasFakeEdge = false;
129  bool HasPlainEdge = false;
130  bool HasAbnormalEdge = false;
131
132  // Ignore default cases that aren't likely to be reachable because all
133  // enums in a switch(X) have explicit case statements.
134  CFGBlock::FilterOptions FO;
135  FO.IgnoreDefaultsWithCoveredEnums = 1;
136
137  for (CFGBlock::filtered_pred_iterator
138	 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
139    const CFGBlock& B = **I;
140    if (!live[B.getBlockID()])
141      continue;
142
143    // Skip blocks which contain an element marked as no-return. They don't
144    // represent actually viable edges into the exit block, so mark them as
145    // abnormal.
146    if (B.hasNoReturnElement()) {
147      HasAbnormalEdge = true;
148      continue;
149    }
150
151    // Destructors can appear after the 'return' in the CFG.  This is
152    // normal.  We need to look pass the destructors for the return
153    // statement (if it exists).
154    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
155
156    for ( ; ri != re ; ++ri)
157      if (isa<CFGStmt>(*ri))
158        break;
159
160    // No more CFGElements in the block?
161    if (ri == re) {
162      if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
163        HasAbnormalEdge = true;
164        continue;
165      }
166      // A labeled empty statement, or the entry block...
167      HasPlainEdge = true;
168      continue;
169    }
170
171    CFGStmt CS = cast<CFGStmt>(*ri);
172    const Stmt *S = CS.getStmt();
173    if (isa<ReturnStmt>(S)) {
174      HasLiveReturn = true;
175      continue;
176    }
177    if (isa<ObjCAtThrowStmt>(S)) {
178      HasFakeEdge = true;
179      continue;
180    }
181    if (isa<CXXThrowExpr>(S)) {
182      HasFakeEdge = true;
183      continue;
184    }
185    if (const AsmStmt *AS = dyn_cast<AsmStmt>(S)) {
186      if (AS->isMSAsm()) {
187        HasFakeEdge = true;
188        HasLiveReturn = true;
189        continue;
190      }
191    }
192    if (isa<CXXTryStmt>(S)) {
193      HasAbnormalEdge = true;
194      continue;
195    }
196    if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
197        == B.succ_end()) {
198      HasAbnormalEdge = true;
199      continue;
200    }
201
202    HasPlainEdge = true;
203  }
204  if (!HasPlainEdge) {
205    if (HasLiveReturn)
206      return NeverFallThrough;
207    return NeverFallThroughOrReturn;
208  }
209  if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
210    return MaybeFallThrough;
211  // This says AlwaysFallThrough for calls to functions that are not marked
212  // noreturn, that don't return.  If people would like this warning to be more
213  // accurate, such functions should be marked as noreturn.
214  return AlwaysFallThrough;
215}
216
217namespace {
218
219struct CheckFallThroughDiagnostics {
220  unsigned diag_MaybeFallThrough_HasNoReturn;
221  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
222  unsigned diag_AlwaysFallThrough_HasNoReturn;
223  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
224  unsigned diag_NeverFallThroughOrReturn;
225  enum { Function, Block, Lambda } funMode;
226  SourceLocation FuncLoc;
227
228  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
229    CheckFallThroughDiagnostics D;
230    D.FuncLoc = Func->getLocation();
231    D.diag_MaybeFallThrough_HasNoReturn =
232      diag::warn_falloff_noreturn_function;
233    D.diag_MaybeFallThrough_ReturnsNonVoid =
234      diag::warn_maybe_falloff_nonvoid_function;
235    D.diag_AlwaysFallThrough_HasNoReturn =
236      diag::warn_falloff_noreturn_function;
237    D.diag_AlwaysFallThrough_ReturnsNonVoid =
238      diag::warn_falloff_nonvoid_function;
239
240    // Don't suggest that virtual functions be marked "noreturn", since they
241    // might be overridden by non-noreturn functions.
242    bool isVirtualMethod = false;
243    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
244      isVirtualMethod = Method->isVirtual();
245
246    // Don't suggest that template instantiations be marked "noreturn"
247    bool isTemplateInstantiation = false;
248    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
249      isTemplateInstantiation = Function->isTemplateInstantiation();
250
251    if (!isVirtualMethod && !isTemplateInstantiation)
252      D.diag_NeverFallThroughOrReturn =
253        diag::warn_suggest_noreturn_function;
254    else
255      D.diag_NeverFallThroughOrReturn = 0;
256
257    D.funMode = Function;
258    return D;
259  }
260
261  static CheckFallThroughDiagnostics MakeForBlock() {
262    CheckFallThroughDiagnostics D;
263    D.diag_MaybeFallThrough_HasNoReturn =
264      diag::err_noreturn_block_has_return_expr;
265    D.diag_MaybeFallThrough_ReturnsNonVoid =
266      diag::err_maybe_falloff_nonvoid_block;
267    D.diag_AlwaysFallThrough_HasNoReturn =
268      diag::err_noreturn_block_has_return_expr;
269    D.diag_AlwaysFallThrough_ReturnsNonVoid =
270      diag::err_falloff_nonvoid_block;
271    D.diag_NeverFallThroughOrReturn =
272      diag::warn_suggest_noreturn_block;
273    D.funMode = Block;
274    return D;
275  }
276
277  static CheckFallThroughDiagnostics MakeForLambda() {
278    CheckFallThroughDiagnostics D;
279    D.diag_MaybeFallThrough_HasNoReturn =
280      diag::err_noreturn_lambda_has_return_expr;
281    D.diag_MaybeFallThrough_ReturnsNonVoid =
282      diag::warn_maybe_falloff_nonvoid_lambda;
283    D.diag_AlwaysFallThrough_HasNoReturn =
284      diag::err_noreturn_lambda_has_return_expr;
285    D.diag_AlwaysFallThrough_ReturnsNonVoid =
286      diag::warn_falloff_nonvoid_lambda;
287    D.diag_NeverFallThroughOrReturn = 0;
288    D.funMode = Lambda;
289    return D;
290  }
291
292  bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
293                        bool HasNoReturn) const {
294    if (funMode == Function) {
295      return (ReturnsVoid ||
296              D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
297                                   FuncLoc) == DiagnosticsEngine::Ignored)
298        && (!HasNoReturn ||
299            D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
300                                 FuncLoc) == DiagnosticsEngine::Ignored)
301        && (!ReturnsVoid ||
302            D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
303              == DiagnosticsEngine::Ignored);
304    }
305
306    // For blocks / lambdas.
307    return ReturnsVoid && !HasNoReturn
308            && ((funMode == Lambda) ||
309                D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
310                  == DiagnosticsEngine::Ignored);
311  }
312};
313
314}
315
316/// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
317/// function that should return a value.  Check that we don't fall off the end
318/// of a noreturn function.  We assume that functions and blocks not marked
319/// noreturn will return.
320static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
321                                    const BlockExpr *blkExpr,
322                                    const CheckFallThroughDiagnostics& CD,
323                                    AnalysisDeclContext &AC) {
324
325  bool ReturnsVoid = false;
326  bool HasNoReturn = false;
327
328  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
329    ReturnsVoid = FD->getResultType()->isVoidType();
330    HasNoReturn = FD->hasAttr<NoReturnAttr>() ||
331       FD->getType()->getAs<FunctionType>()->getNoReturnAttr();
332  }
333  else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
334    ReturnsVoid = MD->getResultType()->isVoidType();
335    HasNoReturn = MD->hasAttr<NoReturnAttr>();
336  }
337  else if (isa<BlockDecl>(D)) {
338    QualType BlockTy = blkExpr->getType();
339    if (const FunctionType *FT =
340          BlockTy->getPointeeType()->getAs<FunctionType>()) {
341      if (FT->getResultType()->isVoidType())
342        ReturnsVoid = true;
343      if (FT->getNoReturnAttr())
344        HasNoReturn = true;
345    }
346  }
347
348  DiagnosticsEngine &Diags = S.getDiagnostics();
349
350  // Short circuit for compilation speed.
351  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
352      return;
353
354  // FIXME: Function try block
355  if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
356    switch (CheckFallThrough(AC)) {
357      case UnknownFallThrough:
358        break;
359
360      case MaybeFallThrough:
361        if (HasNoReturn)
362          S.Diag(Compound->getRBracLoc(),
363                 CD.diag_MaybeFallThrough_HasNoReturn);
364        else if (!ReturnsVoid)
365          S.Diag(Compound->getRBracLoc(),
366                 CD.diag_MaybeFallThrough_ReturnsNonVoid);
367        break;
368      case AlwaysFallThrough:
369        if (HasNoReturn)
370          S.Diag(Compound->getRBracLoc(),
371                 CD.diag_AlwaysFallThrough_HasNoReturn);
372        else if (!ReturnsVoid)
373          S.Diag(Compound->getRBracLoc(),
374                 CD.diag_AlwaysFallThrough_ReturnsNonVoid);
375        break;
376      case NeverFallThroughOrReturn:
377        if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
378          if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
379            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
380              << 0 << FD;
381          } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
382            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
383              << 1 << MD;
384          } else {
385            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn);
386          }
387        }
388        break;
389      case NeverFallThrough:
390        break;
391    }
392  }
393}
394
395//===----------------------------------------------------------------------===//
396// -Wuninitialized
397//===----------------------------------------------------------------------===//
398
399namespace {
400/// ContainsReference - A visitor class to search for references to
401/// a particular declaration (the needle) within any evaluated component of an
402/// expression (recursively).
403class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
404  bool FoundReference;
405  const DeclRefExpr *Needle;
406
407public:
408  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
409    : EvaluatedExprVisitor<ContainsReference>(Context),
410      FoundReference(false), Needle(Needle) {}
411
412  void VisitExpr(Expr *E) {
413    // Stop evaluating if we already have a reference.
414    if (FoundReference)
415      return;
416
417    EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
418  }
419
420  void VisitDeclRefExpr(DeclRefExpr *E) {
421    if (E == Needle)
422      FoundReference = true;
423    else
424      EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
425  }
426
427  bool doesContainReference() const { return FoundReference; }
428};
429}
430
431static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
432  QualType VariableTy = VD->getType().getCanonicalType();
433  if (VariableTy->isBlockPointerType() &&
434      !VD->hasAttr<BlocksAttr>()) {
435    S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization) << VD->getDeclName()
436    << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
437    return true;
438  }
439
440  // Don't issue a fixit if there is already an initializer.
441  if (VD->getInit())
442    return false;
443
444  // Suggest possible initialization (if any).
445  std::string Init = S.getFixItZeroInitializerForType(VariableTy);
446  if (Init.empty())
447    return false;
448
449  // Don't suggest a fixit inside macros.
450  if (VD->getLocEnd().isMacroID())
451    return false;
452
453  SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
454
455  S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
456    << FixItHint::CreateInsertion(Loc, Init);
457  return true;
458}
459
460/// Create a fixit to remove an if-like statement, on the assumption that its
461/// condition is CondVal.
462static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
463                          const Stmt *Else, bool CondVal,
464                          FixItHint &Fixit1, FixItHint &Fixit2) {
465  if (CondVal) {
466    // If condition is always true, remove all but the 'then'.
467    Fixit1 = FixItHint::CreateRemoval(
468        CharSourceRange::getCharRange(If->getLocStart(),
469                                      Then->getLocStart()));
470    if (Else) {
471      SourceLocation ElseKwLoc = Lexer::getLocForEndOfToken(
472          Then->getLocEnd(), 0, S.getSourceManager(), S.getLangOpts());
473      Fixit2 = FixItHint::CreateRemoval(
474          SourceRange(ElseKwLoc, Else->getLocEnd()));
475    }
476  } else {
477    // If condition is always false, remove all but the 'else'.
478    if (Else)
479      Fixit1 = FixItHint::CreateRemoval(
480          CharSourceRange::getCharRange(If->getLocStart(),
481                                        Else->getLocStart()));
482    else
483      Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
484  }
485}
486
487/// DiagUninitUse -- Helper function to produce a diagnostic for an
488/// uninitialized use of a variable.
489static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
490                          bool IsCapturedByBlock) {
491  bool Diagnosed = false;
492
493  // Diagnose each branch which leads to a sometimes-uninitialized use.
494  for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
495       I != E; ++I) {
496    assert(Use.getKind() == UninitUse::Sometimes);
497
498    const Expr *User = Use.getUser();
499    const Stmt *Term = I->Terminator;
500
501    // Information used when building the diagnostic.
502    unsigned DiagKind;
503    const char *Str;
504    SourceRange Range;
505
506    // FixIts to suppress the diagnosic by removing the dead condition.
507    // For all binary terminators, branch 0 is taken if the condition is true,
508    // and branch 1 is taken if the condition is false.
509    int RemoveDiagKind = -1;
510    const char *FixitStr =
511        S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
512                                  : (I->Output ? "1" : "0");
513    FixItHint Fixit1, Fixit2;
514
515    switch (Term->getStmtClass()) {
516    default:
517      // Don't know how to report this. Just fall back to 'may be used
518      // uninitialized'. This happens for range-based for, which the user
519      // can't explicitly fix.
520      // FIXME: This also happens if the first use of a variable is always
521      // uninitialized, eg "for (int n; n < 10; ++n)". We should report that
522      // with the 'is uninitialized' diagnostic.
523      continue;
524
525    // "condition is true / condition is false".
526    case Stmt::IfStmtClass: {
527      const IfStmt *IS = cast<IfStmt>(Term);
528      DiagKind = 0;
529      Str = "if";
530      Range = IS->getCond()->getSourceRange();
531      RemoveDiagKind = 0;
532      CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
533                    I->Output, Fixit1, Fixit2);
534      break;
535    }
536    case Stmt::ConditionalOperatorClass: {
537      const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
538      DiagKind = 0;
539      Str = "?:";
540      Range = CO->getCond()->getSourceRange();
541      RemoveDiagKind = 0;
542      CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
543                    I->Output, Fixit1, Fixit2);
544      break;
545    }
546    case Stmt::BinaryOperatorClass: {
547      const BinaryOperator *BO = cast<BinaryOperator>(Term);
548      if (!BO->isLogicalOp())
549        continue;
550      DiagKind = 0;
551      Str = BO->getOpcodeStr();
552      Range = BO->getLHS()->getSourceRange();
553      RemoveDiagKind = 0;
554      if ((BO->getOpcode() == BO_LAnd && I->Output) ||
555          (BO->getOpcode() == BO_LOr && !I->Output))
556        // true && y -> y, false || y -> y.
557        Fixit1 = FixItHint::CreateRemoval(SourceRange(BO->getLocStart(),
558                                                      BO->getOperatorLoc()));
559      else
560        // false && y -> false, true || y -> true.
561        Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
562      break;
563    }
564
565    // "loop is entered / loop is exited".
566    case Stmt::WhileStmtClass:
567      DiagKind = 1;
568      Str = "while";
569      Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
570      RemoveDiagKind = 1;
571      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
572      break;
573    case Stmt::ForStmtClass:
574      DiagKind = 1;
575      Str = "for";
576      Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
577      RemoveDiagKind = 1;
578      if (I->Output)
579        Fixit1 = FixItHint::CreateRemoval(Range);
580      else
581        Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
582      break;
583
584    // "condition is true / loop is exited".
585    case Stmt::DoStmtClass:
586      DiagKind = 2;
587      Str = "do";
588      Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
589      RemoveDiagKind = 1;
590      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
591      break;
592
593    // "switch case is taken".
594    case Stmt::CaseStmtClass:
595      DiagKind = 3;
596      Str = "case";
597      Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
598      break;
599    case Stmt::DefaultStmtClass:
600      DiagKind = 3;
601      Str = "default";
602      Range = cast<DefaultStmt>(Term)->getDefaultLoc();
603      break;
604    }
605
606    S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
607      << VD->getDeclName() << IsCapturedByBlock << DiagKind
608      << Str << I->Output << Range;
609    S.Diag(User->getLocStart(), diag::note_uninit_var_use)
610      << IsCapturedByBlock << User->getSourceRange();
611    if (RemoveDiagKind != -1)
612      S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
613        << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
614
615    Diagnosed = true;
616  }
617
618  if (!Diagnosed)
619    S.Diag(Use.getUser()->getLocStart(),
620           Use.getKind() == UninitUse::Always ? diag::warn_uninit_var
621                                              : diag::warn_maybe_uninit_var)
622        << VD->getDeclName() << IsCapturedByBlock
623        << Use.getUser()->getSourceRange();
624}
625
626/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
627/// uninitialized variable. This manages the different forms of diagnostic
628/// emitted for particular types of uses. Returns true if the use was diagnosed
629/// as a warning. If a particular use is one we omit warnings for, returns
630/// false.
631static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
632                                     const UninitUse &Use,
633                                     bool alwaysReportSelfInit = false) {
634
635  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
636    // Inspect the initializer of the variable declaration which is
637    // being referenced prior to its initialization. We emit
638    // specialized diagnostics for self-initialization, and we
639    // specifically avoid warning about self references which take the
640    // form of:
641    //
642    //   int x = x;
643    //
644    // This is used to indicate to GCC that 'x' is intentionally left
645    // uninitialized. Proven code paths which access 'x' in
646    // an uninitialized state after this will still warn.
647    if (const Expr *Initializer = VD->getInit()) {
648      if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
649        return false;
650
651      ContainsReference CR(S.Context, DRE);
652      CR.Visit(const_cast<Expr*>(Initializer));
653      if (CR.doesContainReference()) {
654        S.Diag(DRE->getLocStart(),
655               diag::warn_uninit_self_reference_in_init)
656          << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
657        return true;
658      }
659    }
660
661    DiagUninitUse(S, VD, Use, false);
662  } else {
663    const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
664    if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
665      S.Diag(BE->getLocStart(),
666             diag::warn_uninit_byref_blockvar_captured_by_block)
667        << VD->getDeclName();
668    else
669      DiagUninitUse(S, VD, Use, true);
670  }
671
672  // Report where the variable was declared when the use wasn't within
673  // the initializer of that declaration & we didn't already suggest
674  // an initialization fixit.
675  if (!SuggestInitializationFixit(S, VD))
676    S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
677      << VD->getDeclName();
678
679  return true;
680}
681
682namespace {
683  class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
684  public:
685    FallthroughMapper(Sema &S)
686      : FoundSwitchStatements(false),
687        S(S) {
688    }
689
690    bool foundSwitchStatements() const { return FoundSwitchStatements; }
691
692    void markFallthroughVisited(const AttributedStmt *Stmt) {
693      bool Found = FallthroughStmts.erase(Stmt);
694      assert(Found);
695      (void)Found;
696    }
697
698    typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
699
700    const AttrStmts &getFallthroughStmts() const {
701      return FallthroughStmts;
702    }
703
704    bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt) {
705      int UnannotatedCnt = 0;
706      AnnotatedCnt = 0;
707
708      std::deque<const CFGBlock*> BlockQueue;
709
710      std::copy(B.pred_begin(), B.pred_end(), std::back_inserter(BlockQueue));
711
712      while (!BlockQueue.empty()) {
713        const CFGBlock *P = BlockQueue.front();
714        BlockQueue.pop_front();
715
716        const Stmt *Term = P->getTerminator();
717        if (Term && isa<SwitchStmt>(Term))
718          continue; // Switch statement, good.
719
720        const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
721        if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
722          continue; // Previous case label has no statements, good.
723
724        if (P->pred_begin() == P->pred_end()) {  // The block is unreachable.
725          // This only catches trivially unreachable blocks.
726          for (CFGBlock::const_iterator ElIt = P->begin(), ElEnd = P->end();
727               ElIt != ElEnd; ++ElIt) {
728            if (const CFGStmt *CS = ElIt->getAs<CFGStmt>()){
729              if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
730                S.Diag(AS->getLocStart(),
731                       diag::warn_fallthrough_attr_unreachable);
732                markFallthroughVisited(AS);
733                ++AnnotatedCnt;
734              }
735              // Don't care about other unreachable statements.
736            }
737          }
738          // If there are no unreachable statements, this may be a special
739          // case in CFG:
740          // case X: {
741          //    A a;  // A has a destructor.
742          //    break;
743          // }
744          // // <<<< This place is represented by a 'hanging' CFG block.
745          // case Y:
746          continue;
747        }
748
749        const Stmt *LastStmt = getLastStmt(*P);
750        if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
751          markFallthroughVisited(AS);
752          ++AnnotatedCnt;
753          continue; // Fallthrough annotation, good.
754        }
755
756        if (!LastStmt) { // This block contains no executable statements.
757          // Traverse its predecessors.
758          std::copy(P->pred_begin(), P->pred_end(),
759                    std::back_inserter(BlockQueue));
760          continue;
761        }
762
763        ++UnannotatedCnt;
764      }
765      return !!UnannotatedCnt;
766    }
767
768    // RecursiveASTVisitor setup.
769    bool shouldWalkTypesOfTypeLocs() const { return false; }
770
771    bool VisitAttributedStmt(AttributedStmt *S) {
772      if (asFallThroughAttr(S))
773        FallthroughStmts.insert(S);
774      return true;
775    }
776
777    bool VisitSwitchStmt(SwitchStmt *S) {
778      FoundSwitchStatements = true;
779      return true;
780    }
781
782  private:
783
784    static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
785      if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
786        if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
787          return AS;
788      }
789      return 0;
790    }
791
792    static const Stmt *getLastStmt(const CFGBlock &B) {
793      if (const Stmt *Term = B.getTerminator())
794        return Term;
795      for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
796                                            ElemEnd = B.rend();
797                                            ElemIt != ElemEnd; ++ElemIt) {
798        if (const CFGStmt *CS = ElemIt->getAs<CFGStmt>())
799          return CS->getStmt();
800      }
801      // Workaround to detect a statement thrown out by CFGBuilder:
802      //   case X: {} case Y:
803      //   case X: ; case Y:
804      if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
805        if (!isa<SwitchCase>(SW->getSubStmt()))
806          return SW->getSubStmt();
807
808      return 0;
809    }
810
811    bool FoundSwitchStatements;
812    AttrStmts FallthroughStmts;
813    Sema &S;
814  };
815}
816
817static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC) {
818  FallthroughMapper FM(S);
819  FM.TraverseStmt(AC.getBody());
820
821  if (!FM.foundSwitchStatements())
822    return;
823
824  CFG *Cfg = AC.getCFG();
825
826  if (!Cfg)
827    return;
828
829  int AnnotatedCnt;
830
831  for (CFG::reverse_iterator I = Cfg->rbegin(), E = Cfg->rend(); I != E; ++I) {
832    const CFGBlock &B = **I;
833    const Stmt *Label = B.getLabel();
834
835    if (!Label || !isa<SwitchCase>(Label))
836      continue;
837
838    if (!FM.checkFallThroughIntoBlock(B, AnnotatedCnt))
839      continue;
840
841    S.Diag(Label->getLocStart(), diag::warn_unannotated_fallthrough);
842
843    if (!AnnotatedCnt) {
844      SourceLocation L = Label->getLocStart();
845      if (L.isMacroID())
846        continue;
847      if (S.getLangOpts().CPlusPlus0x) {
848        const Stmt *Term = B.getTerminator();
849        if (!(B.empty() && Term && isa<BreakStmt>(Term))) {
850          S.Diag(L, diag::note_insert_fallthrough_fixit) <<
851            FixItHint::CreateInsertion(L, "[[clang::fallthrough]]; ");
852        }
853      }
854      S.Diag(L, diag::note_insert_break_fixit) <<
855        FixItHint::CreateInsertion(L, "break; ");
856    }
857  }
858
859  const FallthroughMapper::AttrStmts &Fallthroughs = FM.getFallthroughStmts();
860  for (FallthroughMapper::AttrStmts::const_iterator I = Fallthroughs.begin(),
861                                                    E = Fallthroughs.end();
862                                                    I != E; ++I) {
863    S.Diag((*I)->getLocStart(), diag::warn_fallthrough_attr_invalid_placement);
864  }
865
866}
867
868namespace {
869struct SLocSort {
870  bool operator()(const UninitUse &a, const UninitUse &b) {
871    // Prefer a more confident report over a less confident one.
872    if (a.getKind() != b.getKind())
873      return a.getKind() > b.getKind();
874    SourceLocation aLoc = a.getUser()->getLocStart();
875    SourceLocation bLoc = b.getUser()->getLocStart();
876    return aLoc.getRawEncoding() < bLoc.getRawEncoding();
877  }
878};
879
880class UninitValsDiagReporter : public UninitVariablesHandler {
881  Sema &S;
882  typedef SmallVector<UninitUse, 2> UsesVec;
883  typedef llvm::DenseMap<const VarDecl *, std::pair<UsesVec*, bool> > UsesMap;
884  UsesMap *uses;
885
886public:
887  UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
888  ~UninitValsDiagReporter() {
889    flushDiagnostics();
890  }
891
892  std::pair<UsesVec*, bool> &getUses(const VarDecl *vd) {
893    if (!uses)
894      uses = new UsesMap();
895
896    UsesMap::mapped_type &V = (*uses)[vd];
897    UsesVec *&vec = V.first;
898    if (!vec)
899      vec = new UsesVec();
900
901    return V;
902  }
903
904  void handleUseOfUninitVariable(const VarDecl *vd, const UninitUse &use) {
905    getUses(vd).first->push_back(use);
906  }
907
908  void handleSelfInit(const VarDecl *vd) {
909    getUses(vd).second = true;
910  }
911
912  void flushDiagnostics() {
913    if (!uses)
914      return;
915
916    // FIXME: This iteration order, and thus the resulting diagnostic order,
917    //        is nondeterministic.
918    for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
919      const VarDecl *vd = i->first;
920      const UsesMap::mapped_type &V = i->second;
921
922      UsesVec *vec = V.first;
923      bool hasSelfInit = V.second;
924
925      // Specially handle the case where we have uses of an uninitialized
926      // variable, but the root cause is an idiomatic self-init.  We want
927      // to report the diagnostic at the self-init since that is the root cause.
928      if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
929        DiagnoseUninitializedUse(S, vd,
930                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
931                                           /* isAlwaysUninit */ true),
932                                 /* alwaysReportSelfInit */ true);
933      else {
934        // Sort the uses by their SourceLocations.  While not strictly
935        // guaranteed to produce them in line/column order, this will provide
936        // a stable ordering.
937        std::sort(vec->begin(), vec->end(), SLocSort());
938
939        for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
940             ++vi) {
941          // If we have self-init, downgrade all uses to 'may be uninitialized'.
942          UninitUse Use = hasSelfInit ? UninitUse(vi->getUser(), false) : *vi;
943
944          if (DiagnoseUninitializedUse(S, vd, Use))
945            // Skip further diagnostics for this variable. We try to warn only
946            // on the first point at which a variable is used uninitialized.
947            break;
948        }
949      }
950
951      // Release the uses vector.
952      delete vec;
953    }
954    delete uses;
955  }
956
957private:
958  static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
959  for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) {
960    if (i->getKind() == UninitUse::Always) {
961      return true;
962    }
963  }
964  return false;
965}
966};
967}
968
969
970//===----------------------------------------------------------------------===//
971// -Wthread-safety
972//===----------------------------------------------------------------------===//
973namespace clang {
974namespace thread_safety {
975typedef llvm::SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
976typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
977typedef std::list<DelayedDiag> DiagList;
978
979struct SortDiagBySourceLocation {
980  SourceManager &SM;
981  SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
982
983  bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
984    // Although this call will be slow, this is only called when outputting
985    // multiple warnings.
986    return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
987  }
988};
989
990namespace {
991class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler {
992  Sema &S;
993  DiagList Warnings;
994  SourceLocation FunLocation, FunEndLocation;
995
996  // Helper functions
997  void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
998    // Gracefully handle rare cases when the analysis can't get a more
999    // precise source location.
1000    if (!Loc.isValid())
1001      Loc = FunLocation;
1002    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName);
1003    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1004  }
1005
1006 public:
1007  ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1008    : S(S), FunLocation(FL), FunEndLocation(FEL) {}
1009
1010  /// \brief Emit all buffered diagnostics in order of sourcelocation.
1011  /// We need to output diagnostics produced while iterating through
1012  /// the lockset in deterministic order, so this function orders diagnostics
1013  /// and outputs them.
1014  void emitDiagnostics() {
1015    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1016    for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
1017         I != E; ++I) {
1018      S.Diag(I->first.first, I->first.second);
1019      const OptionalNotes &Notes = I->second;
1020      for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI)
1021        S.Diag(Notes[NoteI].first, Notes[NoteI].second);
1022    }
1023  }
1024
1025  void handleInvalidLockExp(SourceLocation Loc) {
1026    PartialDiagnosticAt Warning(Loc,
1027                                S.PDiag(diag::warn_cannot_resolve_lock) << Loc);
1028    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1029  }
1030  void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
1031    warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
1032  }
1033
1034  void handleDoubleLock(Name LockName, SourceLocation Loc) {
1035    warnLockMismatch(diag::warn_double_lock, LockName, Loc);
1036  }
1037
1038  void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked,
1039                                 SourceLocation LocEndOfScope,
1040                                 LockErrorKind LEK){
1041    unsigned DiagID = 0;
1042    switch (LEK) {
1043      case LEK_LockedSomePredecessors:
1044        DiagID = diag::warn_lock_some_predecessors;
1045        break;
1046      case LEK_LockedSomeLoopIterations:
1047        DiagID = diag::warn_expecting_lock_held_on_loop;
1048        break;
1049      case LEK_LockedAtEndOfFunction:
1050        DiagID = diag::warn_no_unlock;
1051        break;
1052    }
1053    if (LocEndOfScope.isInvalid())
1054      LocEndOfScope = FunEndLocation;
1055
1056    PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName);
1057    PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here));
1058    Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1059  }
1060
1061
1062  void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
1063                                SourceLocation Loc2) {
1064    PartialDiagnosticAt Warning(
1065      Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName);
1066    PartialDiagnosticAt Note(
1067      Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName);
1068    Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1069  }
1070
1071  void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
1072                         AccessKind AK, SourceLocation Loc) {
1073    assert((POK == POK_VarAccess || POK == POK_VarDereference)
1074             && "Only works for variables");
1075    unsigned DiagID = POK == POK_VarAccess?
1076                        diag::warn_variable_requires_any_lock:
1077                        diag::warn_var_deref_requires_any_lock;
1078    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1079      << D->getName() << getLockKindFromAccessKind(AK));
1080    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1081  }
1082
1083  void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
1084                          Name LockName, LockKind LK, SourceLocation Loc) {
1085    unsigned DiagID = 0;
1086    switch (POK) {
1087      case POK_VarAccess:
1088        DiagID = diag::warn_variable_requires_lock;
1089        break;
1090      case POK_VarDereference:
1091        DiagID = diag::warn_var_deref_requires_lock;
1092        break;
1093      case POK_FunctionCall:
1094        DiagID = diag::warn_fun_requires_lock;
1095        break;
1096    }
1097    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1098      << D->getName() << LockName << LK);
1099    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1100  }
1101
1102  void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
1103    PartialDiagnosticAt Warning(Loc,
1104      S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName);
1105    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1106  }
1107};
1108}
1109}
1110}
1111
1112//===----------------------------------------------------------------------===//
1113// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
1114//  warnings on a function, method, or block.
1115//===----------------------------------------------------------------------===//
1116
1117clang::sema::AnalysisBasedWarnings::Policy::Policy() {
1118  enableCheckFallThrough = 1;
1119  enableCheckUnreachable = 0;
1120  enableThreadSafetyAnalysis = 0;
1121}
1122
1123clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
1124  : S(s),
1125    NumFunctionsAnalyzed(0),
1126    NumFunctionsWithBadCFGs(0),
1127    NumCFGBlocks(0),
1128    MaxCFGBlocksPerFunction(0),
1129    NumUninitAnalysisFunctions(0),
1130    NumUninitAnalysisVariables(0),
1131    MaxUninitAnalysisVariablesPerFunction(0),
1132    NumUninitAnalysisBlockVisits(0),
1133    MaxUninitAnalysisBlockVisitsPerFunction(0) {
1134  DiagnosticsEngine &D = S.getDiagnostics();
1135  DefaultPolicy.enableCheckUnreachable = (unsigned)
1136    (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
1137        DiagnosticsEngine::Ignored);
1138  DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
1139    (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
1140     DiagnosticsEngine::Ignored);
1141
1142}
1143
1144static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
1145  for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1146       i = fscope->PossiblyUnreachableDiags.begin(),
1147       e = fscope->PossiblyUnreachableDiags.end();
1148       i != e; ++i) {
1149    const sema::PossiblyUnreachableDiag &D = *i;
1150    S.Diag(D.Loc, D.PD);
1151  }
1152}
1153
1154void clang::sema::
1155AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
1156                                     sema::FunctionScopeInfo *fscope,
1157                                     const Decl *D, const BlockExpr *blkExpr) {
1158
1159  // We avoid doing analysis-based warnings when there are errors for
1160  // two reasons:
1161  // (1) The CFGs often can't be constructed (if the body is invalid), so
1162  //     don't bother trying.
1163  // (2) The code already has problems; running the analysis just takes more
1164  //     time.
1165  DiagnosticsEngine &Diags = S.getDiagnostics();
1166
1167  // Do not do any analysis for declarations in system headers if we are
1168  // going to just ignore them.
1169  if (Diags.getSuppressSystemWarnings() &&
1170      S.SourceMgr.isInSystemHeader(D->getLocation()))
1171    return;
1172
1173  // For code in dependent contexts, we'll do this at instantiation time.
1174  if (cast<DeclContext>(D)->isDependentContext())
1175    return;
1176
1177  if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) {
1178    // Flush out any possibly unreachable diagnostics.
1179    flushDiagnostics(S, fscope);
1180    return;
1181  }
1182
1183  const Stmt *Body = D->getBody();
1184  assert(Body);
1185
1186  AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D);
1187
1188  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
1189  // explosion for destrutors that can result and the compile time hit.
1190  AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
1191  AC.getCFGBuildOptions().AddEHEdges = false;
1192  AC.getCFGBuildOptions().AddInitializers = true;
1193  AC.getCFGBuildOptions().AddImplicitDtors = true;
1194
1195  // Force that certain expressions appear as CFGElements in the CFG.  This
1196  // is used to speed up various analyses.
1197  // FIXME: This isn't the right factoring.  This is here for initial
1198  // prototyping, but we need a way for analyses to say what expressions they
1199  // expect to always be CFGElements and then fill in the BuildOptions
1200  // appropriately.  This is essentially a layering violation.
1201  if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis) {
1202    // Unreachable code analysis and thread safety require a linearized CFG.
1203    AC.getCFGBuildOptions().setAllAlwaysAdd();
1204  }
1205  else {
1206    AC.getCFGBuildOptions()
1207      .setAlwaysAdd(Stmt::BinaryOperatorClass)
1208      .setAlwaysAdd(Stmt::BlockExprClass)
1209      .setAlwaysAdd(Stmt::CStyleCastExprClass)
1210      .setAlwaysAdd(Stmt::DeclRefExprClass)
1211      .setAlwaysAdd(Stmt::ImplicitCastExprClass)
1212      .setAlwaysAdd(Stmt::UnaryOperatorClass)
1213      .setAlwaysAdd(Stmt::AttributedStmtClass);
1214  }
1215
1216  // Construct the analysis context with the specified CFG build options.
1217
1218  // Emit delayed diagnostics.
1219  if (!fscope->PossiblyUnreachableDiags.empty()) {
1220    bool analyzed = false;
1221
1222    // Register the expressions with the CFGBuilder.
1223    for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1224         i = fscope->PossiblyUnreachableDiags.begin(),
1225         e = fscope->PossiblyUnreachableDiags.end();
1226         i != e; ++i) {
1227      if (const Stmt *stmt = i->stmt)
1228        AC.registerForcedBlockExpression(stmt);
1229    }
1230
1231    if (AC.getCFG()) {
1232      analyzed = true;
1233      for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1234            i = fscope->PossiblyUnreachableDiags.begin(),
1235            e = fscope->PossiblyUnreachableDiags.end();
1236            i != e; ++i)
1237      {
1238        const sema::PossiblyUnreachableDiag &D = *i;
1239        bool processed = false;
1240        if (const Stmt *stmt = i->stmt) {
1241          const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
1242          CFGReverseBlockReachabilityAnalysis *cra =
1243              AC.getCFGReachablityAnalysis();
1244          // FIXME: We should be able to assert that block is non-null, but
1245          // the CFG analysis can skip potentially-evaluated expressions in
1246          // edge cases; see test/Sema/vla-2.c.
1247          if (block && cra) {
1248            // Can this block be reached from the entrance?
1249            if (cra->isReachable(&AC.getCFG()->getEntry(), block))
1250              S.Diag(D.Loc, D.PD);
1251            processed = true;
1252          }
1253        }
1254        if (!processed) {
1255          // Emit the warning anyway if we cannot map to a basic block.
1256          S.Diag(D.Loc, D.PD);
1257        }
1258      }
1259    }
1260
1261    if (!analyzed)
1262      flushDiagnostics(S, fscope);
1263  }
1264
1265
1266  // Warning: check missing 'return'
1267  if (P.enableCheckFallThrough) {
1268    const CheckFallThroughDiagnostics &CD =
1269      (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
1270       : (isa<CXXMethodDecl>(D) &&
1271          cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
1272          cast<CXXMethodDecl>(D)->getParent()->isLambda())
1273            ? CheckFallThroughDiagnostics::MakeForLambda()
1274            : CheckFallThroughDiagnostics::MakeForFunction(D));
1275    CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
1276  }
1277
1278  // Warning: check for unreachable code
1279  if (P.enableCheckUnreachable) {
1280    // Only check for unreachable code on non-template instantiations.
1281    // Different template instantiations can effectively change the control-flow
1282    // and it is very difficult to prove that a snippet of code in a template
1283    // is unreachable for all instantiations.
1284    bool isTemplateInstantiation = false;
1285    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
1286      isTemplateInstantiation = Function->isTemplateInstantiation();
1287    if (!isTemplateInstantiation)
1288      CheckUnreachable(S, AC);
1289  }
1290
1291  // Check for thread safety violations
1292  if (P.enableThreadSafetyAnalysis) {
1293    SourceLocation FL = AC.getDecl()->getLocation();
1294    SourceLocation FEL = AC.getDecl()->getLocEnd();
1295    thread_safety::ThreadSafetyReporter Reporter(S, FL, FEL);
1296    thread_safety::runThreadSafetyAnalysis(AC, Reporter);
1297    Reporter.emitDiagnostics();
1298  }
1299
1300  if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
1301      != DiagnosticsEngine::Ignored ||
1302      Diags.getDiagnosticLevel(diag::warn_sometimes_uninit_var,D->getLocStart())
1303      != DiagnosticsEngine::Ignored ||
1304      Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
1305      != DiagnosticsEngine::Ignored) {
1306    if (CFG *cfg = AC.getCFG()) {
1307      UninitValsDiagReporter reporter(S);
1308      UninitVariablesAnalysisStats stats;
1309      std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
1310      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
1311                                        reporter, stats);
1312
1313      if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
1314        ++NumUninitAnalysisFunctions;
1315        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
1316        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
1317        MaxUninitAnalysisVariablesPerFunction =
1318            std::max(MaxUninitAnalysisVariablesPerFunction,
1319                     stats.NumVariablesAnalyzed);
1320        MaxUninitAnalysisBlockVisitsPerFunction =
1321            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
1322                     stats.NumBlockVisits);
1323      }
1324    }
1325  }
1326
1327  if (Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough,
1328                              D->getLocStart()) != DiagnosticsEngine::Ignored) {
1329    DiagnoseSwitchLabelsFallthrough(S, AC);
1330  }
1331
1332  // Collect statistics about the CFG if it was built.
1333  if (S.CollectStats && AC.isCFGBuilt()) {
1334    ++NumFunctionsAnalyzed;
1335    if (CFG *cfg = AC.getCFG()) {
1336      // If we successfully built a CFG for this context, record some more
1337      // detail information about it.
1338      NumCFGBlocks += cfg->getNumBlockIDs();
1339      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
1340                                         cfg->getNumBlockIDs());
1341    } else {
1342      ++NumFunctionsWithBadCFGs;
1343    }
1344  }
1345}
1346
1347void clang::sema::AnalysisBasedWarnings::PrintStats() const {
1348  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
1349
1350  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
1351  unsigned AvgCFGBlocksPerFunction =
1352      !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
1353  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
1354               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
1355               << "  " << NumCFGBlocks << " CFG blocks built.\n"
1356               << "  " << AvgCFGBlocksPerFunction
1357               << " average CFG blocks per function.\n"
1358               << "  " << MaxCFGBlocksPerFunction
1359               << " max CFG blocks per function.\n";
1360
1361  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
1362      : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
1363  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
1364      : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
1365  llvm::errs() << NumUninitAnalysisFunctions
1366               << " functions analyzed for uninitialiazed variables\n"
1367               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
1368               << "  " << AvgUninitVariablesPerFunction
1369               << " average variables per function.\n"
1370               << "  " << MaxUninitAnalysisVariablesPerFunction
1371               << " max variables per function.\n"
1372               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
1373               << "  " << AvgUninitBlockVisitsPerFunction
1374               << " average block visits per function.\n"
1375               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
1376               << " max block visits per function.\n";
1377}
1378