AnalysisBasedWarnings.cpp revision faadf48443f8c2fc53d267485d7e0e1bd382fc75
1//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines analysis_warnings::[Policy,Executor].
11// Together they are used by Sema to issue warnings based on inexpensive
12// static analysis algorithms in libAnalysis.
13//
14//===----------------------------------------------------------------------===//
15
16#include "clang/Sema/AnalysisBasedWarnings.h"
17#include "clang/Sema/SemaInternal.h"
18#include "clang/Sema/ScopeInfo.h"
19#include "clang/Basic/SourceManager.h"
20#include "clang/Basic/SourceLocation.h"
21#include "clang/Lex/Preprocessor.h"
22#include "clang/Lex/Lexer.h"
23#include "clang/AST/DeclObjC.h"
24#include "clang/AST/DeclCXX.h"
25#include "clang/AST/ExprObjC.h"
26#include "clang/AST/ExprCXX.h"
27#include "clang/AST/StmtObjC.h"
28#include "clang/AST/StmtCXX.h"
29#include "clang/AST/EvaluatedExprVisitor.h"
30#include "clang/AST/StmtVisitor.h"
31#include "clang/AST/RecursiveASTVisitor.h"
32#include "clang/Analysis/AnalysisContext.h"
33#include "clang/Analysis/CFG.h"
34#include "clang/Analysis/Analyses/ReachableCode.h"
35#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
36#include "clang/Analysis/Analyses/ThreadSafety.h"
37#include "clang/Analysis/CFGStmtMap.h"
38#include "clang/Analysis/Analyses/UninitializedValues.h"
39#include "llvm/ADT/BitVector.h"
40#include "llvm/ADT/FoldingSet.h"
41#include "llvm/ADT/ImmutableMap.h"
42#include "llvm/ADT/PostOrderIterator.h"
43#include "llvm/ADT/SmallVector.h"
44#include "llvm/ADT/StringRef.h"
45#include "llvm/Support/Casting.h"
46#include <algorithm>
47#include <iterator>
48#include <vector>
49#include <deque>
50
51using namespace clang;
52
53//===----------------------------------------------------------------------===//
54// Unreachable code analysis.
55//===----------------------------------------------------------------------===//
56
57namespace {
58  class UnreachableCodeHandler : public reachable_code::Callback {
59    Sema &S;
60  public:
61    UnreachableCodeHandler(Sema &s) : S(s) {}
62
63    void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
64      S.Diag(L, diag::warn_unreachable) << R1 << R2;
65    }
66  };
67}
68
69/// CheckUnreachable - Check for unreachable code.
70static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
71  UnreachableCodeHandler UC(S);
72  reachable_code::FindUnreachableCode(AC, UC);
73}
74
75//===----------------------------------------------------------------------===//
76// Check for missing return value.
77//===----------------------------------------------------------------------===//
78
79enum ControlFlowKind {
80  UnknownFallThrough,
81  NeverFallThrough,
82  MaybeFallThrough,
83  AlwaysFallThrough,
84  NeverFallThroughOrReturn
85};
86
87/// CheckFallThrough - Check that we don't fall off the end of a
88/// Statement that should return a value.
89///
90/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
91/// MaybeFallThrough iff we might or might not fall off the end,
92/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
93/// return.  We assume NeverFallThrough iff we never fall off the end of the
94/// statement but we may return.  We assume that functions not marked noreturn
95/// will return.
96static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
97  CFG *cfg = AC.getCFG();
98  if (cfg == 0) return UnknownFallThrough;
99
100  // The CFG leaves in dead things, and we don't want the dead code paths to
101  // confuse us, so we mark all live things first.
102  llvm::BitVector live(cfg->getNumBlockIDs());
103  unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
104                                                          live);
105
106  bool AddEHEdges = AC.getAddEHEdges();
107  if (!AddEHEdges && count != cfg->getNumBlockIDs())
108    // When there are things remaining dead, and we didn't add EH edges
109    // from CallExprs to the catch clauses, we have to go back and
110    // mark them as live.
111    for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
112      CFGBlock &b = **I;
113      if (!live[b.getBlockID()]) {
114        if (b.pred_begin() == b.pred_end()) {
115          if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
116            // When not adding EH edges from calls, catch clauses
117            // can otherwise seem dead.  Avoid noting them as dead.
118            count += reachable_code::ScanReachableFromBlock(&b, live);
119          continue;
120        }
121      }
122    }
123
124  // Now we know what is live, we check the live precessors of the exit block
125  // and look for fall through paths, being careful to ignore normal returns,
126  // and exceptional paths.
127  bool HasLiveReturn = false;
128  bool HasFakeEdge = false;
129  bool HasPlainEdge = false;
130  bool HasAbnormalEdge = false;
131
132  // Ignore default cases that aren't likely to be reachable because all
133  // enums in a switch(X) have explicit case statements.
134  CFGBlock::FilterOptions FO;
135  FO.IgnoreDefaultsWithCoveredEnums = 1;
136
137  for (CFGBlock::filtered_pred_iterator
138	 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
139    const CFGBlock& B = **I;
140    if (!live[B.getBlockID()])
141      continue;
142
143    // Skip blocks which contain an element marked as no-return. They don't
144    // represent actually viable edges into the exit block, so mark them as
145    // abnormal.
146    if (B.hasNoReturnElement()) {
147      HasAbnormalEdge = true;
148      continue;
149    }
150
151    // Destructors can appear after the 'return' in the CFG.  This is
152    // normal.  We need to look pass the destructors for the return
153    // statement (if it exists).
154    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
155
156    for ( ; ri != re ; ++ri)
157      if (isa<CFGStmt>(*ri))
158        break;
159
160    // No more CFGElements in the block?
161    if (ri == re) {
162      if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
163        HasAbnormalEdge = true;
164        continue;
165      }
166      // A labeled empty statement, or the entry block...
167      HasPlainEdge = true;
168      continue;
169    }
170
171    CFGStmt CS = cast<CFGStmt>(*ri);
172    const Stmt *S = CS.getStmt();
173    if (isa<ReturnStmt>(S)) {
174      HasLiveReturn = true;
175      continue;
176    }
177    if (isa<ObjCAtThrowStmt>(S)) {
178      HasFakeEdge = true;
179      continue;
180    }
181    if (isa<CXXThrowExpr>(S)) {
182      HasFakeEdge = true;
183      continue;
184    }
185    if (isa<MSAsmStmt>(S)) {
186      // TODO: Verify this is correct.
187      HasFakeEdge = true;
188      HasLiveReturn = true;
189      continue;
190    }
191    if (isa<CXXTryStmt>(S)) {
192      HasAbnormalEdge = true;
193      continue;
194    }
195    if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
196        == B.succ_end()) {
197      HasAbnormalEdge = true;
198      continue;
199    }
200
201    HasPlainEdge = true;
202  }
203  if (!HasPlainEdge) {
204    if (HasLiveReturn)
205      return NeverFallThrough;
206    return NeverFallThroughOrReturn;
207  }
208  if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
209    return MaybeFallThrough;
210  // This says AlwaysFallThrough for calls to functions that are not marked
211  // noreturn, that don't return.  If people would like this warning to be more
212  // accurate, such functions should be marked as noreturn.
213  return AlwaysFallThrough;
214}
215
216namespace {
217
218struct CheckFallThroughDiagnostics {
219  unsigned diag_MaybeFallThrough_HasNoReturn;
220  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
221  unsigned diag_AlwaysFallThrough_HasNoReturn;
222  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
223  unsigned diag_NeverFallThroughOrReturn;
224  enum { Function, Block, Lambda } funMode;
225  SourceLocation FuncLoc;
226
227  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
228    CheckFallThroughDiagnostics D;
229    D.FuncLoc = Func->getLocation();
230    D.diag_MaybeFallThrough_HasNoReturn =
231      diag::warn_falloff_noreturn_function;
232    D.diag_MaybeFallThrough_ReturnsNonVoid =
233      diag::warn_maybe_falloff_nonvoid_function;
234    D.diag_AlwaysFallThrough_HasNoReturn =
235      diag::warn_falloff_noreturn_function;
236    D.diag_AlwaysFallThrough_ReturnsNonVoid =
237      diag::warn_falloff_nonvoid_function;
238
239    // Don't suggest that virtual functions be marked "noreturn", since they
240    // might be overridden by non-noreturn functions.
241    bool isVirtualMethod = false;
242    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
243      isVirtualMethod = Method->isVirtual();
244
245    // Don't suggest that template instantiations be marked "noreturn"
246    bool isTemplateInstantiation = false;
247    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
248      isTemplateInstantiation = Function->isTemplateInstantiation();
249
250    if (!isVirtualMethod && !isTemplateInstantiation)
251      D.diag_NeverFallThroughOrReturn =
252        diag::warn_suggest_noreturn_function;
253    else
254      D.diag_NeverFallThroughOrReturn = 0;
255
256    D.funMode = Function;
257    return D;
258  }
259
260  static CheckFallThroughDiagnostics MakeForBlock() {
261    CheckFallThroughDiagnostics D;
262    D.diag_MaybeFallThrough_HasNoReturn =
263      diag::err_noreturn_block_has_return_expr;
264    D.diag_MaybeFallThrough_ReturnsNonVoid =
265      diag::err_maybe_falloff_nonvoid_block;
266    D.diag_AlwaysFallThrough_HasNoReturn =
267      diag::err_noreturn_block_has_return_expr;
268    D.diag_AlwaysFallThrough_ReturnsNonVoid =
269      diag::err_falloff_nonvoid_block;
270    D.diag_NeverFallThroughOrReturn =
271      diag::warn_suggest_noreturn_block;
272    D.funMode = Block;
273    return D;
274  }
275
276  static CheckFallThroughDiagnostics MakeForLambda() {
277    CheckFallThroughDiagnostics D;
278    D.diag_MaybeFallThrough_HasNoReturn =
279      diag::err_noreturn_lambda_has_return_expr;
280    D.diag_MaybeFallThrough_ReturnsNonVoid =
281      diag::warn_maybe_falloff_nonvoid_lambda;
282    D.diag_AlwaysFallThrough_HasNoReturn =
283      diag::err_noreturn_lambda_has_return_expr;
284    D.diag_AlwaysFallThrough_ReturnsNonVoid =
285      diag::warn_falloff_nonvoid_lambda;
286    D.diag_NeverFallThroughOrReturn = 0;
287    D.funMode = Lambda;
288    return D;
289  }
290
291  bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
292                        bool HasNoReturn) const {
293    if (funMode == Function) {
294      return (ReturnsVoid ||
295              D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
296                                   FuncLoc) == DiagnosticsEngine::Ignored)
297        && (!HasNoReturn ||
298            D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
299                                 FuncLoc) == DiagnosticsEngine::Ignored)
300        && (!ReturnsVoid ||
301            D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
302              == DiagnosticsEngine::Ignored);
303    }
304
305    // For blocks / lambdas.
306    return ReturnsVoid && !HasNoReturn
307            && ((funMode == Lambda) ||
308                D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
309                  == DiagnosticsEngine::Ignored);
310  }
311};
312
313}
314
315/// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
316/// function that should return a value.  Check that we don't fall off the end
317/// of a noreturn function.  We assume that functions and blocks not marked
318/// noreturn will return.
319static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
320                                    const BlockExpr *blkExpr,
321                                    const CheckFallThroughDiagnostics& CD,
322                                    AnalysisDeclContext &AC) {
323
324  bool ReturnsVoid = false;
325  bool HasNoReturn = false;
326
327  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
328    ReturnsVoid = FD->getResultType()->isVoidType();
329    HasNoReturn = FD->hasAttr<NoReturnAttr>() ||
330       FD->getType()->getAs<FunctionType>()->getNoReturnAttr();
331  }
332  else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
333    ReturnsVoid = MD->getResultType()->isVoidType();
334    HasNoReturn = MD->hasAttr<NoReturnAttr>();
335  }
336  else if (isa<BlockDecl>(D)) {
337    QualType BlockTy = blkExpr->getType();
338    if (const FunctionType *FT =
339          BlockTy->getPointeeType()->getAs<FunctionType>()) {
340      if (FT->getResultType()->isVoidType())
341        ReturnsVoid = true;
342      if (FT->getNoReturnAttr())
343        HasNoReturn = true;
344    }
345  }
346
347  DiagnosticsEngine &Diags = S.getDiagnostics();
348
349  // Short circuit for compilation speed.
350  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
351      return;
352
353  // FIXME: Function try block
354  if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
355    switch (CheckFallThrough(AC)) {
356      case UnknownFallThrough:
357        break;
358
359      case MaybeFallThrough:
360        if (HasNoReturn)
361          S.Diag(Compound->getRBracLoc(),
362                 CD.diag_MaybeFallThrough_HasNoReturn);
363        else if (!ReturnsVoid)
364          S.Diag(Compound->getRBracLoc(),
365                 CD.diag_MaybeFallThrough_ReturnsNonVoid);
366        break;
367      case AlwaysFallThrough:
368        if (HasNoReturn)
369          S.Diag(Compound->getRBracLoc(),
370                 CD.diag_AlwaysFallThrough_HasNoReturn);
371        else if (!ReturnsVoid)
372          S.Diag(Compound->getRBracLoc(),
373                 CD.diag_AlwaysFallThrough_ReturnsNonVoid);
374        break;
375      case NeverFallThroughOrReturn:
376        if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
377          if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
378            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
379              << 0 << FD;
380          } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
381            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
382              << 1 << MD;
383          } else {
384            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn);
385          }
386        }
387        break;
388      case NeverFallThrough:
389        break;
390    }
391  }
392}
393
394//===----------------------------------------------------------------------===//
395// -Wuninitialized
396//===----------------------------------------------------------------------===//
397
398namespace {
399/// ContainsReference - A visitor class to search for references to
400/// a particular declaration (the needle) within any evaluated component of an
401/// expression (recursively).
402class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
403  bool FoundReference;
404  const DeclRefExpr *Needle;
405
406public:
407  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
408    : EvaluatedExprVisitor<ContainsReference>(Context),
409      FoundReference(false), Needle(Needle) {}
410
411  void VisitExpr(Expr *E) {
412    // Stop evaluating if we already have a reference.
413    if (FoundReference)
414      return;
415
416    EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
417  }
418
419  void VisitDeclRefExpr(DeclRefExpr *E) {
420    if (E == Needle)
421      FoundReference = true;
422    else
423      EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
424  }
425
426  bool doesContainReference() const { return FoundReference; }
427};
428}
429
430static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
431  QualType VariableTy = VD->getType().getCanonicalType();
432  if (VariableTy->isBlockPointerType() &&
433      !VD->hasAttr<BlocksAttr>()) {
434    S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization) << VD->getDeclName()
435    << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
436    return true;
437  }
438
439  // Don't issue a fixit if there is already an initializer.
440  if (VD->getInit())
441    return false;
442
443  // Suggest possible initialization (if any).
444  std::string Init = S.getFixItZeroInitializerForType(VariableTy);
445  if (Init.empty())
446    return false;
447
448  // Don't suggest a fixit inside macros.
449  if (VD->getLocEnd().isMacroID())
450    return false;
451
452  SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
453
454  S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
455    << FixItHint::CreateInsertion(Loc, Init);
456  return true;
457}
458
459/// Create a fixit to remove an if-like statement, on the assumption that its
460/// condition is CondVal.
461static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
462                          const Stmt *Else, bool CondVal,
463                          FixItHint &Fixit1, FixItHint &Fixit2) {
464  if (CondVal) {
465    // If condition is always true, remove all but the 'then'.
466    Fixit1 = FixItHint::CreateRemoval(
467        CharSourceRange::getCharRange(If->getLocStart(),
468                                      Then->getLocStart()));
469    if (Else) {
470      SourceLocation ElseKwLoc = Lexer::getLocForEndOfToken(
471          Then->getLocEnd(), 0, S.getSourceManager(), S.getLangOpts());
472      Fixit2 = FixItHint::CreateRemoval(
473          SourceRange(ElseKwLoc, Else->getLocEnd()));
474    }
475  } else {
476    // If condition is always false, remove all but the 'else'.
477    if (Else)
478      Fixit1 = FixItHint::CreateRemoval(
479          CharSourceRange::getCharRange(If->getLocStart(),
480                                        Else->getLocStart()));
481    else
482      Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
483  }
484}
485
486/// DiagUninitUse -- Helper function to produce a diagnostic for an
487/// uninitialized use of a variable.
488static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
489                          bool IsCapturedByBlock) {
490  bool Diagnosed = false;
491
492  // Diagnose each branch which leads to a sometimes-uninitialized use.
493  for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
494       I != E; ++I) {
495    assert(Use.getKind() == UninitUse::Sometimes);
496
497    const Expr *User = Use.getUser();
498    const Stmt *Term = I->Terminator;
499
500    // Information used when building the diagnostic.
501    unsigned DiagKind;
502    const char *Str;
503    SourceRange Range;
504
505    // FixIts to suppress the diagnosic by removing the dead condition.
506    // For all binary terminators, branch 0 is taken if the condition is true,
507    // and branch 1 is taken if the condition is false.
508    int RemoveDiagKind = -1;
509    const char *FixitStr =
510        S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
511                                  : (I->Output ? "1" : "0");
512    FixItHint Fixit1, Fixit2;
513
514    switch (Term->getStmtClass()) {
515    default:
516      // Don't know how to report this. Just fall back to 'may be used
517      // uninitialized'. This happens for range-based for, which the user
518      // can't explicitly fix.
519      // FIXME: This also happens if the first use of a variable is always
520      // uninitialized, eg "for (int n; n < 10; ++n)". We should report that
521      // with the 'is uninitialized' diagnostic.
522      continue;
523
524    // "condition is true / condition is false".
525    case Stmt::IfStmtClass: {
526      const IfStmt *IS = cast<IfStmt>(Term);
527      DiagKind = 0;
528      Str = "if";
529      Range = IS->getCond()->getSourceRange();
530      RemoveDiagKind = 0;
531      CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
532                    I->Output, Fixit1, Fixit2);
533      break;
534    }
535    case Stmt::ConditionalOperatorClass: {
536      const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
537      DiagKind = 0;
538      Str = "?:";
539      Range = CO->getCond()->getSourceRange();
540      RemoveDiagKind = 0;
541      CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
542                    I->Output, Fixit1, Fixit2);
543      break;
544    }
545    case Stmt::BinaryOperatorClass: {
546      const BinaryOperator *BO = cast<BinaryOperator>(Term);
547      if (!BO->isLogicalOp())
548        continue;
549      DiagKind = 0;
550      Str = BO->getOpcodeStr();
551      Range = BO->getLHS()->getSourceRange();
552      RemoveDiagKind = 0;
553      if ((BO->getOpcode() == BO_LAnd && I->Output) ||
554          (BO->getOpcode() == BO_LOr && !I->Output))
555        // true && y -> y, false || y -> y.
556        Fixit1 = FixItHint::CreateRemoval(SourceRange(BO->getLocStart(),
557                                                      BO->getOperatorLoc()));
558      else
559        // false && y -> false, true || y -> true.
560        Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
561      break;
562    }
563
564    // "loop is entered / loop is exited".
565    case Stmt::WhileStmtClass:
566      DiagKind = 1;
567      Str = "while";
568      Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
569      RemoveDiagKind = 1;
570      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
571      break;
572    case Stmt::ForStmtClass:
573      DiagKind = 1;
574      Str = "for";
575      Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
576      RemoveDiagKind = 1;
577      if (I->Output)
578        Fixit1 = FixItHint::CreateRemoval(Range);
579      else
580        Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
581      break;
582
583    // "condition is true / loop is exited".
584    case Stmt::DoStmtClass:
585      DiagKind = 2;
586      Str = "do";
587      Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
588      RemoveDiagKind = 1;
589      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
590      break;
591
592    // "switch case is taken".
593    case Stmt::CaseStmtClass:
594      DiagKind = 3;
595      Str = "case";
596      Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
597      break;
598    case Stmt::DefaultStmtClass:
599      DiagKind = 3;
600      Str = "default";
601      Range = cast<DefaultStmt>(Term)->getDefaultLoc();
602      break;
603    }
604
605    S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
606      << VD->getDeclName() << IsCapturedByBlock << DiagKind
607      << Str << I->Output << Range;
608    S.Diag(User->getLocStart(), diag::note_uninit_var_use)
609      << IsCapturedByBlock << User->getSourceRange();
610    if (RemoveDiagKind != -1)
611      S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
612        << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
613
614    Diagnosed = true;
615  }
616
617  if (!Diagnosed)
618    S.Diag(Use.getUser()->getLocStart(),
619           Use.getKind() == UninitUse::Always ? diag::warn_uninit_var
620                                              : diag::warn_maybe_uninit_var)
621        << VD->getDeclName() << IsCapturedByBlock
622        << Use.getUser()->getSourceRange();
623}
624
625/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
626/// uninitialized variable. This manages the different forms of diagnostic
627/// emitted for particular types of uses. Returns true if the use was diagnosed
628/// as a warning. If a particular use is one we omit warnings for, returns
629/// false.
630static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
631                                     const UninitUse &Use,
632                                     bool alwaysReportSelfInit = false) {
633
634  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
635    // Inspect the initializer of the variable declaration which is
636    // being referenced prior to its initialization. We emit
637    // specialized diagnostics for self-initialization, and we
638    // specifically avoid warning about self references which take the
639    // form of:
640    //
641    //   int x = x;
642    //
643    // This is used to indicate to GCC that 'x' is intentionally left
644    // uninitialized. Proven code paths which access 'x' in
645    // an uninitialized state after this will still warn.
646    if (const Expr *Initializer = VD->getInit()) {
647      if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
648        return false;
649
650      ContainsReference CR(S.Context, DRE);
651      CR.Visit(const_cast<Expr*>(Initializer));
652      if (CR.doesContainReference()) {
653        S.Diag(DRE->getLocStart(),
654               diag::warn_uninit_self_reference_in_init)
655          << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
656        return true;
657      }
658    }
659
660    DiagUninitUse(S, VD, Use, false);
661  } else {
662    const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
663    if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
664      S.Diag(BE->getLocStart(),
665             diag::warn_uninit_byref_blockvar_captured_by_block)
666        << VD->getDeclName();
667    else
668      DiagUninitUse(S, VD, Use, true);
669  }
670
671  // Report where the variable was declared when the use wasn't within
672  // the initializer of that declaration & we didn't already suggest
673  // an initialization fixit.
674  if (!SuggestInitializationFixit(S, VD))
675    S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
676      << VD->getDeclName();
677
678  return true;
679}
680
681namespace {
682  class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
683  public:
684    FallthroughMapper(Sema &S)
685      : FoundSwitchStatements(false),
686        S(S) {
687    }
688
689    bool foundSwitchStatements() const { return FoundSwitchStatements; }
690
691    void markFallthroughVisited(const AttributedStmt *Stmt) {
692      bool Found = FallthroughStmts.erase(Stmt);
693      assert(Found);
694      (void)Found;
695    }
696
697    typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
698
699    const AttrStmts &getFallthroughStmts() const {
700      return FallthroughStmts;
701    }
702
703    bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt) {
704      int UnannotatedCnt = 0;
705      AnnotatedCnt = 0;
706
707      std::deque<const CFGBlock*> BlockQueue;
708
709      std::copy(B.pred_begin(), B.pred_end(), std::back_inserter(BlockQueue));
710
711      while (!BlockQueue.empty()) {
712        const CFGBlock *P = BlockQueue.front();
713        BlockQueue.pop_front();
714
715        const Stmt *Term = P->getTerminator();
716        if (Term && isa<SwitchStmt>(Term))
717          continue; // Switch statement, good.
718
719        const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
720        if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
721          continue; // Previous case label has no statements, good.
722
723        if (P->pred_begin() == P->pred_end()) {  // The block is unreachable.
724          // This only catches trivially unreachable blocks.
725          for (CFGBlock::const_iterator ElIt = P->begin(), ElEnd = P->end();
726               ElIt != ElEnd; ++ElIt) {
727            if (const CFGStmt *CS = ElIt->getAs<CFGStmt>()){
728              if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
729                S.Diag(AS->getLocStart(),
730                       diag::warn_fallthrough_attr_unreachable);
731                markFallthroughVisited(AS);
732                ++AnnotatedCnt;
733              }
734              // Don't care about other unreachable statements.
735            }
736          }
737          // If there are no unreachable statements, this may be a special
738          // case in CFG:
739          // case X: {
740          //    A a;  // A has a destructor.
741          //    break;
742          // }
743          // // <<<< This place is represented by a 'hanging' CFG block.
744          // case Y:
745          continue;
746        }
747
748        const Stmt *LastStmt = getLastStmt(*P);
749        if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
750          markFallthroughVisited(AS);
751          ++AnnotatedCnt;
752          continue; // Fallthrough annotation, good.
753        }
754
755        if (!LastStmt) { // This block contains no executable statements.
756          // Traverse its predecessors.
757          std::copy(P->pred_begin(), P->pred_end(),
758                    std::back_inserter(BlockQueue));
759          continue;
760        }
761
762        ++UnannotatedCnt;
763      }
764      return !!UnannotatedCnt;
765    }
766
767    // RecursiveASTVisitor setup.
768    bool shouldWalkTypesOfTypeLocs() const { return false; }
769
770    bool VisitAttributedStmt(AttributedStmt *S) {
771      if (asFallThroughAttr(S))
772        FallthroughStmts.insert(S);
773      return true;
774    }
775
776    bool VisitSwitchStmt(SwitchStmt *S) {
777      FoundSwitchStatements = true;
778      return true;
779    }
780
781  private:
782
783    static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
784      if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
785        if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
786          return AS;
787      }
788      return 0;
789    }
790
791    static const Stmt *getLastStmt(const CFGBlock &B) {
792      if (const Stmt *Term = B.getTerminator())
793        return Term;
794      for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
795                                            ElemEnd = B.rend();
796                                            ElemIt != ElemEnd; ++ElemIt) {
797        if (const CFGStmt *CS = ElemIt->getAs<CFGStmt>())
798          return CS->getStmt();
799      }
800      // Workaround to detect a statement thrown out by CFGBuilder:
801      //   case X: {} case Y:
802      //   case X: ; case Y:
803      if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
804        if (!isa<SwitchCase>(SW->getSubStmt()))
805          return SW->getSubStmt();
806
807      return 0;
808    }
809
810    bool FoundSwitchStatements;
811    AttrStmts FallthroughStmts;
812    Sema &S;
813  };
814}
815
816static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
817                                            bool PerFunction) {
818  FallthroughMapper FM(S);
819  FM.TraverseStmt(AC.getBody());
820
821  if (!FM.foundSwitchStatements())
822    return;
823
824  if (PerFunction && FM.getFallthroughStmts().empty())
825    return;
826
827  CFG *Cfg = AC.getCFG();
828
829  if (!Cfg)
830    return;
831
832  int AnnotatedCnt;
833
834  for (CFG::reverse_iterator I = Cfg->rbegin(), E = Cfg->rend(); I != E; ++I) {
835    const CFGBlock &B = **I;
836    const Stmt *Label = B.getLabel();
837
838    if (!Label || !isa<SwitchCase>(Label))
839      continue;
840
841    if (!FM.checkFallThroughIntoBlock(B, AnnotatedCnt))
842      continue;
843
844    S.Diag(Label->getLocStart(),
845        PerFunction ? diag::warn_unannotated_fallthrough_per_function
846                    : diag::warn_unannotated_fallthrough);
847
848    if (!AnnotatedCnt) {
849      SourceLocation L = Label->getLocStart();
850      if (L.isMacroID())
851        continue;
852      if (S.getLangOpts().CPlusPlus0x) {
853        const Stmt *Term = B.getTerminator();
854        if (!(B.empty() && Term && isa<BreakStmt>(Term))) {
855          S.Diag(L, diag::note_insert_fallthrough_fixit) <<
856            FixItHint::CreateInsertion(L, "[[clang::fallthrough]]; ");
857        }
858      }
859      S.Diag(L, diag::note_insert_break_fixit) <<
860        FixItHint::CreateInsertion(L, "break; ");
861    }
862  }
863
864  const FallthroughMapper::AttrStmts &Fallthroughs = FM.getFallthroughStmts();
865  for (FallthroughMapper::AttrStmts::const_iterator I = Fallthroughs.begin(),
866                                                    E = Fallthroughs.end();
867                                                    I != E; ++I) {
868    S.Diag((*I)->getLocStart(), diag::warn_fallthrough_attr_invalid_placement);
869  }
870
871}
872
873namespace {
874struct SLocSort {
875  bool operator()(const UninitUse &a, const UninitUse &b) {
876    // Prefer a more confident report over a less confident one.
877    if (a.getKind() != b.getKind())
878      return a.getKind() > b.getKind();
879    SourceLocation aLoc = a.getUser()->getLocStart();
880    SourceLocation bLoc = b.getUser()->getLocStart();
881    return aLoc.getRawEncoding() < bLoc.getRawEncoding();
882  }
883};
884
885class UninitValsDiagReporter : public UninitVariablesHandler {
886  Sema &S;
887  typedef SmallVector<UninitUse, 2> UsesVec;
888  typedef llvm::DenseMap<const VarDecl *, std::pair<UsesVec*, bool> > UsesMap;
889  UsesMap *uses;
890
891public:
892  UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
893  ~UninitValsDiagReporter() {
894    flushDiagnostics();
895  }
896
897  std::pair<UsesVec*, bool> &getUses(const VarDecl *vd) {
898    if (!uses)
899      uses = new UsesMap();
900
901    UsesMap::mapped_type &V = (*uses)[vd];
902    UsesVec *&vec = V.first;
903    if (!vec)
904      vec = new UsesVec();
905
906    return V;
907  }
908
909  void handleUseOfUninitVariable(const VarDecl *vd, const UninitUse &use) {
910    getUses(vd).first->push_back(use);
911  }
912
913  void handleSelfInit(const VarDecl *vd) {
914    getUses(vd).second = true;
915  }
916
917  void flushDiagnostics() {
918    if (!uses)
919      return;
920
921    // FIXME: This iteration order, and thus the resulting diagnostic order,
922    //        is nondeterministic.
923    for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
924      const VarDecl *vd = i->first;
925      const UsesMap::mapped_type &V = i->second;
926
927      UsesVec *vec = V.first;
928      bool hasSelfInit = V.second;
929
930      // Specially handle the case where we have uses of an uninitialized
931      // variable, but the root cause is an idiomatic self-init.  We want
932      // to report the diagnostic at the self-init since that is the root cause.
933      if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
934        DiagnoseUninitializedUse(S, vd,
935                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
936                                           /* isAlwaysUninit */ true),
937                                 /* alwaysReportSelfInit */ true);
938      else {
939        // Sort the uses by their SourceLocations.  While not strictly
940        // guaranteed to produce them in line/column order, this will provide
941        // a stable ordering.
942        std::sort(vec->begin(), vec->end(), SLocSort());
943
944        for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
945             ++vi) {
946          // If we have self-init, downgrade all uses to 'may be uninitialized'.
947          UninitUse Use = hasSelfInit ? UninitUse(vi->getUser(), false) : *vi;
948
949          if (DiagnoseUninitializedUse(S, vd, Use))
950            // Skip further diagnostics for this variable. We try to warn only
951            // on the first point at which a variable is used uninitialized.
952            break;
953        }
954      }
955
956      // Release the uses vector.
957      delete vec;
958    }
959    delete uses;
960  }
961
962private:
963  static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
964  for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) {
965    if (i->getKind() == UninitUse::Always) {
966      return true;
967    }
968  }
969  return false;
970}
971};
972}
973
974
975//===----------------------------------------------------------------------===//
976// -Wthread-safety
977//===----------------------------------------------------------------------===//
978namespace clang {
979namespace thread_safety {
980typedef llvm::SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
981typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
982typedef std::list<DelayedDiag> DiagList;
983
984struct SortDiagBySourceLocation {
985  SourceManager &SM;
986  SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
987
988  bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
989    // Although this call will be slow, this is only called when outputting
990    // multiple warnings.
991    return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
992  }
993};
994
995namespace {
996class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler {
997  Sema &S;
998  DiagList Warnings;
999  SourceLocation FunLocation, FunEndLocation;
1000
1001  // Helper functions
1002  void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
1003    // Gracefully handle rare cases when the analysis can't get a more
1004    // precise source location.
1005    if (!Loc.isValid())
1006      Loc = FunLocation;
1007    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName);
1008    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1009  }
1010
1011 public:
1012  ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1013    : S(S), FunLocation(FL), FunEndLocation(FEL) {}
1014
1015  /// \brief Emit all buffered diagnostics in order of sourcelocation.
1016  /// We need to output diagnostics produced while iterating through
1017  /// the lockset in deterministic order, so this function orders diagnostics
1018  /// and outputs them.
1019  void emitDiagnostics() {
1020    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1021    for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
1022         I != E; ++I) {
1023      S.Diag(I->first.first, I->first.second);
1024      const OptionalNotes &Notes = I->second;
1025      for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI)
1026        S.Diag(Notes[NoteI].first, Notes[NoteI].second);
1027    }
1028  }
1029
1030  void handleInvalidLockExp(SourceLocation Loc) {
1031    PartialDiagnosticAt Warning(Loc,
1032                                S.PDiag(diag::warn_cannot_resolve_lock) << Loc);
1033    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1034  }
1035  void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
1036    warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
1037  }
1038
1039  void handleDoubleLock(Name LockName, SourceLocation Loc) {
1040    warnLockMismatch(diag::warn_double_lock, LockName, Loc);
1041  }
1042
1043  void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked,
1044                                 SourceLocation LocEndOfScope,
1045                                 LockErrorKind LEK){
1046    unsigned DiagID = 0;
1047    switch (LEK) {
1048      case LEK_LockedSomePredecessors:
1049        DiagID = diag::warn_lock_some_predecessors;
1050        break;
1051      case LEK_LockedSomeLoopIterations:
1052        DiagID = diag::warn_expecting_lock_held_on_loop;
1053        break;
1054      case LEK_LockedAtEndOfFunction:
1055        DiagID = diag::warn_no_unlock;
1056        break;
1057      case LEK_NotLockedAtEndOfFunction:
1058        DiagID = diag::warn_expecting_locked;
1059        break;
1060    }
1061    if (LocEndOfScope.isInvalid())
1062      LocEndOfScope = FunEndLocation;
1063
1064    PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName);
1065    PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here));
1066    Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1067  }
1068
1069
1070  void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
1071                                SourceLocation Loc2) {
1072    PartialDiagnosticAt Warning(
1073      Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName);
1074    PartialDiagnosticAt Note(
1075      Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName);
1076    Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1077  }
1078
1079  void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
1080                         AccessKind AK, SourceLocation Loc) {
1081    assert((POK == POK_VarAccess || POK == POK_VarDereference)
1082             && "Only works for variables");
1083    unsigned DiagID = POK == POK_VarAccess?
1084                        diag::warn_variable_requires_any_lock:
1085                        diag::warn_var_deref_requires_any_lock;
1086    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1087      << D->getName() << getLockKindFromAccessKind(AK));
1088    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1089  }
1090
1091  void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
1092                          Name LockName, LockKind LK, SourceLocation Loc) {
1093    unsigned DiagID = 0;
1094    switch (POK) {
1095      case POK_VarAccess:
1096        DiagID = diag::warn_variable_requires_lock;
1097        break;
1098      case POK_VarDereference:
1099        DiagID = diag::warn_var_deref_requires_lock;
1100        break;
1101      case POK_FunctionCall:
1102        DiagID = diag::warn_fun_requires_lock;
1103        break;
1104    }
1105    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1106      << D->getName() << LockName << LK);
1107    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1108  }
1109
1110  void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
1111    PartialDiagnosticAt Warning(Loc,
1112      S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName);
1113    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1114  }
1115};
1116}
1117}
1118}
1119
1120//===----------------------------------------------------------------------===//
1121// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
1122//  warnings on a function, method, or block.
1123//===----------------------------------------------------------------------===//
1124
1125clang::sema::AnalysisBasedWarnings::Policy::Policy() {
1126  enableCheckFallThrough = 1;
1127  enableCheckUnreachable = 0;
1128  enableThreadSafetyAnalysis = 0;
1129}
1130
1131clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
1132  : S(s),
1133    NumFunctionsAnalyzed(0),
1134    NumFunctionsWithBadCFGs(0),
1135    NumCFGBlocks(0),
1136    MaxCFGBlocksPerFunction(0),
1137    NumUninitAnalysisFunctions(0),
1138    NumUninitAnalysisVariables(0),
1139    MaxUninitAnalysisVariablesPerFunction(0),
1140    NumUninitAnalysisBlockVisits(0),
1141    MaxUninitAnalysisBlockVisitsPerFunction(0) {
1142  DiagnosticsEngine &D = S.getDiagnostics();
1143  DefaultPolicy.enableCheckUnreachable = (unsigned)
1144    (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
1145        DiagnosticsEngine::Ignored);
1146  DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
1147    (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
1148     DiagnosticsEngine::Ignored);
1149
1150}
1151
1152static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
1153  for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1154       i = fscope->PossiblyUnreachableDiags.begin(),
1155       e = fscope->PossiblyUnreachableDiags.end();
1156       i != e; ++i) {
1157    const sema::PossiblyUnreachableDiag &D = *i;
1158    S.Diag(D.Loc, D.PD);
1159  }
1160}
1161
1162void clang::sema::
1163AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
1164                                     sema::FunctionScopeInfo *fscope,
1165                                     const Decl *D, const BlockExpr *blkExpr) {
1166
1167  // We avoid doing analysis-based warnings when there are errors for
1168  // two reasons:
1169  // (1) The CFGs often can't be constructed (if the body is invalid), so
1170  //     don't bother trying.
1171  // (2) The code already has problems; running the analysis just takes more
1172  //     time.
1173  DiagnosticsEngine &Diags = S.getDiagnostics();
1174
1175  // Do not do any analysis for declarations in system headers if we are
1176  // going to just ignore them.
1177  if (Diags.getSuppressSystemWarnings() &&
1178      S.SourceMgr.isInSystemHeader(D->getLocation()))
1179    return;
1180
1181  // For code in dependent contexts, we'll do this at instantiation time.
1182  if (cast<DeclContext>(D)->isDependentContext())
1183    return;
1184
1185  if (Diags.hasErrorOccurred() || Diags.hasFatalErrorOccurred()) {
1186    // Flush out any possibly unreachable diagnostics.
1187    flushDiagnostics(S, fscope);
1188    return;
1189  }
1190
1191  const Stmt *Body = D->getBody();
1192  assert(Body);
1193
1194  AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D);
1195
1196  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
1197  // explosion for destrutors that can result and the compile time hit.
1198  AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
1199  AC.getCFGBuildOptions().AddEHEdges = false;
1200  AC.getCFGBuildOptions().AddInitializers = true;
1201  AC.getCFGBuildOptions().AddImplicitDtors = true;
1202  AC.getCFGBuildOptions().AddTemporaryDtors = true;
1203
1204  // Force that certain expressions appear as CFGElements in the CFG.  This
1205  // is used to speed up various analyses.
1206  // FIXME: This isn't the right factoring.  This is here for initial
1207  // prototyping, but we need a way for analyses to say what expressions they
1208  // expect to always be CFGElements and then fill in the BuildOptions
1209  // appropriately.  This is essentially a layering violation.
1210  if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis) {
1211    // Unreachable code analysis and thread safety require a linearized CFG.
1212    AC.getCFGBuildOptions().setAllAlwaysAdd();
1213  }
1214  else {
1215    AC.getCFGBuildOptions()
1216      .setAlwaysAdd(Stmt::BinaryOperatorClass)
1217      .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
1218      .setAlwaysAdd(Stmt::BlockExprClass)
1219      .setAlwaysAdd(Stmt::CStyleCastExprClass)
1220      .setAlwaysAdd(Stmt::DeclRefExprClass)
1221      .setAlwaysAdd(Stmt::ImplicitCastExprClass)
1222      .setAlwaysAdd(Stmt::UnaryOperatorClass)
1223      .setAlwaysAdd(Stmt::AttributedStmtClass);
1224  }
1225
1226  // Construct the analysis context with the specified CFG build options.
1227
1228  // Emit delayed diagnostics.
1229  if (!fscope->PossiblyUnreachableDiags.empty()) {
1230    bool analyzed = false;
1231
1232    // Register the expressions with the CFGBuilder.
1233    for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1234         i = fscope->PossiblyUnreachableDiags.begin(),
1235         e = fscope->PossiblyUnreachableDiags.end();
1236         i != e; ++i) {
1237      if (const Stmt *stmt = i->stmt)
1238        AC.registerForcedBlockExpression(stmt);
1239    }
1240
1241    if (AC.getCFG()) {
1242      analyzed = true;
1243      for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1244            i = fscope->PossiblyUnreachableDiags.begin(),
1245            e = fscope->PossiblyUnreachableDiags.end();
1246            i != e; ++i)
1247      {
1248        const sema::PossiblyUnreachableDiag &D = *i;
1249        bool processed = false;
1250        if (const Stmt *stmt = i->stmt) {
1251          const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
1252          CFGReverseBlockReachabilityAnalysis *cra =
1253              AC.getCFGReachablityAnalysis();
1254          // FIXME: We should be able to assert that block is non-null, but
1255          // the CFG analysis can skip potentially-evaluated expressions in
1256          // edge cases; see test/Sema/vla-2.c.
1257          if (block && cra) {
1258            // Can this block be reached from the entrance?
1259            if (cra->isReachable(&AC.getCFG()->getEntry(), block))
1260              S.Diag(D.Loc, D.PD);
1261            processed = true;
1262          }
1263        }
1264        if (!processed) {
1265          // Emit the warning anyway if we cannot map to a basic block.
1266          S.Diag(D.Loc, D.PD);
1267        }
1268      }
1269    }
1270
1271    if (!analyzed)
1272      flushDiagnostics(S, fscope);
1273  }
1274
1275
1276  // Warning: check missing 'return'
1277  if (P.enableCheckFallThrough) {
1278    const CheckFallThroughDiagnostics &CD =
1279      (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
1280       : (isa<CXXMethodDecl>(D) &&
1281          cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
1282          cast<CXXMethodDecl>(D)->getParent()->isLambda())
1283            ? CheckFallThroughDiagnostics::MakeForLambda()
1284            : CheckFallThroughDiagnostics::MakeForFunction(D));
1285    CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
1286  }
1287
1288  // Warning: check for unreachable code
1289  if (P.enableCheckUnreachable) {
1290    // Only check for unreachable code on non-template instantiations.
1291    // Different template instantiations can effectively change the control-flow
1292    // and it is very difficult to prove that a snippet of code in a template
1293    // is unreachable for all instantiations.
1294    bool isTemplateInstantiation = false;
1295    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
1296      isTemplateInstantiation = Function->isTemplateInstantiation();
1297    if (!isTemplateInstantiation)
1298      CheckUnreachable(S, AC);
1299  }
1300
1301  // Check for thread safety violations
1302  if (P.enableThreadSafetyAnalysis) {
1303    SourceLocation FL = AC.getDecl()->getLocation();
1304    SourceLocation FEL = AC.getDecl()->getLocEnd();
1305    thread_safety::ThreadSafetyReporter Reporter(S, FL, FEL);
1306    thread_safety::runThreadSafetyAnalysis(AC, Reporter);
1307    Reporter.emitDiagnostics();
1308  }
1309
1310  if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
1311      != DiagnosticsEngine::Ignored ||
1312      Diags.getDiagnosticLevel(diag::warn_sometimes_uninit_var,D->getLocStart())
1313      != DiagnosticsEngine::Ignored ||
1314      Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
1315      != DiagnosticsEngine::Ignored) {
1316    if (CFG *cfg = AC.getCFG()) {
1317      UninitValsDiagReporter reporter(S);
1318      UninitVariablesAnalysisStats stats;
1319      std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
1320      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
1321                                        reporter, stats);
1322
1323      if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
1324        ++NumUninitAnalysisFunctions;
1325        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
1326        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
1327        MaxUninitAnalysisVariablesPerFunction =
1328            std::max(MaxUninitAnalysisVariablesPerFunction,
1329                     stats.NumVariablesAnalyzed);
1330        MaxUninitAnalysisBlockVisitsPerFunction =
1331            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
1332                     stats.NumBlockVisits);
1333      }
1334    }
1335  }
1336
1337  bool FallThroughDiagFull =
1338      Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough,
1339                               D->getLocStart()) != DiagnosticsEngine::Ignored;
1340  bool FallThroughDiagPerFunction =
1341      Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough_per_function,
1342                               D->getLocStart()) != DiagnosticsEngine::Ignored;
1343  if (FallThroughDiagFull || FallThroughDiagPerFunction) {
1344    DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
1345  }
1346
1347  // Collect statistics about the CFG if it was built.
1348  if (S.CollectStats && AC.isCFGBuilt()) {
1349    ++NumFunctionsAnalyzed;
1350    if (CFG *cfg = AC.getCFG()) {
1351      // If we successfully built a CFG for this context, record some more
1352      // detail information about it.
1353      NumCFGBlocks += cfg->getNumBlockIDs();
1354      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
1355                                         cfg->getNumBlockIDs());
1356    } else {
1357      ++NumFunctionsWithBadCFGs;
1358    }
1359  }
1360}
1361
1362void clang::sema::AnalysisBasedWarnings::PrintStats() const {
1363  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
1364
1365  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
1366  unsigned AvgCFGBlocksPerFunction =
1367      !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
1368  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
1369               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
1370               << "  " << NumCFGBlocks << " CFG blocks built.\n"
1371               << "  " << AvgCFGBlocksPerFunction
1372               << " average CFG blocks per function.\n"
1373               << "  " << MaxCFGBlocksPerFunction
1374               << " max CFG blocks per function.\n";
1375
1376  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
1377      : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
1378  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
1379      : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
1380  llvm::errs() << NumUninitAnalysisFunctions
1381               << " functions analyzed for uninitialiazed variables\n"
1382               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
1383               << "  " << AvgUninitVariablesPerFunction
1384               << " average variables per function.\n"
1385               << "  " << MaxUninitAnalysisVariablesPerFunction
1386               << " max variables per function.\n"
1387               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
1388               << "  " << AvgUninitBlockVisitsPerFunction
1389               << " average block visits per function.\n"
1390               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
1391               << " max block visits per function.\n";
1392}
1393