AnalysisBasedWarnings.cpp revision 4874a8143dc3032205f97527ff619730db3d1f57
1//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines analysis_warnings::[Policy,Executor].
11// Together they are used by Sema to issue warnings based on inexpensive
12// static analysis algorithms in libAnalysis.
13//
14//===----------------------------------------------------------------------===//
15
16#include "clang/Sema/AnalysisBasedWarnings.h"
17#include "clang/AST/DeclCXX.h"
18#include "clang/AST/DeclObjC.h"
19#include "clang/AST/EvaluatedExprVisitor.h"
20#include "clang/AST/ExprCXX.h"
21#include "clang/AST/ExprObjC.h"
22#include "clang/AST/ParentMap.h"
23#include "clang/AST/RecursiveASTVisitor.h"
24#include "clang/AST/StmtCXX.h"
25#include "clang/AST/StmtObjC.h"
26#include "clang/AST/StmtVisitor.h"
27#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
28#include "clang/Analysis/Analyses/ReachableCode.h"
29#include "clang/Analysis/Analyses/ThreadSafety.h"
30#include "clang/Analysis/Analyses/UninitializedValues.h"
31#include "clang/Analysis/AnalysisContext.h"
32#include "clang/Analysis/CFG.h"
33#include "clang/Analysis/CFGStmtMap.h"
34#include "clang/Basic/SourceLocation.h"
35#include "clang/Basic/SourceManager.h"
36#include "clang/Lex/Lexer.h"
37#include "clang/Lex/Preprocessor.h"
38#include "clang/Sema/ScopeInfo.h"
39#include "clang/Sema/SemaInternal.h"
40#include "llvm/ADT/ArrayRef.h"
41#include "llvm/ADT/BitVector.h"
42#include "llvm/ADT/FoldingSet.h"
43#include "llvm/ADT/ImmutableMap.h"
44#include "llvm/ADT/PostOrderIterator.h"
45#include "llvm/ADT/SmallString.h"
46#include "llvm/ADT/SmallVector.h"
47#include "llvm/ADT/StringRef.h"
48#include "llvm/Support/Casting.h"
49#include <algorithm>
50#include <deque>
51#include <iterator>
52#include <vector>
53
54using namespace clang;
55
56//===----------------------------------------------------------------------===//
57// Unreachable code analysis.
58//===----------------------------------------------------------------------===//
59
60namespace {
61  class UnreachableCodeHandler : public reachable_code::Callback {
62    Sema &S;
63  public:
64    UnreachableCodeHandler(Sema &s) : S(s) {}
65
66    void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
67      S.Diag(L, diag::warn_unreachable) << R1 << R2;
68    }
69  };
70}
71
72/// CheckUnreachable - Check for unreachable code.
73static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
74  UnreachableCodeHandler UC(S);
75  reachable_code::FindUnreachableCode(AC, UC);
76}
77
78//===----------------------------------------------------------------------===//
79// Check for missing return value.
80//===----------------------------------------------------------------------===//
81
82enum ControlFlowKind {
83  UnknownFallThrough,
84  NeverFallThrough,
85  MaybeFallThrough,
86  AlwaysFallThrough,
87  NeverFallThroughOrReturn
88};
89
90/// CheckFallThrough - Check that we don't fall off the end of a
91/// Statement that should return a value.
92///
93/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
94/// MaybeFallThrough iff we might or might not fall off the end,
95/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
96/// return.  We assume NeverFallThrough iff we never fall off the end of the
97/// statement but we may return.  We assume that functions not marked noreturn
98/// will return.
99static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
100  CFG *cfg = AC.getCFG();
101  if (cfg == 0) return UnknownFallThrough;
102
103  // The CFG leaves in dead things, and we don't want the dead code paths to
104  // confuse us, so we mark all live things first.
105  llvm::BitVector live(cfg->getNumBlockIDs());
106  unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
107                                                          live);
108
109  bool AddEHEdges = AC.getAddEHEdges();
110  if (!AddEHEdges && count != cfg->getNumBlockIDs())
111    // When there are things remaining dead, and we didn't add EH edges
112    // from CallExprs to the catch clauses, we have to go back and
113    // mark them as live.
114    for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
115      CFGBlock &b = **I;
116      if (!live[b.getBlockID()]) {
117        if (b.pred_begin() == b.pred_end()) {
118          if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
119            // When not adding EH edges from calls, catch clauses
120            // can otherwise seem dead.  Avoid noting them as dead.
121            count += reachable_code::ScanReachableFromBlock(&b, live);
122          continue;
123        }
124      }
125    }
126
127  // Now we know what is live, we check the live precessors of the exit block
128  // and look for fall through paths, being careful to ignore normal returns,
129  // and exceptional paths.
130  bool HasLiveReturn = false;
131  bool HasFakeEdge = false;
132  bool HasPlainEdge = false;
133  bool HasAbnormalEdge = false;
134
135  // Ignore default cases that aren't likely to be reachable because all
136  // enums in a switch(X) have explicit case statements.
137  CFGBlock::FilterOptions FO;
138  FO.IgnoreDefaultsWithCoveredEnums = 1;
139
140  for (CFGBlock::filtered_pred_iterator
141	 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
142    const CFGBlock& B = **I;
143    if (!live[B.getBlockID()])
144      continue;
145
146    // Skip blocks which contain an element marked as no-return. They don't
147    // represent actually viable edges into the exit block, so mark them as
148    // abnormal.
149    if (B.hasNoReturnElement()) {
150      HasAbnormalEdge = true;
151      continue;
152    }
153
154    // Destructors can appear after the 'return' in the CFG.  This is
155    // normal.  We need to look pass the destructors for the return
156    // statement (if it exists).
157    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
158
159    for ( ; ri != re ; ++ri)
160      if (isa<CFGStmt>(*ri))
161        break;
162
163    // No more CFGElements in the block?
164    if (ri == re) {
165      if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
166        HasAbnormalEdge = true;
167        continue;
168      }
169      // A labeled empty statement, or the entry block...
170      HasPlainEdge = true;
171      continue;
172    }
173
174    CFGStmt CS = cast<CFGStmt>(*ri);
175    const Stmt *S = CS.getStmt();
176    if (isa<ReturnStmt>(S)) {
177      HasLiveReturn = true;
178      continue;
179    }
180    if (isa<ObjCAtThrowStmt>(S)) {
181      HasFakeEdge = true;
182      continue;
183    }
184    if (isa<CXXThrowExpr>(S)) {
185      HasFakeEdge = true;
186      continue;
187    }
188    if (isa<MSAsmStmt>(S)) {
189      // TODO: Verify this is correct.
190      HasFakeEdge = true;
191      HasLiveReturn = true;
192      continue;
193    }
194    if (isa<CXXTryStmt>(S)) {
195      HasAbnormalEdge = true;
196      continue;
197    }
198    if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
199        == B.succ_end()) {
200      HasAbnormalEdge = true;
201      continue;
202    }
203
204    HasPlainEdge = true;
205  }
206  if (!HasPlainEdge) {
207    if (HasLiveReturn)
208      return NeverFallThrough;
209    return NeverFallThroughOrReturn;
210  }
211  if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
212    return MaybeFallThrough;
213  // This says AlwaysFallThrough for calls to functions that are not marked
214  // noreturn, that don't return.  If people would like this warning to be more
215  // accurate, such functions should be marked as noreturn.
216  return AlwaysFallThrough;
217}
218
219namespace {
220
221struct CheckFallThroughDiagnostics {
222  unsigned diag_MaybeFallThrough_HasNoReturn;
223  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
224  unsigned diag_AlwaysFallThrough_HasNoReturn;
225  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
226  unsigned diag_NeverFallThroughOrReturn;
227  enum { Function, Block, Lambda } funMode;
228  SourceLocation FuncLoc;
229
230  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
231    CheckFallThroughDiagnostics D;
232    D.FuncLoc = Func->getLocation();
233    D.diag_MaybeFallThrough_HasNoReturn =
234      diag::warn_falloff_noreturn_function;
235    D.diag_MaybeFallThrough_ReturnsNonVoid =
236      diag::warn_maybe_falloff_nonvoid_function;
237    D.diag_AlwaysFallThrough_HasNoReturn =
238      diag::warn_falloff_noreturn_function;
239    D.diag_AlwaysFallThrough_ReturnsNonVoid =
240      diag::warn_falloff_nonvoid_function;
241
242    // Don't suggest that virtual functions be marked "noreturn", since they
243    // might be overridden by non-noreturn functions.
244    bool isVirtualMethod = false;
245    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
246      isVirtualMethod = Method->isVirtual();
247
248    // Don't suggest that template instantiations be marked "noreturn"
249    bool isTemplateInstantiation = false;
250    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
251      isTemplateInstantiation = Function->isTemplateInstantiation();
252
253    if (!isVirtualMethod && !isTemplateInstantiation)
254      D.diag_NeverFallThroughOrReturn =
255        diag::warn_suggest_noreturn_function;
256    else
257      D.diag_NeverFallThroughOrReturn = 0;
258
259    D.funMode = Function;
260    return D;
261  }
262
263  static CheckFallThroughDiagnostics MakeForBlock() {
264    CheckFallThroughDiagnostics D;
265    D.diag_MaybeFallThrough_HasNoReturn =
266      diag::err_noreturn_block_has_return_expr;
267    D.diag_MaybeFallThrough_ReturnsNonVoid =
268      diag::err_maybe_falloff_nonvoid_block;
269    D.diag_AlwaysFallThrough_HasNoReturn =
270      diag::err_noreturn_block_has_return_expr;
271    D.diag_AlwaysFallThrough_ReturnsNonVoid =
272      diag::err_falloff_nonvoid_block;
273    D.diag_NeverFallThroughOrReturn =
274      diag::warn_suggest_noreturn_block;
275    D.funMode = Block;
276    return D;
277  }
278
279  static CheckFallThroughDiagnostics MakeForLambda() {
280    CheckFallThroughDiagnostics D;
281    D.diag_MaybeFallThrough_HasNoReturn =
282      diag::err_noreturn_lambda_has_return_expr;
283    D.diag_MaybeFallThrough_ReturnsNonVoid =
284      diag::warn_maybe_falloff_nonvoid_lambda;
285    D.diag_AlwaysFallThrough_HasNoReturn =
286      diag::err_noreturn_lambda_has_return_expr;
287    D.diag_AlwaysFallThrough_ReturnsNonVoid =
288      diag::warn_falloff_nonvoid_lambda;
289    D.diag_NeverFallThroughOrReturn = 0;
290    D.funMode = Lambda;
291    return D;
292  }
293
294  bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
295                        bool HasNoReturn) const {
296    if (funMode == Function) {
297      return (ReturnsVoid ||
298              D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
299                                   FuncLoc) == DiagnosticsEngine::Ignored)
300        && (!HasNoReturn ||
301            D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
302                                 FuncLoc) == DiagnosticsEngine::Ignored)
303        && (!ReturnsVoid ||
304            D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
305              == DiagnosticsEngine::Ignored);
306    }
307
308    // For blocks / lambdas.
309    return ReturnsVoid && !HasNoReturn
310            && ((funMode == Lambda) ||
311                D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
312                  == DiagnosticsEngine::Ignored);
313  }
314};
315
316}
317
318/// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
319/// function that should return a value.  Check that we don't fall off the end
320/// of a noreturn function.  We assume that functions and blocks not marked
321/// noreturn will return.
322static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
323                                    const BlockExpr *blkExpr,
324                                    const CheckFallThroughDiagnostics& CD,
325                                    AnalysisDeclContext &AC) {
326
327  bool ReturnsVoid = false;
328  bool HasNoReturn = false;
329
330  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
331    ReturnsVoid = FD->getResultType()->isVoidType();
332    HasNoReturn = FD->isNoReturn();
333  }
334  else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
335    ReturnsVoid = MD->getResultType()->isVoidType();
336    HasNoReturn = MD->hasAttr<NoReturnAttr>();
337  }
338  else if (isa<BlockDecl>(D)) {
339    QualType BlockTy = blkExpr->getType();
340    if (const FunctionType *FT =
341          BlockTy->getPointeeType()->getAs<FunctionType>()) {
342      if (FT->getResultType()->isVoidType())
343        ReturnsVoid = true;
344      if (FT->getNoReturnAttr())
345        HasNoReturn = true;
346    }
347  }
348
349  DiagnosticsEngine &Diags = S.getDiagnostics();
350
351  // Short circuit for compilation speed.
352  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
353      return;
354
355  // FIXME: Function try block
356  if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
357    switch (CheckFallThrough(AC)) {
358      case UnknownFallThrough:
359        break;
360
361      case MaybeFallThrough:
362        if (HasNoReturn)
363          S.Diag(Compound->getRBracLoc(),
364                 CD.diag_MaybeFallThrough_HasNoReturn);
365        else if (!ReturnsVoid)
366          S.Diag(Compound->getRBracLoc(),
367                 CD.diag_MaybeFallThrough_ReturnsNonVoid);
368        break;
369      case AlwaysFallThrough:
370        if (HasNoReturn)
371          S.Diag(Compound->getRBracLoc(),
372                 CD.diag_AlwaysFallThrough_HasNoReturn);
373        else if (!ReturnsVoid)
374          S.Diag(Compound->getRBracLoc(),
375                 CD.diag_AlwaysFallThrough_ReturnsNonVoid);
376        break;
377      case NeverFallThroughOrReturn:
378        if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
379          if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
380            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
381              << 0 << FD;
382          } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
383            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
384              << 1 << MD;
385          } else {
386            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn);
387          }
388        }
389        break;
390      case NeverFallThrough:
391        break;
392    }
393  }
394}
395
396//===----------------------------------------------------------------------===//
397// -Wuninitialized
398//===----------------------------------------------------------------------===//
399
400namespace {
401/// ContainsReference - A visitor class to search for references to
402/// a particular declaration (the needle) within any evaluated component of an
403/// expression (recursively).
404class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
405  bool FoundReference;
406  const DeclRefExpr *Needle;
407
408public:
409  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
410    : EvaluatedExprVisitor<ContainsReference>(Context),
411      FoundReference(false), Needle(Needle) {}
412
413  void VisitExpr(Expr *E) {
414    // Stop evaluating if we already have a reference.
415    if (FoundReference)
416      return;
417
418    EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
419  }
420
421  void VisitDeclRefExpr(DeclRefExpr *E) {
422    if (E == Needle)
423      FoundReference = true;
424    else
425      EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
426  }
427
428  bool doesContainReference() const { return FoundReference; }
429};
430}
431
432static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
433  QualType VariableTy = VD->getType().getCanonicalType();
434  if (VariableTy->isBlockPointerType() &&
435      !VD->hasAttr<BlocksAttr>()) {
436    S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization) << VD->getDeclName()
437    << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
438    return true;
439  }
440
441  // Don't issue a fixit if there is already an initializer.
442  if (VD->getInit())
443    return false;
444
445  // Suggest possible initialization (if any).
446  std::string Init = S.getFixItZeroInitializerForType(VariableTy);
447  if (Init.empty())
448    return false;
449
450  // Don't suggest a fixit inside macros.
451  if (VD->getLocEnd().isMacroID())
452    return false;
453
454  SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
455
456  S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
457    << FixItHint::CreateInsertion(Loc, Init);
458  return true;
459}
460
461/// Create a fixit to remove an if-like statement, on the assumption that its
462/// condition is CondVal.
463static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
464                          const Stmt *Else, bool CondVal,
465                          FixItHint &Fixit1, FixItHint &Fixit2) {
466  if (CondVal) {
467    // If condition is always true, remove all but the 'then'.
468    Fixit1 = FixItHint::CreateRemoval(
469        CharSourceRange::getCharRange(If->getLocStart(),
470                                      Then->getLocStart()));
471    if (Else) {
472      SourceLocation ElseKwLoc = Lexer::getLocForEndOfToken(
473          Then->getLocEnd(), 0, S.getSourceManager(), S.getLangOpts());
474      Fixit2 = FixItHint::CreateRemoval(
475          SourceRange(ElseKwLoc, Else->getLocEnd()));
476    }
477  } else {
478    // If condition is always false, remove all but the 'else'.
479    if (Else)
480      Fixit1 = FixItHint::CreateRemoval(
481          CharSourceRange::getCharRange(If->getLocStart(),
482                                        Else->getLocStart()));
483    else
484      Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
485  }
486}
487
488/// DiagUninitUse -- Helper function to produce a diagnostic for an
489/// uninitialized use of a variable.
490static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
491                          bool IsCapturedByBlock) {
492  bool Diagnosed = false;
493
494  // Diagnose each branch which leads to a sometimes-uninitialized use.
495  for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
496       I != E; ++I) {
497    assert(Use.getKind() == UninitUse::Sometimes);
498
499    const Expr *User = Use.getUser();
500    const Stmt *Term = I->Terminator;
501
502    // Information used when building the diagnostic.
503    unsigned DiagKind;
504    StringRef Str;
505    SourceRange Range;
506
507    // FixIts to suppress the diagnosic by removing the dead condition.
508    // For all binary terminators, branch 0 is taken if the condition is true,
509    // and branch 1 is taken if the condition is false.
510    int RemoveDiagKind = -1;
511    const char *FixitStr =
512        S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
513                                  : (I->Output ? "1" : "0");
514    FixItHint Fixit1, Fixit2;
515
516    switch (Term->getStmtClass()) {
517    default:
518      // Don't know how to report this. Just fall back to 'may be used
519      // uninitialized'. This happens for range-based for, which the user
520      // can't explicitly fix.
521      // FIXME: This also happens if the first use of a variable is always
522      // uninitialized, eg "for (int n; n < 10; ++n)". We should report that
523      // with the 'is uninitialized' diagnostic.
524      continue;
525
526    // "condition is true / condition is false".
527    case Stmt::IfStmtClass: {
528      const IfStmt *IS = cast<IfStmt>(Term);
529      DiagKind = 0;
530      Str = "if";
531      Range = IS->getCond()->getSourceRange();
532      RemoveDiagKind = 0;
533      CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
534                    I->Output, Fixit1, Fixit2);
535      break;
536    }
537    case Stmt::ConditionalOperatorClass: {
538      const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
539      DiagKind = 0;
540      Str = "?:";
541      Range = CO->getCond()->getSourceRange();
542      RemoveDiagKind = 0;
543      CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
544                    I->Output, Fixit1, Fixit2);
545      break;
546    }
547    case Stmt::BinaryOperatorClass: {
548      const BinaryOperator *BO = cast<BinaryOperator>(Term);
549      if (!BO->isLogicalOp())
550        continue;
551      DiagKind = 0;
552      Str = BO->getOpcodeStr();
553      Range = BO->getLHS()->getSourceRange();
554      RemoveDiagKind = 0;
555      if ((BO->getOpcode() == BO_LAnd && I->Output) ||
556          (BO->getOpcode() == BO_LOr && !I->Output))
557        // true && y -> y, false || y -> y.
558        Fixit1 = FixItHint::CreateRemoval(SourceRange(BO->getLocStart(),
559                                                      BO->getOperatorLoc()));
560      else
561        // false && y -> false, true || y -> true.
562        Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
563      break;
564    }
565
566    // "loop is entered / loop is exited".
567    case Stmt::WhileStmtClass:
568      DiagKind = 1;
569      Str = "while";
570      Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
571      RemoveDiagKind = 1;
572      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
573      break;
574    case Stmt::ForStmtClass:
575      DiagKind = 1;
576      Str = "for";
577      Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
578      RemoveDiagKind = 1;
579      if (I->Output)
580        Fixit1 = FixItHint::CreateRemoval(Range);
581      else
582        Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
583      break;
584
585    // "condition is true / loop is exited".
586    case Stmt::DoStmtClass:
587      DiagKind = 2;
588      Str = "do";
589      Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
590      RemoveDiagKind = 1;
591      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
592      break;
593
594    // "switch case is taken".
595    case Stmt::CaseStmtClass:
596      DiagKind = 3;
597      Str = "case";
598      Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
599      break;
600    case Stmt::DefaultStmtClass:
601      DiagKind = 3;
602      Str = "default";
603      Range = cast<DefaultStmt>(Term)->getDefaultLoc();
604      break;
605    }
606
607    S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
608      << VD->getDeclName() << IsCapturedByBlock << DiagKind
609      << Str << I->Output << Range;
610    S.Diag(User->getLocStart(), diag::note_uninit_var_use)
611      << IsCapturedByBlock << User->getSourceRange();
612    if (RemoveDiagKind != -1)
613      S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
614        << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
615
616    Diagnosed = true;
617  }
618
619  if (!Diagnosed)
620    S.Diag(Use.getUser()->getLocStart(),
621           Use.getKind() == UninitUse::Always ? diag::warn_uninit_var
622                                              : diag::warn_maybe_uninit_var)
623        << VD->getDeclName() << IsCapturedByBlock
624        << Use.getUser()->getSourceRange();
625}
626
627/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
628/// uninitialized variable. This manages the different forms of diagnostic
629/// emitted for particular types of uses. Returns true if the use was diagnosed
630/// as a warning. If a particular use is one we omit warnings for, returns
631/// false.
632static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
633                                     const UninitUse &Use,
634                                     bool alwaysReportSelfInit = false) {
635
636  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
637    // Inspect the initializer of the variable declaration which is
638    // being referenced prior to its initialization. We emit
639    // specialized diagnostics for self-initialization, and we
640    // specifically avoid warning about self references which take the
641    // form of:
642    //
643    //   int x = x;
644    //
645    // This is used to indicate to GCC that 'x' is intentionally left
646    // uninitialized. Proven code paths which access 'x' in
647    // an uninitialized state after this will still warn.
648    if (const Expr *Initializer = VD->getInit()) {
649      if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
650        return false;
651
652      ContainsReference CR(S.Context, DRE);
653      CR.Visit(const_cast<Expr*>(Initializer));
654      if (CR.doesContainReference()) {
655        S.Diag(DRE->getLocStart(),
656               diag::warn_uninit_self_reference_in_init)
657          << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
658        return true;
659      }
660    }
661
662    DiagUninitUse(S, VD, Use, false);
663  } else {
664    const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
665    if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
666      S.Diag(BE->getLocStart(),
667             diag::warn_uninit_byref_blockvar_captured_by_block)
668        << VD->getDeclName();
669    else
670      DiagUninitUse(S, VD, Use, true);
671  }
672
673  // Report where the variable was declared when the use wasn't within
674  // the initializer of that declaration & we didn't already suggest
675  // an initialization fixit.
676  if (!SuggestInitializationFixit(S, VD))
677    S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
678      << VD->getDeclName();
679
680  return true;
681}
682
683namespace {
684  class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
685  public:
686    FallthroughMapper(Sema &S)
687      : FoundSwitchStatements(false),
688        S(S) {
689    }
690
691    bool foundSwitchStatements() const { return FoundSwitchStatements; }
692
693    void markFallthroughVisited(const AttributedStmt *Stmt) {
694      bool Found = FallthroughStmts.erase(Stmt);
695      assert(Found);
696      (void)Found;
697    }
698
699    typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
700
701    const AttrStmts &getFallthroughStmts() const {
702      return FallthroughStmts;
703    }
704
705    void fillReachableBlocks(CFG *Cfg) {
706      assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
707      std::deque<const CFGBlock *> BlockQueue;
708
709      ReachableBlocks.insert(&Cfg->getEntry());
710      BlockQueue.push_back(&Cfg->getEntry());
711      while (!BlockQueue.empty()) {
712        const CFGBlock *P = BlockQueue.front();
713        BlockQueue.pop_front();
714        for (CFGBlock::const_succ_iterator I = P->succ_begin(),
715                                           E = P->succ_end();
716             I != E; ++I) {
717          if (ReachableBlocks.insert(*I))
718            BlockQueue.push_back(*I);
719        }
720      }
721    }
722
723    bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt) {
724      assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
725
726      int UnannotatedCnt = 0;
727      AnnotatedCnt = 0;
728
729      std::deque<const CFGBlock*> BlockQueue;
730
731      std::copy(B.pred_begin(), B.pred_end(), std::back_inserter(BlockQueue));
732
733      while (!BlockQueue.empty()) {
734        const CFGBlock *P = BlockQueue.front();
735        BlockQueue.pop_front();
736
737        const Stmt *Term = P->getTerminator();
738        if (Term && isa<SwitchStmt>(Term))
739          continue; // Switch statement, good.
740
741        const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
742        if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
743          continue; // Previous case label has no statements, good.
744
745        const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
746        if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
747          continue; // Case label is preceded with a normal label, good.
748
749        if (!ReachableBlocks.count(P)) {
750          for (CFGBlock::const_iterator ElIt = P->begin(), ElEnd = P->end();
751               ElIt != ElEnd; ++ElIt) {
752            if (const CFGStmt *CS = ElIt->getAs<CFGStmt>()){
753              if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
754                S.Diag(AS->getLocStart(),
755                       diag::warn_fallthrough_attr_unreachable);
756                markFallthroughVisited(AS);
757                ++AnnotatedCnt;
758              }
759              // Don't care about other unreachable statements.
760            }
761          }
762          // If there are no unreachable statements, this may be a special
763          // case in CFG:
764          // case X: {
765          //    A a;  // A has a destructor.
766          //    break;
767          // }
768          // // <<<< This place is represented by a 'hanging' CFG block.
769          // case Y:
770          continue;
771        }
772
773        const Stmt *LastStmt = getLastStmt(*P);
774        if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
775          markFallthroughVisited(AS);
776          ++AnnotatedCnt;
777          continue; // Fallthrough annotation, good.
778        }
779
780        if (!LastStmt) { // This block contains no executable statements.
781          // Traverse its predecessors.
782          std::copy(P->pred_begin(), P->pred_end(),
783                    std::back_inserter(BlockQueue));
784          continue;
785        }
786
787        ++UnannotatedCnt;
788      }
789      return !!UnannotatedCnt;
790    }
791
792    // RecursiveASTVisitor setup.
793    bool shouldWalkTypesOfTypeLocs() const { return false; }
794
795    bool VisitAttributedStmt(AttributedStmt *S) {
796      if (asFallThroughAttr(S))
797        FallthroughStmts.insert(S);
798      return true;
799    }
800
801    bool VisitSwitchStmt(SwitchStmt *S) {
802      FoundSwitchStatements = true;
803      return true;
804    }
805
806  private:
807
808    static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
809      if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
810        if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
811          return AS;
812      }
813      return 0;
814    }
815
816    static const Stmt *getLastStmt(const CFGBlock &B) {
817      if (const Stmt *Term = B.getTerminator())
818        return Term;
819      for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
820                                            ElemEnd = B.rend();
821                                            ElemIt != ElemEnd; ++ElemIt) {
822        if (const CFGStmt *CS = ElemIt->getAs<CFGStmt>())
823          return CS->getStmt();
824      }
825      // Workaround to detect a statement thrown out by CFGBuilder:
826      //   case X: {} case Y:
827      //   case X: ; case Y:
828      if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
829        if (!isa<SwitchCase>(SW->getSubStmt()))
830          return SW->getSubStmt();
831
832      return 0;
833    }
834
835    bool FoundSwitchStatements;
836    AttrStmts FallthroughStmts;
837    Sema &S;
838    llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
839  };
840}
841
842static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
843                                            bool PerFunction) {
844  // Only perform this analysis when using C++11.  There is no good workflow
845  // for this warning when not using C++11.  There is no good way to silence
846  // the warning (no attribute is available) unless we are using C++11's support
847  // for generalized attributes.  Once could use pragmas to silence the warning,
848  // but as a general solution that is gross and not in the spirit of this
849  // warning.
850  //
851  // NOTE: This an intermediate solution.  There are on-going discussions on
852  // how to properly support this warning outside of C++11 with an annotation.
853  if (!AC.getASTContext().getLangOpts().CPlusPlus11)
854    return;
855
856  FallthroughMapper FM(S);
857  FM.TraverseStmt(AC.getBody());
858
859  if (!FM.foundSwitchStatements())
860    return;
861
862  if (PerFunction && FM.getFallthroughStmts().empty())
863    return;
864
865  CFG *Cfg = AC.getCFG();
866
867  if (!Cfg)
868    return;
869
870  FM.fillReachableBlocks(Cfg);
871
872  for (CFG::reverse_iterator I = Cfg->rbegin(), E = Cfg->rend(); I != E; ++I) {
873    const CFGBlock *B = *I;
874    const Stmt *Label = B->getLabel();
875
876    if (!Label || !isa<SwitchCase>(Label))
877      continue;
878
879    int AnnotatedCnt;
880
881    if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt))
882      continue;
883
884    S.Diag(Label->getLocStart(),
885        PerFunction ? diag::warn_unannotated_fallthrough_per_function
886                    : diag::warn_unannotated_fallthrough);
887
888    if (!AnnotatedCnt) {
889      SourceLocation L = Label->getLocStart();
890      if (L.isMacroID())
891        continue;
892      if (S.getLangOpts().CPlusPlus11) {
893        const Stmt *Term = B->getTerminator();
894        // Skip empty cases.
895        while (B->empty() && !Term && B->succ_size() == 1) {
896          B = *B->succ_begin();
897          Term = B->getTerminator();
898        }
899        if (!(B->empty() && Term && isa<BreakStmt>(Term))) {
900          Preprocessor &PP = S.getPreprocessor();
901          TokenValue Tokens[] = {
902            tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
903            tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
904            tok::r_square, tok::r_square
905          };
906          StringRef AnnotationSpelling = "[[clang::fallthrough]]";
907          StringRef MacroName = PP.getLastMacroWithSpelling(L, Tokens);
908          if (!MacroName.empty())
909            AnnotationSpelling = MacroName;
910          SmallString<64> TextToInsert(AnnotationSpelling);
911          TextToInsert += "; ";
912          S.Diag(L, diag::note_insert_fallthrough_fixit) <<
913              AnnotationSpelling <<
914              FixItHint::CreateInsertion(L, TextToInsert);
915        }
916      }
917      S.Diag(L, diag::note_insert_break_fixit) <<
918        FixItHint::CreateInsertion(L, "break; ");
919    }
920  }
921
922  const FallthroughMapper::AttrStmts &Fallthroughs = FM.getFallthroughStmts();
923  for (FallthroughMapper::AttrStmts::const_iterator I = Fallthroughs.begin(),
924                                                    E = Fallthroughs.end();
925                                                    I != E; ++I) {
926    S.Diag((*I)->getLocStart(), diag::warn_fallthrough_attr_invalid_placement);
927  }
928
929}
930
931namespace {
932typedef std::pair<const Stmt *,
933                  sema::FunctionScopeInfo::WeakObjectUseMap::const_iterator>
934        StmtUsesPair;
935
936class StmtUseSorter {
937  const SourceManager &SM;
938
939public:
940  explicit StmtUseSorter(const SourceManager &SM) : SM(SM) { }
941
942  bool operator()(const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
943    return SM.isBeforeInTranslationUnit(LHS.first->getLocStart(),
944                                        RHS.first->getLocStart());
945  }
946};
947}
948
949static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
950                     const Stmt *S) {
951  assert(S);
952
953  do {
954    switch (S->getStmtClass()) {
955    case Stmt::ForStmtClass:
956    case Stmt::WhileStmtClass:
957    case Stmt::CXXForRangeStmtClass:
958    case Stmt::ObjCForCollectionStmtClass:
959      return true;
960    case Stmt::DoStmtClass: {
961      const Expr *Cond = cast<DoStmt>(S)->getCond();
962      llvm::APSInt Val;
963      if (!Cond->EvaluateAsInt(Val, Ctx))
964        return true;
965      return Val.getBoolValue();
966    }
967    default:
968      break;
969    }
970  } while ((S = PM.getParent(S)));
971
972  return false;
973}
974
975
976static void diagnoseRepeatedUseOfWeak(Sema &S,
977                                      const sema::FunctionScopeInfo *CurFn,
978                                      const Decl *D,
979                                      const ParentMap &PM) {
980  typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
981  typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
982  typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
983
984  ASTContext &Ctx = S.getASTContext();
985
986  const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
987
988  // Extract all weak objects that are referenced more than once.
989  SmallVector<StmtUsesPair, 8> UsesByStmt;
990  for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
991       I != E; ++I) {
992    const WeakUseVector &Uses = I->second;
993
994    // Find the first read of the weak object.
995    WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
996    for ( ; UI != UE; ++UI) {
997      if (UI->isUnsafe())
998        break;
999    }
1000
1001    // If there were only writes to this object, don't warn.
1002    if (UI == UE)
1003      continue;
1004
1005    // If there was only one read, followed by any number of writes, and the
1006    // read is not within a loop, don't warn. Additionally, don't warn in a
1007    // loop if the base object is a local variable -- local variables are often
1008    // changed in loops.
1009    if (UI == Uses.begin()) {
1010      WeakUseVector::const_iterator UI2 = UI;
1011      for (++UI2; UI2 != UE; ++UI2)
1012        if (UI2->isUnsafe())
1013          break;
1014
1015      if (UI2 == UE) {
1016        if (!isInLoop(Ctx, PM, UI->getUseExpr()))
1017          continue;
1018
1019        const WeakObjectProfileTy &Profile = I->first;
1020        if (!Profile.isExactProfile())
1021          continue;
1022
1023        const NamedDecl *Base = Profile.getBase();
1024        if (!Base)
1025          Base = Profile.getProperty();
1026        assert(Base && "A profile always has a base or property.");
1027
1028        if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
1029          if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base))
1030            continue;
1031      }
1032    }
1033
1034    UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1035  }
1036
1037  if (UsesByStmt.empty())
1038    return;
1039
1040  // Sort by first use so that we emit the warnings in a deterministic order.
1041  std::sort(UsesByStmt.begin(), UsesByStmt.end(),
1042            StmtUseSorter(S.getSourceManager()));
1043
1044  // Classify the current code body for better warning text.
1045  // This enum should stay in sync with the cases in
1046  // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1047  // FIXME: Should we use a common classification enum and the same set of
1048  // possibilities all throughout Sema?
1049  enum {
1050    Function,
1051    Method,
1052    Block,
1053    Lambda
1054  } FunctionKind;
1055
1056  if (isa<sema::BlockScopeInfo>(CurFn))
1057    FunctionKind = Block;
1058  else if (isa<sema::LambdaScopeInfo>(CurFn))
1059    FunctionKind = Lambda;
1060  else if (isa<ObjCMethodDecl>(D))
1061    FunctionKind = Method;
1062  else
1063    FunctionKind = Function;
1064
1065  // Iterate through the sorted problems and emit warnings for each.
1066  for (SmallVectorImpl<StmtUsesPair>::const_iterator I = UsesByStmt.begin(),
1067                                                     E = UsesByStmt.end();
1068       I != E; ++I) {
1069    const Stmt *FirstRead = I->first;
1070    const WeakObjectProfileTy &Key = I->second->first;
1071    const WeakUseVector &Uses = I->second->second;
1072
1073    // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1074    // may not contain enough information to determine that these are different
1075    // properties. We can only be 100% sure of a repeated use in certain cases,
1076    // and we adjust the diagnostic kind accordingly so that the less certain
1077    // case can be turned off if it is too noisy.
1078    unsigned DiagKind;
1079    if (Key.isExactProfile())
1080      DiagKind = diag::warn_arc_repeated_use_of_weak;
1081    else
1082      DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1083
1084    // Classify the weak object being accessed for better warning text.
1085    // This enum should stay in sync with the cases in
1086    // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1087    enum {
1088      Variable,
1089      Property,
1090      ImplicitProperty,
1091      Ivar
1092    } ObjectKind;
1093
1094    const NamedDecl *D = Key.getProperty();
1095    if (isa<VarDecl>(D))
1096      ObjectKind = Variable;
1097    else if (isa<ObjCPropertyDecl>(D))
1098      ObjectKind = Property;
1099    else if (isa<ObjCMethodDecl>(D))
1100      ObjectKind = ImplicitProperty;
1101    else if (isa<ObjCIvarDecl>(D))
1102      ObjectKind = Ivar;
1103    else
1104      llvm_unreachable("Unexpected weak object kind!");
1105
1106    // Show the first time the object was read.
1107    S.Diag(FirstRead->getLocStart(), DiagKind)
1108      << ObjectKind << D << FunctionKind
1109      << FirstRead->getSourceRange();
1110
1111    // Print all the other accesses as notes.
1112    for (WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1113         UI != UE; ++UI) {
1114      if (UI->getUseExpr() == FirstRead)
1115        continue;
1116      S.Diag(UI->getUseExpr()->getLocStart(),
1117             diag::note_arc_weak_also_accessed_here)
1118        << UI->getUseExpr()->getSourceRange();
1119    }
1120  }
1121}
1122
1123
1124namespace {
1125struct SLocSort {
1126  bool operator()(const UninitUse &a, const UninitUse &b) {
1127    // Prefer a more confident report over a less confident one.
1128    if (a.getKind() != b.getKind())
1129      return a.getKind() > b.getKind();
1130    SourceLocation aLoc = a.getUser()->getLocStart();
1131    SourceLocation bLoc = b.getUser()->getLocStart();
1132    return aLoc.getRawEncoding() < bLoc.getRawEncoding();
1133  }
1134};
1135
1136class UninitValsDiagReporter : public UninitVariablesHandler {
1137  Sema &S;
1138  typedef SmallVector<UninitUse, 2> UsesVec;
1139  typedef llvm::DenseMap<const VarDecl *, std::pair<UsesVec*, bool> > UsesMap;
1140  UsesMap *uses;
1141
1142public:
1143  UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
1144  ~UninitValsDiagReporter() {
1145    flushDiagnostics();
1146  }
1147
1148  std::pair<UsesVec*, bool> &getUses(const VarDecl *vd) {
1149    if (!uses)
1150      uses = new UsesMap();
1151
1152    UsesMap::mapped_type &V = (*uses)[vd];
1153    UsesVec *&vec = V.first;
1154    if (!vec)
1155      vec = new UsesVec();
1156
1157    return V;
1158  }
1159
1160  void handleUseOfUninitVariable(const VarDecl *vd, const UninitUse &use) {
1161    getUses(vd).first->push_back(use);
1162  }
1163
1164  void handleSelfInit(const VarDecl *vd) {
1165    getUses(vd).second = true;
1166  }
1167
1168  void flushDiagnostics() {
1169    if (!uses)
1170      return;
1171
1172    // FIXME: This iteration order, and thus the resulting diagnostic order,
1173    //        is nondeterministic.
1174    for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
1175      const VarDecl *vd = i->first;
1176      const UsesMap::mapped_type &V = i->second;
1177
1178      UsesVec *vec = V.first;
1179      bool hasSelfInit = V.second;
1180
1181      // Specially handle the case where we have uses of an uninitialized
1182      // variable, but the root cause is an idiomatic self-init.  We want
1183      // to report the diagnostic at the self-init since that is the root cause.
1184      if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1185        DiagnoseUninitializedUse(S, vd,
1186                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1187                                           /* isAlwaysUninit */ true),
1188                                 /* alwaysReportSelfInit */ true);
1189      else {
1190        // Sort the uses by their SourceLocations.  While not strictly
1191        // guaranteed to produce them in line/column order, this will provide
1192        // a stable ordering.
1193        std::sort(vec->begin(), vec->end(), SLocSort());
1194
1195        for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
1196             ++vi) {
1197          // If we have self-init, downgrade all uses to 'may be uninitialized'.
1198          UninitUse Use = hasSelfInit ? UninitUse(vi->getUser(), false) : *vi;
1199
1200          if (DiagnoseUninitializedUse(S, vd, Use))
1201            // Skip further diagnostics for this variable. We try to warn only
1202            // on the first point at which a variable is used uninitialized.
1203            break;
1204        }
1205      }
1206
1207      // Release the uses vector.
1208      delete vec;
1209    }
1210    delete uses;
1211  }
1212
1213private:
1214  static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1215  for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) {
1216    if (i->getKind() == UninitUse::Always) {
1217      return true;
1218    }
1219  }
1220  return false;
1221}
1222};
1223}
1224
1225
1226//===----------------------------------------------------------------------===//
1227// -Wthread-safety
1228//===----------------------------------------------------------------------===//
1229namespace clang {
1230namespace thread_safety {
1231typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1232typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1233typedef std::list<DelayedDiag> DiagList;
1234
1235struct SortDiagBySourceLocation {
1236  SourceManager &SM;
1237  SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1238
1239  bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1240    // Although this call will be slow, this is only called when outputting
1241    // multiple warnings.
1242    return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1243  }
1244};
1245
1246namespace {
1247class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler {
1248  Sema &S;
1249  DiagList Warnings;
1250  SourceLocation FunLocation, FunEndLocation;
1251
1252  // Helper functions
1253  void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
1254    // Gracefully handle rare cases when the analysis can't get a more
1255    // precise source location.
1256    if (!Loc.isValid())
1257      Loc = FunLocation;
1258    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName);
1259    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1260  }
1261
1262 public:
1263  ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1264    : S(S), FunLocation(FL), FunEndLocation(FEL) {}
1265
1266  /// \brief Emit all buffered diagnostics in order of sourcelocation.
1267  /// We need to output diagnostics produced while iterating through
1268  /// the lockset in deterministic order, so this function orders diagnostics
1269  /// and outputs them.
1270  void emitDiagnostics() {
1271    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1272    for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
1273         I != E; ++I) {
1274      S.Diag(I->first.first, I->first.second);
1275      const OptionalNotes &Notes = I->second;
1276      for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI)
1277        S.Diag(Notes[NoteI].first, Notes[NoteI].second);
1278    }
1279  }
1280
1281  void handleInvalidLockExp(SourceLocation Loc) {
1282    PartialDiagnosticAt Warning(Loc,
1283                                S.PDiag(diag::warn_cannot_resolve_lock) << Loc);
1284    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1285  }
1286  void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
1287    warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
1288  }
1289
1290  void handleDoubleLock(Name LockName, SourceLocation Loc) {
1291    warnLockMismatch(diag::warn_double_lock, LockName, Loc);
1292  }
1293
1294  void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked,
1295                                 SourceLocation LocEndOfScope,
1296                                 LockErrorKind LEK){
1297    unsigned DiagID = 0;
1298    switch (LEK) {
1299      case LEK_LockedSomePredecessors:
1300        DiagID = diag::warn_lock_some_predecessors;
1301        break;
1302      case LEK_LockedSomeLoopIterations:
1303        DiagID = diag::warn_expecting_lock_held_on_loop;
1304        break;
1305      case LEK_LockedAtEndOfFunction:
1306        DiagID = diag::warn_no_unlock;
1307        break;
1308      case LEK_NotLockedAtEndOfFunction:
1309        DiagID = diag::warn_expecting_locked;
1310        break;
1311    }
1312    if (LocEndOfScope.isInvalid())
1313      LocEndOfScope = FunEndLocation;
1314
1315    PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName);
1316    PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here));
1317    Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1318  }
1319
1320
1321  void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
1322                                SourceLocation Loc2) {
1323    PartialDiagnosticAt Warning(
1324      Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName);
1325    PartialDiagnosticAt Note(
1326      Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName);
1327    Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1328  }
1329
1330  void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
1331                         AccessKind AK, SourceLocation Loc) {
1332    assert((POK == POK_VarAccess || POK == POK_VarDereference)
1333             && "Only works for variables");
1334    unsigned DiagID = POK == POK_VarAccess?
1335                        diag::warn_variable_requires_any_lock:
1336                        diag::warn_var_deref_requires_any_lock;
1337    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1338      << D->getNameAsString() << getLockKindFromAccessKind(AK));
1339    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1340  }
1341
1342  void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
1343                          Name LockName, LockKind LK, SourceLocation Loc,
1344                          Name *PossibleMatch) {
1345    unsigned DiagID = 0;
1346    if (PossibleMatch) {
1347      switch (POK) {
1348        case POK_VarAccess:
1349          DiagID = diag::warn_variable_requires_lock_precise;
1350          break;
1351        case POK_VarDereference:
1352          DiagID = diag::warn_var_deref_requires_lock_precise;
1353          break;
1354        case POK_FunctionCall:
1355          DiagID = diag::warn_fun_requires_lock_precise;
1356          break;
1357      }
1358      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1359        << D->getNameAsString() << LockName << LK);
1360      PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1361                               << *PossibleMatch);
1362      Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1363    } else {
1364      switch (POK) {
1365        case POK_VarAccess:
1366          DiagID = diag::warn_variable_requires_lock;
1367          break;
1368        case POK_VarDereference:
1369          DiagID = diag::warn_var_deref_requires_lock;
1370          break;
1371        case POK_FunctionCall:
1372          DiagID = diag::warn_fun_requires_lock;
1373          break;
1374      }
1375      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1376        << D->getNameAsString() << LockName << LK);
1377      Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1378    }
1379  }
1380
1381  void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
1382    PartialDiagnosticAt Warning(Loc,
1383      S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName);
1384    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1385  }
1386};
1387}
1388}
1389}
1390
1391//===----------------------------------------------------------------------===//
1392// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
1393//  warnings on a function, method, or block.
1394//===----------------------------------------------------------------------===//
1395
1396clang::sema::AnalysisBasedWarnings::Policy::Policy() {
1397  enableCheckFallThrough = 1;
1398  enableCheckUnreachable = 0;
1399  enableThreadSafetyAnalysis = 0;
1400}
1401
1402clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
1403  : S(s),
1404    NumFunctionsAnalyzed(0),
1405    NumFunctionsWithBadCFGs(0),
1406    NumCFGBlocks(0),
1407    MaxCFGBlocksPerFunction(0),
1408    NumUninitAnalysisFunctions(0),
1409    NumUninitAnalysisVariables(0),
1410    MaxUninitAnalysisVariablesPerFunction(0),
1411    NumUninitAnalysisBlockVisits(0),
1412    MaxUninitAnalysisBlockVisitsPerFunction(0) {
1413  DiagnosticsEngine &D = S.getDiagnostics();
1414  DefaultPolicy.enableCheckUnreachable = (unsigned)
1415    (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
1416        DiagnosticsEngine::Ignored);
1417  DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
1418    (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
1419     DiagnosticsEngine::Ignored);
1420
1421}
1422
1423static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
1424  for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1425       i = fscope->PossiblyUnreachableDiags.begin(),
1426       e = fscope->PossiblyUnreachableDiags.end();
1427       i != e; ++i) {
1428    const sema::PossiblyUnreachableDiag &D = *i;
1429    S.Diag(D.Loc, D.PD);
1430  }
1431}
1432
1433void clang::sema::
1434AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
1435                                     sema::FunctionScopeInfo *fscope,
1436                                     const Decl *D, const BlockExpr *blkExpr) {
1437
1438  // We avoid doing analysis-based warnings when there are errors for
1439  // two reasons:
1440  // (1) The CFGs often can't be constructed (if the body is invalid), so
1441  //     don't bother trying.
1442  // (2) The code already has problems; running the analysis just takes more
1443  //     time.
1444  DiagnosticsEngine &Diags = S.getDiagnostics();
1445
1446  // Do not do any analysis for declarations in system headers if we are
1447  // going to just ignore them.
1448  if (Diags.getSuppressSystemWarnings() &&
1449      S.SourceMgr.isInSystemHeader(D->getLocation()))
1450    return;
1451
1452  // For code in dependent contexts, we'll do this at instantiation time.
1453  if (cast<DeclContext>(D)->isDependentContext())
1454    return;
1455
1456  if (Diags.hasUncompilableErrorOccurred() || Diags.hasFatalErrorOccurred()) {
1457    // Flush out any possibly unreachable diagnostics.
1458    flushDiagnostics(S, fscope);
1459    return;
1460  }
1461
1462  const Stmt *Body = D->getBody();
1463  assert(Body);
1464
1465  AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D);
1466
1467  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
1468  // explosion for destrutors that can result and the compile time hit.
1469  AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
1470  AC.getCFGBuildOptions().AddEHEdges = false;
1471  AC.getCFGBuildOptions().AddInitializers = true;
1472  AC.getCFGBuildOptions().AddImplicitDtors = true;
1473  AC.getCFGBuildOptions().AddTemporaryDtors = true;
1474
1475  // Force that certain expressions appear as CFGElements in the CFG.  This
1476  // is used to speed up various analyses.
1477  // FIXME: This isn't the right factoring.  This is here for initial
1478  // prototyping, but we need a way for analyses to say what expressions they
1479  // expect to always be CFGElements and then fill in the BuildOptions
1480  // appropriately.  This is essentially a layering violation.
1481  if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis) {
1482    // Unreachable code analysis and thread safety require a linearized CFG.
1483    AC.getCFGBuildOptions().setAllAlwaysAdd();
1484  }
1485  else {
1486    AC.getCFGBuildOptions()
1487      .setAlwaysAdd(Stmt::BinaryOperatorClass)
1488      .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
1489      .setAlwaysAdd(Stmt::BlockExprClass)
1490      .setAlwaysAdd(Stmt::CStyleCastExprClass)
1491      .setAlwaysAdd(Stmt::DeclRefExprClass)
1492      .setAlwaysAdd(Stmt::ImplicitCastExprClass)
1493      .setAlwaysAdd(Stmt::UnaryOperatorClass)
1494      .setAlwaysAdd(Stmt::AttributedStmtClass);
1495  }
1496
1497  // Construct the analysis context with the specified CFG build options.
1498
1499  // Emit delayed diagnostics.
1500  if (!fscope->PossiblyUnreachableDiags.empty()) {
1501    bool analyzed = false;
1502
1503    // Register the expressions with the CFGBuilder.
1504    for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1505         i = fscope->PossiblyUnreachableDiags.begin(),
1506         e = fscope->PossiblyUnreachableDiags.end();
1507         i != e; ++i) {
1508      if (const Stmt *stmt = i->stmt)
1509        AC.registerForcedBlockExpression(stmt);
1510    }
1511
1512    if (AC.getCFG()) {
1513      analyzed = true;
1514      for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1515            i = fscope->PossiblyUnreachableDiags.begin(),
1516            e = fscope->PossiblyUnreachableDiags.end();
1517            i != e; ++i)
1518      {
1519        const sema::PossiblyUnreachableDiag &D = *i;
1520        bool processed = false;
1521        if (const Stmt *stmt = i->stmt) {
1522          const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
1523          CFGReverseBlockReachabilityAnalysis *cra =
1524              AC.getCFGReachablityAnalysis();
1525          // FIXME: We should be able to assert that block is non-null, but
1526          // the CFG analysis can skip potentially-evaluated expressions in
1527          // edge cases; see test/Sema/vla-2.c.
1528          if (block && cra) {
1529            // Can this block be reached from the entrance?
1530            if (cra->isReachable(&AC.getCFG()->getEntry(), block))
1531              S.Diag(D.Loc, D.PD);
1532            processed = true;
1533          }
1534        }
1535        if (!processed) {
1536          // Emit the warning anyway if we cannot map to a basic block.
1537          S.Diag(D.Loc, D.PD);
1538        }
1539      }
1540    }
1541
1542    if (!analyzed)
1543      flushDiagnostics(S, fscope);
1544  }
1545
1546
1547  // Warning: check missing 'return'
1548  if (P.enableCheckFallThrough) {
1549    const CheckFallThroughDiagnostics &CD =
1550      (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
1551       : (isa<CXXMethodDecl>(D) &&
1552          cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
1553          cast<CXXMethodDecl>(D)->getParent()->isLambda())
1554            ? CheckFallThroughDiagnostics::MakeForLambda()
1555            : CheckFallThroughDiagnostics::MakeForFunction(D));
1556    CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
1557  }
1558
1559  // Warning: check for unreachable code
1560  if (P.enableCheckUnreachable) {
1561    // Only check for unreachable code on non-template instantiations.
1562    // Different template instantiations can effectively change the control-flow
1563    // and it is very difficult to prove that a snippet of code in a template
1564    // is unreachable for all instantiations.
1565    bool isTemplateInstantiation = false;
1566    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
1567      isTemplateInstantiation = Function->isTemplateInstantiation();
1568    if (!isTemplateInstantiation)
1569      CheckUnreachable(S, AC);
1570  }
1571
1572  // Check for thread safety violations
1573  if (P.enableThreadSafetyAnalysis) {
1574    SourceLocation FL = AC.getDecl()->getLocation();
1575    SourceLocation FEL = AC.getDecl()->getLocEnd();
1576    thread_safety::ThreadSafetyReporter Reporter(S, FL, FEL);
1577    if (Diags.getDiagnosticLevel(diag::warn_thread_safety_beta,D->getLocStart())
1578        != DiagnosticsEngine::Ignored)
1579      Reporter.setIssueBetaWarnings(true);
1580
1581    thread_safety::runThreadSafetyAnalysis(AC, Reporter);
1582    Reporter.emitDiagnostics();
1583  }
1584
1585  if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
1586      != DiagnosticsEngine::Ignored ||
1587      Diags.getDiagnosticLevel(diag::warn_sometimes_uninit_var,D->getLocStart())
1588      != DiagnosticsEngine::Ignored ||
1589      Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
1590      != DiagnosticsEngine::Ignored) {
1591    if (CFG *cfg = AC.getCFG()) {
1592      UninitValsDiagReporter reporter(S);
1593      UninitVariablesAnalysisStats stats;
1594      std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
1595      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
1596                                        reporter, stats);
1597
1598      if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
1599        ++NumUninitAnalysisFunctions;
1600        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
1601        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
1602        MaxUninitAnalysisVariablesPerFunction =
1603            std::max(MaxUninitAnalysisVariablesPerFunction,
1604                     stats.NumVariablesAnalyzed);
1605        MaxUninitAnalysisBlockVisitsPerFunction =
1606            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
1607                     stats.NumBlockVisits);
1608      }
1609    }
1610  }
1611
1612  bool FallThroughDiagFull =
1613      Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough,
1614                               D->getLocStart()) != DiagnosticsEngine::Ignored;
1615  bool FallThroughDiagPerFunction =
1616      Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough_per_function,
1617                               D->getLocStart()) != DiagnosticsEngine::Ignored;
1618  if (FallThroughDiagFull || FallThroughDiagPerFunction) {
1619    DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
1620  }
1621
1622  if (S.getLangOpts().ObjCARCWeak &&
1623      Diags.getDiagnosticLevel(diag::warn_arc_repeated_use_of_weak,
1624                               D->getLocStart()) != DiagnosticsEngine::Ignored)
1625    diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
1626
1627  // Collect statistics about the CFG if it was built.
1628  if (S.CollectStats && AC.isCFGBuilt()) {
1629    ++NumFunctionsAnalyzed;
1630    if (CFG *cfg = AC.getCFG()) {
1631      // If we successfully built a CFG for this context, record some more
1632      // detail information about it.
1633      NumCFGBlocks += cfg->getNumBlockIDs();
1634      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
1635                                         cfg->getNumBlockIDs());
1636    } else {
1637      ++NumFunctionsWithBadCFGs;
1638    }
1639  }
1640}
1641
1642void clang::sema::AnalysisBasedWarnings::PrintStats() const {
1643  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
1644
1645  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
1646  unsigned AvgCFGBlocksPerFunction =
1647      !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
1648  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
1649               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
1650               << "  " << NumCFGBlocks << " CFG blocks built.\n"
1651               << "  " << AvgCFGBlocksPerFunction
1652               << " average CFG blocks per function.\n"
1653               << "  " << MaxCFGBlocksPerFunction
1654               << " max CFG blocks per function.\n";
1655
1656  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
1657      : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
1658  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
1659      : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
1660  llvm::errs() << NumUninitAnalysisFunctions
1661               << " functions analyzed for uninitialiazed variables\n"
1662               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
1663               << "  " << AvgUninitVariablesPerFunction
1664               << " average variables per function.\n"
1665               << "  " << MaxUninitAnalysisVariablesPerFunction
1666               << " max variables per function.\n"
1667               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
1668               << "  " << AvgUninitBlockVisitsPerFunction
1669               << " average block visits per function.\n"
1670               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
1671               << " max block visits per function.\n";
1672}
1673