AnalysisBasedWarnings.cpp revision cd8ab51a44e80625d84126780b0d85a7732e25af
1//=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file defines analysis_warnings::[Policy,Executor].
11// Together they are used by Sema to issue warnings based on inexpensive
12// static analysis algorithms in libAnalysis.
13//
14//===----------------------------------------------------------------------===//
15
16#include "clang/Sema/AnalysisBasedWarnings.h"
17#include "clang/AST/DeclCXX.h"
18#include "clang/AST/DeclObjC.h"
19#include "clang/AST/EvaluatedExprVisitor.h"
20#include "clang/AST/ExprCXX.h"
21#include "clang/AST/ExprObjC.h"
22#include "clang/AST/ParentMap.h"
23#include "clang/AST/RecursiveASTVisitor.h"
24#include "clang/AST/StmtCXX.h"
25#include "clang/AST/StmtObjC.h"
26#include "clang/AST/StmtVisitor.h"
27#include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
28#include "clang/Analysis/Analyses/ReachableCode.h"
29#include "clang/Analysis/Analyses/ThreadSafety.h"
30#include "clang/Analysis/Analyses/UninitializedValues.h"
31#include "clang/Analysis/AnalysisContext.h"
32#include "clang/Analysis/CFG.h"
33#include "clang/Analysis/CFGStmtMap.h"
34#include "clang/Basic/SourceLocation.h"
35#include "clang/Basic/SourceManager.h"
36#include "clang/Lex/Lexer.h"
37#include "clang/Lex/Preprocessor.h"
38#include "clang/Sema/ScopeInfo.h"
39#include "clang/Sema/SemaInternal.h"
40#include "llvm/ADT/ArrayRef.h"
41#include "llvm/ADT/BitVector.h"
42#include "llvm/ADT/FoldingSet.h"
43#include "llvm/ADT/ImmutableMap.h"
44#include "llvm/ADT/PostOrderIterator.h"
45#include "llvm/ADT/SmallString.h"
46#include "llvm/ADT/SmallVector.h"
47#include "llvm/ADT/StringRef.h"
48#include "llvm/Support/Casting.h"
49#include <algorithm>
50#include <deque>
51#include <iterator>
52#include <vector>
53
54using namespace clang;
55
56//===----------------------------------------------------------------------===//
57// Unreachable code analysis.
58//===----------------------------------------------------------------------===//
59
60namespace {
61  class UnreachableCodeHandler : public reachable_code::Callback {
62    Sema &S;
63  public:
64    UnreachableCodeHandler(Sema &s) : S(s) {}
65
66    void HandleUnreachable(SourceLocation L, SourceRange R1, SourceRange R2) {
67      S.Diag(L, diag::warn_unreachable) << R1 << R2;
68    }
69  };
70}
71
72/// CheckUnreachable - Check for unreachable code.
73static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
74  UnreachableCodeHandler UC(S);
75  reachable_code::FindUnreachableCode(AC, UC);
76}
77
78//===----------------------------------------------------------------------===//
79// Check for missing return value.
80//===----------------------------------------------------------------------===//
81
82enum ControlFlowKind {
83  UnknownFallThrough,
84  NeverFallThrough,
85  MaybeFallThrough,
86  AlwaysFallThrough,
87  NeverFallThroughOrReturn
88};
89
90/// CheckFallThrough - Check that we don't fall off the end of a
91/// Statement that should return a value.
92///
93/// \returns AlwaysFallThrough iff we always fall off the end of the statement,
94/// MaybeFallThrough iff we might or might not fall off the end,
95/// NeverFallThroughOrReturn iff we never fall off the end of the statement or
96/// return.  We assume NeverFallThrough iff we never fall off the end of the
97/// statement but we may return.  We assume that functions not marked noreturn
98/// will return.
99static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
100  CFG *cfg = AC.getCFG();
101  if (cfg == 0) return UnknownFallThrough;
102
103  // The CFG leaves in dead things, and we don't want the dead code paths to
104  // confuse us, so we mark all live things first.
105  llvm::BitVector live(cfg->getNumBlockIDs());
106  unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
107                                                          live);
108
109  bool AddEHEdges = AC.getAddEHEdges();
110  if (!AddEHEdges && count != cfg->getNumBlockIDs())
111    // When there are things remaining dead, and we didn't add EH edges
112    // from CallExprs to the catch clauses, we have to go back and
113    // mark them as live.
114    for (CFG::iterator I = cfg->begin(), E = cfg->end(); I != E; ++I) {
115      CFGBlock &b = **I;
116      if (!live[b.getBlockID()]) {
117        if (b.pred_begin() == b.pred_end()) {
118          if (b.getTerminator() && isa<CXXTryStmt>(b.getTerminator()))
119            // When not adding EH edges from calls, catch clauses
120            // can otherwise seem dead.  Avoid noting them as dead.
121            count += reachable_code::ScanReachableFromBlock(&b, live);
122          continue;
123        }
124      }
125    }
126
127  // Now we know what is live, we check the live precessors of the exit block
128  // and look for fall through paths, being careful to ignore normal returns,
129  // and exceptional paths.
130  bool HasLiveReturn = false;
131  bool HasFakeEdge = false;
132  bool HasPlainEdge = false;
133  bool HasAbnormalEdge = false;
134
135  // Ignore default cases that aren't likely to be reachable because all
136  // enums in a switch(X) have explicit case statements.
137  CFGBlock::FilterOptions FO;
138  FO.IgnoreDefaultsWithCoveredEnums = 1;
139
140  for (CFGBlock::filtered_pred_iterator
141	 I = cfg->getExit().filtered_pred_start_end(FO); I.hasMore(); ++I) {
142    const CFGBlock& B = **I;
143    if (!live[B.getBlockID()])
144      continue;
145
146    // Skip blocks which contain an element marked as no-return. They don't
147    // represent actually viable edges into the exit block, so mark them as
148    // abnormal.
149    if (B.hasNoReturnElement()) {
150      HasAbnormalEdge = true;
151      continue;
152    }
153
154    // Destructors can appear after the 'return' in the CFG.  This is
155    // normal.  We need to look pass the destructors for the return
156    // statement (if it exists).
157    CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
158
159    for ( ; ri != re ; ++ri)
160      if (isa<CFGStmt>(*ri))
161        break;
162
163    // No more CFGElements in the block?
164    if (ri == re) {
165      if (B.getTerminator() && isa<CXXTryStmt>(B.getTerminator())) {
166        HasAbnormalEdge = true;
167        continue;
168      }
169      // A labeled empty statement, or the entry block...
170      HasPlainEdge = true;
171      continue;
172    }
173
174    CFGStmt CS = cast<CFGStmt>(*ri);
175    const Stmt *S = CS.getStmt();
176    if (isa<ReturnStmt>(S)) {
177      HasLiveReturn = true;
178      continue;
179    }
180    if (isa<ObjCAtThrowStmt>(S)) {
181      HasFakeEdge = true;
182      continue;
183    }
184    if (isa<CXXThrowExpr>(S)) {
185      HasFakeEdge = true;
186      continue;
187    }
188    if (isa<MSAsmStmt>(S)) {
189      // TODO: Verify this is correct.
190      HasFakeEdge = true;
191      HasLiveReturn = true;
192      continue;
193    }
194    if (isa<CXXTryStmt>(S)) {
195      HasAbnormalEdge = true;
196      continue;
197    }
198    if (std::find(B.succ_begin(), B.succ_end(), &cfg->getExit())
199        == B.succ_end()) {
200      HasAbnormalEdge = true;
201      continue;
202    }
203
204    HasPlainEdge = true;
205  }
206  if (!HasPlainEdge) {
207    if (HasLiveReturn)
208      return NeverFallThrough;
209    return NeverFallThroughOrReturn;
210  }
211  if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
212    return MaybeFallThrough;
213  // This says AlwaysFallThrough for calls to functions that are not marked
214  // noreturn, that don't return.  If people would like this warning to be more
215  // accurate, such functions should be marked as noreturn.
216  return AlwaysFallThrough;
217}
218
219namespace {
220
221struct CheckFallThroughDiagnostics {
222  unsigned diag_MaybeFallThrough_HasNoReturn;
223  unsigned diag_MaybeFallThrough_ReturnsNonVoid;
224  unsigned diag_AlwaysFallThrough_HasNoReturn;
225  unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
226  unsigned diag_NeverFallThroughOrReturn;
227  enum { Function, Block, Lambda } funMode;
228  SourceLocation FuncLoc;
229
230  static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
231    CheckFallThroughDiagnostics D;
232    D.FuncLoc = Func->getLocation();
233    D.diag_MaybeFallThrough_HasNoReturn =
234      diag::warn_falloff_noreturn_function;
235    D.diag_MaybeFallThrough_ReturnsNonVoid =
236      diag::warn_maybe_falloff_nonvoid_function;
237    D.diag_AlwaysFallThrough_HasNoReturn =
238      diag::warn_falloff_noreturn_function;
239    D.diag_AlwaysFallThrough_ReturnsNonVoid =
240      diag::warn_falloff_nonvoid_function;
241
242    // Don't suggest that virtual functions be marked "noreturn", since they
243    // might be overridden by non-noreturn functions.
244    bool isVirtualMethod = false;
245    if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
246      isVirtualMethod = Method->isVirtual();
247
248    // Don't suggest that template instantiations be marked "noreturn"
249    bool isTemplateInstantiation = false;
250    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
251      isTemplateInstantiation = Function->isTemplateInstantiation();
252
253    if (!isVirtualMethod && !isTemplateInstantiation)
254      D.diag_NeverFallThroughOrReturn =
255        diag::warn_suggest_noreturn_function;
256    else
257      D.diag_NeverFallThroughOrReturn = 0;
258
259    D.funMode = Function;
260    return D;
261  }
262
263  static CheckFallThroughDiagnostics MakeForBlock() {
264    CheckFallThroughDiagnostics D;
265    D.diag_MaybeFallThrough_HasNoReturn =
266      diag::err_noreturn_block_has_return_expr;
267    D.diag_MaybeFallThrough_ReturnsNonVoid =
268      diag::err_maybe_falloff_nonvoid_block;
269    D.diag_AlwaysFallThrough_HasNoReturn =
270      diag::err_noreturn_block_has_return_expr;
271    D.diag_AlwaysFallThrough_ReturnsNonVoid =
272      diag::err_falloff_nonvoid_block;
273    D.diag_NeverFallThroughOrReturn =
274      diag::warn_suggest_noreturn_block;
275    D.funMode = Block;
276    return D;
277  }
278
279  static CheckFallThroughDiagnostics MakeForLambda() {
280    CheckFallThroughDiagnostics D;
281    D.diag_MaybeFallThrough_HasNoReturn =
282      diag::err_noreturn_lambda_has_return_expr;
283    D.diag_MaybeFallThrough_ReturnsNonVoid =
284      diag::warn_maybe_falloff_nonvoid_lambda;
285    D.diag_AlwaysFallThrough_HasNoReturn =
286      diag::err_noreturn_lambda_has_return_expr;
287    D.diag_AlwaysFallThrough_ReturnsNonVoid =
288      diag::warn_falloff_nonvoid_lambda;
289    D.diag_NeverFallThroughOrReturn = 0;
290    D.funMode = Lambda;
291    return D;
292  }
293
294  bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
295                        bool HasNoReturn) const {
296    if (funMode == Function) {
297      return (ReturnsVoid ||
298              D.getDiagnosticLevel(diag::warn_maybe_falloff_nonvoid_function,
299                                   FuncLoc) == DiagnosticsEngine::Ignored)
300        && (!HasNoReturn ||
301            D.getDiagnosticLevel(diag::warn_noreturn_function_has_return_expr,
302                                 FuncLoc) == DiagnosticsEngine::Ignored)
303        && (!ReturnsVoid ||
304            D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
305              == DiagnosticsEngine::Ignored);
306    }
307
308    // For blocks / lambdas.
309    return ReturnsVoid && !HasNoReturn
310            && ((funMode == Lambda) ||
311                D.getDiagnosticLevel(diag::warn_suggest_noreturn_block, FuncLoc)
312                  == DiagnosticsEngine::Ignored);
313  }
314};
315
316}
317
318/// CheckFallThroughForFunctionDef - Check that we don't fall off the end of a
319/// function that should return a value.  Check that we don't fall off the end
320/// of a noreturn function.  We assume that functions and blocks not marked
321/// noreturn will return.
322static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
323                                    const BlockExpr *blkExpr,
324                                    const CheckFallThroughDiagnostics& CD,
325                                    AnalysisDeclContext &AC) {
326
327  bool ReturnsVoid = false;
328  bool HasNoReturn = false;
329
330  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
331    ReturnsVoid = FD->getResultType()->isVoidType();
332    HasNoReturn = FD->isNoReturn();
333  }
334  else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
335    ReturnsVoid = MD->getResultType()->isVoidType();
336    HasNoReturn = MD->hasAttr<NoReturnAttr>();
337  }
338  else if (isa<BlockDecl>(D)) {
339    QualType BlockTy = blkExpr->getType();
340    if (const FunctionType *FT =
341          BlockTy->getPointeeType()->getAs<FunctionType>()) {
342      if (FT->getResultType()->isVoidType())
343        ReturnsVoid = true;
344      if (FT->getNoReturnAttr())
345        HasNoReturn = true;
346    }
347  }
348
349  DiagnosticsEngine &Diags = S.getDiagnostics();
350
351  // Short circuit for compilation speed.
352  if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
353      return;
354
355  // FIXME: Function try block
356  if (const CompoundStmt *Compound = dyn_cast<CompoundStmt>(Body)) {
357    switch (CheckFallThrough(AC)) {
358      case UnknownFallThrough:
359        break;
360
361      case MaybeFallThrough:
362        if (HasNoReturn)
363          S.Diag(Compound->getRBracLoc(),
364                 CD.diag_MaybeFallThrough_HasNoReturn);
365        else if (!ReturnsVoid)
366          S.Diag(Compound->getRBracLoc(),
367                 CD.diag_MaybeFallThrough_ReturnsNonVoid);
368        break;
369      case AlwaysFallThrough:
370        if (HasNoReturn)
371          S.Diag(Compound->getRBracLoc(),
372                 CD.diag_AlwaysFallThrough_HasNoReturn);
373        else if (!ReturnsVoid)
374          S.Diag(Compound->getRBracLoc(),
375                 CD.diag_AlwaysFallThrough_ReturnsNonVoid);
376        break;
377      case NeverFallThroughOrReturn:
378        if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
379          if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
380            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
381              << 0 << FD;
382          } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
383            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn)
384              << 1 << MD;
385          } else {
386            S.Diag(Compound->getLBracLoc(), CD.diag_NeverFallThroughOrReturn);
387          }
388        }
389        break;
390      case NeverFallThrough:
391        break;
392    }
393  }
394}
395
396//===----------------------------------------------------------------------===//
397// -Wuninitialized
398//===----------------------------------------------------------------------===//
399
400namespace {
401/// ContainsReference - A visitor class to search for references to
402/// a particular declaration (the needle) within any evaluated component of an
403/// expression (recursively).
404class ContainsReference : public EvaluatedExprVisitor<ContainsReference> {
405  bool FoundReference;
406  const DeclRefExpr *Needle;
407
408public:
409  ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
410    : EvaluatedExprVisitor<ContainsReference>(Context),
411      FoundReference(false), Needle(Needle) {}
412
413  void VisitExpr(Expr *E) {
414    // Stop evaluating if we already have a reference.
415    if (FoundReference)
416      return;
417
418    EvaluatedExprVisitor<ContainsReference>::VisitExpr(E);
419  }
420
421  void VisitDeclRefExpr(DeclRefExpr *E) {
422    if (E == Needle)
423      FoundReference = true;
424    else
425      EvaluatedExprVisitor<ContainsReference>::VisitDeclRefExpr(E);
426  }
427
428  bool doesContainReference() const { return FoundReference; }
429};
430}
431
432static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
433  QualType VariableTy = VD->getType().getCanonicalType();
434  if (VariableTy->isBlockPointerType() &&
435      !VD->hasAttr<BlocksAttr>()) {
436    S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization) << VD->getDeclName()
437    << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
438    return true;
439  }
440
441  // Don't issue a fixit if there is already an initializer.
442  if (VD->getInit())
443    return false;
444
445  // Suggest possible initialization (if any).
446  std::string Init = S.getFixItZeroInitializerForType(VariableTy);
447  if (Init.empty())
448    return false;
449
450  // Don't suggest a fixit inside macros.
451  if (VD->getLocEnd().isMacroID())
452    return false;
453
454  SourceLocation Loc = S.PP.getLocForEndOfToken(VD->getLocEnd());
455
456  S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
457    << FixItHint::CreateInsertion(Loc, Init);
458  return true;
459}
460
461/// Create a fixit to remove an if-like statement, on the assumption that its
462/// condition is CondVal.
463static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
464                          const Stmt *Else, bool CondVal,
465                          FixItHint &Fixit1, FixItHint &Fixit2) {
466  if (CondVal) {
467    // If condition is always true, remove all but the 'then'.
468    Fixit1 = FixItHint::CreateRemoval(
469        CharSourceRange::getCharRange(If->getLocStart(),
470                                      Then->getLocStart()));
471    if (Else) {
472      SourceLocation ElseKwLoc = Lexer::getLocForEndOfToken(
473          Then->getLocEnd(), 0, S.getSourceManager(), S.getLangOpts());
474      Fixit2 = FixItHint::CreateRemoval(
475          SourceRange(ElseKwLoc, Else->getLocEnd()));
476    }
477  } else {
478    // If condition is always false, remove all but the 'else'.
479    if (Else)
480      Fixit1 = FixItHint::CreateRemoval(
481          CharSourceRange::getCharRange(If->getLocStart(),
482                                        Else->getLocStart()));
483    else
484      Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
485  }
486}
487
488/// DiagUninitUse -- Helper function to produce a diagnostic for an
489/// uninitialized use of a variable.
490static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
491                          bool IsCapturedByBlock) {
492  bool Diagnosed = false;
493
494  // Diagnose each branch which leads to a sometimes-uninitialized use.
495  for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
496       I != E; ++I) {
497    assert(Use.getKind() == UninitUse::Sometimes);
498
499    const Expr *User = Use.getUser();
500    const Stmt *Term = I->Terminator;
501
502    // Information used when building the diagnostic.
503    unsigned DiagKind;
504    StringRef Str;
505    SourceRange Range;
506
507    // FixIts to suppress the diagnosic by removing the dead condition.
508    // For all binary terminators, branch 0 is taken if the condition is true,
509    // and branch 1 is taken if the condition is false.
510    int RemoveDiagKind = -1;
511    const char *FixitStr =
512        S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
513                                  : (I->Output ? "1" : "0");
514    FixItHint Fixit1, Fixit2;
515
516    switch (Term->getStmtClass()) {
517    default:
518      // Don't know how to report this. Just fall back to 'may be used
519      // uninitialized'. This happens for range-based for, which the user
520      // can't explicitly fix.
521      // FIXME: This also happens if the first use of a variable is always
522      // uninitialized, eg "for (int n; n < 10; ++n)". We should report that
523      // with the 'is uninitialized' diagnostic.
524      continue;
525
526    // "condition is true / condition is false".
527    case Stmt::IfStmtClass: {
528      const IfStmt *IS = cast<IfStmt>(Term);
529      DiagKind = 0;
530      Str = "if";
531      Range = IS->getCond()->getSourceRange();
532      RemoveDiagKind = 0;
533      CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
534                    I->Output, Fixit1, Fixit2);
535      break;
536    }
537    case Stmt::ConditionalOperatorClass: {
538      const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
539      DiagKind = 0;
540      Str = "?:";
541      Range = CO->getCond()->getSourceRange();
542      RemoveDiagKind = 0;
543      CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
544                    I->Output, Fixit1, Fixit2);
545      break;
546    }
547    case Stmt::BinaryOperatorClass: {
548      const BinaryOperator *BO = cast<BinaryOperator>(Term);
549      if (!BO->isLogicalOp())
550        continue;
551      DiagKind = 0;
552      Str = BO->getOpcodeStr();
553      Range = BO->getLHS()->getSourceRange();
554      RemoveDiagKind = 0;
555      if ((BO->getOpcode() == BO_LAnd && I->Output) ||
556          (BO->getOpcode() == BO_LOr && !I->Output))
557        // true && y -> y, false || y -> y.
558        Fixit1 = FixItHint::CreateRemoval(SourceRange(BO->getLocStart(),
559                                                      BO->getOperatorLoc()));
560      else
561        // false && y -> false, true || y -> true.
562        Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
563      break;
564    }
565
566    // "loop is entered / loop is exited".
567    case Stmt::WhileStmtClass:
568      DiagKind = 1;
569      Str = "while";
570      Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
571      RemoveDiagKind = 1;
572      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
573      break;
574    case Stmt::ForStmtClass:
575      DiagKind = 1;
576      Str = "for";
577      Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
578      RemoveDiagKind = 1;
579      if (I->Output)
580        Fixit1 = FixItHint::CreateRemoval(Range);
581      else
582        Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
583      break;
584
585    // "condition is true / loop is exited".
586    case Stmt::DoStmtClass:
587      DiagKind = 2;
588      Str = "do";
589      Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
590      RemoveDiagKind = 1;
591      Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
592      break;
593
594    // "switch case is taken".
595    case Stmt::CaseStmtClass:
596      DiagKind = 3;
597      Str = "case";
598      Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
599      break;
600    case Stmt::DefaultStmtClass:
601      DiagKind = 3;
602      Str = "default";
603      Range = cast<DefaultStmt>(Term)->getDefaultLoc();
604      break;
605    }
606
607    S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
608      << VD->getDeclName() << IsCapturedByBlock << DiagKind
609      << Str << I->Output << Range;
610    S.Diag(User->getLocStart(), diag::note_uninit_var_use)
611      << IsCapturedByBlock << User->getSourceRange();
612    if (RemoveDiagKind != -1)
613      S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
614        << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
615
616    Diagnosed = true;
617  }
618
619  if (!Diagnosed)
620    S.Diag(Use.getUser()->getLocStart(),
621           Use.getKind() == UninitUse::Always ? diag::warn_uninit_var
622                                              : diag::warn_maybe_uninit_var)
623        << VD->getDeclName() << IsCapturedByBlock
624        << Use.getUser()->getSourceRange();
625}
626
627/// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
628/// uninitialized variable. This manages the different forms of diagnostic
629/// emitted for particular types of uses. Returns true if the use was diagnosed
630/// as a warning. If a particular use is one we omit warnings for, returns
631/// false.
632static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
633                                     const UninitUse &Use,
634                                     bool alwaysReportSelfInit = false) {
635
636  if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
637    // Inspect the initializer of the variable declaration which is
638    // being referenced prior to its initialization. We emit
639    // specialized diagnostics for self-initialization, and we
640    // specifically avoid warning about self references which take the
641    // form of:
642    //
643    //   int x = x;
644    //
645    // This is used to indicate to GCC that 'x' is intentionally left
646    // uninitialized. Proven code paths which access 'x' in
647    // an uninitialized state after this will still warn.
648    if (const Expr *Initializer = VD->getInit()) {
649      if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
650        return false;
651
652      ContainsReference CR(S.Context, DRE);
653      CR.Visit(const_cast<Expr*>(Initializer));
654      if (CR.doesContainReference()) {
655        S.Diag(DRE->getLocStart(),
656               diag::warn_uninit_self_reference_in_init)
657          << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
658        return true;
659      }
660    }
661
662    DiagUninitUse(S, VD, Use, false);
663  } else {
664    const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
665    if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
666      S.Diag(BE->getLocStart(),
667             diag::warn_uninit_byref_blockvar_captured_by_block)
668        << VD->getDeclName();
669    else
670      DiagUninitUse(S, VD, Use, true);
671  }
672
673  // Report where the variable was declared when the use wasn't within
674  // the initializer of that declaration & we didn't already suggest
675  // an initialization fixit.
676  if (!SuggestInitializationFixit(S, VD))
677    S.Diag(VD->getLocStart(), diag::note_uninit_var_def)
678      << VD->getDeclName();
679
680  return true;
681}
682
683namespace {
684  class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
685  public:
686    FallthroughMapper(Sema &S)
687      : FoundSwitchStatements(false),
688        S(S) {
689    }
690
691    bool foundSwitchStatements() const { return FoundSwitchStatements; }
692
693    void markFallthroughVisited(const AttributedStmt *Stmt) {
694      bool Found = FallthroughStmts.erase(Stmt);
695      assert(Found);
696      (void)Found;
697    }
698
699    typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
700
701    const AttrStmts &getFallthroughStmts() const {
702      return FallthroughStmts;
703    }
704
705    bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt) {
706      int UnannotatedCnt = 0;
707      AnnotatedCnt = 0;
708
709      std::deque<const CFGBlock*> BlockQueue;
710
711      std::copy(B.pred_begin(), B.pred_end(), std::back_inserter(BlockQueue));
712
713      while (!BlockQueue.empty()) {
714        const CFGBlock *P = BlockQueue.front();
715        BlockQueue.pop_front();
716
717        const Stmt *Term = P->getTerminator();
718        if (Term && isa<SwitchStmt>(Term))
719          continue; // Switch statement, good.
720
721        const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
722        if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
723          continue; // Previous case label has no statements, good.
724
725        if (P->pred_begin() == P->pred_end()) {  // The block is unreachable.
726          // This only catches trivially unreachable blocks.
727          for (CFGBlock::const_iterator ElIt = P->begin(), ElEnd = P->end();
728               ElIt != ElEnd; ++ElIt) {
729            if (const CFGStmt *CS = ElIt->getAs<CFGStmt>()){
730              if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
731                S.Diag(AS->getLocStart(),
732                       diag::warn_fallthrough_attr_unreachable);
733                markFallthroughVisited(AS);
734                ++AnnotatedCnt;
735              }
736              // Don't care about other unreachable statements.
737            }
738          }
739          // If there are no unreachable statements, this may be a special
740          // case in CFG:
741          // case X: {
742          //    A a;  // A has a destructor.
743          //    break;
744          // }
745          // // <<<< This place is represented by a 'hanging' CFG block.
746          // case Y:
747          continue;
748        }
749
750        const Stmt *LastStmt = getLastStmt(*P);
751        if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
752          markFallthroughVisited(AS);
753          ++AnnotatedCnt;
754          continue; // Fallthrough annotation, good.
755        }
756
757        if (!LastStmt) { // This block contains no executable statements.
758          // Traverse its predecessors.
759          std::copy(P->pred_begin(), P->pred_end(),
760                    std::back_inserter(BlockQueue));
761          continue;
762        }
763
764        ++UnannotatedCnt;
765      }
766      return !!UnannotatedCnt;
767    }
768
769    // RecursiveASTVisitor setup.
770    bool shouldWalkTypesOfTypeLocs() const { return false; }
771
772    bool VisitAttributedStmt(AttributedStmt *S) {
773      if (asFallThroughAttr(S))
774        FallthroughStmts.insert(S);
775      return true;
776    }
777
778    bool VisitSwitchStmt(SwitchStmt *S) {
779      FoundSwitchStatements = true;
780      return true;
781    }
782
783  private:
784
785    static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
786      if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
787        if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
788          return AS;
789      }
790      return 0;
791    }
792
793    static const Stmt *getLastStmt(const CFGBlock &B) {
794      if (const Stmt *Term = B.getTerminator())
795        return Term;
796      for (CFGBlock::const_reverse_iterator ElemIt = B.rbegin(),
797                                            ElemEnd = B.rend();
798                                            ElemIt != ElemEnd; ++ElemIt) {
799        if (const CFGStmt *CS = ElemIt->getAs<CFGStmt>())
800          return CS->getStmt();
801      }
802      // Workaround to detect a statement thrown out by CFGBuilder:
803      //   case X: {} case Y:
804      //   case X: ; case Y:
805      if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
806        if (!isa<SwitchCase>(SW->getSubStmt()))
807          return SW->getSubStmt();
808
809      return 0;
810    }
811
812    bool FoundSwitchStatements;
813    AttrStmts FallthroughStmts;
814    Sema &S;
815  };
816}
817
818static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
819                                            bool PerFunction) {
820  // Only perform this analysis when using C++11.  There is no good workflow
821  // for this warning when not using C++11.  There is no good way to silence
822  // the warning (no attribute is available) unless we are using C++11's support
823  // for generalized attributes.  Once could use pragmas to silence the warning,
824  // but as a general solution that is gross and not in the spirit of this
825  // warning.
826  //
827  // NOTE: This an intermediate solution.  There are on-going discussions on
828  // how to properly support this warning outside of C++11 with an annotation.
829  if (!AC.getASTContext().getLangOpts().CPlusPlus11)
830    return;
831
832  FallthroughMapper FM(S);
833  FM.TraverseStmt(AC.getBody());
834
835  if (!FM.foundSwitchStatements())
836    return;
837
838  if (PerFunction && FM.getFallthroughStmts().empty())
839    return;
840
841  CFG *Cfg = AC.getCFG();
842
843  if (!Cfg)
844    return;
845
846  int AnnotatedCnt;
847
848  for (CFG::reverse_iterator I = Cfg->rbegin(), E = Cfg->rend(); I != E; ++I) {
849    const CFGBlock &B = **I;
850    const Stmt *Label = B.getLabel();
851
852    if (!Label || !isa<SwitchCase>(Label))
853      continue;
854
855    if (!FM.checkFallThroughIntoBlock(B, AnnotatedCnt))
856      continue;
857
858    S.Diag(Label->getLocStart(),
859        PerFunction ? diag::warn_unannotated_fallthrough_per_function
860                    : diag::warn_unannotated_fallthrough);
861
862    if (!AnnotatedCnt) {
863      SourceLocation L = Label->getLocStart();
864      if (L.isMacroID())
865        continue;
866      if (S.getLangOpts().CPlusPlus11) {
867        const Stmt *Term = B.getTerminator();
868        if (!(B.empty() && Term && isa<BreakStmt>(Term))) {
869          Preprocessor &PP = S.getPreprocessor();
870          TokenValue Tokens[] = {
871            tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
872            tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
873            tok::r_square, tok::r_square
874          };
875          StringRef AnnotationSpelling = "[[clang::fallthrough]]";
876          StringRef MacroName = PP.getLastMacroWithSpelling(L, Tokens);
877          if (!MacroName.empty())
878            AnnotationSpelling = MacroName;
879          SmallString<64> TextToInsert(AnnotationSpelling);
880          TextToInsert += "; ";
881          S.Diag(L, diag::note_insert_fallthrough_fixit) <<
882              AnnotationSpelling <<
883              FixItHint::CreateInsertion(L, TextToInsert);
884        }
885      }
886      S.Diag(L, diag::note_insert_break_fixit) <<
887        FixItHint::CreateInsertion(L, "break; ");
888    }
889  }
890
891  const FallthroughMapper::AttrStmts &Fallthroughs = FM.getFallthroughStmts();
892  for (FallthroughMapper::AttrStmts::const_iterator I = Fallthroughs.begin(),
893                                                    E = Fallthroughs.end();
894                                                    I != E; ++I) {
895    S.Diag((*I)->getLocStart(), diag::warn_fallthrough_attr_invalid_placement);
896  }
897
898}
899
900namespace {
901typedef std::pair<const Stmt *,
902                  sema::FunctionScopeInfo::WeakObjectUseMap::const_iterator>
903        StmtUsesPair;
904
905class StmtUseSorter {
906  const SourceManager &SM;
907
908public:
909  explicit StmtUseSorter(const SourceManager &SM) : SM(SM) { }
910
911  bool operator()(const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
912    return SM.isBeforeInTranslationUnit(LHS.first->getLocStart(),
913                                        RHS.first->getLocStart());
914  }
915};
916}
917
918static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
919                     const Stmt *S) {
920  assert(S);
921
922  do {
923    switch (S->getStmtClass()) {
924    case Stmt::ForStmtClass:
925    case Stmt::WhileStmtClass:
926    case Stmt::CXXForRangeStmtClass:
927    case Stmt::ObjCForCollectionStmtClass:
928      return true;
929    case Stmt::DoStmtClass: {
930      const Expr *Cond = cast<DoStmt>(S)->getCond();
931      llvm::APSInt Val;
932      if (!Cond->EvaluateAsInt(Val, Ctx))
933        return true;
934      return Val.getBoolValue();
935    }
936    default:
937      break;
938    }
939  } while ((S = PM.getParent(S)));
940
941  return false;
942}
943
944
945static void diagnoseRepeatedUseOfWeak(Sema &S,
946                                      const sema::FunctionScopeInfo *CurFn,
947                                      const Decl *D,
948                                      const ParentMap &PM) {
949  typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
950  typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
951  typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
952
953  ASTContext &Ctx = S.getASTContext();
954
955  const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
956
957  // Extract all weak objects that are referenced more than once.
958  SmallVector<StmtUsesPair, 8> UsesByStmt;
959  for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
960       I != E; ++I) {
961    const WeakUseVector &Uses = I->second;
962
963    // Find the first read of the weak object.
964    WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
965    for ( ; UI != UE; ++UI) {
966      if (UI->isUnsafe())
967        break;
968    }
969
970    // If there were only writes to this object, don't warn.
971    if (UI == UE)
972      continue;
973
974    // If there was only one read, followed by any number of writes, and the
975    // read is not within a loop, don't warn. Additionally, don't warn in a
976    // loop if the base object is a local variable -- local variables are often
977    // changed in loops.
978    if (UI == Uses.begin()) {
979      WeakUseVector::const_iterator UI2 = UI;
980      for (++UI2; UI2 != UE; ++UI2)
981        if (UI2->isUnsafe())
982          break;
983
984      if (UI2 == UE) {
985        if (!isInLoop(Ctx, PM, UI->getUseExpr()))
986          continue;
987
988        const WeakObjectProfileTy &Profile = I->first;
989        if (!Profile.isExactProfile())
990          continue;
991
992        const NamedDecl *Base = Profile.getBase();
993        if (!Base)
994          Base = Profile.getProperty();
995        assert(Base && "A profile always has a base or property.");
996
997        if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
998          if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base))
999            continue;
1000      }
1001    }
1002
1003    UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
1004  }
1005
1006  if (UsesByStmt.empty())
1007    return;
1008
1009  // Sort by first use so that we emit the warnings in a deterministic order.
1010  std::sort(UsesByStmt.begin(), UsesByStmt.end(),
1011            StmtUseSorter(S.getSourceManager()));
1012
1013  // Classify the current code body for better warning text.
1014  // This enum should stay in sync with the cases in
1015  // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1016  // FIXME: Should we use a common classification enum and the same set of
1017  // possibilities all throughout Sema?
1018  enum {
1019    Function,
1020    Method,
1021    Block,
1022    Lambda
1023  } FunctionKind;
1024
1025  if (isa<sema::BlockScopeInfo>(CurFn))
1026    FunctionKind = Block;
1027  else if (isa<sema::LambdaScopeInfo>(CurFn))
1028    FunctionKind = Lambda;
1029  else if (isa<ObjCMethodDecl>(D))
1030    FunctionKind = Method;
1031  else
1032    FunctionKind = Function;
1033
1034  // Iterate through the sorted problems and emit warnings for each.
1035  for (SmallVectorImpl<StmtUsesPair>::const_iterator I = UsesByStmt.begin(),
1036                                                     E = UsesByStmt.end();
1037       I != E; ++I) {
1038    const Stmt *FirstRead = I->first;
1039    const WeakObjectProfileTy &Key = I->second->first;
1040    const WeakUseVector &Uses = I->second->second;
1041
1042    // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
1043    // may not contain enough information to determine that these are different
1044    // properties. We can only be 100% sure of a repeated use in certain cases,
1045    // and we adjust the diagnostic kind accordingly so that the less certain
1046    // case can be turned off if it is too noisy.
1047    unsigned DiagKind;
1048    if (Key.isExactProfile())
1049      DiagKind = diag::warn_arc_repeated_use_of_weak;
1050    else
1051      DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
1052
1053    // Classify the weak object being accessed for better warning text.
1054    // This enum should stay in sync with the cases in
1055    // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
1056    enum {
1057      Variable,
1058      Property,
1059      ImplicitProperty,
1060      Ivar
1061    } ObjectKind;
1062
1063    const NamedDecl *D = Key.getProperty();
1064    if (isa<VarDecl>(D))
1065      ObjectKind = Variable;
1066    else if (isa<ObjCPropertyDecl>(D))
1067      ObjectKind = Property;
1068    else if (isa<ObjCMethodDecl>(D))
1069      ObjectKind = ImplicitProperty;
1070    else if (isa<ObjCIvarDecl>(D))
1071      ObjectKind = Ivar;
1072    else
1073      llvm_unreachable("Unexpected weak object kind!");
1074
1075    // Show the first time the object was read.
1076    S.Diag(FirstRead->getLocStart(), DiagKind)
1077      << ObjectKind << D << FunctionKind
1078      << FirstRead->getSourceRange();
1079
1080    // Print all the other accesses as notes.
1081    for (WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
1082         UI != UE; ++UI) {
1083      if (UI->getUseExpr() == FirstRead)
1084        continue;
1085      S.Diag(UI->getUseExpr()->getLocStart(),
1086             diag::note_arc_weak_also_accessed_here)
1087        << UI->getUseExpr()->getSourceRange();
1088    }
1089  }
1090}
1091
1092
1093namespace {
1094struct SLocSort {
1095  bool operator()(const UninitUse &a, const UninitUse &b) {
1096    // Prefer a more confident report over a less confident one.
1097    if (a.getKind() != b.getKind())
1098      return a.getKind() > b.getKind();
1099    SourceLocation aLoc = a.getUser()->getLocStart();
1100    SourceLocation bLoc = b.getUser()->getLocStart();
1101    return aLoc.getRawEncoding() < bLoc.getRawEncoding();
1102  }
1103};
1104
1105class UninitValsDiagReporter : public UninitVariablesHandler {
1106  Sema &S;
1107  typedef SmallVector<UninitUse, 2> UsesVec;
1108  typedef llvm::DenseMap<const VarDecl *, std::pair<UsesVec*, bool> > UsesMap;
1109  UsesMap *uses;
1110
1111public:
1112  UninitValsDiagReporter(Sema &S) : S(S), uses(0) {}
1113  ~UninitValsDiagReporter() {
1114    flushDiagnostics();
1115  }
1116
1117  std::pair<UsesVec*, bool> &getUses(const VarDecl *vd) {
1118    if (!uses)
1119      uses = new UsesMap();
1120
1121    UsesMap::mapped_type &V = (*uses)[vd];
1122    UsesVec *&vec = V.first;
1123    if (!vec)
1124      vec = new UsesVec();
1125
1126    return V;
1127  }
1128
1129  void handleUseOfUninitVariable(const VarDecl *vd, const UninitUse &use) {
1130    getUses(vd).first->push_back(use);
1131  }
1132
1133  void handleSelfInit(const VarDecl *vd) {
1134    getUses(vd).second = true;
1135  }
1136
1137  void flushDiagnostics() {
1138    if (!uses)
1139      return;
1140
1141    // FIXME: This iteration order, and thus the resulting diagnostic order,
1142    //        is nondeterministic.
1143    for (UsesMap::iterator i = uses->begin(), e = uses->end(); i != e; ++i) {
1144      const VarDecl *vd = i->first;
1145      const UsesMap::mapped_type &V = i->second;
1146
1147      UsesVec *vec = V.first;
1148      bool hasSelfInit = V.second;
1149
1150      // Specially handle the case where we have uses of an uninitialized
1151      // variable, but the root cause is an idiomatic self-init.  We want
1152      // to report the diagnostic at the self-init since that is the root cause.
1153      if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
1154        DiagnoseUninitializedUse(S, vd,
1155                                 UninitUse(vd->getInit()->IgnoreParenCasts(),
1156                                           /* isAlwaysUninit */ true),
1157                                 /* alwaysReportSelfInit */ true);
1158      else {
1159        // Sort the uses by their SourceLocations.  While not strictly
1160        // guaranteed to produce them in line/column order, this will provide
1161        // a stable ordering.
1162        std::sort(vec->begin(), vec->end(), SLocSort());
1163
1164        for (UsesVec::iterator vi = vec->begin(), ve = vec->end(); vi != ve;
1165             ++vi) {
1166          // If we have self-init, downgrade all uses to 'may be uninitialized'.
1167          UninitUse Use = hasSelfInit ? UninitUse(vi->getUser(), false) : *vi;
1168
1169          if (DiagnoseUninitializedUse(S, vd, Use))
1170            // Skip further diagnostics for this variable. We try to warn only
1171            // on the first point at which a variable is used uninitialized.
1172            break;
1173        }
1174      }
1175
1176      // Release the uses vector.
1177      delete vec;
1178    }
1179    delete uses;
1180  }
1181
1182private:
1183  static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
1184  for (UsesVec::const_iterator i = vec->begin(), e = vec->end(); i != e; ++i) {
1185    if (i->getKind() == UninitUse::Always) {
1186      return true;
1187    }
1188  }
1189  return false;
1190}
1191};
1192}
1193
1194
1195//===----------------------------------------------------------------------===//
1196// -Wthread-safety
1197//===----------------------------------------------------------------------===//
1198namespace clang {
1199namespace thread_safety {
1200typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
1201typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
1202typedef std::list<DelayedDiag> DiagList;
1203
1204struct SortDiagBySourceLocation {
1205  SourceManager &SM;
1206  SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
1207
1208  bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
1209    // Although this call will be slow, this is only called when outputting
1210    // multiple warnings.
1211    return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
1212  }
1213};
1214
1215namespace {
1216class ThreadSafetyReporter : public clang::thread_safety::ThreadSafetyHandler {
1217  Sema &S;
1218  DiagList Warnings;
1219  SourceLocation FunLocation, FunEndLocation;
1220
1221  // Helper functions
1222  void warnLockMismatch(unsigned DiagID, Name LockName, SourceLocation Loc) {
1223    // Gracefully handle rare cases when the analysis can't get a more
1224    // precise source location.
1225    if (!Loc.isValid())
1226      Loc = FunLocation;
1227    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << LockName);
1228    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1229  }
1230
1231 public:
1232  ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
1233    : S(S), FunLocation(FL), FunEndLocation(FEL) {}
1234
1235  /// \brief Emit all buffered diagnostics in order of sourcelocation.
1236  /// We need to output diagnostics produced while iterating through
1237  /// the lockset in deterministic order, so this function orders diagnostics
1238  /// and outputs them.
1239  void emitDiagnostics() {
1240    Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
1241    for (DiagList::iterator I = Warnings.begin(), E = Warnings.end();
1242         I != E; ++I) {
1243      S.Diag(I->first.first, I->first.second);
1244      const OptionalNotes &Notes = I->second;
1245      for (unsigned NoteI = 0, NoteN = Notes.size(); NoteI != NoteN; ++NoteI)
1246        S.Diag(Notes[NoteI].first, Notes[NoteI].second);
1247    }
1248  }
1249
1250  void handleInvalidLockExp(SourceLocation Loc) {
1251    PartialDiagnosticAt Warning(Loc,
1252                                S.PDiag(diag::warn_cannot_resolve_lock) << Loc);
1253    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1254  }
1255  void handleUnmatchedUnlock(Name LockName, SourceLocation Loc) {
1256    warnLockMismatch(diag::warn_unlock_but_no_lock, LockName, Loc);
1257  }
1258
1259  void handleDoubleLock(Name LockName, SourceLocation Loc) {
1260    warnLockMismatch(diag::warn_double_lock, LockName, Loc);
1261  }
1262
1263  void handleMutexHeldEndOfScope(Name LockName, SourceLocation LocLocked,
1264                                 SourceLocation LocEndOfScope,
1265                                 LockErrorKind LEK){
1266    unsigned DiagID = 0;
1267    switch (LEK) {
1268      case LEK_LockedSomePredecessors:
1269        DiagID = diag::warn_lock_some_predecessors;
1270        break;
1271      case LEK_LockedSomeLoopIterations:
1272        DiagID = diag::warn_expecting_lock_held_on_loop;
1273        break;
1274      case LEK_LockedAtEndOfFunction:
1275        DiagID = diag::warn_no_unlock;
1276        break;
1277      case LEK_NotLockedAtEndOfFunction:
1278        DiagID = diag::warn_expecting_locked;
1279        break;
1280    }
1281    if (LocEndOfScope.isInvalid())
1282      LocEndOfScope = FunEndLocation;
1283
1284    PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << LockName);
1285    PartialDiagnosticAt Note(LocLocked, S.PDiag(diag::note_locked_here));
1286    Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1287  }
1288
1289
1290  void handleExclusiveAndShared(Name LockName, SourceLocation Loc1,
1291                                SourceLocation Loc2) {
1292    PartialDiagnosticAt Warning(
1293      Loc1, S.PDiag(diag::warn_lock_exclusive_and_shared) << LockName);
1294    PartialDiagnosticAt Note(
1295      Loc2, S.PDiag(diag::note_lock_exclusive_and_shared) << LockName);
1296    Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1297  }
1298
1299  void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
1300                         AccessKind AK, SourceLocation Loc) {
1301    assert((POK == POK_VarAccess || POK == POK_VarDereference)
1302             && "Only works for variables");
1303    unsigned DiagID = POK == POK_VarAccess?
1304                        diag::warn_variable_requires_any_lock:
1305                        diag::warn_var_deref_requires_any_lock;
1306    PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1307      << D->getNameAsString() << getLockKindFromAccessKind(AK));
1308    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1309  }
1310
1311  void handleMutexNotHeld(const NamedDecl *D, ProtectedOperationKind POK,
1312                          Name LockName, LockKind LK, SourceLocation Loc,
1313                          Name *PossibleMatch) {
1314    unsigned DiagID = 0;
1315    if (PossibleMatch) {
1316      switch (POK) {
1317        case POK_VarAccess:
1318          DiagID = diag::warn_variable_requires_lock_precise;
1319          break;
1320        case POK_VarDereference:
1321          DiagID = diag::warn_var_deref_requires_lock_precise;
1322          break;
1323        case POK_FunctionCall:
1324          DiagID = diag::warn_fun_requires_lock_precise;
1325          break;
1326      }
1327      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1328        << D->getNameAsString() << LockName << LK);
1329      PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
1330                               << *PossibleMatch);
1331      Warnings.push_back(DelayedDiag(Warning, OptionalNotes(1, Note)));
1332    } else {
1333      switch (POK) {
1334        case POK_VarAccess:
1335          DiagID = diag::warn_variable_requires_lock;
1336          break;
1337        case POK_VarDereference:
1338          DiagID = diag::warn_var_deref_requires_lock;
1339          break;
1340        case POK_FunctionCall:
1341          DiagID = diag::warn_fun_requires_lock;
1342          break;
1343      }
1344      PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
1345        << D->getNameAsString() << LockName << LK);
1346      Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1347    }
1348  }
1349
1350  void handleFunExcludesLock(Name FunName, Name LockName, SourceLocation Loc) {
1351    PartialDiagnosticAt Warning(Loc,
1352      S.PDiag(diag::warn_fun_excludes_mutex) << FunName << LockName);
1353    Warnings.push_back(DelayedDiag(Warning, OptionalNotes()));
1354  }
1355};
1356}
1357}
1358}
1359
1360//===----------------------------------------------------------------------===//
1361// AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
1362//  warnings on a function, method, or block.
1363//===----------------------------------------------------------------------===//
1364
1365clang::sema::AnalysisBasedWarnings::Policy::Policy() {
1366  enableCheckFallThrough = 1;
1367  enableCheckUnreachable = 0;
1368  enableThreadSafetyAnalysis = 0;
1369}
1370
1371clang::sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
1372  : S(s),
1373    NumFunctionsAnalyzed(0),
1374    NumFunctionsWithBadCFGs(0),
1375    NumCFGBlocks(0),
1376    MaxCFGBlocksPerFunction(0),
1377    NumUninitAnalysisFunctions(0),
1378    NumUninitAnalysisVariables(0),
1379    MaxUninitAnalysisVariablesPerFunction(0),
1380    NumUninitAnalysisBlockVisits(0),
1381    MaxUninitAnalysisBlockVisitsPerFunction(0) {
1382  DiagnosticsEngine &D = S.getDiagnostics();
1383  DefaultPolicy.enableCheckUnreachable = (unsigned)
1384    (D.getDiagnosticLevel(diag::warn_unreachable, SourceLocation()) !=
1385        DiagnosticsEngine::Ignored);
1386  DefaultPolicy.enableThreadSafetyAnalysis = (unsigned)
1387    (D.getDiagnosticLevel(diag::warn_double_lock, SourceLocation()) !=
1388     DiagnosticsEngine::Ignored);
1389
1390}
1391
1392static void flushDiagnostics(Sema &S, sema::FunctionScopeInfo *fscope) {
1393  for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1394       i = fscope->PossiblyUnreachableDiags.begin(),
1395       e = fscope->PossiblyUnreachableDiags.end();
1396       i != e; ++i) {
1397    const sema::PossiblyUnreachableDiag &D = *i;
1398    S.Diag(D.Loc, D.PD);
1399  }
1400}
1401
1402void clang::sema::
1403AnalysisBasedWarnings::IssueWarnings(sema::AnalysisBasedWarnings::Policy P,
1404                                     sema::FunctionScopeInfo *fscope,
1405                                     const Decl *D, const BlockExpr *blkExpr) {
1406
1407  // We avoid doing analysis-based warnings when there are errors for
1408  // two reasons:
1409  // (1) The CFGs often can't be constructed (if the body is invalid), so
1410  //     don't bother trying.
1411  // (2) The code already has problems; running the analysis just takes more
1412  //     time.
1413  DiagnosticsEngine &Diags = S.getDiagnostics();
1414
1415  // Do not do any analysis for declarations in system headers if we are
1416  // going to just ignore them.
1417  if (Diags.getSuppressSystemWarnings() &&
1418      S.SourceMgr.isInSystemHeader(D->getLocation()))
1419    return;
1420
1421  // For code in dependent contexts, we'll do this at instantiation time.
1422  if (cast<DeclContext>(D)->isDependentContext())
1423    return;
1424
1425  if (Diags.hasUncompilableErrorOccurred() || Diags.hasFatalErrorOccurred()) {
1426    // Flush out any possibly unreachable diagnostics.
1427    flushDiagnostics(S, fscope);
1428    return;
1429  }
1430
1431  const Stmt *Body = D->getBody();
1432  assert(Body);
1433
1434  AnalysisDeclContext AC(/* AnalysisDeclContextManager */ 0, D);
1435
1436  // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
1437  // explosion for destrutors that can result and the compile time hit.
1438  AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
1439  AC.getCFGBuildOptions().AddEHEdges = false;
1440  AC.getCFGBuildOptions().AddInitializers = true;
1441  AC.getCFGBuildOptions().AddImplicitDtors = true;
1442  AC.getCFGBuildOptions().AddTemporaryDtors = true;
1443
1444  // Force that certain expressions appear as CFGElements in the CFG.  This
1445  // is used to speed up various analyses.
1446  // FIXME: This isn't the right factoring.  This is here for initial
1447  // prototyping, but we need a way for analyses to say what expressions they
1448  // expect to always be CFGElements and then fill in the BuildOptions
1449  // appropriately.  This is essentially a layering violation.
1450  if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis) {
1451    // Unreachable code analysis and thread safety require a linearized CFG.
1452    AC.getCFGBuildOptions().setAllAlwaysAdd();
1453  }
1454  else {
1455    AC.getCFGBuildOptions()
1456      .setAlwaysAdd(Stmt::BinaryOperatorClass)
1457      .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
1458      .setAlwaysAdd(Stmt::BlockExprClass)
1459      .setAlwaysAdd(Stmt::CStyleCastExprClass)
1460      .setAlwaysAdd(Stmt::DeclRefExprClass)
1461      .setAlwaysAdd(Stmt::ImplicitCastExprClass)
1462      .setAlwaysAdd(Stmt::UnaryOperatorClass)
1463      .setAlwaysAdd(Stmt::AttributedStmtClass);
1464  }
1465
1466  // Construct the analysis context with the specified CFG build options.
1467
1468  // Emit delayed diagnostics.
1469  if (!fscope->PossiblyUnreachableDiags.empty()) {
1470    bool analyzed = false;
1471
1472    // Register the expressions with the CFGBuilder.
1473    for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1474         i = fscope->PossiblyUnreachableDiags.begin(),
1475         e = fscope->PossiblyUnreachableDiags.end();
1476         i != e; ++i) {
1477      if (const Stmt *stmt = i->stmt)
1478        AC.registerForcedBlockExpression(stmt);
1479    }
1480
1481    if (AC.getCFG()) {
1482      analyzed = true;
1483      for (SmallVectorImpl<sema::PossiblyUnreachableDiag>::iterator
1484            i = fscope->PossiblyUnreachableDiags.begin(),
1485            e = fscope->PossiblyUnreachableDiags.end();
1486            i != e; ++i)
1487      {
1488        const sema::PossiblyUnreachableDiag &D = *i;
1489        bool processed = false;
1490        if (const Stmt *stmt = i->stmt) {
1491          const CFGBlock *block = AC.getBlockForRegisteredExpression(stmt);
1492          CFGReverseBlockReachabilityAnalysis *cra =
1493              AC.getCFGReachablityAnalysis();
1494          // FIXME: We should be able to assert that block is non-null, but
1495          // the CFG analysis can skip potentially-evaluated expressions in
1496          // edge cases; see test/Sema/vla-2.c.
1497          if (block && cra) {
1498            // Can this block be reached from the entrance?
1499            if (cra->isReachable(&AC.getCFG()->getEntry(), block))
1500              S.Diag(D.Loc, D.PD);
1501            processed = true;
1502          }
1503        }
1504        if (!processed) {
1505          // Emit the warning anyway if we cannot map to a basic block.
1506          S.Diag(D.Loc, D.PD);
1507        }
1508      }
1509    }
1510
1511    if (!analyzed)
1512      flushDiagnostics(S, fscope);
1513  }
1514
1515
1516  // Warning: check missing 'return'
1517  if (P.enableCheckFallThrough) {
1518    const CheckFallThroughDiagnostics &CD =
1519      (isa<BlockDecl>(D) ? CheckFallThroughDiagnostics::MakeForBlock()
1520       : (isa<CXXMethodDecl>(D) &&
1521          cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
1522          cast<CXXMethodDecl>(D)->getParent()->isLambda())
1523            ? CheckFallThroughDiagnostics::MakeForLambda()
1524            : CheckFallThroughDiagnostics::MakeForFunction(D));
1525    CheckFallThroughForBody(S, D, Body, blkExpr, CD, AC);
1526  }
1527
1528  // Warning: check for unreachable code
1529  if (P.enableCheckUnreachable) {
1530    // Only check for unreachable code on non-template instantiations.
1531    // Different template instantiations can effectively change the control-flow
1532    // and it is very difficult to prove that a snippet of code in a template
1533    // is unreachable for all instantiations.
1534    bool isTemplateInstantiation = false;
1535    if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
1536      isTemplateInstantiation = Function->isTemplateInstantiation();
1537    if (!isTemplateInstantiation)
1538      CheckUnreachable(S, AC);
1539  }
1540
1541  // Check for thread safety violations
1542  if (P.enableThreadSafetyAnalysis) {
1543    SourceLocation FL = AC.getDecl()->getLocation();
1544    SourceLocation FEL = AC.getDecl()->getLocEnd();
1545    thread_safety::ThreadSafetyReporter Reporter(S, FL, FEL);
1546    if (Diags.getDiagnosticLevel(diag::warn_thread_safety_beta,D->getLocStart())
1547        != DiagnosticsEngine::Ignored)
1548      Reporter.setIssueBetaWarnings(true);
1549
1550    thread_safety::runThreadSafetyAnalysis(AC, Reporter);
1551    Reporter.emitDiagnostics();
1552  }
1553
1554  if (Diags.getDiagnosticLevel(diag::warn_uninit_var, D->getLocStart())
1555      != DiagnosticsEngine::Ignored ||
1556      Diags.getDiagnosticLevel(diag::warn_sometimes_uninit_var,D->getLocStart())
1557      != DiagnosticsEngine::Ignored ||
1558      Diags.getDiagnosticLevel(diag::warn_maybe_uninit_var, D->getLocStart())
1559      != DiagnosticsEngine::Ignored) {
1560    if (CFG *cfg = AC.getCFG()) {
1561      UninitValsDiagReporter reporter(S);
1562      UninitVariablesAnalysisStats stats;
1563      std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
1564      runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
1565                                        reporter, stats);
1566
1567      if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
1568        ++NumUninitAnalysisFunctions;
1569        NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
1570        NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
1571        MaxUninitAnalysisVariablesPerFunction =
1572            std::max(MaxUninitAnalysisVariablesPerFunction,
1573                     stats.NumVariablesAnalyzed);
1574        MaxUninitAnalysisBlockVisitsPerFunction =
1575            std::max(MaxUninitAnalysisBlockVisitsPerFunction,
1576                     stats.NumBlockVisits);
1577      }
1578    }
1579  }
1580
1581  bool FallThroughDiagFull =
1582      Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough,
1583                               D->getLocStart()) != DiagnosticsEngine::Ignored;
1584  bool FallThroughDiagPerFunction =
1585      Diags.getDiagnosticLevel(diag::warn_unannotated_fallthrough_per_function,
1586                               D->getLocStart()) != DiagnosticsEngine::Ignored;
1587  if (FallThroughDiagFull || FallThroughDiagPerFunction) {
1588    DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
1589  }
1590
1591  if (S.getLangOpts().ObjCARCWeak &&
1592      Diags.getDiagnosticLevel(diag::warn_arc_repeated_use_of_weak,
1593                               D->getLocStart()) != DiagnosticsEngine::Ignored)
1594    diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
1595
1596  // Collect statistics about the CFG if it was built.
1597  if (S.CollectStats && AC.isCFGBuilt()) {
1598    ++NumFunctionsAnalyzed;
1599    if (CFG *cfg = AC.getCFG()) {
1600      // If we successfully built a CFG for this context, record some more
1601      // detail information about it.
1602      NumCFGBlocks += cfg->getNumBlockIDs();
1603      MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
1604                                         cfg->getNumBlockIDs());
1605    } else {
1606      ++NumFunctionsWithBadCFGs;
1607    }
1608  }
1609}
1610
1611void clang::sema::AnalysisBasedWarnings::PrintStats() const {
1612  llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
1613
1614  unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
1615  unsigned AvgCFGBlocksPerFunction =
1616      !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
1617  llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
1618               << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
1619               << "  " << NumCFGBlocks << " CFG blocks built.\n"
1620               << "  " << AvgCFGBlocksPerFunction
1621               << " average CFG blocks per function.\n"
1622               << "  " << MaxCFGBlocksPerFunction
1623               << " max CFG blocks per function.\n";
1624
1625  unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
1626      : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
1627  unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
1628      : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
1629  llvm::errs() << NumUninitAnalysisFunctions
1630               << " functions analyzed for uninitialiazed variables\n"
1631               << "  " << NumUninitAnalysisVariables << " variables analyzed.\n"
1632               << "  " << AvgUninitVariablesPerFunction
1633               << " average variables per function.\n"
1634               << "  " << MaxUninitAnalysisVariablesPerFunction
1635               << " max variables per function.\n"
1636               << "  " << NumUninitAnalysisBlockVisits << " block visits.\n"
1637               << "  " << AvgUninitBlockVisitsPerFunction
1638               << " average block visits per function.\n"
1639               << "  " << MaxUninitAnalysisBlockVisitsPerFunction
1640               << " max block visits per function.\n";
1641}
1642