1//==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines a generic engine for intraprocedural, path-sensitive,
11//  dataflow analysis via graph reachability engine.
12//
13//===----------------------------------------------------------------------===//
14
15#include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
16#include "clang/AST/Expr.h"
17#include "clang/AST/ExprCXX.h"
18#include "clang/AST/StmtCXX.h"
19#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
20#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
21#include "llvm/ADT/DenseMap.h"
22#include "llvm/ADT/Statistic.h"
23#include "llvm/Support/Casting.h"
24
25using namespace clang;
26using namespace ento;
27
28#define DEBUG_TYPE "CoreEngine"
29
30STATISTIC(NumSteps,
31            "The # of steps executed.");
32STATISTIC(NumReachedMaxSteps,
33            "The # of times we reached the max number of steps.");
34STATISTIC(NumPathsExplored,
35            "The # of paths explored by the analyzer.");
36
37//===----------------------------------------------------------------------===//
38// Worklist classes for exploration of reachable states.
39//===----------------------------------------------------------------------===//
40
41WorkList::Visitor::~Visitor() {}
42
43namespace {
44class DFS : public WorkList {
45  SmallVector<WorkListUnit,20> Stack;
46public:
47  bool hasWork() const override {
48    return !Stack.empty();
49  }
50
51  void enqueue(const WorkListUnit& U) override {
52    Stack.push_back(U);
53  }
54
55  WorkListUnit dequeue() override {
56    assert (!Stack.empty());
57    const WorkListUnit& U = Stack.back();
58    Stack.pop_back(); // This technically "invalidates" U, but we are fine.
59    return U;
60  }
61
62  bool visitItemsInWorkList(Visitor &V) override {
63    for (SmallVectorImpl<WorkListUnit>::iterator
64         I = Stack.begin(), E = Stack.end(); I != E; ++I) {
65      if (V.visit(*I))
66        return true;
67    }
68    return false;
69  }
70};
71
72class BFS : public WorkList {
73  std::deque<WorkListUnit> Queue;
74public:
75  bool hasWork() const override {
76    return !Queue.empty();
77  }
78
79  void enqueue(const WorkListUnit& U) override {
80    Queue.push_back(U);
81  }
82
83  WorkListUnit dequeue() override {
84    WorkListUnit U = Queue.front();
85    Queue.pop_front();
86    return U;
87  }
88
89  bool visitItemsInWorkList(Visitor &V) override {
90    for (std::deque<WorkListUnit>::iterator
91         I = Queue.begin(), E = Queue.end(); I != E; ++I) {
92      if (V.visit(*I))
93        return true;
94    }
95    return false;
96  }
97};
98
99} // end anonymous namespace
100
101// Place the dstor for WorkList here because it contains virtual member
102// functions, and we the code for the dstor generated in one compilation unit.
103WorkList::~WorkList() {}
104
105WorkList *WorkList::makeDFS() { return new DFS(); }
106WorkList *WorkList::makeBFS() { return new BFS(); }
107
108namespace {
109  class BFSBlockDFSContents : public WorkList {
110    std::deque<WorkListUnit> Queue;
111    SmallVector<WorkListUnit,20> Stack;
112  public:
113    bool hasWork() const override {
114      return !Queue.empty() || !Stack.empty();
115    }
116
117    void enqueue(const WorkListUnit& U) override {
118      if (U.getNode()->getLocation().getAs<BlockEntrance>())
119        Queue.push_front(U);
120      else
121        Stack.push_back(U);
122    }
123
124    WorkListUnit dequeue() override {
125      // Process all basic blocks to completion.
126      if (!Stack.empty()) {
127        const WorkListUnit& U = Stack.back();
128        Stack.pop_back(); // This technically "invalidates" U, but we are fine.
129        return U;
130      }
131
132      assert(!Queue.empty());
133      // Don't use const reference.  The subsequent pop_back() might make it
134      // unsafe.
135      WorkListUnit U = Queue.front();
136      Queue.pop_front();
137      return U;
138    }
139    bool visitItemsInWorkList(Visitor &V) override {
140      for (SmallVectorImpl<WorkListUnit>::iterator
141           I = Stack.begin(), E = Stack.end(); I != E; ++I) {
142        if (V.visit(*I))
143          return true;
144      }
145      for (std::deque<WorkListUnit>::iterator
146           I = Queue.begin(), E = Queue.end(); I != E; ++I) {
147        if (V.visit(*I))
148          return true;
149      }
150      return false;
151    }
152
153  };
154} // end anonymous namespace
155
156WorkList* WorkList::makeBFSBlockDFSContents() {
157  return new BFSBlockDFSContents();
158}
159
160//===----------------------------------------------------------------------===//
161// Core analysis engine.
162//===----------------------------------------------------------------------===//
163
164/// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
165bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
166                                   ProgramStateRef InitState) {
167
168  if (G.num_roots() == 0) { // Initialize the analysis by constructing
169    // the root if none exists.
170
171    const CFGBlock *Entry = &(L->getCFG()->getEntry());
172
173    assert (Entry->empty() &&
174            "Entry block must be empty.");
175
176    assert (Entry->succ_size() == 1 &&
177            "Entry block must have 1 successor.");
178
179    // Mark the entry block as visited.
180    FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(),
181                                             L->getDecl(),
182                                             L->getCFG()->getNumBlockIDs());
183
184    // Get the solitary successor.
185    const CFGBlock *Succ = *(Entry->succ_begin());
186
187    // Construct an edge representing the
188    // starting location in the function.
189    BlockEdge StartLoc(Entry, Succ, L);
190
191    // Set the current block counter to being empty.
192    WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
193
194    if (!InitState)
195      // Generate the root.
196      generateNode(StartLoc, SubEng.getInitialState(L), nullptr);
197    else
198      generateNode(StartLoc, InitState, nullptr);
199  }
200
201  // Check if we have a steps limit
202  bool UnlimitedSteps = Steps == 0;
203
204  while (WList->hasWork()) {
205    if (!UnlimitedSteps) {
206      if (Steps == 0) {
207        NumReachedMaxSteps++;
208        break;
209      }
210      --Steps;
211    }
212
213    NumSteps++;
214
215    const WorkListUnit& WU = WList->dequeue();
216
217    // Set the current block counter.
218    WList->setBlockCounter(WU.getBlockCounter());
219
220    // Retrieve the node.
221    ExplodedNode *Node = WU.getNode();
222
223    dispatchWorkItem(Node, Node->getLocation(), WU);
224  }
225  SubEng.processEndWorklist(hasWorkRemaining());
226  return WList->hasWork();
227}
228
229void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
230                                  const WorkListUnit& WU) {
231  // Dispatch on the location type.
232  switch (Loc.getKind()) {
233    case ProgramPoint::BlockEdgeKind:
234      HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred);
235      break;
236
237    case ProgramPoint::BlockEntranceKind:
238      HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred);
239      break;
240
241    case ProgramPoint::BlockExitKind:
242      assert (false && "BlockExit location never occur in forward analysis.");
243      break;
244
245    case ProgramPoint::CallEnterKind: {
246      CallEnter CEnter = Loc.castAs<CallEnter>();
247      SubEng.processCallEnter(CEnter, Pred);
248      break;
249    }
250
251    case ProgramPoint::CallExitBeginKind:
252      SubEng.processCallExit(Pred);
253      break;
254
255    case ProgramPoint::EpsilonKind: {
256      assert(Pred->hasSinglePred() &&
257             "Assume epsilon has exactly one predecessor by construction");
258      ExplodedNode *PNode = Pred->getFirstPred();
259      dispatchWorkItem(Pred, PNode->getLocation(), WU);
260      break;
261    }
262    default:
263      assert(Loc.getAs<PostStmt>() ||
264             Loc.getAs<PostInitializer>() ||
265             Loc.getAs<PostImplicitCall>() ||
266             Loc.getAs<CallExitEnd>());
267      HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred);
268      break;
269  }
270}
271
272bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
273                                                 unsigned Steps,
274                                                 ProgramStateRef InitState,
275                                                 ExplodedNodeSet &Dst) {
276  bool DidNotFinish = ExecuteWorkList(L, Steps, InitState);
277  for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E;
278       ++I) {
279    Dst.Add(*I);
280  }
281  return DidNotFinish;
282}
283
284void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
285
286  const CFGBlock *Blk = L.getDst();
287  NodeBuilderContext BuilderCtx(*this, Blk, Pred);
288
289  // Mark this block as visited.
290  const LocationContext *LC = Pred->getLocationContext();
291  FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(),
292                                           LC->getDecl(),
293                                           LC->getCFG()->getNumBlockIDs());
294
295  // Check if we are entering the EXIT block.
296  if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
297
298    assert (L.getLocationContext()->getCFG()->getExit().size() == 0
299            && "EXIT block cannot contain Stmts.");
300
301    // Process the final state transition.
302    SubEng.processEndOfFunction(BuilderCtx, Pred);
303
304    // This path is done. Don't enqueue any more nodes.
305    return;
306  }
307
308  // Call into the SubEngine to process entering the CFGBlock.
309  ExplodedNodeSet dstNodes;
310  BlockEntrance BE(Blk, Pred->getLocationContext());
311  NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
312  SubEng.processCFGBlockEntrance(L, nodeBuilder, Pred);
313
314  // Auto-generate a node.
315  if (!nodeBuilder.hasGeneratedNodes()) {
316    nodeBuilder.generateNode(Pred->State, Pred);
317  }
318
319  // Enqueue nodes onto the worklist.
320  enqueue(dstNodes);
321}
322
323void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
324                                       ExplodedNode *Pred) {
325
326  // Increment the block counter.
327  const LocationContext *LC = Pred->getLocationContext();
328  unsigned BlockId = L.getBlock()->getBlockID();
329  BlockCounter Counter = WList->getBlockCounter();
330  Counter = BCounterFactory.IncrementCount(Counter, LC->getCurrentStackFrame(),
331                                           BlockId);
332  WList->setBlockCounter(Counter);
333
334  // Process the entrance of the block.
335  if (Optional<CFGElement> E = L.getFirstElement()) {
336    NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
337    SubEng.processCFGElement(*E, Pred, 0, &Ctx);
338  }
339  else
340    HandleBlockExit(L.getBlock(), Pred);
341}
342
343void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
344
345  if (const Stmt *Term = B->getTerminator()) {
346    switch (Term->getStmtClass()) {
347      default:
348        llvm_unreachable("Analysis for this terminator not implemented.");
349
350      case Stmt::CXXBindTemporaryExprClass:
351        HandleCleanupTemporaryBranch(
352            cast<CXXBindTemporaryExpr>(B->getTerminator().getStmt()), B, Pred);
353        return;
354
355      // Model static initializers.
356      case Stmt::DeclStmtClass:
357        HandleStaticInit(cast<DeclStmt>(Term), B, Pred);
358        return;
359
360      case Stmt::BinaryOperatorClass: // '&&' and '||'
361        HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
362        return;
363
364      case Stmt::BinaryConditionalOperatorClass:
365      case Stmt::ConditionalOperatorClass:
366        HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
367                     Term, B, Pred);
368        return;
369
370        // FIXME: Use constant-folding in CFG construction to simplify this
371        // case.
372
373      case Stmt::ChooseExprClass:
374        HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
375        return;
376
377      case Stmt::CXXTryStmtClass: {
378        // Generate a node for each of the successors.
379        // Our logic for EH analysis can certainly be improved.
380        for (CFGBlock::const_succ_iterator it = B->succ_begin(),
381             et = B->succ_end(); it != et; ++it) {
382          if (const CFGBlock *succ = *it) {
383            generateNode(BlockEdge(B, succ, Pred->getLocationContext()),
384                         Pred->State, Pred);
385          }
386        }
387        return;
388      }
389
390      case Stmt::DoStmtClass:
391        HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
392        return;
393
394      case Stmt::CXXForRangeStmtClass:
395        HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred);
396        return;
397
398      case Stmt::ForStmtClass:
399        HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
400        return;
401
402      case Stmt::ContinueStmtClass:
403      case Stmt::BreakStmtClass:
404      case Stmt::GotoStmtClass:
405        break;
406
407      case Stmt::IfStmtClass:
408        HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
409        return;
410
411      case Stmt::IndirectGotoStmtClass: {
412        // Only 1 successor: the indirect goto dispatch block.
413        assert (B->succ_size() == 1);
414
415        IndirectGotoNodeBuilder
416           builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
417                   *(B->succ_begin()), this);
418
419        SubEng.processIndirectGoto(builder);
420        return;
421      }
422
423      case Stmt::ObjCForCollectionStmtClass: {
424        // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
425        //
426        //  (1) inside a basic block, which represents the binding of the
427        //      'element' variable to a value.
428        //  (2) in a terminator, which represents the branch.
429        //
430        // For (1), subengines will bind a value (i.e., 0 or 1) indicating
431        // whether or not collection contains any more elements.  We cannot
432        // just test to see if the element is nil because a container can
433        // contain nil elements.
434        HandleBranch(Term, Term, B, Pred);
435        return;
436      }
437
438      case Stmt::SwitchStmtClass: {
439        SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
440                                    this);
441
442        SubEng.processSwitch(builder);
443        return;
444      }
445
446      case Stmt::WhileStmtClass:
447        HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
448        return;
449    }
450  }
451
452  assert (B->succ_size() == 1 &&
453          "Blocks with no terminator should have at most 1 successor.");
454
455  generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
456               Pred->State, Pred);
457}
458
459void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
460                                const CFGBlock * B, ExplodedNode *Pred) {
461  assert(B->succ_size() == 2);
462  NodeBuilderContext Ctx(*this, B, Pred);
463  ExplodedNodeSet Dst;
464  SubEng.processBranch(Cond, Term, Ctx, Pred, Dst,
465                       *(B->succ_begin()), *(B->succ_begin()+1));
466  // Enqueue the new frontier onto the worklist.
467  enqueue(Dst);
468}
469
470void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE,
471                                              const CFGBlock *B,
472                                              ExplodedNode *Pred) {
473  assert(B->succ_size() == 2);
474  NodeBuilderContext Ctx(*this, B, Pred);
475  ExplodedNodeSet Dst;
476  SubEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()),
477                                       *(B->succ_begin() + 1));
478  // Enqueue the new frontier onto the worklist.
479  enqueue(Dst);
480}
481
482void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
483                                  ExplodedNode *Pred) {
484  assert(B->succ_size() == 2);
485  NodeBuilderContext Ctx(*this, B, Pred);
486  ExplodedNodeSet Dst;
487  SubEng.processStaticInitializer(DS, Ctx, Pred, Dst,
488                                  *(B->succ_begin()), *(B->succ_begin()+1));
489  // Enqueue the new frontier onto the worklist.
490  enqueue(Dst);
491}
492
493
494void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
495                                  ExplodedNode *Pred) {
496  assert(B);
497  assert(!B->empty());
498
499  if (StmtIdx == B->size())
500    HandleBlockExit(B, Pred);
501  else {
502    NodeBuilderContext Ctx(*this, B, Pred);
503    SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx);
504  }
505}
506
507/// generateNode - Utility method to generate nodes, hook up successors,
508///  and add nodes to the worklist.
509void CoreEngine::generateNode(const ProgramPoint &Loc,
510                              ProgramStateRef State,
511                              ExplodedNode *Pred) {
512
513  bool IsNew;
514  ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew);
515
516  if (Pred)
517    Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor.
518  else {
519    assert (IsNew);
520    G.addRoot(Node); // 'Node' has no predecessor.  Make it a root.
521  }
522
523  // Only add 'Node' to the worklist if it was freshly generated.
524  if (IsNew) WList->enqueue(Node);
525}
526
527void CoreEngine::enqueueStmtNode(ExplodedNode *N,
528                                 const CFGBlock *Block, unsigned Idx) {
529  assert(Block);
530  assert (!N->isSink());
531
532  // Check if this node entered a callee.
533  if (N->getLocation().getAs<CallEnter>()) {
534    // Still use the index of the CallExpr. It's needed to create the callee
535    // StackFrameContext.
536    WList->enqueue(N, Block, Idx);
537    return;
538  }
539
540  // Do not create extra nodes. Move to the next CFG element.
541  if (N->getLocation().getAs<PostInitializer>() ||
542      N->getLocation().getAs<PostImplicitCall>()) {
543    WList->enqueue(N, Block, Idx+1);
544    return;
545  }
546
547  if (N->getLocation().getAs<EpsilonPoint>()) {
548    WList->enqueue(N, Block, Idx);
549    return;
550  }
551
552  if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
553    WList->enqueue(N, Block, Idx+1);
554    return;
555  }
556
557  // At this point, we know we're processing a normal statement.
558  CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
559  PostStmt Loc(CS.getStmt(), N->getLocationContext());
560
561  if (Loc == N->getLocation().withTag(nullptr)) {
562    // Note: 'N' should be a fresh node because otherwise it shouldn't be
563    // a member of Deferred.
564    WList->enqueue(N, Block, Idx+1);
565    return;
566  }
567
568  bool IsNew;
569  ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew);
570  Succ->addPredecessor(N, G);
571
572  if (IsNew)
573    WList->enqueue(Succ, Block, Idx+1);
574}
575
576ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N) {
577  // Create a CallExitBegin node and enqueue it.
578  const StackFrameContext *LocCtx
579                         = cast<StackFrameContext>(N->getLocationContext());
580
581  // Use the callee location context.
582  CallExitBegin Loc(LocCtx);
583
584  bool isNew;
585  ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew);
586  Node->addPredecessor(N, G);
587  return isNew ? Node : nullptr;
588}
589
590
591void CoreEngine::enqueue(ExplodedNodeSet &Set) {
592  for (ExplodedNodeSet::iterator I = Set.begin(),
593                                 E = Set.end(); I != E; ++I) {
594    WList->enqueue(*I);
595  }
596}
597
598void CoreEngine::enqueue(ExplodedNodeSet &Set,
599                         const CFGBlock *Block, unsigned Idx) {
600  for (ExplodedNodeSet::iterator I = Set.begin(),
601                                 E = Set.end(); I != E; ++I) {
602    enqueueStmtNode(*I, Block, Idx);
603  }
604}
605
606void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set) {
607  for (ExplodedNodeSet::iterator I = Set.begin(), E = Set.end(); I != E; ++I) {
608    ExplodedNode *N = *I;
609    // If we are in an inlined call, generate CallExitBegin node.
610    if (N->getLocationContext()->getParent()) {
611      N = generateCallExitBeginNode(N);
612      if (N)
613        WList->enqueue(N);
614    } else {
615      // TODO: We should run remove dead bindings here.
616      G.addEndOfPath(N);
617      NumPathsExplored++;
618    }
619  }
620}
621
622
623void NodeBuilder::anchor() { }
624
625ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
626                                            ProgramStateRef State,
627                                            ExplodedNode *FromN,
628                                            bool MarkAsSink) {
629  HasGeneratedNodes = true;
630  bool IsNew;
631  ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew);
632  N->addPredecessor(FromN, C.Eng.G);
633  Frontier.erase(FromN);
634
635  if (!IsNew)
636    return nullptr;
637
638  if (!MarkAsSink)
639    Frontier.Add(N);
640
641  return N;
642}
643
644void NodeBuilderWithSinks::anchor() { }
645
646StmtNodeBuilder::~StmtNodeBuilder() {
647  if (EnclosingBldr)
648    for (ExplodedNodeSet::iterator I = Frontier.begin(),
649                                   E = Frontier.end(); I != E; ++I )
650      EnclosingBldr->addNodes(*I);
651}
652
653void BranchNodeBuilder::anchor() { }
654
655ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
656                                              bool branch,
657                                              ExplodedNode *NodePred) {
658  // If the branch has been marked infeasible we should not generate a node.
659  if (!isFeasible(branch))
660    return nullptr;
661
662  ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF,
663                               NodePred->getLocationContext());
664  ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred);
665  return Succ;
666}
667
668ExplodedNode*
669IndirectGotoNodeBuilder::generateNode(const iterator &I,
670                                      ProgramStateRef St,
671                                      bool IsSink) {
672  bool IsNew;
673  ExplodedNode *Succ =
674      Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
675                    St, IsSink, &IsNew);
676  Succ->addPredecessor(Pred, Eng.G);
677
678  if (!IsNew)
679    return nullptr;
680
681  if (!IsSink)
682    Eng.WList->enqueue(Succ);
683
684  return Succ;
685}
686
687
688ExplodedNode*
689SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
690                                        ProgramStateRef St) {
691
692  bool IsNew;
693  ExplodedNode *Succ =
694      Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()),
695                    St, false, &IsNew);
696  Succ->addPredecessor(Pred, Eng.G);
697  if (!IsNew)
698    return nullptr;
699
700  Eng.WList->enqueue(Succ);
701  return Succ;
702}
703
704
705ExplodedNode*
706SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
707                                           bool IsSink) {
708  // Get the block for the default case.
709  assert(Src->succ_rbegin() != Src->succ_rend());
710  CFGBlock *DefaultBlock = *Src->succ_rbegin();
711
712  // Sanity check for default blocks that are unreachable and not caught
713  // by earlier stages.
714  if (!DefaultBlock)
715    return nullptr;
716
717  bool IsNew;
718  ExplodedNode *Succ =
719      Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()),
720                    St, IsSink, &IsNew);
721  Succ->addPredecessor(Pred, Eng.G);
722
723  if (!IsNew)
724    return nullptr;
725
726  if (!IsSink)
727    Eng.WList->enqueue(Succ);
728
729  return Succ;
730}
731