CoreEngine.cpp revision ef8225444452a1486bd721f3285301fe84643b00
1//==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10//  This file defines a generic engine for intraprocedural, path-sensitive,
11//  dataflow analysis via graph reachability engine.
12//
13//===----------------------------------------------------------------------===//
14
15#include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
16#include "clang/AST/Expr.h"
17#include "clang/AST/StmtCXX.h"
18#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
19#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
20#include "llvm/ADT/DenseMap.h"
21#include "llvm/ADT/Statistic.h"
22#include "llvm/Support/Casting.h"
23
24using namespace clang;
25using namespace ento;
26
27#define DEBUG_TYPE "CoreEngine"
28
29STATISTIC(NumSteps,
30            "The # of steps executed.");
31STATISTIC(NumReachedMaxSteps,
32            "The # of times we reached the max number of steps.");
33STATISTIC(NumPathsExplored,
34            "The # of paths explored by the analyzer.");
35
36//===----------------------------------------------------------------------===//
37// Worklist classes for exploration of reachable states.
38//===----------------------------------------------------------------------===//
39
40WorkList::Visitor::~Visitor() {}
41
42namespace {
43class DFS : public WorkList {
44  SmallVector<WorkListUnit,20> Stack;
45public:
46  bool hasWork() const override {
47    return !Stack.empty();
48  }
49
50  void enqueue(const WorkListUnit& U) override {
51    Stack.push_back(U);
52  }
53
54  WorkListUnit dequeue() override {
55    assert (!Stack.empty());
56    const WorkListUnit& U = Stack.back();
57    Stack.pop_back(); // This technically "invalidates" U, but we are fine.
58    return U;
59  }
60
61  bool visitItemsInWorkList(Visitor &V) override {
62    for (SmallVectorImpl<WorkListUnit>::iterator
63         I = Stack.begin(), E = Stack.end(); I != E; ++I) {
64      if (V.visit(*I))
65        return true;
66    }
67    return false;
68  }
69};
70
71class BFS : public WorkList {
72  std::deque<WorkListUnit> Queue;
73public:
74  bool hasWork() const override {
75    return !Queue.empty();
76  }
77
78  void enqueue(const WorkListUnit& U) override {
79    Queue.push_back(U);
80  }
81
82  WorkListUnit dequeue() override {
83    WorkListUnit U = Queue.front();
84    Queue.pop_front();
85    return U;
86  }
87
88  bool visitItemsInWorkList(Visitor &V) override {
89    for (std::deque<WorkListUnit>::iterator
90         I = Queue.begin(), E = Queue.end(); I != E; ++I) {
91      if (V.visit(*I))
92        return true;
93    }
94    return false;
95  }
96};
97
98} // end anonymous namespace
99
100// Place the dstor for WorkList here because it contains virtual member
101// functions, and we the code for the dstor generated in one compilation unit.
102WorkList::~WorkList() {}
103
104WorkList *WorkList::makeDFS() { return new DFS(); }
105WorkList *WorkList::makeBFS() { return new BFS(); }
106
107namespace {
108  class BFSBlockDFSContents : public WorkList {
109    std::deque<WorkListUnit> Queue;
110    SmallVector<WorkListUnit,20> Stack;
111  public:
112    bool hasWork() const override {
113      return !Queue.empty() || !Stack.empty();
114    }
115
116    void enqueue(const WorkListUnit& U) override {
117      if (U.getNode()->getLocation().getAs<BlockEntrance>())
118        Queue.push_front(U);
119      else
120        Stack.push_back(U);
121    }
122
123    WorkListUnit dequeue() override {
124      // Process all basic blocks to completion.
125      if (!Stack.empty()) {
126        const WorkListUnit& U = Stack.back();
127        Stack.pop_back(); // This technically "invalidates" U, but we are fine.
128        return U;
129      }
130
131      assert(!Queue.empty());
132      // Don't use const reference.  The subsequent pop_back() might make it
133      // unsafe.
134      WorkListUnit U = Queue.front();
135      Queue.pop_front();
136      return U;
137    }
138    bool visitItemsInWorkList(Visitor &V) override {
139      for (SmallVectorImpl<WorkListUnit>::iterator
140           I = Stack.begin(), E = Stack.end(); I != E; ++I) {
141        if (V.visit(*I))
142          return true;
143      }
144      for (std::deque<WorkListUnit>::iterator
145           I = Queue.begin(), E = Queue.end(); I != E; ++I) {
146        if (V.visit(*I))
147          return true;
148      }
149      return false;
150    }
151
152  };
153} // end anonymous namespace
154
155WorkList* WorkList::makeBFSBlockDFSContents() {
156  return new BFSBlockDFSContents();
157}
158
159//===----------------------------------------------------------------------===//
160// Core analysis engine.
161//===----------------------------------------------------------------------===//
162
163/// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
164bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
165                                   ProgramStateRef InitState) {
166
167  if (G->num_roots() == 0) { // Initialize the analysis by constructing
168    // the root if none exists.
169
170    const CFGBlock *Entry = &(L->getCFG()->getEntry());
171
172    assert (Entry->empty() &&
173            "Entry block must be empty.");
174
175    assert (Entry->succ_size() == 1 &&
176            "Entry block must have 1 successor.");
177
178    // Mark the entry block as visited.
179    FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(),
180                                             L->getDecl(),
181                                             L->getCFG()->getNumBlockIDs());
182
183    // Get the solitary successor.
184    const CFGBlock *Succ = *(Entry->succ_begin());
185
186    // Construct an edge representing the
187    // starting location in the function.
188    BlockEdge StartLoc(Entry, Succ, L);
189
190    // Set the current block counter to being empty.
191    WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
192
193    if (!InitState)
194      // Generate the root.
195      generateNode(StartLoc, SubEng.getInitialState(L), nullptr);
196    else
197      generateNode(StartLoc, InitState, nullptr);
198  }
199
200  // Check if we have a steps limit
201  bool UnlimitedSteps = Steps == 0;
202
203  while (WList->hasWork()) {
204    if (!UnlimitedSteps) {
205      if (Steps == 0) {
206        NumReachedMaxSteps++;
207        break;
208      }
209      --Steps;
210    }
211
212    NumSteps++;
213
214    const WorkListUnit& WU = WList->dequeue();
215
216    // Set the current block counter.
217    WList->setBlockCounter(WU.getBlockCounter());
218
219    // Retrieve the node.
220    ExplodedNode *Node = WU.getNode();
221
222    dispatchWorkItem(Node, Node->getLocation(), WU);
223  }
224  SubEng.processEndWorklist(hasWorkRemaining());
225  return WList->hasWork();
226}
227
228void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
229                                  const WorkListUnit& WU) {
230  // Dispatch on the location type.
231  switch (Loc.getKind()) {
232    case ProgramPoint::BlockEdgeKind:
233      HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred);
234      break;
235
236    case ProgramPoint::BlockEntranceKind:
237      HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred);
238      break;
239
240    case ProgramPoint::BlockExitKind:
241      assert (false && "BlockExit location never occur in forward analysis.");
242      break;
243
244    case ProgramPoint::CallEnterKind: {
245      CallEnter CEnter = Loc.castAs<CallEnter>();
246      SubEng.processCallEnter(CEnter, Pred);
247      break;
248    }
249
250    case ProgramPoint::CallExitBeginKind:
251      SubEng.processCallExit(Pred);
252      break;
253
254    case ProgramPoint::EpsilonKind: {
255      assert(Pred->hasSinglePred() &&
256             "Assume epsilon has exactly one predecessor by construction");
257      ExplodedNode *PNode = Pred->getFirstPred();
258      dispatchWorkItem(Pred, PNode->getLocation(), WU);
259      break;
260    }
261    default:
262      assert(Loc.getAs<PostStmt>() ||
263             Loc.getAs<PostInitializer>() ||
264             Loc.getAs<PostImplicitCall>() ||
265             Loc.getAs<CallExitEnd>());
266      HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred);
267      break;
268  }
269}
270
271bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
272                                                 unsigned Steps,
273                                                 ProgramStateRef InitState,
274                                                 ExplodedNodeSet &Dst) {
275  bool DidNotFinish = ExecuteWorkList(L, Steps, InitState);
276  for (ExplodedGraph::eop_iterator I = G->eop_begin(),
277                                   E = G->eop_end(); I != E; ++I) {
278    Dst.Add(*I);
279  }
280  return DidNotFinish;
281}
282
283void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
284
285  const CFGBlock *Blk = L.getDst();
286  NodeBuilderContext BuilderCtx(*this, Blk, Pred);
287
288  // Mark this block as visited.
289  const LocationContext *LC = Pred->getLocationContext();
290  FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(),
291                                           LC->getDecl(),
292                                           LC->getCFG()->getNumBlockIDs());
293
294  // Check if we are entering the EXIT block.
295  if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
296
297    assert (L.getLocationContext()->getCFG()->getExit().size() == 0
298            && "EXIT block cannot contain Stmts.");
299
300    // Process the final state transition.
301    SubEng.processEndOfFunction(BuilderCtx, Pred);
302
303    // This path is done. Don't enqueue any more nodes.
304    return;
305  }
306
307  // Call into the SubEngine to process entering the CFGBlock.
308  ExplodedNodeSet dstNodes;
309  BlockEntrance BE(Blk, Pred->getLocationContext());
310  NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
311  SubEng.processCFGBlockEntrance(L, nodeBuilder, Pred);
312
313  // Auto-generate a node.
314  if (!nodeBuilder.hasGeneratedNodes()) {
315    nodeBuilder.generateNode(Pred->State, Pred);
316  }
317
318  // Enqueue nodes onto the worklist.
319  enqueue(dstNodes);
320}
321
322void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
323                                       ExplodedNode *Pred) {
324
325  // Increment the block counter.
326  const LocationContext *LC = Pred->getLocationContext();
327  unsigned BlockId = L.getBlock()->getBlockID();
328  BlockCounter Counter = WList->getBlockCounter();
329  Counter = BCounterFactory.IncrementCount(Counter, LC->getCurrentStackFrame(),
330                                           BlockId);
331  WList->setBlockCounter(Counter);
332
333  // Process the entrance of the block.
334  if (Optional<CFGElement> E = L.getFirstElement()) {
335    NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
336    SubEng.processCFGElement(*E, Pred, 0, &Ctx);
337  }
338  else
339    HandleBlockExit(L.getBlock(), Pred);
340}
341
342void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
343
344  if (const Stmt *Term = B->getTerminator()) {
345    switch (Term->getStmtClass()) {
346      default:
347        llvm_unreachable("Analysis for this terminator not implemented.");
348
349      // Model static initializers.
350      case Stmt::DeclStmtClass:
351        HandleStaticInit(cast<DeclStmt>(Term), B, Pred);
352        return;
353
354      case Stmt::BinaryOperatorClass: // '&&' and '||'
355        HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
356        return;
357
358      case Stmt::BinaryConditionalOperatorClass:
359      case Stmt::ConditionalOperatorClass:
360        HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
361                     Term, B, Pred);
362        return;
363
364        // FIXME: Use constant-folding in CFG construction to simplify this
365        // case.
366
367      case Stmt::ChooseExprClass:
368        HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
369        return;
370
371      case Stmt::CXXTryStmtClass: {
372        // Generate a node for each of the successors.
373        // Our logic for EH analysis can certainly be improved.
374        for (CFGBlock::const_succ_iterator it = B->succ_begin(),
375             et = B->succ_end(); it != et; ++it) {
376          if (const CFGBlock *succ = *it) {
377            generateNode(BlockEdge(B, succ, Pred->getLocationContext()),
378                         Pred->State, Pred);
379          }
380        }
381        return;
382      }
383
384      case Stmt::DoStmtClass:
385        HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
386        return;
387
388      case Stmt::CXXForRangeStmtClass:
389        HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred);
390        return;
391
392      case Stmt::ForStmtClass:
393        HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
394        return;
395
396      case Stmt::ContinueStmtClass:
397      case Stmt::BreakStmtClass:
398      case Stmt::GotoStmtClass:
399        break;
400
401      case Stmt::IfStmtClass:
402        HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
403        return;
404
405      case Stmt::IndirectGotoStmtClass: {
406        // Only 1 successor: the indirect goto dispatch block.
407        assert (B->succ_size() == 1);
408
409        IndirectGotoNodeBuilder
410           builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
411                   *(B->succ_begin()), this);
412
413        SubEng.processIndirectGoto(builder);
414        return;
415      }
416
417      case Stmt::ObjCForCollectionStmtClass: {
418        // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
419        //
420        //  (1) inside a basic block, which represents the binding of the
421        //      'element' variable to a value.
422        //  (2) in a terminator, which represents the branch.
423        //
424        // For (1), subengines will bind a value (i.e., 0 or 1) indicating
425        // whether or not collection contains any more elements.  We cannot
426        // just test to see if the element is nil because a container can
427        // contain nil elements.
428        HandleBranch(Term, Term, B, Pred);
429        return;
430      }
431
432      case Stmt::SwitchStmtClass: {
433        SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
434                                    this);
435
436        SubEng.processSwitch(builder);
437        return;
438      }
439
440      case Stmt::WhileStmtClass:
441        HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
442        return;
443    }
444  }
445
446  assert (B->succ_size() == 1 &&
447          "Blocks with no terminator should have at most 1 successor.");
448
449  generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
450               Pred->State, Pred);
451}
452
453void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
454                                const CFGBlock * B, ExplodedNode *Pred) {
455  assert(B->succ_size() == 2);
456  NodeBuilderContext Ctx(*this, B, Pred);
457  ExplodedNodeSet Dst;
458  SubEng.processBranch(Cond, Term, Ctx, Pred, Dst,
459                       *(B->succ_begin()), *(B->succ_begin()+1));
460  // Enqueue the new frontier onto the worklist.
461  enqueue(Dst);
462}
463
464
465void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B,
466                                  ExplodedNode *Pred) {
467  assert(B->succ_size() == 2);
468  NodeBuilderContext Ctx(*this, B, Pred);
469  ExplodedNodeSet Dst;
470  SubEng.processStaticInitializer(DS, Ctx, Pred, Dst,
471                                  *(B->succ_begin()), *(B->succ_begin()+1));
472  // Enqueue the new frontier onto the worklist.
473  enqueue(Dst);
474}
475
476
477void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
478                                  ExplodedNode *Pred) {
479  assert(B);
480  assert(!B->empty());
481
482  if (StmtIdx == B->size())
483    HandleBlockExit(B, Pred);
484  else {
485    NodeBuilderContext Ctx(*this, B, Pred);
486    SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx);
487  }
488}
489
490/// generateNode - Utility method to generate nodes, hook up successors,
491///  and add nodes to the worklist.
492void CoreEngine::generateNode(const ProgramPoint &Loc,
493                              ProgramStateRef State,
494                              ExplodedNode *Pred) {
495
496  bool IsNew;
497  ExplodedNode *Node = G->getNode(Loc, State, false, &IsNew);
498
499  if (Pred)
500    Node->addPredecessor(Pred, *G);  // Link 'Node' with its predecessor.
501  else {
502    assert (IsNew);
503    G->addRoot(Node);  // 'Node' has no predecessor.  Make it a root.
504  }
505
506  // Only add 'Node' to the worklist if it was freshly generated.
507  if (IsNew) WList->enqueue(Node);
508}
509
510void CoreEngine::enqueueStmtNode(ExplodedNode *N,
511                                 const CFGBlock *Block, unsigned Idx) {
512  assert(Block);
513  assert (!N->isSink());
514
515  // Check if this node entered a callee.
516  if (N->getLocation().getAs<CallEnter>()) {
517    // Still use the index of the CallExpr. It's needed to create the callee
518    // StackFrameContext.
519    WList->enqueue(N, Block, Idx);
520    return;
521  }
522
523  // Do not create extra nodes. Move to the next CFG element.
524  if (N->getLocation().getAs<PostInitializer>() ||
525      N->getLocation().getAs<PostImplicitCall>()) {
526    WList->enqueue(N, Block, Idx+1);
527    return;
528  }
529
530  if (N->getLocation().getAs<EpsilonPoint>()) {
531    WList->enqueue(N, Block, Idx);
532    return;
533  }
534
535  if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) {
536    WList->enqueue(N, Block, Idx+1);
537    return;
538  }
539
540  // At this point, we know we're processing a normal statement.
541  CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>();
542  PostStmt Loc(CS.getStmt(), N->getLocationContext());
543
544  if (Loc == N->getLocation().withTag(nullptr)) {
545    // Note: 'N' should be a fresh node because otherwise it shouldn't be
546    // a member of Deferred.
547    WList->enqueue(N, Block, Idx+1);
548    return;
549  }
550
551  bool IsNew;
552  ExplodedNode *Succ = G->getNode(Loc, N->getState(), false, &IsNew);
553  Succ->addPredecessor(N, *G);
554
555  if (IsNew)
556    WList->enqueue(Succ, Block, Idx+1);
557}
558
559ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N) {
560  // Create a CallExitBegin node and enqueue it.
561  const StackFrameContext *LocCtx
562                         = cast<StackFrameContext>(N->getLocationContext());
563
564  // Use the callee location context.
565  CallExitBegin Loc(LocCtx);
566
567  bool isNew;
568  ExplodedNode *Node = G->getNode(Loc, N->getState(), false, &isNew);
569  Node->addPredecessor(N, *G);
570  return isNew ? Node : nullptr;
571}
572
573
574void CoreEngine::enqueue(ExplodedNodeSet &Set) {
575  for (ExplodedNodeSet::iterator I = Set.begin(),
576                                 E = Set.end(); I != E; ++I) {
577    WList->enqueue(*I);
578  }
579}
580
581void CoreEngine::enqueue(ExplodedNodeSet &Set,
582                         const CFGBlock *Block, unsigned Idx) {
583  for (ExplodedNodeSet::iterator I = Set.begin(),
584                                 E = Set.end(); I != E; ++I) {
585    enqueueStmtNode(*I, Block, Idx);
586  }
587}
588
589void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set) {
590  for (ExplodedNodeSet::iterator I = Set.begin(), E = Set.end(); I != E; ++I) {
591    ExplodedNode *N = *I;
592    // If we are in an inlined call, generate CallExitBegin node.
593    if (N->getLocationContext()->getParent()) {
594      N = generateCallExitBeginNode(N);
595      if (N)
596        WList->enqueue(N);
597    } else {
598      // TODO: We should run remove dead bindings here.
599      G->addEndOfPath(N);
600      NumPathsExplored++;
601    }
602  }
603}
604
605
606void NodeBuilder::anchor() { }
607
608ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
609                                            ProgramStateRef State,
610                                            ExplodedNode *FromN,
611                                            bool MarkAsSink) {
612  HasGeneratedNodes = true;
613  bool IsNew;
614  ExplodedNode *N = C.Eng.G->getNode(Loc, State, MarkAsSink, &IsNew);
615  N->addPredecessor(FromN, *C.Eng.G);
616  Frontier.erase(FromN);
617
618  if (!IsNew)
619    return nullptr;
620
621  if (!MarkAsSink)
622    Frontier.Add(N);
623
624  return N;
625}
626
627void NodeBuilderWithSinks::anchor() { }
628
629StmtNodeBuilder::~StmtNodeBuilder() {
630  if (EnclosingBldr)
631    for (ExplodedNodeSet::iterator I = Frontier.begin(),
632                                   E = Frontier.end(); I != E; ++I )
633      EnclosingBldr->addNodes(*I);
634}
635
636void BranchNodeBuilder::anchor() { }
637
638ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
639                                              bool branch,
640                                              ExplodedNode *NodePred) {
641  // If the branch has been marked infeasible we should not generate a node.
642  if (!isFeasible(branch))
643    return nullptr;
644
645  ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF,
646                               NodePred->getLocationContext());
647  ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred);
648  return Succ;
649}
650
651ExplodedNode*
652IndirectGotoNodeBuilder::generateNode(const iterator &I,
653                                      ProgramStateRef St,
654                                      bool IsSink) {
655  bool IsNew;
656  ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(),
657                                      Pred->getLocationContext()), St,
658                                      IsSink, &IsNew);
659  Succ->addPredecessor(Pred, *Eng.G);
660
661  if (!IsNew)
662    return nullptr;
663
664  if (!IsSink)
665    Eng.WList->enqueue(Succ);
666
667  return Succ;
668}
669
670
671ExplodedNode*
672SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
673                                        ProgramStateRef St) {
674
675  bool IsNew;
676  ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(),
677                                      Pred->getLocationContext()), St,
678                                      false, &IsNew);
679  Succ->addPredecessor(Pred, *Eng.G);
680  if (!IsNew)
681    return nullptr;
682
683  Eng.WList->enqueue(Succ);
684  return Succ;
685}
686
687
688ExplodedNode*
689SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
690                                           bool IsSink) {
691  // Get the block for the default case.
692  assert(Src->succ_rbegin() != Src->succ_rend());
693  CFGBlock *DefaultBlock = *Src->succ_rbegin();
694
695  // Sanity check for default blocks that are unreachable and not caught
696  // by earlier stages.
697  if (!DefaultBlock)
698    return nullptr;
699
700  bool IsNew;
701  ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock,
702                                      Pred->getLocationContext()), St,
703                                      IsSink, &IsNew);
704  Succ->addPredecessor(Pred, *Eng.G);
705
706  if (!IsNew)
707    return nullptr;
708
709  if (!IsSink)
710    Eng.WList->enqueue(Succ);
711
712  return Succ;
713}
714