CoreEngine.cpp revision dd7ddf2b2296f95e7591ca3f9791f0eb9a15ee42
1//==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines a generic engine for intraprocedural, path-sensitive, 11// dataflow analysis via graph reachability engine. 12// 13//===----------------------------------------------------------------------===// 14 15#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 16#include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 17#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 18#include "clang/Index/TranslationUnit.h" 19#include "clang/AST/Expr.h" 20#include "clang/AST/StmtCXX.h" 21#include "llvm/Support/Casting.h" 22#include "llvm/ADT/DenseMap.h" 23using namespace clang; 24using namespace ento; 25 26//===----------------------------------------------------------------------===// 27// Worklist classes for exploration of reachable states. 28//===----------------------------------------------------------------------===// 29 30WorkList::Visitor::~Visitor() {} 31 32namespace { 33class DFS : public WorkList { 34 SmallVector<WorkListUnit,20> Stack; 35public: 36 virtual bool hasWork() const { 37 return !Stack.empty(); 38 } 39 40 virtual void enqueue(const WorkListUnit& U) { 41 Stack.push_back(U); 42 } 43 44 virtual WorkListUnit dequeue() { 45 assert (!Stack.empty()); 46 const WorkListUnit& U = Stack.back(); 47 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 48 return U; 49 } 50 51 virtual bool visitItemsInWorkList(Visitor &V) { 52 for (SmallVectorImpl<WorkListUnit>::iterator 53 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 54 if (V.visit(*I)) 55 return true; 56 } 57 return false; 58 } 59}; 60 61class BFS : public WorkList { 62 std::deque<WorkListUnit> Queue; 63public: 64 virtual bool hasWork() const { 65 return !Queue.empty(); 66 } 67 68 virtual void enqueue(const WorkListUnit& U) { 69 Queue.push_front(U); 70 } 71 72 virtual WorkListUnit dequeue() { 73 WorkListUnit U = Queue.front(); 74 Queue.pop_front(); 75 return U; 76 } 77 78 virtual bool visitItemsInWorkList(Visitor &V) { 79 for (std::deque<WorkListUnit>::iterator 80 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 81 if (V.visit(*I)) 82 return true; 83 } 84 return false; 85 } 86}; 87 88} // end anonymous namespace 89 90// Place the dstor for WorkList here because it contains virtual member 91// functions, and we the code for the dstor generated in one compilation unit. 92WorkList::~WorkList() {} 93 94WorkList *WorkList::makeDFS() { return new DFS(); } 95WorkList *WorkList::makeBFS() { return new BFS(); } 96 97namespace { 98 class BFSBlockDFSContents : public WorkList { 99 std::deque<WorkListUnit> Queue; 100 SmallVector<WorkListUnit,20> Stack; 101 public: 102 virtual bool hasWork() const { 103 return !Queue.empty() || !Stack.empty(); 104 } 105 106 virtual void enqueue(const WorkListUnit& U) { 107 if (isa<BlockEntrance>(U.getNode()->getLocation())) 108 Queue.push_front(U); 109 else 110 Stack.push_back(U); 111 } 112 113 virtual WorkListUnit dequeue() { 114 // Process all basic blocks to completion. 115 if (!Stack.empty()) { 116 const WorkListUnit& U = Stack.back(); 117 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 118 return U; 119 } 120 121 assert(!Queue.empty()); 122 // Don't use const reference. The subsequent pop_back() might make it 123 // unsafe. 124 WorkListUnit U = Queue.front(); 125 Queue.pop_front(); 126 return U; 127 } 128 virtual bool visitItemsInWorkList(Visitor &V) { 129 for (SmallVectorImpl<WorkListUnit>::iterator 130 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 131 if (V.visit(*I)) 132 return true; 133 } 134 for (std::deque<WorkListUnit>::iterator 135 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 136 if (V.visit(*I)) 137 return true; 138 } 139 return false; 140 } 141 142 }; 143} // end anonymous namespace 144 145WorkList* WorkList::makeBFSBlockDFSContents() { 146 return new BFSBlockDFSContents(); 147} 148 149//===----------------------------------------------------------------------===// 150// Core analysis engine. 151//===----------------------------------------------------------------------===// 152 153/// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 154bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 155 const ProgramState *InitState) { 156 157 if (G->num_roots() == 0) { // Initialize the analysis by constructing 158 // the root if none exists. 159 160 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 161 162 assert (Entry->empty() && 163 "Entry block must be empty."); 164 165 assert (Entry->succ_size() == 1 && 166 "Entry block must have 1 successor."); 167 168 // Get the solitary successor. 169 const CFGBlock *Succ = *(Entry->succ_begin()); 170 171 // Construct an edge representing the 172 // starting location in the function. 173 BlockEdge StartLoc(Entry, Succ, L); 174 175 // Set the current block counter to being empty. 176 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 177 178 if (!InitState) 179 // Generate the root. 180 generateNode(StartLoc, SubEng.getInitialState(L), 0); 181 else 182 generateNode(StartLoc, InitState, 0); 183 } 184 185 // Check if we have a steps limit 186 bool UnlimitedSteps = Steps == 0; 187 188 while (WList->hasWork()) { 189 if (!UnlimitedSteps) { 190 if (Steps == 0) 191 break; 192 --Steps; 193 } 194 195 const WorkListUnit& WU = WList->dequeue(); 196 197 // Set the current block counter. 198 WList->setBlockCounter(WU.getBlockCounter()); 199 200 // Retrieve the node. 201 ExplodedNode *Node = WU.getNode(); 202 203 // Dispatch on the location type. 204 switch (Node->getLocation().getKind()) { 205 case ProgramPoint::BlockEdgeKind: 206 HandleBlockEdge(cast<BlockEdge>(Node->getLocation()), Node); 207 break; 208 209 case ProgramPoint::BlockEntranceKind: 210 HandleBlockEntrance(cast<BlockEntrance>(Node->getLocation()), Node); 211 break; 212 213 case ProgramPoint::BlockExitKind: 214 assert (false && "BlockExit location never occur in forward analysis."); 215 break; 216 217 case ProgramPoint::CallEnterKind: 218 HandleCallEnter(cast<CallEnter>(Node->getLocation()), WU.getBlock(), 219 WU.getIndex(), Node); 220 break; 221 222 case ProgramPoint::CallExitKind: 223 HandleCallExit(cast<CallExit>(Node->getLocation()), Node); 224 break; 225 226 default: 227 assert(isa<PostStmt>(Node->getLocation()) || 228 isa<PostInitializer>(Node->getLocation())); 229 HandlePostStmt(WU.getBlock(), WU.getIndex(), Node); 230 break; 231 } 232 } 233 234 SubEng.processEndWorklist(hasWorkRemaining()); 235 return WList->hasWork(); 236} 237 238void CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 239 unsigned Steps, 240 const ProgramState *InitState, 241 ExplodedNodeSet &Dst) { 242 ExecuteWorkList(L, Steps, InitState); 243 for (SmallVectorImpl<ExplodedNode*>::iterator I = G->EndNodes.begin(), 244 E = G->EndNodes.end(); I != E; ++I) { 245 Dst.Add(*I); 246 } 247} 248 249void CoreEngine::HandleCallEnter(const CallEnter &L, const CFGBlock *Block, 250 unsigned Index, ExplodedNode *Pred) { 251 CallEnterNodeBuilder Builder(*this, Pred, L.getCallExpr(), 252 L.getCalleeContext(), Block, Index); 253 SubEng.processCallEnter(Builder); 254} 255 256void CoreEngine::HandleCallExit(const CallExit &L, ExplodedNode *Pred) { 257 CallExitNodeBuilder Builder(*this, Pred); 258 SubEng.processCallExit(Builder); 259} 260 261void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 262 263 const CFGBlock *Blk = L.getDst(); 264 NodeBuilderContext BuilderCtx(*this, Blk, Pred); 265 266 // Check if we are entering the EXIT block. 267 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 268 269 assert (L.getLocationContext()->getCFG()->getExit().size() == 0 270 && "EXIT block cannot contain Stmts."); 271 272 // Process the final state transition. 273 SubEng.processEndOfFunction(BuilderCtx); 274 275 // This path is done. Don't enqueue any more nodes. 276 return; 277 } 278 279 // Call into the SubEngine to process entering the CFGBlock. 280 ExplodedNodeSet dstNodes; 281 BlockEntrance BE(Blk, Pred->getLocationContext()); 282 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE); 283 SubEng.processCFGBlockEntrance(nodeBuilder); 284 285 // Auto-generate a node. 286 if (!nodeBuilder.hasGeneratedNodes()) { 287 nodeBuilder.generateNode(Pred->State, Pred); 288 } 289 290 // Enqueue nodes onto the worklist. 291 enqueue(dstNodes); 292 293 // Make sink nodes as exhausted. 294 const SmallVectorImpl<ExplodedNode*> &Sinks = nodeBuilder.getSinks(); 295 for (SmallVectorImpl<ExplodedNode*>::const_iterator 296 I =Sinks.begin(), E = Sinks.end(); I != E; ++I) { 297 blocksExhausted.push_back(std::make_pair(L, *I)); 298 } 299} 300 301void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 302 ExplodedNode *Pred) { 303 304 // Increment the block counter. 305 BlockCounter Counter = WList->getBlockCounter(); 306 Counter = BCounterFactory.IncrementCount(Counter, 307 Pred->getLocationContext()->getCurrentStackFrame(), 308 L.getBlock()->getBlockID()); 309 WList->setBlockCounter(Counter); 310 311 // Process the entrance of the block. 312 if (CFGElement E = L.getFirstElement()) { 313 NodeBuilderContext Ctx(*this, L.getBlock(), Pred); 314 SubEng.processCFGElement(E, Pred, 0, &Ctx); 315 } 316 else 317 HandleBlockExit(L.getBlock(), Pred); 318} 319 320void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 321 322 if (const Stmt *Term = B->getTerminator()) { 323 switch (Term->getStmtClass()) { 324 default: 325 llvm_unreachable("Analysis for this terminator not implemented."); 326 327 case Stmt::BinaryOperatorClass: // '&&' and '||' 328 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 329 return; 330 331 case Stmt::BinaryConditionalOperatorClass: 332 case Stmt::ConditionalOperatorClass: 333 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 334 Term, B, Pred); 335 return; 336 337 // FIXME: Use constant-folding in CFG construction to simplify this 338 // case. 339 340 case Stmt::ChooseExprClass: 341 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 342 return; 343 344 case Stmt::DoStmtClass: 345 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 346 return; 347 348 case Stmt::CXXForRangeStmtClass: 349 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 350 return; 351 352 case Stmt::ForStmtClass: 353 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 354 return; 355 356 case Stmt::ContinueStmtClass: 357 case Stmt::BreakStmtClass: 358 case Stmt::GotoStmtClass: 359 break; 360 361 case Stmt::IfStmtClass: 362 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 363 return; 364 365 case Stmt::IndirectGotoStmtClass: { 366 // Only 1 successor: the indirect goto dispatch block. 367 assert (B->succ_size() == 1); 368 369 IndirectGotoNodeBuilder 370 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 371 *(B->succ_begin()), this); 372 373 SubEng.processIndirectGoto(builder); 374 return; 375 } 376 377 case Stmt::ObjCForCollectionStmtClass: { 378 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 379 // 380 // (1) inside a basic block, which represents the binding of the 381 // 'element' variable to a value. 382 // (2) in a terminator, which represents the branch. 383 // 384 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 385 // whether or not collection contains any more elements. We cannot 386 // just test to see if the element is nil because a container can 387 // contain nil elements. 388 HandleBranch(Term, Term, B, Pred); 389 return; 390 } 391 392 case Stmt::SwitchStmtClass: { 393 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 394 this); 395 396 SubEng.processSwitch(builder); 397 return; 398 } 399 400 case Stmt::WhileStmtClass: 401 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 402 return; 403 } 404 } 405 406 assert (B->succ_size() == 1 && 407 "Blocks with no terminator should have at most 1 successor."); 408 409 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 410 Pred->State, Pred); 411} 412 413void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 414 const CFGBlock * B, ExplodedNode *Pred) { 415 assert(B->succ_size() == 2); 416 NodeBuilderContext Ctx(*this, B, Pred); 417 ExplodedNodeSet Dst; 418 SubEng.processBranch(Cond, Term, Ctx, Pred, Dst, 419 *(B->succ_begin()), *(B->succ_begin()+1)); 420 // Enqueue the new frontier onto the worklist. 421 enqueue(Dst); 422} 423 424void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 425 ExplodedNode *Pred) { 426 assert(B); 427 assert(!B->empty()); 428 429 if (StmtIdx == B->size()) 430 HandleBlockExit(B, Pred); 431 else { 432 NodeBuilderContext Ctx(*this, B, Pred); 433 SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx); 434 } 435} 436 437/// generateNode - Utility method to generate nodes, hook up successors, 438/// and add nodes to the worklist. 439void CoreEngine::generateNode(const ProgramPoint &Loc, 440 const ProgramState *State, 441 ExplodedNode *Pred) { 442 443 bool IsNew; 444 ExplodedNode *Node = G->getNode(Loc, State, &IsNew); 445 446 if (Pred) 447 Node->addPredecessor(Pred, *G); // Link 'Node' with its predecessor. 448 else { 449 assert (IsNew); 450 G->addRoot(Node); // 'Node' has no predecessor. Make it a root. 451 } 452 453 // Only add 'Node' to the worklist if it was freshly generated. 454 if (IsNew) WList->enqueue(Node); 455} 456 457void CoreEngine::enqueueStmtNode(ExplodedNode *N, 458 const CFGBlock *Block, unsigned Idx) { 459 assert (!N->isSink()); 460 461 // Check if this node entered a callee. 462 if (isa<CallEnter>(N->getLocation())) { 463 // Still use the index of the CallExpr. It's needed to create the callee 464 // StackFrameContext. 465 WList->enqueue(N, Block, Idx); 466 return; 467 } 468 469 // Do not create extra nodes. Move to the next CFG element. 470 if (isa<PostInitializer>(N->getLocation())) { 471 WList->enqueue(N, Block, Idx+1); 472 return; 473 } 474 475 const CFGStmt *CS = (*Block)[Idx].getAs<CFGStmt>(); 476 const Stmt *St = CS ? CS->getStmt() : 0; 477 PostStmt Loc(St, N->getLocationContext()); 478 479 if (Loc == N->getLocation()) { 480 // Note: 'N' should be a fresh node because otherwise it shouldn't be 481 // a member of Deferred. 482 WList->enqueue(N, Block, Idx+1); 483 return; 484 } 485 486 bool IsNew; 487 ExplodedNode *Succ = G->getNode(Loc, N->getState(), &IsNew); 488 Succ->addPredecessor(N, *G); 489 490 if (IsNew) 491 WList->enqueue(Succ, Block, Idx+1); 492} 493 494void CoreEngine::enqueue(ExplodedNodeSet &Set) { 495 for (ExplodedNodeSet::iterator I = Set.begin(), 496 E = Set.end(); I != E; ++I) { 497 WList->enqueue(*I); 498 } 499} 500 501void CoreEngine::enqueue(ExplodedNodeSet &Set, 502 const CFGBlock *Block, unsigned Idx) { 503 for (ExplodedNodeSet::iterator I = Set.begin(), 504 E = Set.end(); I != E; ++I) { 505 enqueueStmtNode(*I, Block, Idx); 506 } 507} 508 509 510ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc, 511 const ProgramState *State, 512 ExplodedNode *FromN, 513 bool MarkAsSink) { 514 HasGeneratedNodes = true; 515 bool IsNew; 516 ExplodedNode *N = C.Eng.G->getNode(Loc, State, &IsNew); 517 N->addPredecessor(FromN, *C.Eng.G); 518 Frontier.erase(FromN); 519 520 if (MarkAsSink) 521 N->markAsSink(); 522 523 if (IsNew && !MarkAsSink) 524 Frontier.Add(N); 525 526 return (IsNew ? N : 0); 527} 528 529StmtNodeBuilder::~StmtNodeBuilder() { 530 if (EnclosingBldr) 531 for (ExplodedNodeSet::iterator I = Frontier.begin(), 532 E = Frontier.end(); I != E; ++I ) 533 EnclosingBldr->addNodes(*I); 534} 535 536ExplodedNode *BranchNodeBuilder::generateNode(const ProgramState *State, 537 bool branch, 538 ExplodedNode *NodePred) { 539 // If the branch has been marked infeasible we should not generate a node. 540 if (!isFeasible(branch)) 541 return NULL; 542 543 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF, 544 NodePred->getLocationContext()); 545 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred); 546 return Succ; 547} 548 549ExplodedNode* 550IndirectGotoNodeBuilder::generateNode(const iterator &I, 551 const ProgramState *St, 552 bool isSink) { 553 bool IsNew; 554 555 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 556 Pred->getLocationContext()), St, &IsNew); 557 558 Succ->addPredecessor(Pred, *Eng.G); 559 560 if (IsNew) { 561 562 if (isSink) 563 Succ->markAsSink(); 564 else 565 Eng.WList->enqueue(Succ); 566 567 return Succ; 568 } 569 570 return NULL; 571} 572 573 574ExplodedNode* 575SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 576 const ProgramState *St) { 577 578 bool IsNew; 579 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 580 Pred->getLocationContext()), 581 St, &IsNew); 582 Succ->addPredecessor(Pred, *Eng.G); 583 if (IsNew) { 584 Eng.WList->enqueue(Succ); 585 return Succ; 586 } 587 return NULL; 588} 589 590 591ExplodedNode* 592SwitchNodeBuilder::generateDefaultCaseNode(const ProgramState *St, 593 bool isSink) { 594 // Get the block for the default case. 595 assert(Src->succ_rbegin() != Src->succ_rend()); 596 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 597 598 // Sanity check for default blocks that are unreachable and not caught 599 // by earlier stages. 600 if (!DefaultBlock) 601 return NULL; 602 603 bool IsNew; 604 605 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock, 606 Pred->getLocationContext()), St, &IsNew); 607 Succ->addPredecessor(Pred, *Eng.G); 608 609 if (IsNew) { 610 if (isSink) 611 Succ->markAsSink(); 612 else 613 Eng.WList->enqueue(Succ); 614 615 return Succ; 616 } 617 618 return NULL; 619} 620 621void CallEnterNodeBuilder::generateNode(const ProgramState *state) { 622 // Check if the callee is in the same translation unit. 623 if (CalleeCtx->getTranslationUnit() != 624 Pred->getLocationContext()->getTranslationUnit()) { 625 // Create a new engine. We must be careful that the new engine should not 626 // reference data structures owned by the old engine. 627 628 AnalysisManager &OldMgr = Eng.SubEng.getAnalysisManager(); 629 630 // Get the callee's translation unit. 631 idx::TranslationUnit *TU = CalleeCtx->getTranslationUnit(); 632 633 // Create a new AnalysisManager with components of the callee's 634 // TranslationUnit. 635 // The Diagnostic is actually shared when we create ASTUnits from AST files. 636 AnalysisManager AMgr(TU->getASTContext(), TU->getDiagnostic(), OldMgr); 637 638 // Create the new engine. 639 // FIXME: This cast isn't really safe. 640 bool GCEnabled = static_cast<ExprEngine&>(Eng.SubEng).isObjCGCEnabled(); 641 ExprEngine NewEng(AMgr, GCEnabled); 642 643 // Create the new LocationContext. 644 AnalysisDeclContext *NewAnaCtx = 645 AMgr.getAnalysisDeclContext(CalleeCtx->getDecl(), 646 CalleeCtx->getTranslationUnit()); 647 648 const StackFrameContext *OldLocCtx = CalleeCtx; 649 const StackFrameContext *NewLocCtx = 650 NewAnaCtx->getStackFrame(OldLocCtx->getParent(), 651 OldLocCtx->getCallSite(), 652 OldLocCtx->getCallSiteBlock(), 653 OldLocCtx->getIndex()); 654 655 // Now create an initial state for the new engine. 656 const ProgramState *NewState = 657 NewEng.getStateManager().MarshalState(state, NewLocCtx); 658 ExplodedNodeSet ReturnNodes; 659 NewEng.ExecuteWorkListWithInitialState(NewLocCtx, AMgr.getMaxNodes(), 660 NewState, ReturnNodes); 661 return; 662 } 663 664 // Get the callee entry block. 665 const CFGBlock *Entry = &(CalleeCtx->getCFG()->getEntry()); 666 assert(Entry->empty()); 667 assert(Entry->succ_size() == 1); 668 669 // Get the solitary successor. 670 const CFGBlock *SuccB = *(Entry->succ_begin()); 671 672 // Construct an edge representing the starting location in the callee. 673 BlockEdge Loc(Entry, SuccB, CalleeCtx); 674 675 bool isNew; 676 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 677 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 678 679 if (isNew) 680 Eng.WList->enqueue(Node); 681} 682 683void CallExitNodeBuilder::generateNode(const ProgramState *state) { 684 // Get the callee's location context. 685 const StackFrameContext *LocCtx 686 = cast<StackFrameContext>(Pred->getLocationContext()); 687 // When exiting an implicit automatic obj dtor call, the callsite is the Stmt 688 // that triggers the dtor. 689 PostStmt Loc(LocCtx->getCallSite(), LocCtx->getParent()); 690 bool isNew; 691 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 692 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 693 if (isNew) 694 Eng.WList->enqueue(Node, LocCtx->getCallSiteBlock(), 695 LocCtx->getIndex() + 1); 696} 697