CoreEngine.cpp revision 0e89061a399bae32f0eca5b85658ad66a58c504d
1//==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines a generic engine for intraprocedural, path-sensitive, 11// dataflow analysis via graph reachability engine. 12// 13//===----------------------------------------------------------------------===// 14 15#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 16#include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 17#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 18#include "clang/Index/TranslationUnit.h" 19#include "clang/AST/Expr.h" 20#include "llvm/Support/Casting.h" 21#include "llvm/ADT/DenseMap.h" 22using namespace clang; 23using namespace ento; 24 25// This should be removed in the future. 26namespace clang { 27namespace ento { 28TransferFuncs* MakeCFRefCountTF(ASTContext& Ctx, bool GCEnabled, 29 const LangOptions& lopts); 30} 31} 32 33//===----------------------------------------------------------------------===// 34// Worklist classes for exploration of reachable states. 35//===----------------------------------------------------------------------===// 36 37WorkList::Visitor::~Visitor() {} 38 39namespace { 40class DFS : public WorkList { 41 SmallVector<WorkListUnit,20> Stack; 42public: 43 virtual bool hasWork() const { 44 return !Stack.empty(); 45 } 46 47 virtual void enqueue(const WorkListUnit& U) { 48 Stack.push_back(U); 49 } 50 51 virtual WorkListUnit dequeue() { 52 assert (!Stack.empty()); 53 const WorkListUnit& U = Stack.back(); 54 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 55 return U; 56 } 57 58 virtual bool visitItemsInWorkList(Visitor &V) { 59 for (SmallVectorImpl<WorkListUnit>::iterator 60 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 61 if (V.visit(*I)) 62 return true; 63 } 64 return false; 65 } 66}; 67 68class BFS : public WorkList { 69 std::deque<WorkListUnit> Queue; 70public: 71 virtual bool hasWork() const { 72 return !Queue.empty(); 73 } 74 75 virtual void enqueue(const WorkListUnit& U) { 76 Queue.push_front(U); 77 } 78 79 virtual WorkListUnit dequeue() { 80 WorkListUnit U = Queue.front(); 81 Queue.pop_front(); 82 return U; 83 } 84 85 virtual bool visitItemsInWorkList(Visitor &V) { 86 for (std::deque<WorkListUnit>::iterator 87 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 88 if (V.visit(*I)) 89 return true; 90 } 91 return false; 92 } 93}; 94 95} // end anonymous namespace 96 97// Place the dstor for WorkList here because it contains virtual member 98// functions, and we the code for the dstor generated in one compilation unit. 99WorkList::~WorkList() {} 100 101WorkList *WorkList::makeDFS() { return new DFS(); } 102WorkList *WorkList::makeBFS() { return new BFS(); } 103 104namespace { 105 class BFSBlockDFSContents : public WorkList { 106 std::deque<WorkListUnit> Queue; 107 SmallVector<WorkListUnit,20> Stack; 108 public: 109 virtual bool hasWork() const { 110 return !Queue.empty() || !Stack.empty(); 111 } 112 113 virtual void enqueue(const WorkListUnit& U) { 114 if (isa<BlockEntrance>(U.getNode()->getLocation())) 115 Queue.push_front(U); 116 else 117 Stack.push_back(U); 118 } 119 120 virtual WorkListUnit dequeue() { 121 // Process all basic blocks to completion. 122 if (!Stack.empty()) { 123 const WorkListUnit& U = Stack.back(); 124 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 125 return U; 126 } 127 128 assert(!Queue.empty()); 129 // Don't use const reference. The subsequent pop_back() might make it 130 // unsafe. 131 WorkListUnit U = Queue.front(); 132 Queue.pop_front(); 133 return U; 134 } 135 virtual bool visitItemsInWorkList(Visitor &V) { 136 for (SmallVectorImpl<WorkListUnit>::iterator 137 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 138 if (V.visit(*I)) 139 return true; 140 } 141 for (std::deque<WorkListUnit>::iterator 142 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 143 if (V.visit(*I)) 144 return true; 145 } 146 return false; 147 } 148 149 }; 150} // end anonymous namespace 151 152WorkList* WorkList::makeBFSBlockDFSContents() { 153 return new BFSBlockDFSContents(); 154} 155 156//===----------------------------------------------------------------------===// 157// Core analysis engine. 158//===----------------------------------------------------------------------===// 159 160/// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 161bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 162 const GRState *InitState) { 163 164 if (G->num_roots() == 0) { // Initialize the analysis by constructing 165 // the root if none exists. 166 167 const CFGBlock* Entry = &(L->getCFG()->getEntry()); 168 169 assert (Entry->empty() && 170 "Entry block must be empty."); 171 172 assert (Entry->succ_size() == 1 && 173 "Entry block must have 1 successor."); 174 175 // Get the solitary successor. 176 const CFGBlock* Succ = *(Entry->succ_begin()); 177 178 // Construct an edge representing the 179 // starting location in the function. 180 BlockEdge StartLoc(Entry, Succ, L); 181 182 // Set the current block counter to being empty. 183 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 184 185 if (!InitState) 186 // Generate the root. 187 generateNode(StartLoc, SubEng.getInitialState(L), 0); 188 else 189 generateNode(StartLoc, InitState, 0); 190 } 191 192 // Check if we have a steps limit 193 bool UnlimitedSteps = Steps == 0; 194 195 while (WList->hasWork()) { 196 if (!UnlimitedSteps) { 197 if (Steps == 0) 198 break; 199 --Steps; 200 } 201 202 const WorkListUnit& WU = WList->dequeue(); 203 204 // Set the current block counter. 205 WList->setBlockCounter(WU.getBlockCounter()); 206 207 // Retrieve the node. 208 ExplodedNode* Node = WU.getNode(); 209 210 // Dispatch on the location type. 211 switch (Node->getLocation().getKind()) { 212 case ProgramPoint::BlockEdgeKind: 213 HandleBlockEdge(cast<BlockEdge>(Node->getLocation()), Node); 214 break; 215 216 case ProgramPoint::BlockEntranceKind: 217 HandleBlockEntrance(cast<BlockEntrance>(Node->getLocation()), Node); 218 break; 219 220 case ProgramPoint::BlockExitKind: 221 assert (false && "BlockExit location never occur in forward analysis."); 222 break; 223 224 case ProgramPoint::CallEnterKind: 225 HandleCallEnter(cast<CallEnter>(Node->getLocation()), WU.getBlock(), 226 WU.getIndex(), Node); 227 break; 228 229 case ProgramPoint::CallExitKind: 230 HandleCallExit(cast<CallExit>(Node->getLocation()), Node); 231 break; 232 233 default: 234 assert(isa<PostStmt>(Node->getLocation()) || 235 isa<PostInitializer>(Node->getLocation())); 236 HandlePostStmt(WU.getBlock(), WU.getIndex(), Node); 237 break; 238 } 239 } 240 241 SubEng.processEndWorklist(hasWorkRemaining()); 242 return WList->hasWork(); 243} 244 245void CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 246 unsigned Steps, 247 const GRState *InitState, 248 ExplodedNodeSet &Dst) { 249 ExecuteWorkList(L, Steps, InitState); 250 for (SmallVectorImpl<ExplodedNode*>::iterator I = G->EndNodes.begin(), 251 E = G->EndNodes.end(); I != E; ++I) { 252 Dst.Add(*I); 253 } 254} 255 256void CoreEngine::HandleCallEnter(const CallEnter &L, const CFGBlock *Block, 257 unsigned Index, ExplodedNode *Pred) { 258 CallEnterNodeBuilder Builder(*this, Pred, L.getCallExpr(), 259 L.getCalleeContext(), Block, Index); 260 SubEng.processCallEnter(Builder); 261} 262 263void CoreEngine::HandleCallExit(const CallExit &L, ExplodedNode *Pred) { 264 CallExitNodeBuilder Builder(*this, Pred); 265 SubEng.processCallExit(Builder); 266} 267 268void CoreEngine::HandleBlockEdge(const BlockEdge& L, ExplodedNode* Pred) { 269 270 const CFGBlock* Blk = L.getDst(); 271 272 // Check if we are entering the EXIT block. 273 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 274 275 assert (L.getLocationContext()->getCFG()->getExit().size() == 0 276 && "EXIT block cannot contain Stmts."); 277 278 // Process the final state transition. 279 EndOfFunctionNodeBuilder Builder(Blk, Pred, this); 280 SubEng.processEndOfFunction(Builder); 281 282 // This path is done. Don't enqueue any more nodes. 283 return; 284 } 285 286 // Call into the subengine to process entering the CFGBlock. 287 ExplodedNodeSet dstNodes; 288 BlockEntrance BE(Blk, Pred->getLocationContext()); 289 GenericNodeBuilder<BlockEntrance> nodeBuilder(*this, Pred, BE); 290 SubEng.processCFGBlockEntrance(dstNodes, nodeBuilder); 291 292 if (dstNodes.empty()) { 293 if (!nodeBuilder.hasGeneratedNode) { 294 // Auto-generate a node and enqueue it to the worklist. 295 generateNode(BE, Pred->State, Pred); 296 } 297 } 298 else { 299 for (ExplodedNodeSet::iterator I = dstNodes.begin(), E = dstNodes.end(); 300 I != E; ++I) { 301 WList->enqueue(*I); 302 } 303 } 304 305 for (SmallVectorImpl<ExplodedNode*>::const_iterator 306 I = nodeBuilder.sinks().begin(), E = nodeBuilder.sinks().end(); 307 I != E; ++I) { 308 blocksExhausted.push_back(std::make_pair(L, *I)); 309 } 310} 311 312void CoreEngine::HandleBlockEntrance(const BlockEntrance& L, 313 ExplodedNode* Pred) { 314 315 // Increment the block counter. 316 BlockCounter Counter = WList->getBlockCounter(); 317 Counter = BCounterFactory.IncrementCount(Counter, 318 Pred->getLocationContext()->getCurrentStackFrame(), 319 L.getBlock()->getBlockID()); 320 WList->setBlockCounter(Counter); 321 322 // Process the entrance of the block. 323 if (CFGElement E = L.getFirstElement()) { 324 StmtNodeBuilder Builder(L.getBlock(), 0, Pred, this, 325 SubEng.getStateManager()); 326 SubEng.processCFGElement(E, Builder); 327 } 328 else 329 HandleBlockExit(L.getBlock(), Pred); 330} 331 332void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode* Pred) { 333 334 if (const Stmt* Term = B->getTerminator()) { 335 switch (Term->getStmtClass()) { 336 default: 337 assert(false && "Analysis for this terminator not implemented."); 338 break; 339 340 case Stmt::BinaryOperatorClass: // '&&' and '||' 341 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 342 return; 343 344 case Stmt::BinaryConditionalOperatorClass: 345 case Stmt::ConditionalOperatorClass: 346 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 347 Term, B, Pred); 348 return; 349 350 // FIXME: Use constant-folding in CFG construction to simplify this 351 // case. 352 353 case Stmt::ChooseExprClass: 354 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 355 return; 356 357 case Stmt::DoStmtClass: 358 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 359 return; 360 361 case Stmt::ForStmtClass: 362 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 363 return; 364 365 case Stmt::ContinueStmtClass: 366 case Stmt::BreakStmtClass: 367 case Stmt::GotoStmtClass: 368 break; 369 370 case Stmt::IfStmtClass: 371 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 372 return; 373 374 case Stmt::IndirectGotoStmtClass: { 375 // Only 1 successor: the indirect goto dispatch block. 376 assert (B->succ_size() == 1); 377 378 IndirectGotoNodeBuilder 379 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 380 *(B->succ_begin()), this); 381 382 SubEng.processIndirectGoto(builder); 383 return; 384 } 385 386 case Stmt::ObjCForCollectionStmtClass: { 387 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 388 // 389 // (1) inside a basic block, which represents the binding of the 390 // 'element' variable to a value. 391 // (2) in a terminator, which represents the branch. 392 // 393 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 394 // whether or not collection contains any more elements. We cannot 395 // just test to see if the element is nil because a container can 396 // contain nil elements. 397 HandleBranch(Term, Term, B, Pred); 398 return; 399 } 400 401 case Stmt::SwitchStmtClass: { 402 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 403 this); 404 405 SubEng.processSwitch(builder); 406 return; 407 } 408 409 case Stmt::WhileStmtClass: 410 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 411 return; 412 } 413 } 414 415 assert (B->succ_size() == 1 && 416 "Blocks with no terminator should have at most 1 successor."); 417 418 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 419 Pred->State, Pred); 420} 421 422void CoreEngine::HandleBranch(const Stmt* Cond, const Stmt* Term, 423 const CFGBlock * B, ExplodedNode* Pred) { 424 assert(B->succ_size() == 2); 425 BranchNodeBuilder Builder(B, *(B->succ_begin()), *(B->succ_begin()+1), 426 Pred, this); 427 SubEng.processBranch(Cond, Term, Builder); 428} 429 430void CoreEngine::HandlePostStmt(const CFGBlock* B, unsigned StmtIdx, 431 ExplodedNode* Pred) { 432 assert (!B->empty()); 433 434 if (StmtIdx == B->size()) 435 HandleBlockExit(B, Pred); 436 else { 437 StmtNodeBuilder Builder(B, StmtIdx, Pred, this, 438 SubEng.getStateManager()); 439 SubEng.processCFGElement((*B)[StmtIdx], Builder); 440 } 441} 442 443/// generateNode - Utility method to generate nodes, hook up successors, 444/// and add nodes to the worklist. 445void CoreEngine::generateNode(const ProgramPoint& Loc, 446 const GRState* State, ExplodedNode* Pred) { 447 448 bool IsNew; 449 ExplodedNode* Node = G->getNode(Loc, State, &IsNew); 450 451 if (Pred) 452 Node->addPredecessor(Pred, *G); // Link 'Node' with its predecessor. 453 else { 454 assert (IsNew); 455 G->addRoot(Node); // 'Node' has no predecessor. Make it a root. 456 } 457 458 // Only add 'Node' to the worklist if it was freshly generated. 459 if (IsNew) WList->enqueue(Node); 460} 461 462ExplodedNode * 463GenericNodeBuilderImpl::generateNodeImpl(const GRState *state, 464 ExplodedNode *pred, 465 ProgramPoint programPoint, 466 bool asSink) { 467 468 hasGeneratedNode = true; 469 bool isNew; 470 ExplodedNode *node = engine.getGraph().getNode(programPoint, state, &isNew); 471 if (pred) 472 node->addPredecessor(pred, engine.getGraph()); 473 if (isNew) { 474 if (asSink) { 475 node->markAsSink(); 476 sinksGenerated.push_back(node); 477 } 478 return node; 479 } 480 return 0; 481} 482 483StmtNodeBuilder::StmtNodeBuilder(const CFGBlock* b, unsigned idx, 484 ExplodedNode* N, CoreEngine* e, 485 GRStateManager &mgr) 486 : Eng(*e), B(*b), Idx(idx), Pred(N), Mgr(mgr), 487 PurgingDeadSymbols(false), BuildSinks(false), hasGeneratedNode(false), 488 PointKind(ProgramPoint::PostStmtKind), Tag(0) { 489 Deferred.insert(N); 490} 491 492StmtNodeBuilder::~StmtNodeBuilder() { 493 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I) 494 if (!(*I)->isSink()) 495 GenerateAutoTransition(*I); 496} 497 498void StmtNodeBuilder::GenerateAutoTransition(ExplodedNode* N) { 499 assert (!N->isSink()); 500 501 // Check if this node entered a callee. 502 if (isa<CallEnter>(N->getLocation())) { 503 // Still use the index of the CallExpr. It's needed to create the callee 504 // StackFrameContext. 505 Eng.WList->enqueue(N, &B, Idx); 506 return; 507 } 508 509 // Do not create extra nodes. Move to the next CFG element. 510 if (isa<PostInitializer>(N->getLocation())) { 511 Eng.WList->enqueue(N, &B, Idx+1); 512 return; 513 } 514 515 PostStmt Loc(getStmt(), N->getLocationContext()); 516 517 if (Loc == N->getLocation()) { 518 // Note: 'N' should be a fresh node because otherwise it shouldn't be 519 // a member of Deferred. 520 Eng.WList->enqueue(N, &B, Idx+1); 521 return; 522 } 523 524 bool IsNew; 525 ExplodedNode* Succ = Eng.G->getNode(Loc, N->State, &IsNew); 526 Succ->addPredecessor(N, *Eng.G); 527 528 if (IsNew) 529 Eng.WList->enqueue(Succ, &B, Idx+1); 530} 531 532ExplodedNode* StmtNodeBuilder::MakeNode(ExplodedNodeSet& Dst, const Stmt* S, 533 ExplodedNode* Pred, const GRState* St, 534 ProgramPoint::Kind K) { 535 536 ExplodedNode* N = generateNode(S, St, Pred, K); 537 538 if (N) { 539 if (BuildSinks) 540 N->markAsSink(); 541 else 542 Dst.Add(N); 543 } 544 545 return N; 546} 547 548static ProgramPoint GetProgramPoint(const Stmt *S, ProgramPoint::Kind K, 549 const LocationContext *LC, const void *tag){ 550 switch (K) { 551 default: 552 assert(false && "Unhandled ProgramPoint kind"); 553 case ProgramPoint::PreStmtKind: 554 return PreStmt(S, LC, tag); 555 case ProgramPoint::PostStmtKind: 556 return PostStmt(S, LC, tag); 557 case ProgramPoint::PreLoadKind: 558 return PreLoad(S, LC, tag); 559 case ProgramPoint::PostLoadKind: 560 return PostLoad(S, LC, tag); 561 case ProgramPoint::PreStoreKind: 562 return PreStore(S, LC, tag); 563 case ProgramPoint::PostStoreKind: 564 return PostStore(S, LC, tag); 565 case ProgramPoint::PostLValueKind: 566 return PostLValue(S, LC, tag); 567 case ProgramPoint::PostPurgeDeadSymbolsKind: 568 return PostPurgeDeadSymbols(S, LC, tag); 569 } 570} 571 572ExplodedNode* 573StmtNodeBuilder::generateNodeInternal(const Stmt* S, const GRState* state, 574 ExplodedNode* Pred, 575 ProgramPoint::Kind K, 576 const void *tag) { 577 578 const ProgramPoint &L = GetProgramPoint(S, K, Pred->getLocationContext(),tag); 579 return generateNodeInternal(L, state, Pred); 580} 581 582ExplodedNode* 583StmtNodeBuilder::generateNodeInternal(const ProgramPoint &Loc, 584 const GRState* State, 585 ExplodedNode* Pred) { 586 bool IsNew; 587 ExplodedNode* N = Eng.G->getNode(Loc, State, &IsNew); 588 N->addPredecessor(Pred, *Eng.G); 589 Deferred.erase(Pred); 590 591 if (IsNew) { 592 Deferred.insert(N); 593 return N; 594 } 595 596 return NULL; 597} 598 599// This function generate a new ExplodedNode but not a new branch(block edge). 600ExplodedNode* BranchNodeBuilder::generateNode(const Stmt* Condition, 601 const GRState* State) { 602 bool IsNew; 603 604 ExplodedNode* Succ 605 = Eng.G->getNode(PostCondition(Condition, Pred->getLocationContext()), State, 606 &IsNew); 607 608 Succ->addPredecessor(Pred, *Eng.G); 609 610 Pred = Succ; 611 612 if (IsNew) 613 return Succ; 614 615 return NULL; 616} 617 618ExplodedNode* BranchNodeBuilder::generateNode(const GRState* State, 619 bool branch) { 620 621 // If the branch has been marked infeasible we should not generate a node. 622 if (!isFeasible(branch)) 623 return NULL; 624 625 bool IsNew; 626 627 ExplodedNode* Succ = 628 Eng.G->getNode(BlockEdge(Src,branch ? DstT:DstF,Pred->getLocationContext()), 629 State, &IsNew); 630 631 Succ->addPredecessor(Pred, *Eng.G); 632 633 if (branch) 634 GeneratedTrue = true; 635 else 636 GeneratedFalse = true; 637 638 if (IsNew) { 639 Deferred.push_back(Succ); 640 return Succ; 641 } 642 643 return NULL; 644} 645 646BranchNodeBuilder::~BranchNodeBuilder() { 647 if (!GeneratedTrue) generateNode(Pred->State, true); 648 if (!GeneratedFalse) generateNode(Pred->State, false); 649 650 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I) 651 if (!(*I)->isSink()) Eng.WList->enqueue(*I); 652} 653 654 655ExplodedNode* 656IndirectGotoNodeBuilder::generateNode(const iterator& I, const GRState* St, 657 bool isSink) { 658 bool IsNew; 659 660 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 661 Pred->getLocationContext()), St, &IsNew); 662 663 Succ->addPredecessor(Pred, *Eng.G); 664 665 if (IsNew) { 666 667 if (isSink) 668 Succ->markAsSink(); 669 else 670 Eng.WList->enqueue(Succ); 671 672 return Succ; 673 } 674 675 return NULL; 676} 677 678 679ExplodedNode* 680SwitchNodeBuilder::generateCaseStmtNode(const iterator& I, const GRState* St){ 681 682 bool IsNew; 683 684 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 685 Pred->getLocationContext()), St, &IsNew); 686 Succ->addPredecessor(Pred, *Eng.G); 687 688 if (IsNew) { 689 Eng.WList->enqueue(Succ); 690 return Succ; 691 } 692 693 return NULL; 694} 695 696 697ExplodedNode* 698SwitchNodeBuilder::generateDefaultCaseNode(const GRState* St, bool isSink) { 699 700 // Get the block for the default case. 701 assert (Src->succ_rbegin() != Src->succ_rend()); 702 CFGBlock* DefaultBlock = *Src->succ_rbegin(); 703 704 bool IsNew; 705 706 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock, 707 Pred->getLocationContext()), St, &IsNew); 708 Succ->addPredecessor(Pred, *Eng.G); 709 710 if (IsNew) { 711 if (isSink) 712 Succ->markAsSink(); 713 else 714 Eng.WList->enqueue(Succ); 715 716 return Succ; 717 } 718 719 return NULL; 720} 721 722EndOfFunctionNodeBuilder::~EndOfFunctionNodeBuilder() { 723 // Auto-generate an EOP node if one has not been generated. 724 if (!hasGeneratedNode) { 725 // If we are in an inlined call, generate CallExit node. 726 if (Pred->getLocationContext()->getParent()) 727 GenerateCallExitNode(Pred->State); 728 else 729 generateNode(Pred->State); 730 } 731} 732 733ExplodedNode* 734EndOfFunctionNodeBuilder::generateNode(const GRState* State, 735 ExplodedNode* P, const void *tag) { 736 hasGeneratedNode = true; 737 bool IsNew; 738 739 ExplodedNode* Node = Eng.G->getNode(BlockEntrance(&B, 740 Pred->getLocationContext(), tag ? tag : Tag), 741 State, &IsNew); 742 743 Node->addPredecessor(P ? P : Pred, *Eng.G); 744 745 if (IsNew) { 746 Eng.G->addEndOfPath(Node); 747 return Node; 748 } 749 750 return NULL; 751} 752 753void EndOfFunctionNodeBuilder::GenerateCallExitNode(const GRState *state) { 754 hasGeneratedNode = true; 755 // Create a CallExit node and enqueue it. 756 const StackFrameContext *LocCtx 757 = cast<StackFrameContext>(Pred->getLocationContext()); 758 const Stmt *CE = LocCtx->getCallSite(); 759 760 // Use the the callee location context. 761 CallExit Loc(CE, LocCtx); 762 763 bool isNew; 764 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 765 Node->addPredecessor(Pred, *Eng.G); 766 767 if (isNew) 768 Eng.WList->enqueue(Node); 769} 770 771 772void CallEnterNodeBuilder::generateNode(const GRState *state) { 773 // Check if the callee is in the same translation unit. 774 if (CalleeCtx->getTranslationUnit() != 775 Pred->getLocationContext()->getTranslationUnit()) { 776 // Create a new engine. We must be careful that the new engine should not 777 // reference data structures owned by the old engine. 778 779 AnalysisManager &OldMgr = Eng.SubEng.getAnalysisManager(); 780 781 // Get the callee's translation unit. 782 idx::TranslationUnit *TU = CalleeCtx->getTranslationUnit(); 783 784 // Create a new AnalysisManager with components of the callee's 785 // TranslationUnit. 786 // The Diagnostic is actually shared when we create ASTUnits from AST files. 787 AnalysisManager AMgr(TU->getASTContext(), TU->getDiagnostic(), 788 OldMgr.getLangOptions(), 789 OldMgr.getPathDiagnosticClient(), 790 OldMgr.getStoreManagerCreator(), 791 OldMgr.getConstraintManagerCreator(), 792 OldMgr.getCheckerManager(), 793 OldMgr.getIndexer(), 794 OldMgr.getMaxNodes(), OldMgr.getMaxVisit(), 795 OldMgr.shouldVisualizeGraphviz(), 796 OldMgr.shouldVisualizeUbigraph(), 797 OldMgr.shouldPurgeDead(), 798 OldMgr.shouldEagerlyAssume(), 799 OldMgr.shouldTrimGraph(), 800 OldMgr.shouldInlineCall(), 801 OldMgr.getAnalysisContextManager().getUseUnoptimizedCFG(), 802 OldMgr.getAnalysisContextManager(). 803 getCFGBuildOptions().AddImplicitDtors, 804 OldMgr.getAnalysisContextManager(). 805 getCFGBuildOptions().AddInitializers, 806 OldMgr.shouldEagerlyTrimExplodedGraph()); 807 llvm::OwningPtr<TransferFuncs> TF(MakeCFRefCountTF(AMgr.getASTContext(), 808 /* GCEnabled */ false, 809 AMgr.getLangOptions())); 810 // Create the new engine. 811 ExprEngine NewEng(AMgr, TF.take()); 812 813 // Create the new LocationContext. 814 AnalysisContext *NewAnaCtx = AMgr.getAnalysisContext(CalleeCtx->getDecl(), 815 CalleeCtx->getTranslationUnit()); 816 const StackFrameContext *OldLocCtx = CalleeCtx; 817 const StackFrameContext *NewLocCtx = AMgr.getStackFrame(NewAnaCtx, 818 OldLocCtx->getParent(), 819 OldLocCtx->getCallSite(), 820 OldLocCtx->getCallSiteBlock(), 821 OldLocCtx->getIndex()); 822 823 // Now create an initial state for the new engine. 824 const GRState *NewState = NewEng.getStateManager().MarshalState(state, 825 NewLocCtx); 826 ExplodedNodeSet ReturnNodes; 827 NewEng.ExecuteWorkListWithInitialState(NewLocCtx, AMgr.getMaxNodes(), 828 NewState, ReturnNodes); 829 return; 830 } 831 832 // Get the callee entry block. 833 const CFGBlock *Entry = &(CalleeCtx->getCFG()->getEntry()); 834 assert(Entry->empty()); 835 assert(Entry->succ_size() == 1); 836 837 // Get the solitary successor. 838 const CFGBlock *SuccB = *(Entry->succ_begin()); 839 840 // Construct an edge representing the starting location in the callee. 841 BlockEdge Loc(Entry, SuccB, CalleeCtx); 842 843 bool isNew; 844 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 845 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 846 847 if (isNew) 848 Eng.WList->enqueue(Node); 849} 850 851void CallExitNodeBuilder::generateNode(const GRState *state) { 852 // Get the callee's location context. 853 const StackFrameContext *LocCtx 854 = cast<StackFrameContext>(Pred->getLocationContext()); 855 // When exiting an implicit automatic obj dtor call, the callsite is the Stmt 856 // that triggers the dtor. 857 PostStmt Loc(LocCtx->getCallSite(), LocCtx->getParent()); 858 bool isNew; 859 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 860 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 861 if (isNew) 862 Eng.WList->enqueue(Node, LocCtx->getCallSiteBlock(), 863 LocCtx->getIndex() + 1); 864} 865