ExplodedGraph.cpp revision 8347d3d45e6f128bba19821f0d2f54cadd4d49bb
1//=-- ExplodedGraph.cpp - Local, Path-Sens. "Exploded Graph" -*- C++ -*------=// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines the template classes ExplodedNode and ExplodedGraph, 11// which represent a path-sensitive, intra-procedural "exploded graph." 12// 13//===----------------------------------------------------------------------===// 14 15#include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" 16#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 17#include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" 18#include "clang/AST/Stmt.h" 19#include "clang/AST/ParentMap.h" 20#include "llvm/ADT/DenseSet.h" 21#include "llvm/ADT/DenseMap.h" 22#include "llvm/ADT/SmallVector.h" 23#include "llvm/ADT/Statistic.h" 24#include <vector> 25 26using namespace clang; 27using namespace ento; 28 29//===----------------------------------------------------------------------===// 30// Node auditing. 31//===----------------------------------------------------------------------===// 32 33// An out of line virtual method to provide a home for the class vtable. 34ExplodedNode::Auditor::~Auditor() {} 35 36#ifndef NDEBUG 37static ExplodedNode::Auditor* NodeAuditor = 0; 38#endif 39 40void ExplodedNode::SetAuditor(ExplodedNode::Auditor* A) { 41#ifndef NDEBUG 42 NodeAuditor = A; 43#endif 44} 45 46//===----------------------------------------------------------------------===// 47// Cleanup. 48//===----------------------------------------------------------------------===// 49 50static const unsigned CounterTop = 1000; 51 52ExplodedGraph::ExplodedGraph() 53 : NumNodes(0), reclaimNodes(false), reclaimCounter(CounterTop) {} 54 55ExplodedGraph::~ExplodedGraph() {} 56 57//===----------------------------------------------------------------------===// 58// Node reclamation. 59//===----------------------------------------------------------------------===// 60 61bool ExplodedGraph::shouldCollect(const ExplodedNode *node) { 62 // Reclaim all nodes that match *all* the following criteria: 63 // 64 // (1) 1 predecessor (that has one successor) 65 // (2) 1 successor (that has one predecessor) 66 // (3) The ProgramPoint is for a PostStmt. 67 // (4) There is no 'tag' for the ProgramPoint. 68 // (5) The 'store' is the same as the predecessor. 69 // (6) The 'GDM' is the same as the predecessor. 70 // (7) The LocationContext is the same as the predecessor. 71 // (8) The PostStmt isn't for a non-consumed Stmt or Expr. 72 // (9) The successor is not a CallExpr StmtPoint (so that we would be able to 73 // find it when retrying a call with no inlining). 74 // FIXME: It may be safe to reclaim PreCall and PostCall nodes as well. 75 76 // Conditions 1 and 2. 77 if (node->pred_size() != 1 || node->succ_size() != 1) 78 return false; 79 80 const ExplodedNode *pred = *(node->pred_begin()); 81 if (pred->succ_size() != 1) 82 return false; 83 84 const ExplodedNode *succ = *(node->succ_begin()); 85 if (succ->pred_size() != 1) 86 return false; 87 88 // Condition 3. 89 ProgramPoint progPoint = node->getLocation(); 90 if (!isa<PostStmt>(progPoint)) 91 return false; 92 93 // Condition 4. 94 PostStmt ps = cast<PostStmt>(progPoint); 95 if (ps.getTag()) 96 return false; 97 98 // Conditions 5, 6, and 7. 99 ProgramStateRef state = node->getState(); 100 ProgramStateRef pred_state = pred->getState(); 101 if (state->store != pred_state->store || state->GDM != pred_state->GDM || 102 progPoint.getLocationContext() != pred->getLocationContext()) 103 return false; 104 105 // Condition 8. 106 // Do not collect nodes for non-consumed Stmt or Expr to ensure precise 107 // diagnostic generation; specifically, so that we could anchor arrows 108 // pointing to the beginning of statements (as written in code). 109 if (!isa<Expr>(ps.getStmt())) 110 return false; 111 112 if (const Expr *Ex = dyn_cast<Expr>(ps.getStmt())) { 113 ParentMap &PM = progPoint.getLocationContext()->getParentMap(); 114 if (!PM.isConsumedExpr(Ex)) 115 return false; 116 } 117 118 // Condition 9. 119 const ProgramPoint SuccLoc = succ->getLocation(); 120 if (const StmtPoint *SP = dyn_cast<StmtPoint>(&SuccLoc)) 121 if (CallEvent::isCallStmt(SP->getStmt())) 122 return false; 123 124 return true; 125} 126 127void ExplodedGraph::collectNode(ExplodedNode *node) { 128 // Removing a node means: 129 // (a) changing the predecessors successor to the successor of this node 130 // (b) changing the successors predecessor to the predecessor of this node 131 // (c) Putting 'node' onto freeNodes. 132 assert(node->pred_size() == 1 || node->succ_size() == 1); 133 ExplodedNode *pred = *(node->pred_begin()); 134 ExplodedNode *succ = *(node->succ_begin()); 135 pred->replaceSuccessor(succ); 136 succ->replacePredecessor(pred); 137 FreeNodes.push_back(node); 138 Nodes.RemoveNode(node); 139 --NumNodes; 140 node->~ExplodedNode(); 141} 142 143void ExplodedGraph::reclaimRecentlyAllocatedNodes() { 144 if (ChangedNodes.empty()) 145 return; 146 147 // Only periodically relcaim nodes so that we can build up a set of 148 // nodes that meet the reclamation criteria. Freshly created nodes 149 // by definition have no successor, and thus cannot be reclaimed (see below). 150 assert(reclaimCounter > 0); 151 if (--reclaimCounter != 0) 152 return; 153 reclaimCounter = CounterTop; 154 155 for (NodeVector::iterator it = ChangedNodes.begin(), et = ChangedNodes.end(); 156 it != et; ++it) { 157 ExplodedNode *node = *it; 158 if (shouldCollect(node)) 159 collectNode(node); 160 } 161 ChangedNodes.clear(); 162} 163 164//===----------------------------------------------------------------------===// 165// ExplodedNode. 166//===----------------------------------------------------------------------===// 167 168// An NodeGroup's storage type is actually very much like a TinyPtrVector: 169// it can be either a pointer to a single ExplodedNode, or a pointer to a 170// BumpVector allocated with the ExplodedGraph's allocator. This allows the 171// common case of single-node NodeGroups to be implemented with no extra memory. 172// 173// Consequently, each of the NodeGroup methods have up to four cases to handle: 174// 1. The flag is set and this group does not actually contain any nodes. 175// 2. The group is empty, in which case the storage value is null. 176// 3. The group contains a single node. 177// 4. The group contains more than one node. 178typedef BumpVector<ExplodedNode *> ExplodedNodeVector; 179typedef llvm::PointerUnion<ExplodedNode *, ExplodedNodeVector *> GroupStorage; 180 181void ExplodedNode::addPredecessor(ExplodedNode *V, ExplodedGraph &G) { 182 assert (!V->isSink()); 183 Preds.addNode(V, G); 184 V->Succs.addNode(this, G); 185#ifndef NDEBUG 186 if (NodeAuditor) NodeAuditor->AddEdge(V, this); 187#endif 188} 189 190void ExplodedNode::NodeGroup::replaceNode(ExplodedNode *node) { 191 assert(!getFlag()); 192 193 GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P); 194 assert(Storage.is<ExplodedNode *>()); 195 Storage = node; 196 assert(Storage.is<ExplodedNode *>()); 197} 198 199void ExplodedNode::NodeGroup::addNode(ExplodedNode *N, ExplodedGraph &G) { 200 assert(!getFlag()); 201 202 GroupStorage &Storage = reinterpret_cast<GroupStorage&>(P); 203 if (Storage.isNull()) { 204 Storage = N; 205 assert(Storage.is<ExplodedNode *>()); 206 return; 207 } 208 209 ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>(); 210 211 if (!V) { 212 // Switch from single-node to multi-node representation. 213 ExplodedNode *Old = Storage.get<ExplodedNode *>(); 214 215 BumpVectorContext &Ctx = G.getNodeAllocator(); 216 V = G.getAllocator().Allocate<ExplodedNodeVector>(); 217 new (V) ExplodedNodeVector(Ctx, 4); 218 V->push_back(Old, Ctx); 219 220 Storage = V; 221 assert(!getFlag()); 222 assert(Storage.is<ExplodedNodeVector *>()); 223 } 224 225 V->push_back(N, G.getNodeAllocator()); 226} 227 228unsigned ExplodedNode::NodeGroup::size() const { 229 if (getFlag()) 230 return 0; 231 232 const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P); 233 if (Storage.isNull()) 234 return 0; 235 if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>()) 236 return V->size(); 237 return 1; 238} 239 240ExplodedNode * const *ExplodedNode::NodeGroup::begin() const { 241 if (getFlag()) 242 return 0; 243 244 const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P); 245 if (Storage.isNull()) 246 return 0; 247 if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>()) 248 return V->begin(); 249 return Storage.getAddrOfPtr1(); 250} 251 252ExplodedNode * const *ExplodedNode::NodeGroup::end() const { 253 if (getFlag()) 254 return 0; 255 256 const GroupStorage &Storage = reinterpret_cast<const GroupStorage &>(P); 257 if (Storage.isNull()) 258 return 0; 259 if (ExplodedNodeVector *V = Storage.dyn_cast<ExplodedNodeVector *>()) 260 return V->end(); 261 return Storage.getAddrOfPtr1() + 1; 262} 263 264ExplodedNode *ExplodedGraph::getNode(const ProgramPoint &L, 265 ProgramStateRef State, 266 bool IsSink, 267 bool* IsNew) { 268 // Profile 'State' to determine if we already have an existing node. 269 llvm::FoldingSetNodeID profile; 270 void *InsertPos = 0; 271 272 NodeTy::Profile(profile, L, State, IsSink); 273 NodeTy* V = Nodes.FindNodeOrInsertPos(profile, InsertPos); 274 275 if (!V) { 276 if (!FreeNodes.empty()) { 277 V = FreeNodes.back(); 278 FreeNodes.pop_back(); 279 } 280 else { 281 // Allocate a new node. 282 V = (NodeTy*) getAllocator().Allocate<NodeTy>(); 283 } 284 285 new (V) NodeTy(L, State, IsSink); 286 287 if (reclaimNodes) 288 ChangedNodes.push_back(V); 289 290 // Insert the node into the node set and return it. 291 Nodes.InsertNode(V, InsertPos); 292 ++NumNodes; 293 294 if (IsNew) *IsNew = true; 295 } 296 else 297 if (IsNew) *IsNew = false; 298 299 return V; 300} 301 302std::pair<ExplodedGraph*, InterExplodedGraphMap*> 303ExplodedGraph::Trim(const NodeTy* const* NBeg, const NodeTy* const* NEnd, 304 llvm::DenseMap<const void*, const void*> *InverseMap) const { 305 306 if (NBeg == NEnd) 307 return std::make_pair((ExplodedGraph*) 0, 308 (InterExplodedGraphMap*) 0); 309 310 assert (NBeg < NEnd); 311 312 OwningPtr<InterExplodedGraphMap> M(new InterExplodedGraphMap()); 313 314 ExplodedGraph* G = TrimInternal(NBeg, NEnd, M.get(), InverseMap); 315 316 return std::make_pair(static_cast<ExplodedGraph*>(G), M.take()); 317} 318 319ExplodedGraph* 320ExplodedGraph::TrimInternal(const ExplodedNode* const* BeginSources, 321 const ExplodedNode* const* EndSources, 322 InterExplodedGraphMap* M, 323 llvm::DenseMap<const void*, const void*> *InverseMap) const { 324 325 typedef llvm::DenseSet<const ExplodedNode*> Pass1Ty; 326 Pass1Ty Pass1; 327 328 typedef llvm::DenseMap<const ExplodedNode*, ExplodedNode*> Pass2Ty; 329 Pass2Ty& Pass2 = M->M; 330 331 SmallVector<const ExplodedNode*, 10> WL1, WL2; 332 333 // ===- Pass 1 (reverse DFS) -=== 334 for (const ExplodedNode* const* I = BeginSources; I != EndSources; ++I) { 335 if (*I) 336 WL1.push_back(*I); 337 } 338 339 // Process the first worklist until it is empty. Because it is a std::list 340 // it acts like a FIFO queue. 341 while (!WL1.empty()) { 342 const ExplodedNode *N = WL1.back(); 343 WL1.pop_back(); 344 345 // Have we already visited this node? If so, continue to the next one. 346 if (Pass1.count(N)) 347 continue; 348 349 // Otherwise, mark this node as visited. 350 Pass1.insert(N); 351 352 // If this is a root enqueue it to the second worklist. 353 if (N->Preds.empty()) { 354 WL2.push_back(N); 355 continue; 356 } 357 358 // Visit our predecessors and enqueue them. 359 for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end(); 360 I != E; ++I) 361 WL1.push_back(*I); 362 } 363 364 // We didn't hit a root? Return with a null pointer for the new graph. 365 if (WL2.empty()) 366 return 0; 367 368 // Create an empty graph. 369 ExplodedGraph* G = MakeEmptyGraph(); 370 371 // ===- Pass 2 (forward DFS to construct the new graph) -=== 372 while (!WL2.empty()) { 373 const ExplodedNode *N = WL2.back(); 374 WL2.pop_back(); 375 376 // Skip this node if we have already processed it. 377 if (Pass2.find(N) != Pass2.end()) 378 continue; 379 380 // Create the corresponding node in the new graph and record the mapping 381 // from the old node to the new node. 382 ExplodedNode *NewN = G->getNode(N->getLocation(), N->State, N->isSink(), 0); 383 Pass2[N] = NewN; 384 385 // Also record the reverse mapping from the new node to the old node. 386 if (InverseMap) (*InverseMap)[NewN] = N; 387 388 // If this node is a root, designate it as such in the graph. 389 if (N->Preds.empty()) 390 G->addRoot(NewN); 391 392 // In the case that some of the intended predecessors of NewN have already 393 // been created, we should hook them up as predecessors. 394 395 // Walk through the predecessors of 'N' and hook up their corresponding 396 // nodes in the new graph (if any) to the freshly created node. 397 for (ExplodedNode::pred_iterator I = N->Preds.begin(), E = N->Preds.end(); 398 I != E; ++I) { 399 Pass2Ty::iterator PI = Pass2.find(*I); 400 if (PI == Pass2.end()) 401 continue; 402 403 NewN->addPredecessor(PI->second, *G); 404 } 405 406 // In the case that some of the intended successors of NewN have already 407 // been created, we should hook them up as successors. Otherwise, enqueue 408 // the new nodes from the original graph that should have nodes created 409 // in the new graph. 410 for (ExplodedNode::succ_iterator I = N->Succs.begin(), E = N->Succs.end(); 411 I != E; ++I) { 412 Pass2Ty::iterator PI = Pass2.find(*I); 413 if (PI != Pass2.end()) { 414 PI->second->addPredecessor(NewN, *G); 415 continue; 416 } 417 418 // Enqueue nodes to the worklist that were marked during pass 1. 419 if (Pass1.count(*I)) 420 WL2.push_back(*I); 421 } 422 } 423 424 return G; 425} 426 427void InterExplodedGraphMap::anchor() { } 428 429ExplodedNode* 430InterExplodedGraphMap::getMappedNode(const ExplodedNode *N) const { 431 llvm::DenseMap<const ExplodedNode*, ExplodedNode*>::const_iterator I = 432 M.find(N); 433 434 return I == M.end() ? 0 : I->second; 435} 436 437