ExprEngineCallAndReturn.cpp revision 84c484545c5906ba55143e212b4a5275ab55889f
1//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file defines ExprEngine's support for calls and returns. 11// 12//===----------------------------------------------------------------------===// 13 14#define DEBUG_TYPE "ExprEngine" 15 16#include "clang/Analysis/Analyses/LiveVariables.h" 17#include "clang/StaticAnalyzer/Core/CheckerManager.h" 18#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 19#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 20#include "clang/AST/CXXInheritance.h" 21#include "clang/AST/DeclCXX.h" 22#include "clang/AST/ParentMap.h" 23#include "llvm/ADT/SmallSet.h" 24#include "llvm/ADT/Statistic.h" 25#include "llvm/Support/SaveAndRestore.h" 26 27using namespace clang; 28using namespace ento; 29 30STATISTIC(NumOfDynamicDispatchPathSplits, 31 "The # of times we split the path due to imprecise dynamic dispatch info"); 32 33STATISTIC(NumInlinedCalls, 34 "The # of times we inlined a call"); 35 36void ExprEngine::processCallEnter(CallEnter CE, ExplodedNode *Pred) { 37 // Get the entry block in the CFG of the callee. 38 const StackFrameContext *calleeCtx = CE.getCalleeContext(); 39 const CFG *CalleeCFG = calleeCtx->getCFG(); 40 const CFGBlock *Entry = &(CalleeCFG->getEntry()); 41 42 // Validate the CFG. 43 assert(Entry->empty()); 44 assert(Entry->succ_size() == 1); 45 46 // Get the solitary sucessor. 47 const CFGBlock *Succ = *(Entry->succ_begin()); 48 49 // Construct an edge representing the starting location in the callee. 50 BlockEdge Loc(Entry, Succ, calleeCtx); 51 52 ProgramStateRef state = Pred->getState(); 53 54 // Construct a new node and add it to the worklist. 55 bool isNew; 56 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew); 57 Node->addPredecessor(Pred, G); 58 if (isNew) 59 Engine.getWorkList()->enqueue(Node); 60} 61 62// Find the last statement on the path to the exploded node and the 63// corresponding Block. 64static std::pair<const Stmt*, 65 const CFGBlock*> getLastStmt(const ExplodedNode *Node) { 66 const Stmt *S = 0; 67 const StackFrameContext *SF = 68 Node->getLocation().getLocationContext()->getCurrentStackFrame(); 69 70 // Back up through the ExplodedGraph until we reach a statement node in this 71 // stack frame. 72 while (Node) { 73 const ProgramPoint &PP = Node->getLocation(); 74 75 if (PP.getLocationContext()->getCurrentStackFrame() == SF) { 76 if (const StmtPoint *SP = dyn_cast<StmtPoint>(&PP)) { 77 S = SP->getStmt(); 78 break; 79 } else if (const CallExitEnd *CEE = dyn_cast<CallExitEnd>(&PP)) { 80 S = CEE->getCalleeContext()->getCallSite(); 81 if (S) 82 break; 83 84 // If there is no statement, this is an implicitly-generated call. 85 // We'll walk backwards over it and then continue the loop to find 86 // an actual statement. 87 const CallEnter *CE; 88 do { 89 Node = Node->getFirstPred(); 90 CE = Node->getLocationAs<CallEnter>(); 91 } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext()); 92 93 // Continue searching the graph. 94 } 95 } else if (const CallEnter *CE = dyn_cast<CallEnter>(&PP)) { 96 // If we reached the CallEnter for this function, it has no statements. 97 if (CE->getCalleeContext() == SF) 98 break; 99 } 100 101 if (Node->pred_empty()) 102 return std::pair<const Stmt*, const CFGBlock*>((Stmt*)0, (CFGBlock*)0); 103 104 Node = *Node->pred_begin(); 105 } 106 107 const CFGBlock *Blk = 0; 108 if (S) { 109 // Now, get the enclosing basic block. 110 while (Node) { 111 const ProgramPoint &PP = Node->getLocation(); 112 if (isa<BlockEdge>(PP) && 113 (PP.getLocationContext()->getCurrentStackFrame() == SF)) { 114 BlockEdge &EPP = cast<BlockEdge>(PP); 115 Blk = EPP.getDst(); 116 break; 117 } 118 if (Node->pred_empty()) 119 return std::pair<const Stmt*, const CFGBlock*>(S, (CFGBlock*)0); 120 121 Node = *Node->pred_begin(); 122 } 123 } 124 125 return std::pair<const Stmt*, const CFGBlock*>(S, Blk); 126} 127 128/// Adjusts a return value when the called function's return type does not 129/// match the caller's expression type. This can happen when a dynamic call 130/// is devirtualized, and the overridding method has a covariant (more specific) 131/// return type than the parent's method. For C++ objects, this means we need 132/// to add base casts. 133static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, 134 StoreManager &StoreMgr) { 135 // For now, the only adjustments we handle apply only to locations. 136 if (!isa<Loc>(V)) 137 return V; 138 139 // If the types already match, don't do any unnecessary work. 140 ExpectedTy = ExpectedTy.getCanonicalType(); 141 ActualTy = ActualTy.getCanonicalType(); 142 if (ExpectedTy == ActualTy) 143 return V; 144 145 // No adjustment is needed between Objective-C pointer types. 146 if (ExpectedTy->isObjCObjectPointerType() && 147 ActualTy->isObjCObjectPointerType()) 148 return V; 149 150 // C++ object pointers may need "derived-to-base" casts. 151 const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl(); 152 const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl(); 153 if (ExpectedClass && ActualClass) { 154 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true, 155 /*DetectVirtual=*/false); 156 if (ActualClass->isDerivedFrom(ExpectedClass, Paths) && 157 !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) { 158 return StoreMgr.evalDerivedToBase(V, Paths.front()); 159 } 160 } 161 162 // Unfortunately, Objective-C does not enforce that overridden methods have 163 // covariant return types, so we can't assert that that never happens. 164 // Be safe and return UnknownVal(). 165 return UnknownVal(); 166} 167 168void ExprEngine::removeDeadOnEndOfFunction(NodeBuilderContext& BC, 169 ExplodedNode *Pred, 170 ExplodedNodeSet &Dst) { 171 // Find the last statement in the function and the corresponding basic block. 172 const Stmt *LastSt = 0; 173 const CFGBlock *Blk = 0; 174 llvm::tie(LastSt, Blk) = getLastStmt(Pred); 175 if (!Blk || !LastSt) { 176 Dst.Add(Pred); 177 return; 178 } 179 180 // Here, we call the Symbol Reaper with 0 stack context telling it to clean up 181 // everything on the stack. We use LastStmt as a diagnostic statement, with 182 // which the program point will be associated. However, we only want to use 183 // LastStmt as a reference for what to clean up if it's a ReturnStmt; 184 // otherwise, everything is dead. 185 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC); 186 removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), 187 Pred->getLocationContext(), LastSt, 188 ProgramPoint::PostStmtPurgeDeadSymbolsKind); 189} 190 191static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, 192 const StackFrameContext *calleeCtx) { 193 const Decl *RuntimeCallee = calleeCtx->getDecl(); 194 const Decl *StaticDecl = Call->getDecl(); 195 assert(RuntimeCallee); 196 if (!StaticDecl) 197 return true; 198 return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl(); 199} 200 201/// The call exit is simulated with a sequence of nodes, which occur between 202/// CallExitBegin and CallExitEnd. The following operations occur between the 203/// two program points: 204/// 1. CallExitBegin (triggers the start of call exit sequence) 205/// 2. Bind the return value 206/// 3. Run Remove dead bindings to clean up the dead symbols from the callee. 207/// 4. CallExitEnd (switch to the caller context) 208/// 5. PostStmt<CallExpr> 209void ExprEngine::processCallExit(ExplodedNode *CEBNode) { 210 // Step 1 CEBNode was generated before the call. 211 212 const StackFrameContext *calleeCtx = 213 CEBNode->getLocationContext()->getCurrentStackFrame(); 214 215 // The parent context might not be a stack frame, so make sure we 216 // look up the first enclosing stack frame. 217 const StackFrameContext *callerCtx = 218 calleeCtx->getParent()->getCurrentStackFrame(); 219 220 const Stmt *CE = calleeCtx->getCallSite(); 221 ProgramStateRef state = CEBNode->getState(); 222 // Find the last statement in the function and the corresponding basic block. 223 const Stmt *LastSt = 0; 224 const CFGBlock *Blk = 0; 225 llvm::tie(LastSt, Blk) = getLastStmt(CEBNode); 226 227 // Generate a CallEvent /before/ cleaning the state, so that we can get the 228 // correct value for 'this' (if necessary). 229 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 230 CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state); 231 232 // Step 2: generate node with bound return value: CEBNode -> BindedRetNode. 233 234 // If the callee returns an expression, bind its value to CallExpr. 235 if (CE) { 236 if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) { 237 const LocationContext *LCtx = CEBNode->getLocationContext(); 238 SVal V = state->getSVal(RS, LCtx); 239 240 // Ensure that the return type matches the type of the returned Expr. 241 if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) { 242 QualType ReturnedTy = 243 CallEvent::getDeclaredResultType(calleeCtx->getDecl()); 244 if (!ReturnedTy.isNull()) { 245 if (const Expr *Ex = dyn_cast<Expr>(CE)) { 246 V = adjustReturnValue(V, Ex->getType(), ReturnedTy, 247 getStoreManager()); 248 } 249 } 250 } 251 252 state = state->BindExpr(CE, callerCtx, V); 253 } 254 255 // Bind the constructed object value to CXXConstructExpr. 256 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) { 257 loc::MemRegionVal This = 258 svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx); 259 SVal ThisV = state->getSVal(This); 260 261 // If the constructed object is a prvalue, get its bindings. 262 // Note that we have to be careful here because constructors embedded 263 // in DeclStmts are not marked as lvalues. 264 if (!CCE->isGLValue()) 265 if (const MemRegion *MR = ThisV.getAsRegion()) 266 if (isa<CXXTempObjectRegion>(MR)) 267 ThisV = state->getSVal(cast<Loc>(ThisV)); 268 269 state = state->BindExpr(CCE, callerCtx, ThisV); 270 } 271 } 272 273 // Step 3: BindedRetNode -> CleanedNodes 274 // If we can find a statement and a block in the inlined function, run remove 275 // dead bindings before returning from the call. This is important to ensure 276 // that we report the issues such as leaks in the stack contexts in which 277 // they occurred. 278 ExplodedNodeSet CleanedNodes; 279 if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) { 280 static SimpleProgramPointTag retValBind("ExprEngine : Bind Return Value"); 281 PostStmt Loc(LastSt, calleeCtx, &retValBind); 282 bool isNew; 283 ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew); 284 BindedRetNode->addPredecessor(CEBNode, G); 285 if (!isNew) 286 return; 287 288 NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode); 289 currBldrCtx = &Ctx; 290 // Here, we call the Symbol Reaper with 0 statement and callee location 291 // context, telling it to clean up everything in the callee's context 292 // (and its children). We use LastSt as a diagnostic statement, which 293 // which the program point will be associated. 294 removeDead(BindedRetNode, CleanedNodes, 0, calleeCtx, LastSt, 295 ProgramPoint::PostStmtPurgeDeadSymbolsKind); 296 currBldrCtx = 0; 297 } else { 298 CleanedNodes.Add(CEBNode); 299 } 300 301 for (ExplodedNodeSet::iterator I = CleanedNodes.begin(), 302 E = CleanedNodes.end(); I != E; ++I) { 303 304 // Step 4: Generate the CallExit and leave the callee's context. 305 // CleanedNodes -> CEENode 306 CallExitEnd Loc(calleeCtx, callerCtx); 307 bool isNew; 308 ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState(); 309 ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew); 310 CEENode->addPredecessor(*I, G); 311 if (!isNew) 312 return; 313 314 // Step 5: Perform the post-condition check of the CallExpr and enqueue the 315 // result onto the work list. 316 // CEENode -> Dst -> WorkList 317 NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode); 318 SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx, 319 &Ctx); 320 SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex()); 321 322 CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState); 323 324 ExplodedNodeSet DstPostCall; 325 getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode, 326 *UpdatedCall, *this, 327 /*WasInlined=*/true); 328 329 ExplodedNodeSet Dst; 330 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) { 331 getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg, 332 *this, 333 /*WasInlined=*/true); 334 } else if (CE) { 335 getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE, 336 *this, /*WasInlined=*/true); 337 } else { 338 Dst.insert(DstPostCall); 339 } 340 341 // Enqueue the next element in the block. 342 for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end(); 343 PSI != PSE; ++PSI) { 344 Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(), 345 calleeCtx->getIndex()+1); 346 } 347 } 348} 349 350void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx, 351 bool &IsRecursive, unsigned &StackDepth) { 352 IsRecursive = false; 353 StackDepth = 0; 354 355 while (LCtx) { 356 if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) { 357 const Decl *DI = SFC->getDecl(); 358 359 // Mark recursive (and mutually recursive) functions and always count 360 // them when measuring the stack depth. 361 if (DI == D) { 362 IsRecursive = true; 363 ++StackDepth; 364 LCtx = LCtx->getParent(); 365 continue; 366 } 367 368 // Do not count the small functions when determining the stack depth. 369 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI); 370 const CFG *CalleeCFG = CalleeADC->getCFG(); 371 if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize()) 372 ++StackDepth; 373 } 374 LCtx = LCtx->getParent(); 375 } 376 377} 378 379static bool IsInStdNamespace(const FunctionDecl *FD) { 380 const DeclContext *DC = FD->getEnclosingNamespaceContext(); 381 const NamespaceDecl *ND = dyn_cast<NamespaceDecl>(DC); 382 if (!ND) 383 return false; 384 385 while (const DeclContext *Parent = ND->getParent()) { 386 if (!isa<NamespaceDecl>(Parent)) 387 break; 388 ND = cast<NamespaceDecl>(Parent); 389 } 390 391 return ND->getName() == "std"; 392} 393 394// Determine if we should inline the call. 395bool ExprEngine::shouldInlineDecl(const Decl *D, ExplodedNode *Pred) { 396 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D); 397 const CFG *CalleeCFG = CalleeADC->getCFG(); 398 399 // It is possible that the CFG cannot be constructed. 400 // Be safe, and check if the CalleeCFG is valid. 401 if (!CalleeCFG) 402 return false; 403 404 bool IsRecursive = false; 405 unsigned StackDepth = 0; 406 examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth); 407 if ((StackDepth >= AMgr.options.InlineMaxStackDepth) && 408 ((CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize()) 409 || IsRecursive)) 410 return false; 411 412 if (Engine.FunctionSummaries->hasReachedMaxBlockCount(D)) 413 return false; 414 415 if (CalleeCFG->getNumBlockIDs() > AMgr.options.InlineMaxFunctionSize) 416 return false; 417 418 // Do not inline variadic calls (for now). 419 if (const BlockDecl *BD = dyn_cast<BlockDecl>(D)) { 420 if (BD->isVariadic()) 421 return false; 422 } 423 else if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 424 if (FD->isVariadic()) 425 return false; 426 } 427 428 if (getContext().getLangOpts().CPlusPlus) { 429 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 430 // Conditionally allow the inlining of template functions. 431 if (!getAnalysisManager().options.mayInlineTemplateFunctions()) 432 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate) 433 return false; 434 435 // Conditionally allow the inlining of C++ standard library functions. 436 if (!getAnalysisManager().options.mayInlineCXXStandardLibrary()) 437 if (getContext().getSourceManager().isInSystemHeader(FD->getLocation())) 438 if (IsInStdNamespace(FD)) 439 return false; 440 } 441 } 442 443 // It is possible that the live variables analysis cannot be 444 // run. If so, bail out. 445 if (!CalleeADC->getAnalysis<RelaxedLiveVariables>()) 446 return false; 447 448 return true; 449} 450 451// The GDM component containing the dynamic dispatch bifurcation info. When 452// the exact type of the receiver is not known, we want to explore both paths - 453// one on which we do inline it and the other one on which we don't. This is 454// done to ensure we do not drop coverage. 455// This is the map from the receiver region to a bool, specifying either we 456// consider this region's information precise or not along the given path. 457namespace { 458 enum DynamicDispatchMode { 459 DynamicDispatchModeInlined = 1, 460 DynamicDispatchModeConservative 461 }; 462} 463REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap, 464 CLANG_ENTO_PROGRAMSTATE_MAP(const MemRegion *, 465 unsigned)) 466 467bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D, 468 NodeBuilder &Bldr, ExplodedNode *Pred, 469 ProgramStateRef State) { 470 assert(D); 471 472 const LocationContext *CurLC = Pred->getLocationContext(); 473 const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame(); 474 const LocationContext *ParentOfCallee = 0; 475 476 AnalyzerOptions &Opts = getAnalysisManager().options; 477 478 // FIXME: Refactor this check into a hypothetical CallEvent::canInline. 479 switch (Call.getKind()) { 480 case CE_Function: 481 break; 482 case CE_CXXMember: 483 case CE_CXXMemberOperator: 484 if (!Opts.mayInlineCXXMemberFunction(CIMK_MemberFunctions)) 485 return false; 486 break; 487 case CE_CXXConstructor: { 488 if (!Opts.mayInlineCXXMemberFunction(CIMK_Constructors)) 489 return false; 490 491 const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call); 492 493 // FIXME: We don't handle constructors or destructors for arrays properly. 494 const MemRegion *Target = Ctor.getCXXThisVal().getAsRegion(); 495 if (Target && isa<ElementRegion>(Target)) 496 return false; 497 498 // FIXME: This is a hack. We don't use the correct region for a new 499 // expression, so if we inline the constructor its result will just be 500 // thrown away. This short-term hack is tracked in <rdar://problem/12180598> 501 // and the longer-term possible fix is discussed in PR12014. 502 const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr(); 503 if (const Stmt *Parent = CurLC->getParentMap().getParent(CtorExpr)) 504 if (isa<CXXNewExpr>(Parent)) 505 return false; 506 507 // Inlining constructors requires including initializers in the CFG. 508 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext(); 509 assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers"); 510 (void)ADC; 511 512 // If the destructor is trivial, it's always safe to inline the constructor. 513 if (Ctor.getDecl()->getParent()->hasTrivialDestructor()) 514 break; 515 516 // For other types, only inline constructors if destructor inlining is 517 // also enabled. 518 if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors)) 519 return false; 520 521 // FIXME: This is a hack. We don't handle temporary destructors 522 // right now, so we shouldn't inline their constructors. 523 if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) 524 if (!Target || !isa<DeclRegion>(Target)) 525 return false; 526 527 break; 528 } 529 case CE_CXXDestructor: { 530 if (!Opts.mayInlineCXXMemberFunction(CIMK_Destructors)) 531 return false; 532 533 // Inlining destructors requires building the CFG correctly. 534 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext(); 535 assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors"); 536 (void)ADC; 537 538 const CXXDestructorCall &Dtor = cast<CXXDestructorCall>(Call); 539 540 // FIXME: We don't handle constructors or destructors for arrays properly. 541 const MemRegion *Target = Dtor.getCXXThisVal().getAsRegion(); 542 if (Target && isa<ElementRegion>(Target)) 543 return false; 544 545 break; 546 } 547 case CE_CXXAllocator: 548 // Do not inline allocators until we model deallocators. 549 // This is unfortunate, but basically necessary for smart pointers and such. 550 return false; 551 case CE_Block: { 552 const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion(); 553 assert(BR && "If we have the block definition we should have its region"); 554 AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D); 555 ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC, 556 cast<BlockDecl>(D), 557 BR); 558 break; 559 } 560 case CE_ObjCMessage: 561 if (!Opts.mayInlineObjCMethod()) 562 return false; 563 if (!(getAnalysisManager().options.IPAMode == DynamicDispatch || 564 getAnalysisManager().options.IPAMode == DynamicDispatchBifurcate)) 565 return false; 566 break; 567 } 568 569 if (!shouldInlineDecl(D, Pred)) 570 return false; 571 572 if (!ParentOfCallee) 573 ParentOfCallee = CallerSFC; 574 575 // This may be NULL, but that's fine. 576 const Expr *CallE = Call.getOriginExpr(); 577 578 // Construct a new stack frame for the callee. 579 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D); 580 const StackFrameContext *CalleeSFC = 581 CalleeADC->getStackFrame(ParentOfCallee, CallE, 582 currBldrCtx->getBlock(), 583 currStmtIdx); 584 585 CallEnter Loc(CallE, CalleeSFC, CurLC); 586 587 // Construct a new state which contains the mapping from actual to 588 // formal arguments. 589 State = State->enterStackFrame(Call, CalleeSFC); 590 591 bool isNew; 592 if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) { 593 N->addPredecessor(Pred, G); 594 if (isNew) 595 Engine.getWorkList()->enqueue(N); 596 } 597 598 // If we decided to inline the call, the successor has been manually 599 // added onto the work list so remove it from the node builder. 600 Bldr.takeNodes(Pred); 601 602 NumInlinedCalls++; 603 604 // Mark the decl as visited. 605 if (VisitedCallees) 606 VisitedCallees->insert(D); 607 608 return true; 609} 610 611static ProgramStateRef getInlineFailedState(ProgramStateRef State, 612 const Stmt *CallE) { 613 void *ReplayState = State->get<ReplayWithoutInlining>(); 614 if (!ReplayState) 615 return 0; 616 617 assert(ReplayState == (const void*)CallE && "Backtracked to the wrong call."); 618 (void)CallE; 619 620 return State->remove<ReplayWithoutInlining>(); 621} 622 623void ExprEngine::VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, 624 ExplodedNodeSet &dst) { 625 // Perform the previsit of the CallExpr. 626 ExplodedNodeSet dstPreVisit; 627 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this); 628 629 // Get the call in its initial state. We use this as a template to perform 630 // all the checks. 631 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 632 CallEventRef<> CallTemplate 633 = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext()); 634 635 // Evaluate the function call. We try each of the checkers 636 // to see if the can evaluate the function call. 637 ExplodedNodeSet dstCallEvaluated; 638 for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end(); 639 I != E; ++I) { 640 evalCall(dstCallEvaluated, *I, *CallTemplate); 641 } 642 643 // Finally, perform the post-condition check of the CallExpr and store 644 // the created nodes in 'Dst'. 645 // Note that if the call was inlined, dstCallEvaluated will be empty. 646 // The post-CallExpr check will occur in processCallExit. 647 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE, 648 *this); 649} 650 651void ExprEngine::evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, 652 const CallEvent &Call) { 653 // WARNING: At this time, the state attached to 'Call' may be older than the 654 // state in 'Pred'. This is a minor optimization since CheckerManager will 655 // use an updated CallEvent instance when calling checkers, but if 'Call' is 656 // ever used directly in this function all callers should be updated to pass 657 // the most recent state. (It is probably not worth doing the work here since 658 // for some callers this will not be necessary.) 659 660 // Run any pre-call checks using the generic call interface. 661 ExplodedNodeSet dstPreVisit; 662 getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, Call, *this); 663 664 // Actually evaluate the function call. We try each of the checkers 665 // to see if the can evaluate the function call, and get a callback at 666 // defaultEvalCall if all of them fail. 667 ExplodedNodeSet dstCallEvaluated; 668 getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit, 669 Call, *this); 670 671 // Finally, run any post-call checks. 672 getCheckerManager().runCheckersForPostCall(Dst, dstCallEvaluated, 673 Call, *this); 674} 675 676ProgramStateRef ExprEngine::bindReturnValue(const CallEvent &Call, 677 const LocationContext *LCtx, 678 ProgramStateRef State) { 679 const Expr *E = Call.getOriginExpr(); 680 if (!E) 681 return State; 682 683 // Some method families have known return values. 684 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) { 685 switch (Msg->getMethodFamily()) { 686 default: 687 break; 688 case OMF_autorelease: 689 case OMF_retain: 690 case OMF_self: { 691 // These methods return their receivers. 692 return State->BindExpr(E, LCtx, Msg->getReceiverSVal()); 693 } 694 } 695 } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){ 696 return State->BindExpr(E, LCtx, C->getCXXThisVal()); 697 } 698 699 // Conjure a symbol if the return value is unknown. 700 QualType ResultTy = Call.getResultType(); 701 SValBuilder &SVB = getSValBuilder(); 702 unsigned Count = currBldrCtx->blockCount(); 703 SVal R = SVB.conjureSymbolVal(0, E, LCtx, ResultTy, Count); 704 return State->BindExpr(E, LCtx, R); 705} 706 707// Conservatively evaluate call by invalidating regions and binding 708// a conjured return value. 709void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr, 710 ExplodedNode *Pred, ProgramStateRef State) { 711 State = Call.invalidateRegions(currBldrCtx->blockCount(), State); 712 State = bindReturnValue(Call, Pred->getLocationContext(), State); 713 714 // And make the result node. 715 Bldr.generateNode(Call.getProgramPoint(), State, Pred); 716} 717 718void ExprEngine::defaultEvalCall(NodeBuilder &Bldr, ExplodedNode *Pred, 719 const CallEvent &CallTemplate) { 720 // Make sure we have the most recent state attached to the call. 721 ProgramStateRef State = Pred->getState(); 722 CallEventRef<> Call = CallTemplate.cloneWithState(State); 723 724 if (!getAnalysisManager().shouldInlineCall()) { 725 conservativeEvalCall(*Call, Bldr, Pred, State); 726 return; 727 } 728 // Try to inline the call. 729 // The origin expression here is just used as a kind of checksum; 730 // this should still be safe even for CallEvents that don't come from exprs. 731 const Expr *E = Call->getOriginExpr(); 732 ProgramStateRef InlinedFailedState = getInlineFailedState(State, E); 733 734 if (InlinedFailedState) { 735 // If we already tried once and failed, make sure we don't retry later. 736 State = InlinedFailedState; 737 } else { 738 RuntimeDefinition RD = Call->getRuntimeDefinition(); 739 const Decl *D = RD.getDecl(); 740 if (D) { 741 if (RD.mayHaveOtherDefinitions()) { 742 // Explore with and without inlining the call. 743 if (getAnalysisManager().options.IPAMode == DynamicDispatchBifurcate) { 744 BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred); 745 return; 746 } 747 748 // Don't inline if we're not in any dynamic dispatch mode. 749 if (getAnalysisManager().options.IPAMode != DynamicDispatch) { 750 conservativeEvalCall(*Call, Bldr, Pred, State); 751 return; 752 } 753 } 754 755 // We are not bifurcating and we do have a Decl, so just inline. 756 if (inlineCall(*Call, D, Bldr, Pred, State)) 757 return; 758 } 759 } 760 761 // If we can't inline it, handle the return value and invalidate the regions. 762 conservativeEvalCall(*Call, Bldr, Pred, State); 763} 764 765void ExprEngine::BifurcateCall(const MemRegion *BifurReg, 766 const CallEvent &Call, const Decl *D, 767 NodeBuilder &Bldr, ExplodedNode *Pred) { 768 assert(BifurReg); 769 BifurReg = BifurReg->StripCasts(); 770 771 // Check if we've performed the split already - note, we only want 772 // to split the path once per memory region. 773 ProgramStateRef State = Pred->getState(); 774 const unsigned *BState = 775 State->get<DynamicDispatchBifurcationMap>(BifurReg); 776 if (BState) { 777 // If we are on "inline path", keep inlining if possible. 778 if (*BState == DynamicDispatchModeInlined) 779 if (inlineCall(Call, D, Bldr, Pred, State)) 780 return; 781 // If inline failed, or we are on the path where we assume we 782 // don't have enough info about the receiver to inline, conjure the 783 // return value and invalidate the regions. 784 conservativeEvalCall(Call, Bldr, Pred, State); 785 return; 786 } 787 788 // If we got here, this is the first time we process a message to this 789 // region, so split the path. 790 ProgramStateRef IState = 791 State->set<DynamicDispatchBifurcationMap>(BifurReg, 792 DynamicDispatchModeInlined); 793 inlineCall(Call, D, Bldr, Pred, IState); 794 795 ProgramStateRef NoIState = 796 State->set<DynamicDispatchBifurcationMap>(BifurReg, 797 DynamicDispatchModeConservative); 798 conservativeEvalCall(Call, Bldr, Pred, NoIState); 799 800 NumOfDynamicDispatchPathSplits++; 801 return; 802} 803 804 805void ExprEngine::VisitReturnStmt(const ReturnStmt *RS, ExplodedNode *Pred, 806 ExplodedNodeSet &Dst) { 807 808 ExplodedNodeSet dstPreVisit; 809 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this); 810 811 StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx); 812 813 if (RS->getRetValue()) { 814 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(), 815 ei = dstPreVisit.end(); it != ei; ++it) { 816 B.generateNode(RS, *it, (*it)->getState()); 817 } 818 } 819} 820