clang  8.0.0svn
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1 //=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines ExprEngine's support for calls and returns.
11 //
12 //===----------------------------------------------------------------------===//
13 
17 #include "clang/AST/DeclCXX.h"
22 #include "llvm/ADT/SmallSet.h"
23 #include "llvm/ADT/Statistic.h"
24 #include "llvm/Support/SaveAndRestore.h"
25 
26 using namespace clang;
27 using namespace ento;
28 
29 #define DEBUG_TYPE "ExprEngine"
30 
31 STATISTIC(NumOfDynamicDispatchPathSplits,
32  "The # of times we split the path due to imprecise dynamic dispatch info");
33 
34 STATISTIC(NumInlinedCalls,
35  "The # of times we inlined a call");
36 
37 STATISTIC(NumReachedInlineCountMax,
38  "The # of times we reached inline count maximum");
39 
41  ExplodedNode *Pred) {
42  // Get the entry block in the CFG of the callee.
43  const StackFrameContext *calleeCtx = CE.getCalleeContext();
44  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
45  const CFGBlock *Entry = CE.getEntry();
46 
47  // Validate the CFG.
48  assert(Entry->empty());
49  assert(Entry->succ_size() == 1);
50 
51  // Get the solitary successor.
52  const CFGBlock *Succ = *(Entry->succ_begin());
53 
54  // Construct an edge representing the starting location in the callee.
55  BlockEdge Loc(Entry, Succ, calleeCtx);
56 
57  ProgramStateRef state = Pred->getState();
58 
59  // Construct a new node, notify checkers that analysis of the function has
60  // begun, and add the resultant nodes to the worklist.
61  bool isNew;
62  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63  Node->addPredecessor(Pred, G);
64  if (isNew) {
65  ExplodedNodeSet DstBegin;
66  processBeginOfFunction(BC, Node, DstBegin, Loc);
67  Engine.enqueue(DstBegin);
68  }
69 }
70 
71 // Find the last statement on the path to the exploded node and the
72 // corresponding Block.
73 static std::pair<const Stmt*,
74  const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
75  const Stmt *S = nullptr;
76  const CFGBlock *Blk = nullptr;
77  const StackFrameContext *SF = Node->getStackFrame();
78 
79  // Back up through the ExplodedGraph until we reach a statement node in this
80  // stack frame.
81  while (Node) {
82  const ProgramPoint &PP = Node->getLocation();
83 
84  if (PP.getStackFrame() == SF) {
85  if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
86  S = SP->getStmt();
87  break;
88  } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
89  S = CEE->getCalleeContext()->getCallSite();
90  if (S)
91  break;
92 
93  // If there is no statement, this is an implicitly-generated call.
94  // We'll walk backwards over it and then continue the loop to find
95  // an actual statement.
97  do {
98  Node = Node->getFirstPred();
99  CE = Node->getLocationAs<CallEnter>();
100  } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
101 
102  // Continue searching the graph.
103  } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
104  Blk = BE->getSrc();
105  }
106  } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
107  // If we reached the CallEnter for this function, it has no statements.
108  if (CE->getCalleeContext() == SF)
109  break;
110  }
111 
112  if (Node->pred_empty())
113  return std::make_pair(nullptr, nullptr);
114 
115  Node = *Node->pred_begin();
116  }
117 
118  return std::make_pair(S, Blk);
119 }
120 
121 /// Adjusts a return value when the called function's return type does not
122 /// match the caller's expression type. This can happen when a dynamic call
123 /// is devirtualized, and the overriding method has a covariant (more specific)
124 /// return type than the parent's method. For C++ objects, this means we need
125 /// to add base casts.
126 static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
127  StoreManager &StoreMgr) {
128  // For now, the only adjustments we handle apply only to locations.
129  if (!V.getAs<Loc>())
130  return V;
131 
132  // If the types already match, don't do any unnecessary work.
133  ExpectedTy = ExpectedTy.getCanonicalType();
134  ActualTy = ActualTy.getCanonicalType();
135  if (ExpectedTy == ActualTy)
136  return V;
137 
138  // No adjustment is needed between Objective-C pointer types.
139  if (ExpectedTy->isObjCObjectPointerType() &&
140  ActualTy->isObjCObjectPointerType())
141  return V;
142 
143  // C++ object pointers may need "derived-to-base" casts.
144  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
145  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
146  if (ExpectedClass && ActualClass) {
147  CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
148  /*DetectVirtual=*/false);
149  if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
150  !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
151  return StoreMgr.evalDerivedToBase(V, Paths.front());
152  }
153  }
154 
155  // Unfortunately, Objective-C does not enforce that overridden methods have
156  // covariant return types, so we can't assert that that never happens.
157  // Be safe and return UnknownVal().
158  return UnknownVal();
159 }
160 
162  ExplodedNode *Pred,
163  ExplodedNodeSet &Dst) {
164  // Find the last statement in the function and the corresponding basic block.
165  const Stmt *LastSt = nullptr;
166  const CFGBlock *Blk = nullptr;
167  std::tie(LastSt, Blk) = getLastStmt(Pred);
168  if (!Blk || !LastSt) {
169  Dst.Add(Pred);
170  return;
171  }
172 
173  // Here, we destroy the current location context. We use the current
174  // function's entire body as a diagnostic statement, with which the program
175  // point will be associated. However, we only want to use LastStmt as a
176  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
177  // is dead.
178  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
179  const LocationContext *LCtx = Pred->getLocationContext();
180  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
181  LCtx->getAnalysisDeclContext()->getBody(),
183 }
184 
185 static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
186  const StackFrameContext *calleeCtx) {
187  const Decl *RuntimeCallee = calleeCtx->getDecl();
188  const Decl *StaticDecl = Call->getDecl();
189  assert(RuntimeCallee);
190  if (!StaticDecl)
191  return true;
192  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
193 }
194 
195 /// The call exit is simulated with a sequence of nodes, which occur between
196 /// CallExitBegin and CallExitEnd. The following operations occur between the
197 /// two program points:
198 /// 1. CallExitBegin (triggers the start of call exit sequence)
199 /// 2. Bind the return value
200 /// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
201 /// 4. CallExitEnd (switch to the caller context)
202 /// 5. PostStmt<CallExpr>
204  // Step 1 CEBNode was generated before the call.
206  const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
207 
208  // The parent context might not be a stack frame, so make sure we
209  // look up the first enclosing stack frame.
210  const StackFrameContext *callerCtx =
211  calleeCtx->getParent()->getStackFrame();
212 
213  const Stmt *CE = calleeCtx->getCallSite();
214  ProgramStateRef state = CEBNode->getState();
215  // Find the last statement in the function and the corresponding basic block.
216  const Stmt *LastSt = nullptr;
217  const CFGBlock *Blk = nullptr;
218  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
219 
220  // Generate a CallEvent /before/ cleaning the state, so that we can get the
221  // correct value for 'this' (if necessary).
223  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
224 
225  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
226 
227  // If the callee returns an expression, bind its value to CallExpr.
228  if (CE) {
229  if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
230  const LocationContext *LCtx = CEBNode->getLocationContext();
231  SVal V = state->getSVal(RS, LCtx);
232 
233  // Ensure that the return type matches the type of the returned Expr.
234  if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
235  QualType ReturnedTy =
236  CallEvent::getDeclaredResultType(calleeCtx->getDecl());
237  if (!ReturnedTy.isNull()) {
238  if (const Expr *Ex = dyn_cast<Expr>(CE)) {
239  V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
240  getStoreManager());
241  }
242  }
243  }
244 
245  state = state->BindExpr(CE, callerCtx, V);
246  }
247 
248  // Bind the constructed object value to CXXConstructExpr.
249  if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
250  loc::MemRegionVal This =
251  svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
252  SVal ThisV = state->getSVal(This);
253  ThisV = state->getSVal(ThisV.castAs<Loc>());
254  state = state->BindExpr(CCE, callerCtx, ThisV);
255  }
256 
257  if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
258  // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
259  // while to reach the actual CXXNewExpr element from here, so keep the
260  // region for later use.
261  // Additionally cast the return value of the inlined operator new
262  // (which is of type 'void *') to the correct object type.
263  SVal AllocV = state->getSVal(CNE, callerCtx);
264  AllocV = svalBuilder.evalCast(
265  AllocV, CNE->getType(),
266  getContext().getPointerType(getContext().VoidTy));
267 
268  state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
269  AllocV);
270  }
271  }
272 
273  // Step 3: BindedRetNode -> CleanedNodes
274  // If we can find a statement and a block in the inlined function, run remove
275  // dead bindings before returning from the call. This is important to ensure
276  // that we report the issues such as leaks in the stack contexts in which
277  // they occurred.
278  ExplodedNodeSet CleanedNodes;
279  if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
280  static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
281  PostStmt Loc(LastSt, calleeCtx, &retValBind);
282  bool isNew;
283  ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
284  BindedRetNode->addPredecessor(CEBNode, G);
285  if (!isNew)
286  return;
287 
288  NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
289  currBldrCtx = &Ctx;
290  // Here, we call the Symbol Reaper with 0 statement and callee location
291  // context, telling it to clean up everything in the callee's context
292  // (and its children). We use the callee's function body as a diagnostic
293  // statement, with which the program point will be associated.
294  removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
295  calleeCtx->getAnalysisDeclContext()->getBody(),
297  currBldrCtx = nullptr;
298  } else {
299  CleanedNodes.Add(CEBNode);
300  }
301 
302  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
303  E = CleanedNodes.end(); I != E; ++I) {
304 
305  // Step 4: Generate the CallExit and leave the callee's context.
306  // CleanedNodes -> CEENode
307  CallExitEnd Loc(calleeCtx, callerCtx);
308  bool isNew;
309  ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
310 
311  ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
312  CEENode->addPredecessor(*I, G);
313  if (!isNew)
314  return;
315 
316  // Step 5: Perform the post-condition check of the CallExpr and enqueue the
317  // result onto the work list.
318  // CEENode -> Dst -> WorkList
319  NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
320  SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
321  &Ctx);
322  SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
323 
324  CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
325 
326  ExplodedNodeSet DstPostCall;
327  if (const CXXNewExpr *CNE = dyn_cast_or_null<CXXNewExpr>(CE)) {
328  ExplodedNodeSet DstPostPostCallCallback;
329  getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
330  CEENode, *UpdatedCall, *this,
331  /*WasInlined=*/true);
332  for (auto I : DstPostPostCallCallback) {
334  CNE,
335  *getObjectUnderConstruction(I->getState(), CNE,
336  calleeCtx->getParent()),
337  DstPostCall, I, *this,
338  /*WasInlined=*/true);
339  }
340  } else {
341  getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
342  *UpdatedCall, *this,
343  /*WasInlined=*/true);
344  }
345  ExplodedNodeSet Dst;
346  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
347  getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
348  *this,
349  /*WasInlined=*/true);
350  } else if (CE &&
351  !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
353  getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
354  *this, /*WasInlined=*/true);
355  } else {
356  Dst.insert(DstPostCall);
357  }
358 
359  // Enqueue the next element in the block.
360  for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
361  PSI != PSE; ++PSI) {
362  Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
363  calleeCtx->getIndex()+1);
364  }
365  }
366 }
367 
368 void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
369  bool &IsRecursive, unsigned &StackDepth) {
370  IsRecursive = false;
371  StackDepth = 0;
372 
373  while (LCtx) {
374  if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
375  const Decl *DI = SFC->getDecl();
376 
377  // Mark recursive (and mutually recursive) functions and always count
378  // them when measuring the stack depth.
379  if (DI == D) {
380  IsRecursive = true;
381  ++StackDepth;
382  LCtx = LCtx->getParent();
383  continue;
384  }
385 
386  // Do not count the small functions when determining the stack depth.
387  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
388  const CFG *CalleeCFG = CalleeADC->getCFG();
389  if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
390  ++StackDepth;
391  }
392  LCtx = LCtx->getParent();
393  }
394 }
395 
396 // The GDM component containing the dynamic dispatch bifurcation info. When
397 // the exact type of the receiver is not known, we want to explore both paths -
398 // one on which we do inline it and the other one on which we don't. This is
399 // done to ensure we do not drop coverage.
400 // This is the map from the receiver region to a bool, specifying either we
401 // consider this region's information precise or not along the given path.
402 namespace {
404  DynamicDispatchModeInlined = 1,
405  DynamicDispatchModeConservative
406  };
407 } // end anonymous namespace
408 
409 REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
410  const MemRegion *, unsigned)
411 
412 bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
413  NodeBuilder &Bldr, ExplodedNode *Pred,
415  assert(D);
416 
417  const LocationContext *CurLC = Pred->getLocationContext();
418  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
419  const LocationContext *ParentOfCallee = CallerSFC;
420  if (Call.getKind() == CE_Block &&
421  !cast<BlockCall>(Call).isConversionFromLambda()) {
422  const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
423  assert(BR && "If we have the block definition we should have its region");
424  AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
425  ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
426  cast<BlockDecl>(D),
427  BR);
428  }
429 
430  // This may be NULL, but that's fine.
431  const Expr *CallE = Call.getOriginExpr();
432 
433  // Construct a new stack frame for the callee.
434  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
435  const StackFrameContext *CalleeSFC =
436  CalleeADC->getStackFrame(ParentOfCallee, CallE,
437  currBldrCtx->getBlock(),
438  currStmtIdx);
439 
440  CallEnter Loc(CallE, CalleeSFC, CurLC);
441 
442  // Construct a new state which contains the mapping from actual to
443  // formal arguments.
444  State = State->enterStackFrame(Call, CalleeSFC);
445 
446  bool isNew;
447  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
448  N->addPredecessor(Pred, G);
449  if (isNew)
450  Engine.getWorkList()->enqueue(N);
451  }
452 
453  // If we decided to inline the call, the successor has been manually
454  // added onto the work list so remove it from the node builder.
455  Bldr.takeNodes(Pred);
456 
457  NumInlinedCalls++;
458  Engine.FunctionSummaries->bumpNumTimesInlined(D);
459 
460  // Mark the decl as visited.
461  if (VisitedCallees)
462  VisitedCallees->insert(D);
463 
464  return true;
465 }
466 
468  const Stmt *CallE) {
469  const void *ReplayState = State->get<ReplayWithoutInlining>();
470  if (!ReplayState)
471  return nullptr;
472 
473  assert(ReplayState == CallE && "Backtracked to the wrong call.");
474  (void)CallE;
475 
476  return State->remove<ReplayWithoutInlining>();
477 }
478 
480  ExplodedNodeSet &dst) {
481  // Perform the previsit of the CallExpr.
482  ExplodedNodeSet dstPreVisit;
483  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
484 
485  // Get the call in its initial state. We use this as a template to perform
486  // all the checks.
488  CallEventRef<> CallTemplate
489  = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
490 
491  // Evaluate the function call. We try each of the checkers
492  // to see if the can evaluate the function call.
493  ExplodedNodeSet dstCallEvaluated;
494  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
495  I != E; ++I) {
496  evalCall(dstCallEvaluated, *I, *CallTemplate);
497  }
498 
499  // Finally, perform the post-condition check of the CallExpr and store
500  // the created nodes in 'Dst'.
501  // Note that if the call was inlined, dstCallEvaluated will be empty.
502  // The post-CallExpr check will occur in processCallExit.
503  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
504  *this);
505 }
506 
507 ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
508  const CallEvent &Call) {
509  const Expr *E = Call.getOriginExpr();
510  // FIXME: Constructors to placement arguments of operator new
511  // are not supported yet.
512  if (!E || isa<CXXNewExpr>(E))
513  return State;
514 
515  const LocationContext *LC = Call.getLocationContext();
516  for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
517  unsigned I = Call.getASTArgumentIndex(CallI);
518  if (Optional<SVal> V =
519  getObjectUnderConstruction(State, {E, I}, LC)) {
520  SVal VV = *V;
521  (void)VV;
522  assert(cast<VarRegion>(VV.castAs<loc::MemRegionVal>().getRegion())
523  ->getStackFrame()->getParent()
524  ->getStackFrame() == LC->getStackFrame());
525  State = finishObjectConstruction(State, {E, I}, LC);
526  }
527  }
528 
529  return State;
530 }
531 
532 void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
533  ExplodedNode *Pred,
534  const CallEvent &Call) {
535  ProgramStateRef State = Pred->getState();
536  ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
537  if (CleanedState == State) {
538  Dst.insert(Pred);
539  return;
540  }
541 
542  const Expr *E = Call.getOriginExpr();
543  const LocationContext *LC = Call.getLocationContext();
544  NodeBuilder B(Pred, Dst, *currBldrCtx);
545  static SimpleProgramPointTag Tag("ExprEngine",
546  "Finish argument construction");
547  PreStmt PP(E, LC, &Tag);
548  B.generateNode(PP, CleanedState, Pred);
549 }
550 
552  const CallEvent &Call) {
553  // WARNING: At this time, the state attached to 'Call' may be older than the
554  // state in 'Pred'. This is a minor optimization since CheckerManager will
555  // use an updated CallEvent instance when calling checkers, but if 'Call' is
556  // ever used directly in this function all callers should be updated to pass
557  // the most recent state. (It is probably not worth doing the work here since
558  // for some callers this will not be necessary.)
559 
560  // Run any pre-call checks using the generic call interface.
561  ExplodedNodeSet dstPreVisit;
562  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred,
563  Call, *this);
564 
565  // Actually evaluate the function call. We try each of the checkers
566  // to see if the can evaluate the function call, and get a callback at
567  // defaultEvalCall if all of them fail.
568  ExplodedNodeSet dstCallEvaluated;
569  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
570  Call, *this);
571 
572  // If there were other constructors called for object-type arguments
573  // of this call, clean them up.
574  ExplodedNodeSet dstArgumentCleanup;
575  for (auto I : dstCallEvaluated)
576  finishArgumentConstruction(dstArgumentCleanup, I, Call);
577 
578  // Finally, run any post-call checks.
579  getCheckerManager().runCheckersForPostCall(Dst, dstArgumentCleanup,
580  Call, *this);
581 }
582 
584  const LocationContext *LCtx,
585  ProgramStateRef State) {
586  const Expr *E = Call.getOriginExpr();
587  if (!E)
588  return State;
589 
590  // Some method families have known return values.
591  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
592  switch (Msg->getMethodFamily()) {
593  default:
594  break;
595  case OMF_autorelease:
596  case OMF_retain:
597  case OMF_self: {
598  // These methods return their receivers.
599  return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
600  }
601  }
602  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
603  SVal ThisV = C->getCXXThisVal();
604  ThisV = State->getSVal(ThisV.castAs<Loc>());
605  return State->BindExpr(E, LCtx, ThisV);
606  }
607 
608  SVal R;
609  QualType ResultTy = Call.getResultType();
610  unsigned Count = currBldrCtx->blockCount();
611  if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
612  // Conjure a temporary if the function returns an object by value.
613  SVal Target;
614  assert(RTC->getStmt() == Call.getOriginExpr());
615  EvalCallOptions CallOpts; // FIXME: We won't really need those.
616  std::tie(State, Target) =
617  prepareForObjectConstruction(Call.getOriginExpr(), State, LCtx,
618  RTC->getConstructionContext(), CallOpts);
619  assert(Target.getAsRegion());
620  // Invalidate the region so that it didn't look uninitialized. Don't notify
621  // the checkers.
622  State = State->invalidateRegions(Target.getAsRegion(), E, Count, LCtx,
623  /* CausedByPointerEscape=*/false, nullptr,
624  &Call, nullptr);
625 
626  R = State->getSVal(Target.castAs<Loc>(), E->getType());
627  } else {
628  // Conjure a symbol if the return value is unknown.
629 
630  // See if we need to conjure a heap pointer instead of
631  // a regular unknown pointer.
632  bool IsHeapPointer = false;
633  if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
634  if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
635  // FIXME: Delegate this to evalCall in MallocChecker?
636  IsHeapPointer = true;
637  }
638 
639  R = IsHeapPointer ? svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count)
640  : svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy,
641  Count);
642  }
643  return State->BindExpr(E, LCtx, R);
644 }
645 
646 // Conservatively evaluate call by invalidating regions and binding
647 // a conjured return value.
648 void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
649  ExplodedNode *Pred,
650  ProgramStateRef State) {
651  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
652  State = bindReturnValue(Call, Pred->getLocationContext(), State);
653 
654  // And make the result node.
655  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
656 }
657 
658 ExprEngine::CallInlinePolicy
659 ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
660  AnalyzerOptions &Opts,
661  const ExprEngine::EvalCallOptions &CallOpts) {
662  const LocationContext *CurLC = Pred->getLocationContext();
663  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
664  switch (Call.getKind()) {
665  case CE_Function:
666  case CE_Block:
667  break;
668  case CE_CXXMember:
671  return CIP_DisallowedAlways;
672  break;
673  case CE_CXXConstructor: {
675  return CIP_DisallowedAlways;
676 
677  const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
678 
679  const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
680 
681  auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
682  const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
683  : nullptr;
684 
685  if (CC && isa<NewAllocatedObjectConstructionContext>(CC) &&
686  !Opts.mayInlineCXXAllocator())
687  return CIP_DisallowedOnce;
688 
689  // FIXME: We don't handle constructors or destructors for arrays properly.
690  // Even once we do, we still need to be careful about implicitly-generated
691  // initializers for array fields in default move/copy constructors.
692  // We still allow construction into ElementRegion targets when they don't
693  // represent array elements.
694  if (CallOpts.IsArrayCtorOrDtor)
695  return CIP_DisallowedOnce;
696 
697  // Inlining constructors requires including initializers in the CFG.
698  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
699  assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
700  (void)ADC;
701 
702  // If the destructor is trivial, it's always safe to inline the constructor.
703  if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
704  break;
705 
706  // For other types, only inline constructors if destructor inlining is
707  // also enabled.
709  return CIP_DisallowedAlways;
710 
711  if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
712  // If we don't handle temporary destructors, we shouldn't inline
713  // their constructors.
714  if (CallOpts.IsTemporaryCtorOrDtor &&
716  return CIP_DisallowedOnce;
717 
718  // If we did not find the correct this-region, it would be pointless
719  // to inline the constructor. Instead we will simply invalidate
720  // the fake temporary target.
722  return CIP_DisallowedOnce;
723 
724  // If the temporary is lifetime-extended by binding it to a reference-type
725  // field within an aggregate, automatic destructors don't work properly.
727  return CIP_DisallowedOnce;
728  }
729 
730  break;
731  }
732  case CE_CXXDestructor: {
734  return CIP_DisallowedAlways;
735 
736  // Inlining destructors requires building the CFG correctly.
737  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
738  assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
739  (void)ADC;
740 
741  // FIXME: We don't handle constructors or destructors for arrays properly.
742  if (CallOpts.IsArrayCtorOrDtor)
743  return CIP_DisallowedOnce;
744 
745  // Allow disabling temporary destructor inlining with a separate option.
746  if (CallOpts.IsTemporaryCtorOrDtor && !Opts.mayInlineCXXTemporaryDtors())
747  return CIP_DisallowedOnce;
748 
749  // If we did not find the correct this-region, it would be pointless
750  // to inline the destructor. Instead we will simply invalidate
751  // the fake temporary target.
753  return CIP_DisallowedOnce;
754  break;
755  }
756  case CE_CXXAllocator:
757  if (Opts.mayInlineCXXAllocator())
758  break;
759  // Do not inline allocators until we model deallocators.
760  // This is unfortunate, but basically necessary for smart pointers and such.
761  return CIP_DisallowedAlways;
762  case CE_ObjCMessage:
763  if (!Opts.mayInlineObjCMethod())
764  return CIP_DisallowedAlways;
765  if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
767  return CIP_DisallowedAlways;
768  break;
769  }
770 
771  return CIP_Allowed;
772 }
773 
774 /// Returns true if the given C++ class contains a member with the given name.
775 static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
776  StringRef Name) {
777  const IdentifierInfo &II = Ctx.Idents.get(Name);
778  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
779  if (!RD->lookup(DeclName).empty())
780  return true;
781 
782  CXXBasePaths Paths(false, false, false);
783  if (RD->lookupInBases(
784  [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
785  return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
786  },
787  Paths))
788  return true;
789 
790  return false;
791 }
792 
793 /// Returns true if the given C++ class is a container or iterator.
794 ///
795 /// Our heuristic for this is whether it contains a method named 'begin()' or a
796 /// nested type named 'iterator' or 'iterator_category'.
797 static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
798  return hasMember(Ctx, RD, "begin") ||
799  hasMember(Ctx, RD, "iterator") ||
800  hasMember(Ctx, RD, "iterator_category");
801 }
802 
803 /// Returns true if the given function refers to a method of a C++ container
804 /// or iterator.
805 ///
806 /// We generally do a poor job modeling most containers right now, and might
807 /// prefer not to inline their methods.
808 static bool isContainerMethod(const ASTContext &Ctx,
809  const FunctionDecl *FD) {
810  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
811  return isContainerClass(Ctx, MD->getParent());
812  return false;
813 }
814 
815 /// Returns true if the given function is the destructor of a class named
816 /// "shared_ptr".
817 static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
818  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
819  if (!Dtor)
820  return false;
821 
822  const CXXRecordDecl *RD = Dtor->getParent();
823  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
824  if (II->isStr("shared_ptr"))
825  return true;
826 
827  return false;
828 }
829 
830 /// Returns true if the function in \p CalleeADC may be inlined in general.
831 ///
832 /// This checks static properties of the function, such as its signature and
833 /// CFG, to determine whether the analyzer should ever consider inlining it,
834 /// in any context.
835 static bool mayInlineDecl(AnalysisManager &AMgr,
836  AnalysisDeclContext *CalleeADC) {
837  AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
838  // FIXME: Do not inline variadic calls.
839  if (CallEvent::isVariadic(CalleeADC->getDecl()))
840  return false;
841 
842  // Check certain C++-related inlining policies.
843  ASTContext &Ctx = CalleeADC->getASTContext();
844  if (Ctx.getLangOpts().CPlusPlus) {
845  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
846  // Conditionally control the inlining of template functions.
847  if (!Opts.mayInlineTemplateFunctions())
848  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
849  return false;
850 
851  // Conditionally control the inlining of C++ standard library functions.
852  if (!Opts.mayInlineCXXStandardLibrary())
853  if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
855  return false;
856 
857  // Conditionally control the inlining of methods on objects that look
858  // like C++ containers.
859  if (!Opts.mayInlineCXXContainerMethods())
860  if (!AMgr.isInCodeFile(FD->getLocation()))
861  if (isContainerMethod(Ctx, FD))
862  return false;
863 
864  // Conditionally control the inlining of the destructor of C++ shared_ptr.
865  // We don't currently do a good job modeling shared_ptr because we can't
866  // see the reference count, so treating as opaque is probably the best
867  // idea.
868  if (!Opts.mayInlineCXXSharedPtrDtor())
869  if (isCXXSharedPtrDtor(FD))
870  return false;
871  }
872  }
873 
874  // It is possible that the CFG cannot be constructed.
875  // Be safe, and check if the CalleeCFG is valid.
876  const CFG *CalleeCFG = CalleeADC->getCFG();
877  if (!CalleeCFG)
878  return false;
879 
880  // Do not inline large functions.
881  if (CalleeCFG->getNumBlockIDs() > Opts.getMaxInlinableSize())
882  return false;
883 
884  // It is possible that the live variables analysis cannot be
885  // run. If so, bail out.
886  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
887  return false;
888 
889  return true;
890 }
891 
892 bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
893  const ExplodedNode *Pred,
894  const EvalCallOptions &CallOpts) {
895  if (!D)
896  return false;
897 
898  AnalysisManager &AMgr = getAnalysisManager();
899  AnalyzerOptions &Opts = AMgr.options;
900  AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
901  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
902 
903  // The auto-synthesized bodies are essential to inline as they are
904  // usually small and commonly used. Note: we should do this check early on to
905  // ensure we always inline these calls.
906  if (CalleeADC->isBodyAutosynthesized())
907  return true;
908 
909  if (!AMgr.shouldInlineCall())
910  return false;
911 
912  // Check if this function has been marked as non-inlinable.
913  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
914  if (MayInline.hasValue()) {
915  if (!MayInline.getValue())
916  return false;
917 
918  } else {
919  // We haven't actually checked the static properties of this function yet.
920  // Do that now, and record our decision in the function summaries.
921  if (mayInlineDecl(getAnalysisManager(), CalleeADC)) {
922  Engine.FunctionSummaries->markMayInline(D);
923  } else {
924  Engine.FunctionSummaries->markShouldNotInline(D);
925  return false;
926  }
927  }
928 
929  // Check if we should inline a call based on its kind.
930  // FIXME: this checks both static and dynamic properties of the call, which
931  // means we're redoing a bit of work that could be cached in the function
932  // summary.
933  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
934  if (CIP != CIP_Allowed) {
935  if (CIP == CIP_DisallowedAlways) {
936  assert(!MayInline.hasValue() || MayInline.getValue());
937  Engine.FunctionSummaries->markShouldNotInline(D);
938  }
939  return false;
940  }
941 
942  const CFG *CalleeCFG = CalleeADC->getCFG();
943 
944  // Do not inline if recursive or we've reached max stack frame count.
945  bool IsRecursive = false;
946  unsigned StackDepth = 0;
947  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
948  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
949  ((CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize())
950  || IsRecursive))
951  return false;
952 
953  // Do not inline large functions too many times.
954  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
955  Opts.getMaxTimesInlineLarge()) &&
956  CalleeCFG->getNumBlockIDs() >=
958  NumReachedInlineCountMax++;
959  return false;
960  }
961 
962  if (HowToInline == Inline_Minimal &&
963  (CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize()
964  || IsRecursive))
965  return false;
966 
967  return true;
968 }
969 
970 static bool isTrivialObjectAssignment(const CallEvent &Call) {
971  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
972  if (!ICall)
973  return false;
974 
975  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
976  if (!MD)
977  return false;
979  return false;
980 
981  return MD->isTrivial();
982 }
983 
985  const CallEvent &CallTemplate,
986  const EvalCallOptions &CallOpts) {
987  // Make sure we have the most recent state attached to the call.
988  ProgramStateRef State = Pred->getState();
989  CallEventRef<> Call = CallTemplate.cloneWithState(State);
990 
991  // Special-case trivial assignment operators.
992  if (isTrivialObjectAssignment(*Call)) {
993  performTrivialCopy(Bldr, Pred, *Call);
994  return;
995  }
996 
997  // Try to inline the call.
998  // The origin expression here is just used as a kind of checksum;
999  // this should still be safe even for CallEvents that don't come from exprs.
1000  const Expr *E = Call->getOriginExpr();
1001 
1002  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1003  if (InlinedFailedState) {
1004  // If we already tried once and failed, make sure we don't retry later.
1005  State = InlinedFailedState;
1006  } else {
1007  RuntimeDefinition RD = Call->getRuntimeDefinition();
1008  const Decl *D = RD.getDecl();
1009  if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
1010  if (RD.mayHaveOtherDefinitions()) {
1012 
1013  // Explore with and without inlining the call.
1014  if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1015  BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
1016  return;
1017  }
1018 
1019  // Don't inline if we're not in any dynamic dispatch mode.
1020  if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1021  conservativeEvalCall(*Call, Bldr, Pred, State);
1022  return;
1023  }
1024  }
1025 
1026  // We are not bifurcating and we do have a Decl, so just inline.
1027  if (inlineCall(*Call, D, Bldr, Pred, State))
1028  return;
1029  }
1030  }
1031 
1032  // If we can't inline it, handle the return value and invalidate the regions.
1033  conservativeEvalCall(*Call, Bldr, Pred, State);
1034 }
1035 
1036 void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1037  const CallEvent &Call, const Decl *D,
1038  NodeBuilder &Bldr, ExplodedNode *Pred) {
1039  assert(BifurReg);
1040  BifurReg = BifurReg->StripCasts();
1041 
1042  // Check if we've performed the split already - note, we only want
1043  // to split the path once per memory region.
1044  ProgramStateRef State = Pred->getState();
1045  const unsigned *BState =
1046  State->get<DynamicDispatchBifurcationMap>(BifurReg);
1047  if (BState) {
1048  // If we are on "inline path", keep inlining if possible.
1049  if (*BState == DynamicDispatchModeInlined)
1050  if (inlineCall(Call, D, Bldr, Pred, State))
1051  return;
1052  // If inline failed, or we are on the path where we assume we
1053  // don't have enough info about the receiver to inline, conjure the
1054  // return value and invalidate the regions.
1055  conservativeEvalCall(Call, Bldr, Pred, State);
1056  return;
1057  }
1058 
1059  // If we got here, this is the first time we process a message to this
1060  // region, so split the path.
1061  ProgramStateRef IState =
1062  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1063  DynamicDispatchModeInlined);
1064  inlineCall(Call, D, Bldr, Pred, IState);
1065 
1066  ProgramStateRef NoIState =
1067  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1068  DynamicDispatchModeConservative);
1069  conservativeEvalCall(Call, Bldr, Pred, NoIState);
1070 
1071  NumOfDynamicDispatchPathSplits++;
1072 }
1073 
1075  ExplodedNodeSet &Dst) {
1076  ExplodedNodeSet dstPreVisit;
1077  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1078 
1079  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1080 
1081  if (RS->getRetValue()) {
1082  for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1083  ei = dstPreVisit.end(); it != ei; ++it) {
1084  B.generateNode(RS, *it, (*it)->getState());
1085  }
1086  }
1087 }
Represents a function declaration or definition.
Definition: Decl.h:1732
unsigned InlineMaxStackDepth
The inlining stack depth limit.
bool empty() const
Definition: CFG.h:714
A (possibly-)qualified type.
Definition: Type.h:642
MemRegion - The root abstract class for all memory regions.
Definition: MemRegion.h:94
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
const CXXConstructorDecl * getDecl() const override
Definition: CallEvent.h:878
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition: ExprEngine.h:106
Stmt * getBody() const
Get the body of the Declaration.
succ_iterator succ_begin()
Definition: CFG.h:751
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
void processCallExit(ExplodedNode *Pred) override
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr...
Stmt - This represents one statement.
Definition: Stmt.h:66
This builder class is useful for generating nodes that resulted from visiting a statement.
Definition: CoreEngine.h:370
ProgramPoint getProgramPoint(bool IsPreVisit=false, const ProgramPointTag *Tag=nullptr) const
Returns an appropriate ProgramPoint for this call.
Definition: CallEvent.cpp:339
Decl - This represents one declaration (or definition), e.g.
Definition: DeclBase.h:87
bool mayInlineCXXTemporaryDtors()
Returns true if C++ temporary destructors should be inlined during analysis.
Represents a point when we begin processing an inlined call.
Definition: ProgramPoint.h:632
Manages the lifetime of CallEvent objects.
Definition: CallEvent.h:1087
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
IntrusiveRefCntPtr< const ProgramState > ProgramStateRef
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Hints for figuring out of a call should be inlined during evalCall().
Definition: ExprEngine.h:96
Represents a call to a C++ constructor.
Definition: ExprCXX.h:1262
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition: ExprEngine.h:103
const NestedNameSpecifier * Specifier
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx)
Definition: CallEvent.cpp:1341
const ProgramStateRef & getState() const
SVal evalCast(SVal val, QualType castTy, QualType originalType)
Represents a path from a specific derived class (which is not represented as part of the path) to a p...
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
unsigned succ_size() const
Definition: CFG.h:769
const Expr * getOriginExpr() const
Returns the expression whose value will be the result of this call.
Definition: CallEvent.h:255
ASTContext & getASTContext() const
loc::MemRegionVal getCXXThis(const CXXMethodDecl *D, const StackFrameContext *SFC)
Return a memory region for the &#39;this&#39; object reference.
static Optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object&#39;s ConstructionContext, retrieve such object&#39;s location.
Definition: ExprEngine.cpp:453
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
void enqueue(ExplodedNodeSet &Set)
Enqueue the given set of nodes onto the work list.
Definition: CoreEngine.cpp:526
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer&#39;s garbage collection - remove dead symbols and bindings from the state...
Definition: ExprEngine.cpp:631
bool mayInlineTemplateFunctions()
Returns whether or not templated functions may be considered for inlining.
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition: DeclCXX.cpp:2124
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
Definition: ProgramPoint.h:647
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition: Decl.h:297
One of these records is kept for each identifier that is lexed.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition: ASTContext.h:154
LineState State
AnalysisDeclContext contains the context data for the function or method under analysis.
const Expr * getRetValue() const
Definition: Stmt.cpp:937
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
i32 captured_struct **param SharedsTy A type which contains references the shared variables *param Shareds Context with the list of shared variables from the p *TaskFunction *param Data Additional data for task generation like final * state
const StackFrameContext * getStackFrame() const
Definition: ProgramPoint.h:185
IdentifierTable & Idents
Definition: ASTContext.h:565
STATISTIC(NumOfDynamicDispatchPathSplits, "The # of times we split the path due to imprecise dynamic dispatch info")
Represents any expression that calls an Objective-C method.
Definition: CallEvent.h:970
virtual Kind getKind() const =0
Returns the kind of call this is.
static bool isInStdNamespace(const Decl *D)
Returns true if the root namespace of the given declaration is the &#39;std&#39; C++ namespace.
WorkList * getWorkList() const
Definition: CoreEngine.h:165
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting obj-c messages.
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition: ExprEngine.h:650
bool mayInlineCXXContainerMethods()
Returns whether or not methods of C++ container objects may be considered for inlining.
T * getAnalysis()
Return the specified analysis object, lazily running the analysis if necessary.
const LocationContext * getLocationContext() const
const LocationContext * getParent() const
virtual const CXXConstructExpr * getOriginExpr() const
Definition: CallEvent.h:874
unsigned getMinCFGSizeTreatFunctionsAsLarge()
Returns the number of basic blocks a function needs to have to be considered large for the &#39;max-times...
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
const CoreEngine & getCoreEngine() const
Definition: ExprEngine.h:399
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition: DeclCXX.h:1478
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
Definition: CallEvent.cpp:477
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition: DeclBase.h:870
lookup_result lookup(DeclarationName Name) const
lookup - Find the declarations (if any) with the given Name in this context.
Definition: DeclBase.cpp:1590
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
CheckerManager & getCheckerManager() const
Definition: ExprEngine.h:183
void removeDeadOnEndOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
static bool isTrivialObjectAssignment(const CallEvent &Call)
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
Represents a non-static C++ member function call, no matter how it is written.
Definition: CallEvent.h:670
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function&#39;s return type does not match the caller&#39;s expression ...
DeclarationNameTable DeclarationNames
Definition: ASTContext.h:568
Represents a single basic block in a source-level CFG.
Definition: CFG.h:552
Represents a point when we finish the call exit sequence (for inlined call).
Definition: ProgramPoint.h:690
AnalysisDeclContext * getContext(const Decl *D)
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
const LocationContext * getLocationContext() const
The context in which the call is being evaluated.
Definition: CallEvent.h:245
void processBeginOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L) override
Called by CoreEngine.
This represents one expression.
Definition: Expr.h:105
IPAKind getIPAMode()
Returns the inter-procedural analysis mode.
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt...
Definition: CFG.h:1003
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to...
Definition: Type.cpp:1590
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Definition: CallEvent.cpp:1362
Represents a C++ destructor within a class.
Definition: DeclCXX.h:2700
AnalyzerOptions & getAnalyzerOptions() override
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition: CoreEngine.h:228
Represents C++ constructor call.
Definition: CFG.h:151
void Add(ExplodedNode *N)
Refers to regular member function and operator calls.
bool mayInlineObjCMethod()
Returns true if ObjectiveC inlining is enabled, false otherwise.
IdentifierInfo * getAsIdentifierInfo() const
Retrieve the IdentifierInfo * stored in this declaration name, or null if this declaration name isn&#39;t...
Refers to constructors (implicit or explicit).
QualType getType() const
Definition: Expr.h:127
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng)
Run checkers for evaluating a call.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition: ExprEngine.h:179
ReturnStmt - This represents a return, optionally of an expression: return; return 4;...
Definition: Stmt.h:1444
bool isBodyAutosynthesized() const
Checks if the body of the Decl is generated by the BodyFarm.
ExplodedNode * getNode(const ProgramPoint &L, ProgramStateRef State, bool IsSink=false, bool *IsNew=nullptr)
Retrieve the node associated with a (Location,State) pair, where the &#39;Location&#39; is a ProgramPoint in ...
const StackFrameContext * getStackFrame(LocationContext const *Parent, const Stmt *S, const CFGBlock *Blk, unsigned Idx)
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition: Decl.h:2020
Enable inlining of dynamically dispatched methods.
bool isNull() const
Return true if this QualType doesn&#39;t point to a type yet.
Definition: Type.h:707
const MemRegion * getRegion() const
Get the underlining region.
Definition: SVals.h:605
While alive, includes the current analysis stack in a crash trace.
CanQualType getCanonicalTypeUnqualified() const
void runCheckersForNewAllocator(const CXXNewExpr *NE, SVal Target, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
Defines the runtime definition of the called function.
Definition: CallEvent.h:134
QualType getCanonicalType() const
Definition: Type.h:6085
const FunctionDecl * getDecl() const override
Definition: CallEvent.cpp:654
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
AnalysisManager & getAnalysisManager() override
Definition: ExprEngine.h:181
const MemRegion * getAsRegion() const
Definition: SVals.cpp:151
Represents a new-expression for memory allocation and constructor calls, e.g: "new CXXNewExpr(foo)"...
Definition: ExprCXX.h:1873
CallEventManager & getCallEventManager()
Definition: ProgramState.h:570
REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap, const MemRegion *, unsigned) bool ExprEngine
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checks and allowing checkers to be responsible for handli...
bool mayInlineCXXStandardLibrary()
Returns whether or not C++ standard library functions may be considered for inlining.
Represents a static or instance method of a struct/union/class.
Definition: DeclCXX.h:2041
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition: ExprEngine.h:99
SVal - This represents a symbolic expression, which can be either an L-value or an R-value...
Definition: SVals.h:76
DeclarationName getIdentifier(const IdentifierInfo *ID)
Create a declaration name that is a simple identifier.
const Decl * getDecl() const
bool isObjCObjectPointerType() const
Definition: Type.h:6367
Do minimal inlining of callees.
Definition: ExprEngine.h:92
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition: CFG.h:1169
Refers to destructors (implicit or explicit).
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition: CallEvent.h:159
void insert(const ExplodedNodeSet &S)
Optional< T > getAs() const
Convert to the specified CFGElement type, returning None if this CFGElement is not of the desired typ...
Definition: CFG.h:110
static bool mayInlineDecl(AnalysisManager &AMgr, AnalysisDeclContext *CalleeADC)
Returns true if the function in CalleeADC may be inlined in general.
ast_type_traits::DynTypedNode Node
pred_iterator pred_begin()
Definition: CFG.h:733
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
Dataflow Directional Tag Classes.
CFG::BuildOptions & getCFGBuildOptions()
Return the build options used to construct the CFG.
StoreManager & getStoreManager()
Definition: ExprEngine.h:379
const StackFrameContext * getCalleeContext() const
Definition: ProgramPoint.h:642
The name of a declaration.
const CXXRecordDecl * getParent() const
Returns the parent of this method declaration, which is the class in which this method is defined...
Definition: DeclCXX.h:2166
const MemRegion * StripCasts(bool StripBaseAndDerivedCasts=true) const
Definition: MemRegion.cpp:1186
bool isAmbiguous(CanQualType BaseType)
Determine whether the path from the most-derived type to the given base type is ambiguous (i...
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition: DeclCXX.cpp:2103
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition: CallEvent.h:155
Represents an abstract call to a function or method along a particular path.
Definition: CallEvent.h:171
ProgramStateManager & getStateManager() override
Definition: ExprEngine.h:377
const Decl * getDecl() const
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
bool lookupInBases(BaseMatchesCallback BaseMatches, CXXBasePaths &Paths, bool LookupInDependent=false) const
Look for entities within the base classes of this C++ class, transitively searching all base class su...
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition: SVals.h:104
virtual unsigned getASTArgumentIndex(unsigned CallArgumentIndex) const
Some call event sub-classes conveniently adjust mismatching AST indices to match parameter indices...
Definition: CallEvent.h:452
bool includeTemporaryDtorsInCFG()
Returns whether or not the destructors for C++ temporary objects should be included in the CFG...
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
Definition: CallEvent.cpp:448
CXXBasePath & front()
const StackFrameContext * getStackFrame() const
Represents a base class of a C++ class.
Definition: DeclCXX.h:192
SourceManager & getSourceManager()
Definition: ASTContext.h:671
QualType getResultType() const
Returns the result type, adjusted for references.
Definition: CallEvent.cpp:70
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred)
Generates a node in the ExplodedGraph.
Definition: CoreEngine.h:281
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate.h) and friends (in DeclFriend.h).
ConstructionContext&#39;s subclasses describe different ways of constructing an object in C++...
Represents a C++ struct/union/class.
Definition: DeclCXX.h:300
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K)
Returns the option controlling which C++ member functions will be considered for inlining.
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition: ExprEngine.h:111
virtual unsigned getNumArgs() const =0
Returns the number of arguments (explicit and implicit).
QualType getPointerType(QualType T) const
Return the uniqued reference to the type for a pointer to the specified type.
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition: Expr.h:2290
ExplodedNode * generateNode(const Stmt *S, ExplodedNode *Pred, ProgramStateRef St, const ProgramPointTag *tag=nullptr, ProgramPoint::Kind K=ProgramPoint::PostStmtKind)
Definition: CoreEngine.h:399
unsigned getMaxTimesInlineLarge()
Returns the maximum times a large function could be inlined.
ProgramStateRef invalidateRegions(unsigned BlockCount, ProgramStateRef Orig=nullptr) const
Returns a new state with all argument regions invalidated.
Definition: CallEvent.cpp:285
virtual void enqueue(const WorkListUnit &U)=0
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *parent, const BlockDecl *BD, const void *ContextData)
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
const StackFrameContext * getStackFrame() const
bool mayInlineCXXAllocator()
Returns whether or not allocator call may be considered for inlining.
AnalysisPurgeMode AnalysisPurgeOpt
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
bool mayInlineCXXSharedPtrDtor()
Returns whether or not the destructor of C++ &#39;shared_ptr&#39; may be considered for inlining.
Optional< T > getAs() const
Convert to the specified ProgramPoint type, returning None if this ProgramPoint is not of the desired...
Definition: ProgramPoint.h:153
AnalysisDeclContext * getAnalysisDeclContext() const
Represents a call to a C++ constructor.
Definition: CallEvent.h:849
const LangOptions & getLangOpts() const
Definition: ASTContext.h:716
void processCallEnter(NodeBuilderContext &BC, CallEnter CE, ExplodedNode *Pred) override
Generate the entry node of the callee.
CallEventRef< T > cloneWithState(ProgramStateRef State) const
Definition: CallEvent.h:115