clang  8.0.0svn
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1 //=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines ExprEngine's support for calls and returns.
11 //
12 //===----------------------------------------------------------------------===//
13 
17 #include "clang/AST/DeclCXX.h"
22 #include "llvm/ADT/SmallSet.h"
23 #include "llvm/ADT/Statistic.h"
24 #include "llvm/Support/SaveAndRestore.h"
25 
26 using namespace clang;
27 using namespace ento;
28 
29 #define DEBUG_TYPE "ExprEngine"
30 
31 STATISTIC(NumOfDynamicDispatchPathSplits,
32  "The # of times we split the path due to imprecise dynamic dispatch info");
33 
34 STATISTIC(NumInlinedCalls,
35  "The # of times we inlined a call");
36 
37 STATISTIC(NumReachedInlineCountMax,
38  "The # of times we reached inline count maximum");
39 
41  ExplodedNode *Pred) {
42  // Get the entry block in the CFG of the callee.
43  const StackFrameContext *calleeCtx = CE.getCalleeContext();
44  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
45  const CFGBlock *Entry = CE.getEntry();
46 
47  // Validate the CFG.
48  assert(Entry->empty());
49  assert(Entry->succ_size() == 1);
50 
51  // Get the solitary successor.
52  const CFGBlock *Succ = *(Entry->succ_begin());
53 
54  // Construct an edge representing the starting location in the callee.
55  BlockEdge Loc(Entry, Succ, calleeCtx);
56 
57  ProgramStateRef state = Pred->getState();
58 
59  // Construct a new node, notify checkers that analysis of the function has
60  // begun, and add the resultant nodes to the worklist.
61  bool isNew;
62  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63  Node->addPredecessor(Pred, G);
64  if (isNew) {
65  ExplodedNodeSet DstBegin;
66  processBeginOfFunction(BC, Node, DstBegin, Loc);
67  Engine.enqueue(DstBegin);
68  }
69 }
70 
71 // Find the last statement on the path to the exploded node and the
72 // corresponding Block.
73 static std::pair<const Stmt*,
74  const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
75  const Stmt *S = nullptr;
76  const CFGBlock *Blk = nullptr;
77  const StackFrameContext *SF = Node->getStackFrame();
78 
79  // Back up through the ExplodedGraph until we reach a statement node in this
80  // stack frame.
81  while (Node) {
82  const ProgramPoint &PP = Node->getLocation();
83 
84  if (PP.getStackFrame() == SF) {
85  if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
86  S = SP->getStmt();
87  break;
88  } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
89  S = CEE->getCalleeContext()->getCallSite();
90  if (S)
91  break;
92 
93  // If there is no statement, this is an implicitly-generated call.
94  // We'll walk backwards over it and then continue the loop to find
95  // an actual statement.
97  do {
98  Node = Node->getFirstPred();
99  CE = Node->getLocationAs<CallEnter>();
100  } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
101 
102  // Continue searching the graph.
103  } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
104  Blk = BE->getSrc();
105  }
106  } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
107  // If we reached the CallEnter for this function, it has no statements.
108  if (CE->getCalleeContext() == SF)
109  break;
110  }
111 
112  if (Node->pred_empty())
113  return std::make_pair(nullptr, nullptr);
114 
115  Node = *Node->pred_begin();
116  }
117 
118  return std::make_pair(S, Blk);
119 }
120 
121 /// Adjusts a return value when the called function's return type does not
122 /// match the caller's expression type. This can happen when a dynamic call
123 /// is devirtualized, and the overriding method has a covariant (more specific)
124 /// return type than the parent's method. For C++ objects, this means we need
125 /// to add base casts.
126 static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
127  StoreManager &StoreMgr) {
128  // For now, the only adjustments we handle apply only to locations.
129  if (!V.getAs<Loc>())
130  return V;
131 
132  // If the types already match, don't do any unnecessary work.
133  ExpectedTy = ExpectedTy.getCanonicalType();
134  ActualTy = ActualTy.getCanonicalType();
135  if (ExpectedTy == ActualTy)
136  return V;
137 
138  // No adjustment is needed between Objective-C pointer types.
139  if (ExpectedTy->isObjCObjectPointerType() &&
140  ActualTy->isObjCObjectPointerType())
141  return V;
142 
143  // C++ object pointers may need "derived-to-base" casts.
144  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
145  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
146  if (ExpectedClass && ActualClass) {
147  CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
148  /*DetectVirtual=*/false);
149  if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
150  !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
151  return StoreMgr.evalDerivedToBase(V, Paths.front());
152  }
153  }
154 
155  // Unfortunately, Objective-C does not enforce that overridden methods have
156  // covariant return types, so we can't assert that that never happens.
157  // Be safe and return UnknownVal().
158  return UnknownVal();
159 }
160 
162  ExplodedNode *Pred,
163  ExplodedNodeSet &Dst) {
164  // Find the last statement in the function and the corresponding basic block.
165  const Stmt *LastSt = nullptr;
166  const CFGBlock *Blk = nullptr;
167  std::tie(LastSt, Blk) = getLastStmt(Pred);
168  if (!Blk || !LastSt) {
169  Dst.Add(Pred);
170  return;
171  }
172 
173  // Here, we destroy the current location context. We use the current
174  // function's entire body as a diagnostic statement, with which the program
175  // point will be associated. However, we only want to use LastStmt as a
176  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
177  // is dead.
178  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
179  const LocationContext *LCtx = Pred->getLocationContext();
180  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
181  LCtx->getAnalysisDeclContext()->getBody(),
183 }
184 
185 static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
186  const StackFrameContext *calleeCtx) {
187  const Decl *RuntimeCallee = calleeCtx->getDecl();
188  const Decl *StaticDecl = Call->getDecl();
189  assert(RuntimeCallee);
190  if (!StaticDecl)
191  return true;
192  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
193 }
194 
195 /// The call exit is simulated with a sequence of nodes, which occur between
196 /// CallExitBegin and CallExitEnd. The following operations occur between the
197 /// two program points:
198 /// 1. CallExitBegin (triggers the start of call exit sequence)
199 /// 2. Bind the return value
200 /// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
201 /// 4. CallExitEnd (switch to the caller context)
202 /// 5. PostStmt<CallExpr>
204  // Step 1 CEBNode was generated before the call.
206  const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
207 
208  // The parent context might not be a stack frame, so make sure we
209  // look up the first enclosing stack frame.
210  const StackFrameContext *callerCtx =
211  calleeCtx->getParent()->getStackFrame();
212 
213  const Stmt *CE = calleeCtx->getCallSite();
214  ProgramStateRef state = CEBNode->getState();
215  // Find the last statement in the function and the corresponding basic block.
216  const Stmt *LastSt = nullptr;
217  const CFGBlock *Blk = nullptr;
218  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
219 
220  // Generate a CallEvent /before/ cleaning the state, so that we can get the
221  // correct value for 'this' (if necessary).
223  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
224 
225  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
226 
227  // If the callee returns an expression, bind its value to CallExpr.
228  if (CE) {
229  if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
230  const LocationContext *LCtx = CEBNode->getLocationContext();
231  SVal V = state->getSVal(RS, LCtx);
232 
233  // Ensure that the return type matches the type of the returned Expr.
234  if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
235  QualType ReturnedTy =
236  CallEvent::getDeclaredResultType(calleeCtx->getDecl());
237  if (!ReturnedTy.isNull()) {
238  if (const Expr *Ex = dyn_cast<Expr>(CE)) {
239  V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
240  getStoreManager());
241  }
242  }
243  }
244 
245  state = state->BindExpr(CE, callerCtx, V);
246  }
247 
248  // Bind the constructed object value to CXXConstructExpr.
249  if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
250  loc::MemRegionVal This =
251  svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
252  SVal ThisV = state->getSVal(This);
253  ThisV = state->getSVal(ThisV.castAs<Loc>());
254  state = state->BindExpr(CCE, callerCtx, ThisV);
255  }
256 
257  if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
258  // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
259  // while to reach the actual CXXNewExpr element from here, so keep the
260  // region for later use.
261  // Additionally cast the return value of the inlined operator new
262  // (which is of type 'void *') to the correct object type.
263  SVal AllocV = state->getSVal(CNE, callerCtx);
264  AllocV = svalBuilder.evalCast(
265  AllocV, CNE->getType(),
266  getContext().getPointerType(getContext().VoidTy));
267 
268  state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
269  AllocV);
270  }
271  }
272 
273  // Step 3: BindedRetNode -> CleanedNodes
274  // If we can find a statement and a block in the inlined function, run remove
275  // dead bindings before returning from the call. This is important to ensure
276  // that we report the issues such as leaks in the stack contexts in which
277  // they occurred.
278  ExplodedNodeSet CleanedNodes;
279  if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
280  static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
281  PostStmt Loc(LastSt, calleeCtx, &retValBind);
282  bool isNew;
283  ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
284  BindedRetNode->addPredecessor(CEBNode, G);
285  if (!isNew)
286  return;
287 
288  NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
289  currBldrCtx = &Ctx;
290  // Here, we call the Symbol Reaper with 0 statement and callee location
291  // context, telling it to clean up everything in the callee's context
292  // (and its children). We use the callee's function body as a diagnostic
293  // statement, with which the program point will be associated.
294  removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
295  calleeCtx->getAnalysisDeclContext()->getBody(),
297  currBldrCtx = nullptr;
298  } else {
299  CleanedNodes.Add(CEBNode);
300  }
301 
302  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
303  E = CleanedNodes.end(); I != E; ++I) {
304 
305  // Step 4: Generate the CallExit and leave the callee's context.
306  // CleanedNodes -> CEENode
307  CallExitEnd Loc(calleeCtx, callerCtx);
308  bool isNew;
309  ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
310 
311  ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
312  CEENode->addPredecessor(*I, G);
313  if (!isNew)
314  return;
315 
316  // Step 5: Perform the post-condition check of the CallExpr and enqueue the
317  // result onto the work list.
318  // CEENode -> Dst -> WorkList
319  NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
320  SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
321  &Ctx);
322  SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
323 
324  CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
325 
326  ExplodedNodeSet DstPostCall;
327  if (const CXXNewExpr *CNE = dyn_cast_or_null<CXXNewExpr>(CE)) {
328  ExplodedNodeSet DstPostPostCallCallback;
329  getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
330  CEENode, *UpdatedCall, *this,
331  /*WasInlined=*/true);
332  for (auto I : DstPostPostCallCallback) {
334  CNE,
335  *getObjectUnderConstruction(I->getState(), CNE,
336  calleeCtx->getParent()),
337  DstPostCall, I, *this,
338  /*WasInlined=*/true);
339  }
340  } else {
341  getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
342  *UpdatedCall, *this,
343  /*WasInlined=*/true);
344  }
345  ExplodedNodeSet Dst;
346  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
347  getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
348  *this,
349  /*WasInlined=*/true);
350  } else if (CE &&
351  !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
353  getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
354  *this, /*WasInlined=*/true);
355  } else {
356  Dst.insert(DstPostCall);
357  }
358 
359  // Enqueue the next element in the block.
360  for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
361  PSI != PSE; ++PSI) {
362  Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
363  calleeCtx->getIndex()+1);
364  }
365  }
366 }
367 
368 void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
369  bool &IsRecursive, unsigned &StackDepth) {
370  IsRecursive = false;
371  StackDepth = 0;
372 
373  while (LCtx) {
374  if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
375  const Decl *DI = SFC->getDecl();
376 
377  // Mark recursive (and mutually recursive) functions and always count
378  // them when measuring the stack depth.
379  if (DI == D) {
380  IsRecursive = true;
381  ++StackDepth;
382  LCtx = LCtx->getParent();
383  continue;
384  }
385 
386  // Do not count the small functions when determining the stack depth.
387  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
388  const CFG *CalleeCFG = CalleeADC->getCFG();
389  if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
390  ++StackDepth;
391  }
392  LCtx = LCtx->getParent();
393  }
394 }
395 
396 // The GDM component containing the dynamic dispatch bifurcation info. When
397 // the exact type of the receiver is not known, we want to explore both paths -
398 // one on which we do inline it and the other one on which we don't. This is
399 // done to ensure we do not drop coverage.
400 // This is the map from the receiver region to a bool, specifying either we
401 // consider this region's information precise or not along the given path.
402 namespace {
404  DynamicDispatchModeInlined = 1,
405  DynamicDispatchModeConservative
406  };
407 } // end anonymous namespace
408 
409 REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
410  CLANG_ENTO_PROGRAMSTATE_MAP(const MemRegion *,
411  unsigned))
412 
413 bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
414  NodeBuilder &Bldr, ExplodedNode *Pred,
416  assert(D);
417 
418  const LocationContext *CurLC = Pred->getLocationContext();
419  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
420  const LocationContext *ParentOfCallee = CallerSFC;
421  if (Call.getKind() == CE_Block &&
422  !cast<BlockCall>(Call).isConversionFromLambda()) {
423  const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
424  assert(BR && "If we have the block definition we should have its region");
425  AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
426  ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
427  cast<BlockDecl>(D),
428  BR);
429  }
430 
431  // This may be NULL, but that's fine.
432  const Expr *CallE = Call.getOriginExpr();
433 
434  // Construct a new stack frame for the callee.
435  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
436  const StackFrameContext *CalleeSFC =
437  CalleeADC->getStackFrame(ParentOfCallee, CallE,
438  currBldrCtx->getBlock(),
439  currStmtIdx);
440 
441  CallEnter Loc(CallE, CalleeSFC, CurLC);
442 
443  // Construct a new state which contains the mapping from actual to
444  // formal arguments.
445  State = State->enterStackFrame(Call, CalleeSFC);
446 
447  bool isNew;
448  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
449  N->addPredecessor(Pred, G);
450  if (isNew)
451  Engine.getWorkList()->enqueue(N);
452  }
453 
454  // If we decided to inline the call, the successor has been manually
455  // added onto the work list so remove it from the node builder.
456  Bldr.takeNodes(Pred);
457 
458  NumInlinedCalls++;
459  Engine.FunctionSummaries->bumpNumTimesInlined(D);
460 
461  // Mark the decl as visited.
462  if (VisitedCallees)
463  VisitedCallees->insert(D);
464 
465  return true;
466 }
467 
469  const Stmt *CallE) {
470  const void *ReplayState = State->get<ReplayWithoutInlining>();
471  if (!ReplayState)
472  return nullptr;
473 
474  assert(ReplayState == CallE && "Backtracked to the wrong call.");
475  (void)CallE;
476 
477  return State->remove<ReplayWithoutInlining>();
478 }
479 
481  ExplodedNodeSet &dst) {
482  // Perform the previsit of the CallExpr.
483  ExplodedNodeSet dstPreVisit;
484  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
485 
486  // Get the call in its initial state. We use this as a template to perform
487  // all the checks.
489  CallEventRef<> CallTemplate
490  = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
491 
492  // Evaluate the function call. We try each of the checkers
493  // to see if the can evaluate the function call.
494  ExplodedNodeSet dstCallEvaluated;
495  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
496  I != E; ++I) {
497  evalCall(dstCallEvaluated, *I, *CallTemplate);
498  }
499 
500  // Finally, perform the post-condition check of the CallExpr and store
501  // the created nodes in 'Dst'.
502  // Note that if the call was inlined, dstCallEvaluated will be empty.
503  // The post-CallExpr check will occur in processCallExit.
504  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
505  *this);
506 }
507 
508 ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
509  const CallEvent &Call) {
510  const Expr *E = Call.getOriginExpr();
511  // FIXME: Constructors to placement arguments of operator new
512  // are not supported yet.
513  if (!E || isa<CXXNewExpr>(E))
514  return State;
515 
516  const LocationContext *LC = Call.getLocationContext();
517  for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
518  unsigned I = Call.getASTArgumentIndex(CallI);
519  if (Optional<SVal> V =
520  getObjectUnderConstruction(State, {E, I}, LC)) {
521  SVal VV = *V;
522  (void)VV;
523  assert(cast<VarRegion>(VV.castAs<loc::MemRegionVal>().getRegion())
524  ->getStackFrame()->getParent()
525  ->getStackFrame() == LC->getStackFrame());
526  State = finishObjectConstruction(State, {E, I}, LC);
527  }
528  }
529 
530  return State;
531 }
532 
533 void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
534  ExplodedNode *Pred,
535  const CallEvent &Call) {
536  ProgramStateRef State = Pred->getState();
537  ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
538  if (CleanedState == State) {
539  Dst.insert(Pred);
540  return;
541  }
542 
543  const Expr *E = Call.getOriginExpr();
544  const LocationContext *LC = Call.getLocationContext();
545  NodeBuilder B(Pred, Dst, *currBldrCtx);
546  static SimpleProgramPointTag Tag("ExprEngine",
547  "Finish argument construction");
548  PreStmt PP(E, LC, &Tag);
549  B.generateNode(PP, CleanedState, Pred);
550 }
551 
553  const CallEvent &Call) {
554  // WARNING: At this time, the state attached to 'Call' may be older than the
555  // state in 'Pred'. This is a minor optimization since CheckerManager will
556  // use an updated CallEvent instance when calling checkers, but if 'Call' is
557  // ever used directly in this function all callers should be updated to pass
558  // the most recent state. (It is probably not worth doing the work here since
559  // for some callers this will not be necessary.)
560 
561  // Run any pre-call checks using the generic call interface.
562  ExplodedNodeSet dstPreVisit;
563  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred,
564  Call, *this);
565 
566  // Actually evaluate the function call. We try each of the checkers
567  // to see if the can evaluate the function call, and get a callback at
568  // defaultEvalCall if all of them fail.
569  ExplodedNodeSet dstCallEvaluated;
570  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
571  Call, *this);
572 
573  // If there were other constructors called for object-type arguments
574  // of this call, clean them up.
575  ExplodedNodeSet dstArgumentCleanup;
576  for (auto I : dstCallEvaluated)
577  finishArgumentConstruction(dstArgumentCleanup, I, Call);
578 
579  // Finally, run any post-call checks.
580  getCheckerManager().runCheckersForPostCall(Dst, dstArgumentCleanup,
581  Call, *this);
582 }
583 
585  const LocationContext *LCtx,
586  ProgramStateRef State) {
587  const Expr *E = Call.getOriginExpr();
588  if (!E)
589  return State;
590 
591  // Some method families have known return values.
592  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
593  switch (Msg->getMethodFamily()) {
594  default:
595  break;
596  case OMF_autorelease:
597  case OMF_retain:
598  case OMF_self: {
599  // These methods return their receivers.
600  return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
601  }
602  }
603  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
604  SVal ThisV = C->getCXXThisVal();
605  ThisV = State->getSVal(ThisV.castAs<Loc>());
606  return State->BindExpr(E, LCtx, ThisV);
607  }
608 
609  SVal R;
610  QualType ResultTy = Call.getResultType();
611  unsigned Count = currBldrCtx->blockCount();
612  if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
613  // Conjure a temporary if the function returns an object by value.
614  SVal Target;
615  assert(RTC->getStmt() == Call.getOriginExpr());
616  EvalCallOptions CallOpts; // FIXME: We won't really need those.
617  std::tie(State, Target) =
618  prepareForObjectConstruction(Call.getOriginExpr(), State, LCtx,
619  RTC->getConstructionContext(), CallOpts);
620  assert(Target.getAsRegion());
621  // Invalidate the region so that it didn't look uninitialized. Don't notify
622  // the checkers.
623  State = State->invalidateRegions(Target.getAsRegion(), E, Count, LCtx,
624  /* CausedByPointerEscape=*/false, nullptr,
625  &Call, nullptr);
626 
627  R = State->getSVal(Target.castAs<Loc>(), E->getType());
628  } else {
629  // Conjure a symbol if the return value is unknown.
630 
631  // See if we need to conjure a heap pointer instead of
632  // a regular unknown pointer.
633  bool IsHeapPointer = false;
634  if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
635  if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
636  // FIXME: Delegate this to evalCall in MallocChecker?
637  IsHeapPointer = true;
638  }
639 
640  R = IsHeapPointer ? svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count)
641  : svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy,
642  Count);
643  }
644  return State->BindExpr(E, LCtx, R);
645 }
646 
647 // Conservatively evaluate call by invalidating regions and binding
648 // a conjured return value.
649 void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
650  ExplodedNode *Pred,
651  ProgramStateRef State) {
652  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
653  State = bindReturnValue(Call, Pred->getLocationContext(), State);
654 
655  // And make the result node.
656  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
657 }
658 
659 ExprEngine::CallInlinePolicy
660 ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
661  AnalyzerOptions &Opts,
662  const ExprEngine::EvalCallOptions &CallOpts) {
663  const LocationContext *CurLC = Pred->getLocationContext();
664  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
665  switch (Call.getKind()) {
666  case CE_Function:
667  case CE_Block:
668  break;
669  case CE_CXXMember:
672  return CIP_DisallowedAlways;
673  break;
674  case CE_CXXConstructor: {
676  return CIP_DisallowedAlways;
677 
678  const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
679 
680  const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
681 
682  auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
683  const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
684  : nullptr;
685 
686  if (CC && isa<NewAllocatedObjectConstructionContext>(CC) &&
687  !Opts.mayInlineCXXAllocator())
688  return CIP_DisallowedOnce;
689 
690  // FIXME: We don't handle constructors or destructors for arrays properly.
691  // Even once we do, we still need to be careful about implicitly-generated
692  // initializers for array fields in default move/copy constructors.
693  // We still allow construction into ElementRegion targets when they don't
694  // represent array elements.
695  if (CallOpts.IsArrayCtorOrDtor)
696  return CIP_DisallowedOnce;
697 
698  // Inlining constructors requires including initializers in the CFG.
699  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
700  assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
701  (void)ADC;
702 
703  // If the destructor is trivial, it's always safe to inline the constructor.
704  if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
705  break;
706 
707  // For other types, only inline constructors if destructor inlining is
708  // also enabled.
710  return CIP_DisallowedAlways;
711 
712  if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
713  // If we don't handle temporary destructors, we shouldn't inline
714  // their constructors.
715  if (CallOpts.IsTemporaryCtorOrDtor &&
717  return CIP_DisallowedOnce;
718 
719  // If we did not find the correct this-region, it would be pointless
720  // to inline the constructor. Instead we will simply invalidate
721  // the fake temporary target.
723  return CIP_DisallowedOnce;
724 
725  // If the temporary is lifetime-extended by binding it to a reference-type
726  // field within an aggregate, automatic destructors don't work properly.
728  return CIP_DisallowedOnce;
729  }
730 
731  break;
732  }
733  case CE_CXXDestructor: {
735  return CIP_DisallowedAlways;
736 
737  // Inlining destructors requires building the CFG correctly.
738  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
739  assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
740  (void)ADC;
741 
742  // FIXME: We don't handle constructors or destructors for arrays properly.
743  if (CallOpts.IsArrayCtorOrDtor)
744  return CIP_DisallowedOnce;
745 
746  // Allow disabling temporary destructor inlining with a separate option.
747  if (CallOpts.IsTemporaryCtorOrDtor && !Opts.mayInlineCXXTemporaryDtors())
748  return CIP_DisallowedOnce;
749 
750  // If we did not find the correct this-region, it would be pointless
751  // to inline the destructor. Instead we will simply invalidate
752  // the fake temporary target.
754  return CIP_DisallowedOnce;
755  break;
756  }
757  case CE_CXXAllocator:
758  if (Opts.mayInlineCXXAllocator())
759  break;
760  // Do not inline allocators until we model deallocators.
761  // This is unfortunate, but basically necessary for smart pointers and such.
762  return CIP_DisallowedAlways;
763  case CE_ObjCMessage:
764  if (!Opts.mayInlineObjCMethod())
765  return CIP_DisallowedAlways;
766  if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
768  return CIP_DisallowedAlways;
769  break;
770  }
771 
772  return CIP_Allowed;
773 }
774 
775 /// Returns true if the given C++ class contains a member with the given name.
776 static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
777  StringRef Name) {
778  const IdentifierInfo &II = Ctx.Idents.get(Name);
779  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
780  if (!RD->lookup(DeclName).empty())
781  return true;
782 
783  CXXBasePaths Paths(false, false, false);
784  if (RD->lookupInBases(
785  [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
786  return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
787  },
788  Paths))
789  return true;
790 
791  return false;
792 }
793 
794 /// Returns true if the given C++ class is a container or iterator.
795 ///
796 /// Our heuristic for this is whether it contains a method named 'begin()' or a
797 /// nested type named 'iterator' or 'iterator_category'.
798 static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
799  return hasMember(Ctx, RD, "begin") ||
800  hasMember(Ctx, RD, "iterator") ||
801  hasMember(Ctx, RD, "iterator_category");
802 }
803 
804 /// Returns true if the given function refers to a method of a C++ container
805 /// or iterator.
806 ///
807 /// We generally do a poor job modeling most containers right now, and might
808 /// prefer not to inline their methods.
809 static bool isContainerMethod(const ASTContext &Ctx,
810  const FunctionDecl *FD) {
811  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
812  return isContainerClass(Ctx, MD->getParent());
813  return false;
814 }
815 
816 /// Returns true if the given function is the destructor of a class named
817 /// "shared_ptr".
818 static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
819  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
820  if (!Dtor)
821  return false;
822 
823  const CXXRecordDecl *RD = Dtor->getParent();
824  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
825  if (II->isStr("shared_ptr"))
826  return true;
827 
828  return false;
829 }
830 
831 /// Returns true if the function in \p CalleeADC may be inlined in general.
832 ///
833 /// This checks static properties of the function, such as its signature and
834 /// CFG, to determine whether the analyzer should ever consider inlining it,
835 /// in any context.
836 static bool mayInlineDecl(AnalysisManager &AMgr,
837  AnalysisDeclContext *CalleeADC) {
838  AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
839  // FIXME: Do not inline variadic calls.
840  if (CallEvent::isVariadic(CalleeADC->getDecl()))
841  return false;
842 
843  // Check certain C++-related inlining policies.
844  ASTContext &Ctx = CalleeADC->getASTContext();
845  if (Ctx.getLangOpts().CPlusPlus) {
846  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
847  // Conditionally control the inlining of template functions.
848  if (!Opts.mayInlineTemplateFunctions())
849  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
850  return false;
851 
852  // Conditionally control the inlining of C++ standard library functions.
853  if (!Opts.mayInlineCXXStandardLibrary())
854  if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
856  return false;
857 
858  // Conditionally control the inlining of methods on objects that look
859  // like C++ containers.
860  if (!Opts.mayInlineCXXContainerMethods())
861  if (!AMgr.isInCodeFile(FD->getLocation()))
862  if (isContainerMethod(Ctx, FD))
863  return false;
864 
865  // Conditionally control the inlining of the destructor of C++ shared_ptr.
866  // We don't currently do a good job modeling shared_ptr because we can't
867  // see the reference count, so treating as opaque is probably the best
868  // idea.
869  if (!Opts.mayInlineCXXSharedPtrDtor())
870  if (isCXXSharedPtrDtor(FD))
871  return false;
872  }
873  }
874 
875  // It is possible that the CFG cannot be constructed.
876  // Be safe, and check if the CalleeCFG is valid.
877  const CFG *CalleeCFG = CalleeADC->getCFG();
878  if (!CalleeCFG)
879  return false;
880 
881  // Do not inline large functions.
882  if (CalleeCFG->getNumBlockIDs() > Opts.getMaxInlinableSize())
883  return false;
884 
885  // It is possible that the live variables analysis cannot be
886  // run. If so, bail out.
887  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
888  return false;
889 
890  return true;
891 }
892 
893 bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
894  const ExplodedNode *Pred,
895  const EvalCallOptions &CallOpts) {
896  if (!D)
897  return false;
898 
899  AnalysisManager &AMgr = getAnalysisManager();
900  AnalyzerOptions &Opts = AMgr.options;
901  AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
902  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
903 
904  // The auto-synthesized bodies are essential to inline as they are
905  // usually small and commonly used. Note: we should do this check early on to
906  // ensure we always inline these calls.
907  if (CalleeADC->isBodyAutosynthesized())
908  return true;
909 
910  if (!AMgr.shouldInlineCall())
911  return false;
912 
913  // Check if this function has been marked as non-inlinable.
914  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
915  if (MayInline.hasValue()) {
916  if (!MayInline.getValue())
917  return false;
918 
919  } else {
920  // We haven't actually checked the static properties of this function yet.
921  // Do that now, and record our decision in the function summaries.
922  if (mayInlineDecl(getAnalysisManager(), CalleeADC)) {
923  Engine.FunctionSummaries->markMayInline(D);
924  } else {
925  Engine.FunctionSummaries->markShouldNotInline(D);
926  return false;
927  }
928  }
929 
930  // Check if we should inline a call based on its kind.
931  // FIXME: this checks both static and dynamic properties of the call, which
932  // means we're redoing a bit of work that could be cached in the function
933  // summary.
934  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
935  if (CIP != CIP_Allowed) {
936  if (CIP == CIP_DisallowedAlways) {
937  assert(!MayInline.hasValue() || MayInline.getValue());
938  Engine.FunctionSummaries->markShouldNotInline(D);
939  }
940  return false;
941  }
942 
943  const CFG *CalleeCFG = CalleeADC->getCFG();
944 
945  // Do not inline if recursive or we've reached max stack frame count.
946  bool IsRecursive = false;
947  unsigned StackDepth = 0;
948  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
949  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
950  ((CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize())
951  || IsRecursive))
952  return false;
953 
954  // Do not inline large functions too many times.
955  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
956  Opts.getMaxTimesInlineLarge()) &&
957  CalleeCFG->getNumBlockIDs() >=
959  NumReachedInlineCountMax++;
960  return false;
961  }
962 
963  if (HowToInline == Inline_Minimal &&
964  (CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize()
965  || IsRecursive))
966  return false;
967 
968  return true;
969 }
970 
971 static bool isTrivialObjectAssignment(const CallEvent &Call) {
972  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
973  if (!ICall)
974  return false;
975 
976  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
977  if (!MD)
978  return false;
980  return false;
981 
982  return MD->isTrivial();
983 }
984 
986  const CallEvent &CallTemplate,
987  const EvalCallOptions &CallOpts) {
988  // Make sure we have the most recent state attached to the call.
989  ProgramStateRef State = Pred->getState();
990  CallEventRef<> Call = CallTemplate.cloneWithState(State);
991 
992  // Special-case trivial assignment operators.
993  if (isTrivialObjectAssignment(*Call)) {
994  performTrivialCopy(Bldr, Pred, *Call);
995  return;
996  }
997 
998  // Try to inline the call.
999  // The origin expression here is just used as a kind of checksum;
1000  // this should still be safe even for CallEvents that don't come from exprs.
1001  const Expr *E = Call->getOriginExpr();
1002 
1003  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1004  if (InlinedFailedState) {
1005  // If we already tried once and failed, make sure we don't retry later.
1006  State = InlinedFailedState;
1007  } else {
1008  RuntimeDefinition RD = Call->getRuntimeDefinition();
1009  const Decl *D = RD.getDecl();
1010  if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
1011  if (RD.mayHaveOtherDefinitions()) {
1013 
1014  // Explore with and without inlining the call.
1015  if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1016  BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
1017  return;
1018  }
1019 
1020  // Don't inline if we're not in any dynamic dispatch mode.
1021  if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1022  conservativeEvalCall(*Call, Bldr, Pred, State);
1023  return;
1024  }
1025  }
1026 
1027  // We are not bifurcating and we do have a Decl, so just inline.
1028  if (inlineCall(*Call, D, Bldr, Pred, State))
1029  return;
1030  }
1031  }
1032 
1033  // If we can't inline it, handle the return value and invalidate the regions.
1034  conservativeEvalCall(*Call, Bldr, Pred, State);
1035 }
1036 
1037 void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1038  const CallEvent &Call, const Decl *D,
1039  NodeBuilder &Bldr, ExplodedNode *Pred) {
1040  assert(BifurReg);
1041  BifurReg = BifurReg->StripCasts();
1042 
1043  // Check if we've performed the split already - note, we only want
1044  // to split the path once per memory region.
1045  ProgramStateRef State = Pred->getState();
1046  const unsigned *BState =
1047  State->get<DynamicDispatchBifurcationMap>(BifurReg);
1048  if (BState) {
1049  // If we are on "inline path", keep inlining if possible.
1050  if (*BState == DynamicDispatchModeInlined)
1051  if (inlineCall(Call, D, Bldr, Pred, State))
1052  return;
1053  // If inline failed, or we are on the path where we assume we
1054  // don't have enough info about the receiver to inline, conjure the
1055  // return value and invalidate the regions.
1056  conservativeEvalCall(Call, Bldr, Pred, State);
1057  return;
1058  }
1059 
1060  // If we got here, this is the first time we process a message to this
1061  // region, so split the path.
1062  ProgramStateRef IState =
1063  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1064  DynamicDispatchModeInlined);
1065  inlineCall(Call, D, Bldr, Pred, IState);
1066 
1067  ProgramStateRef NoIState =
1068  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1069  DynamicDispatchModeConservative);
1070  conservativeEvalCall(Call, Bldr, Pred, NoIState);
1071 
1072  NumOfDynamicDispatchPathSplits++;
1073 }
1074 
1076  ExplodedNodeSet &Dst) {
1077  ExplodedNodeSet dstPreVisit;
1078  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1079 
1080  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1081 
1082  if (RS->getRetValue()) {
1083  for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1084  ei = dstPreVisit.end(); it != ei; ++it) {
1085  B.generateNode(RS, *it, (*it)->getState());
1086  }
1087  }
1088 }
Represents a function declaration or definition.
Definition: Decl.h:1717
unsigned InlineMaxStackDepth
The inlining stack depth limit.
bool empty() const
Definition: CFG.h:714
A (possibly-)qualified type.
Definition: Type.h:642
MemRegion - The root abstract class for all memory regions.
Definition: MemRegion.h:94
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
const CXXConstructorDecl * getDecl() const override
Definition: CallEvent.h:889
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition: ExprEngine.h:106
Stmt * getBody() const
Get the body of the Declaration.
succ_iterator succ_begin()
Definition: CFG.h:751
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
void processCallExit(ExplodedNode *Pred) override
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr...
Stmt - This represents one statement.
Definition: Stmt.h:66
This builder class is useful for generating nodes that resulted from visiting a statement.
Definition: CoreEngine.h:370
ProgramPoint getProgramPoint(bool IsPreVisit=false, const ProgramPointTag *Tag=nullptr) const
Returns an appropriate ProgramPoint for this call.
Definition: CallEvent.cpp:339
Decl - This represents one declaration (or definition), e.g.
Definition: DeclBase.h:86
bool mayInlineCXXTemporaryDtors()
Returns true if C++ temporary destructors should be inlined during analysis.
Represents a point when we begin processing an inlined call.
Definition: ProgramPoint.h:604
Manages the lifetime of CallEvent objects.
Definition: CallEvent.h:1083
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
IntrusiveRefCntPtr< const ProgramState > ProgramStateRef
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Hints for figuring out of a call should be inlined during evalCall().
Definition: ExprEngine.h:96
Represents a call to a C++ constructor.
Definition: ExprCXX.h:1262
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition: ExprEngine.h:103
const NestedNameSpecifier * Specifier
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx)
Definition: CallEvent.cpp:1334
const ProgramStateRef & getState() const
SVal evalCast(SVal val, QualType castTy, QualType originalType)
Represents a path from a specific derived class (which is not represented as part of the path) to a p...
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
unsigned succ_size() const
Definition: CFG.h:769
const Expr * getOriginExpr() const
Returns the expression whose value will be the result of this call.
Definition: CallEvent.h:266
ASTContext & getASTContext() const
loc::MemRegionVal getCXXThis(const CXXMethodDecl *D, const StackFrameContext *SFC)
Return a memory region for the &#39;this&#39; object reference.
static Optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object&#39;s ConstructionContext, retrieve such object&#39;s location.
Definition: ExprEngine.cpp:451
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
void enqueue(ExplodedNodeSet &Set)
Enqueue the given set of nodes onto the work list.
Definition: CoreEngine.cpp:524
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer&#39;s garbage collection - remove dead symbols and bindings from the state...
Definition: ExprEngine.cpp:630
bool mayInlineTemplateFunctions()
Returns whether or not templated functions may be considered for inlining.
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition: DeclCXX.cpp:2097
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
Definition: ProgramPoint.h:619
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition: Decl.h:297
One of these records is kept for each identifier that is lexed.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition: ASTContext.h:154
LineState State
AnalysisDeclContext contains the context data for the function or method under analysis.
const Expr * getRetValue() const
Definition: Stmt.cpp:937
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
i32 captured_struct **param SharedsTy A type which contains references the shared variables *param Shareds Context with the list of shared variables from the p *TaskFunction *param Data Additional data for task generation like final * state
const StackFrameContext * getStackFrame() const
Definition: ProgramPoint.h:184
IdentifierTable & Idents
Definition: ASTContext.h:565
STATISTIC(NumOfDynamicDispatchPathSplits, "The # of times we split the path due to imprecise dynamic dispatch info")
Represents any expression that calls an Objective-C method.
Definition: CallEvent.h:966
virtual Kind getKind() const =0
Returns the kind of call this is.
static bool isInStdNamespace(const Decl *D)
Returns true if the root namespace of the given declaration is the &#39;std&#39; C++ namespace.
WorkList * getWorkList() const
Definition: CoreEngine.h:165
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting obj-c messages.
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition: ExprEngine.h:644
bool mayInlineCXXContainerMethods()
Returns whether or not methods of C++ container objects may be considered for inlining.
T * getAnalysis()
Return the specified analysis object, lazily running the analysis if necessary.
const LocationContext * getLocationContext() const
const LocationContext * getParent() const
virtual const CXXConstructExpr * getOriginExpr() const
Definition: CallEvent.h:885
unsigned getMinCFGSizeTreatFunctionsAsLarge()
Returns the number of basic blocks a function needs to have to be considered large for the &#39;max-times...
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
const CoreEngine & getCoreEngine() const
Definition: ExprEngine.h:392
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition: DeclCXX.h:1480
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
Definition: CallEvent.cpp:474
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition: DeclBase.h:875
lookup_result lookup(DeclarationName Name) const
lookup - Find the declarations (if any) with the given Name in this context.
Definition: DeclBase.cpp:1566
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
CheckerManager & getCheckerManager() const
Definition: ExprEngine.h:186
void removeDeadOnEndOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
static bool isTrivialObjectAssignment(const CallEvent &Call)
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
Represents a non-static C++ member function call, no matter how it is written.
Definition: CallEvent.h:681
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function&#39;s return type does not match the caller&#39;s expression ...
DeclarationNameTable DeclarationNames
Definition: ASTContext.h:568
Represents a single basic block in a source-level CFG.
Definition: CFG.h:552
Represents a point when we finish the call exit sequence (for inlined call).
Definition: ProgramPoint.h:662
AnalysisDeclContext * getContext(const Decl *D)
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
const LocationContext * getLocationContext() const
The context in which the call is being evaluated.
Definition: CallEvent.h:256
void processBeginOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L) override
Called by CoreEngine.
This represents one expression.
Definition: Expr.h:105
IPAKind getIPAMode()
Returns the inter-procedural analysis mode.
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt...
Definition: CFG.h:1003
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to...
Definition: Type.cpp:1590
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Definition: CallEvent.cpp:1355
Represents a C++ destructor within a class.
Definition: DeclCXX.h:2697
AnalyzerOptions & getAnalyzerOptions() override
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition: CoreEngine.h:228
Represents C++ constructor call.
Definition: CFG.h:151
void Add(ExplodedNode *N)
Refers to regular member function and operator calls.
bool mayInlineObjCMethod()
Returns true if ObjectiveC inlining is enabled, false otherwise.
IdentifierInfo * getAsIdentifierInfo() const
getAsIdentifierInfo - Retrieve the IdentifierInfo * stored in this declaration name, or NULL if this declaration name isn&#39;t a simple identifier.
Refers to constructors (implicit or explicit).
QualType getType() const
Definition: Expr.h:127
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng)
Run checkers for evaluating a call.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition: ExprEngine.h:182
REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap, CLANG_ENTO_PROGRAMSTATE_MAP(const MemRegion *, unsigned)) bool ExprEngine
ReturnStmt - This represents a return, optionally of an expression: return; return 4;...
Definition: Stmt.h:1444
bool isBodyAutosynthesized() const
Checks if the body of the Decl is generated by the BodyFarm.
ExplodedNode * getNode(const ProgramPoint &L, ProgramStateRef State, bool IsSink=false, bool *IsNew=nullptr)
Retrieve the node associated with a (Location,State) pair, where the &#39;Location&#39; is a ProgramPoint in ...
const StackFrameContext * getStackFrame(LocationContext const *Parent, const Stmt *S, const CFGBlock *Blk, unsigned Idx)
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition: Decl.h:2005
Enable inlining of dynamically dispatched methods.
bool isNull() const
Return true if this QualType doesn&#39;t point to a type yet.
Definition: Type.h:707
const MemRegion * getRegion() const
Get the underlining region.
Definition: SVals.h:605
While alive, includes the current analysis stack in a crash trace.
CanQualType getCanonicalTypeUnqualified() const
void runCheckersForNewAllocator(const CXXNewExpr *NE, SVal Target, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
Defines the runtime definition of the called function.
Definition: CallEvent.h:145
QualType getCanonicalType() const
Definition: Type.h:5992
const FunctionDecl * getDecl() const override
Definition: CallEvent.cpp:647
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
AnalysisManager & getAnalysisManager() override
Definition: ExprEngine.h:184
const MemRegion * getAsRegion() const
Definition: SVals.cpp:151
Represents a new-expression for memory allocation and constructor calls, e.g: "new CXXNewExpr(foo)"...
Definition: ExprCXX.h:1873
CallEventManager & getCallEventManager()
Definition: ProgramState.h:571
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checks and allowing checkers to be responsible for handli...
bool mayInlineCXXStandardLibrary()
Returns whether or not C++ standard library functions may be considered for inlining.
Represents a static or instance method of a struct/union/class.
Definition: DeclCXX.h:2043
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition: ExprEngine.h:99
SVal - This represents a symbolic expression, which can be either an L-value or an R-value...
Definition: SVals.h:76
DeclarationName getIdentifier(const IdentifierInfo *ID)
getIdentifier - Create a declaration name that is a simple identifier.
#define CLANG_ENTO_PROGRAMSTATE_MAP(Key, Value)
Helper for registering a map trait.
const Decl * getDecl() const
bool isObjCObjectPointerType() const
Definition: Type.h:6274
Do minimal inlining of callees.
Definition: ExprEngine.h:92
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition: CFG.h:1169
Refers to destructors (implicit or explicit).
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition: CallEvent.h:170
void insert(const ExplodedNodeSet &S)
Optional< T > getAs() const
Convert to the specified CFGElement type, returning None if this CFGElement is not of the desired typ...
Definition: CFG.h:110
static bool mayInlineDecl(AnalysisManager &AMgr, AnalysisDeclContext *CalleeADC)
Returns true if the function in CalleeADC may be inlined in general.
ast_type_traits::DynTypedNode Node
pred_iterator pred_begin()
Definition: CFG.h:733
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
Dataflow Directional Tag Classes.
CFG::BuildOptions & getCFGBuildOptions()
Return the build options used to construct the CFG.
StoreManager & getStoreManager()
Definition: ExprEngine.h:372
const StackFrameContext * getCalleeContext() const
Definition: ProgramPoint.h:614
DeclarationName - The name of a declaration.
const CXXRecordDecl * getParent() const
Returns the parent of this method declaration, which is the class in which this method is defined...
Definition: DeclCXX.h:2163
const MemRegion * StripCasts(bool StripBaseAndDerivedCasts=true) const
Definition: MemRegion.cpp:1186
bool isAmbiguous(CanQualType BaseType)
Determine whether the path from the most-derived type to the given base type is ambiguous (i...
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition: DeclCXX.cpp:2076
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition: CallEvent.h:166
Represents an abstract call to a function or method along a particular path.
Definition: CallEvent.h:182
ProgramStateManager & getStateManager() override
Definition: ExprEngine.h:370
const Decl * getDecl() const
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
bool lookupInBases(BaseMatchesCallback BaseMatches, CXXBasePaths &Paths, bool LookupInDependent=false) const
Look for entities within the base classes of this C++ class, transitively searching all base class su...
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition: SVals.h:104
virtual unsigned getASTArgumentIndex(unsigned CallArgumentIndex) const
Some call event sub-classes conveniently adjust mismatching AST indices to match parameter indices...
Definition: CallEvent.h:463
bool includeTemporaryDtorsInCFG()
Returns whether or not the destructors for C++ temporary objects should be included in the CFG...
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
Definition: CallEvent.cpp:445
CXXBasePath & front()
const StackFrameContext * getStackFrame() const
Represents a base class of a C++ class.
Definition: DeclCXX.h:192
SourceManager & getSourceManager()
Definition: ASTContext.h:671
QualType getResultType() const
Returns the result type, adjusted for references.
Definition: CallEvent.cpp:70
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred)
Generates a node in the ExplodedGraph.
Definition: CoreEngine.h:281
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate.h) and friends (in DeclFriend.h).
ConstructionContext&#39;s subclasses describe different ways of constructing an object in C++...
Represents a C++ struct/union/class.
Definition: DeclCXX.h:300
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K)
Returns the option controlling which C++ member functions will be considered for inlining.
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition: ExprEngine.h:111
virtual unsigned getNumArgs() const =0
Returns the number of arguments (explicit and implicit).
QualType getPointerType(QualType T) const
Return the uniqued reference to the type for a pointer to the specified type.
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition: Expr.h:2285
ExplodedNode * generateNode(const Stmt *S, ExplodedNode *Pred, ProgramStateRef St, const ProgramPointTag *tag=nullptr, ProgramPoint::Kind K=ProgramPoint::PostStmtKind)
Definition: CoreEngine.h:399
unsigned getMaxTimesInlineLarge()
Returns the maximum times a large function could be inlined.
ProgramStateRef invalidateRegions(unsigned BlockCount, ProgramStateRef Orig=nullptr) const
Returns a new state with all argument regions invalidated.
Definition: CallEvent.cpp:285
virtual void enqueue(const WorkListUnit &U)=0
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *parent, const BlockDecl *BD, const void *ContextData)
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
const StackFrameContext * getStackFrame() const
bool mayInlineCXXAllocator()
Returns whether or not allocator call may be considered for inlining.
AnalysisPurgeMode AnalysisPurgeOpt
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
bool mayInlineCXXSharedPtrDtor()
Returns whether or not the destructor of C++ &#39;shared_ptr&#39; may be considered for inlining.
Optional< T > getAs() const
Convert to the specified ProgramPoint type, returning None if this ProgramPoint is not of the desired...
Definition: ProgramPoint.h:152
AnalysisDeclContext * getAnalysisDeclContext() const
Represents a call to a C++ constructor.
Definition: CallEvent.h:860
const LangOptions & getLangOpts() const
Definition: ASTContext.h:716
void processCallEnter(NodeBuilderContext &BC, CallEnter CE, ExplodedNode *Pred) override
Generate the entry node of the callee.
CallEventRef< T > cloneWithState(ProgramStateRef State) const
Definition: CallEvent.h:126