clang  7.0.0svn
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1 //=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines ExprEngine's support for calls and returns.
11 //
12 //===----------------------------------------------------------------------===//
13 
17 #include "clang/AST/DeclCXX.h"
22 #include "llvm/ADT/SmallSet.h"
23 #include "llvm/ADT/Statistic.h"
24 #include "llvm/Support/SaveAndRestore.h"
25 
26 using namespace clang;
27 using namespace ento;
28 
29 #define DEBUG_TYPE "ExprEngine"
30 
31 STATISTIC(NumOfDynamicDispatchPathSplits,
32  "The # of times we split the path due to imprecise dynamic dispatch info");
33 
34 STATISTIC(NumInlinedCalls,
35  "The # of times we inlined a call");
36 
37 STATISTIC(NumReachedInlineCountMax,
38  "The # of times we reached inline count maximum");
39 
41  ExplodedNode *Pred) {
42  // Get the entry block in the CFG of the callee.
43  const StackFrameContext *calleeCtx = CE.getCalleeContext();
44  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
45  const CFGBlock *Entry = CE.getEntry();
46 
47  // Validate the CFG.
48  assert(Entry->empty());
49  assert(Entry->succ_size() == 1);
50 
51  // Get the solitary successor.
52  const CFGBlock *Succ = *(Entry->succ_begin());
53 
54  // Construct an edge representing the starting location in the callee.
55  BlockEdge Loc(Entry, Succ, calleeCtx);
56 
57  ProgramStateRef state = Pred->getState();
58 
59  // Construct a new node, notify checkers that analysis of the function has
60  // begun, and add the resultant nodes to the worklist.
61  bool isNew;
62  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63  Node->addPredecessor(Pred, G);
64  if (isNew) {
65  ExplodedNodeSet DstBegin;
66  processBeginOfFunction(BC, Node, DstBegin, Loc);
67  Engine.enqueue(DstBegin);
68  }
69 }
70 
71 // Find the last statement on the path to the exploded node and the
72 // corresponding Block.
73 static std::pair<const Stmt*,
75  const Stmt *S = nullptr;
76  const CFGBlock *Blk = nullptr;
77  const StackFrameContext *SF =
79 
80  // Back up through the ExplodedGraph until we reach a statement node in this
81  // stack frame.
82  while (Node) {
83  const ProgramPoint &PP = Node->getLocation();
84 
85  if (PP.getLocationContext()->getCurrentStackFrame() == SF) {
86  if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
87  S = SP->getStmt();
88  break;
89  } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
90  S = CEE->getCalleeContext()->getCallSite();
91  if (S)
92  break;
93 
94  // If there is no statement, this is an implicitly-generated call.
95  // We'll walk backwards over it and then continue the loop to find
96  // an actual statement.
98  do {
99  Node = Node->getFirstPred();
100  CE = Node->getLocationAs<CallEnter>();
101  } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
102 
103  // Continue searching the graph.
104  } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
105  Blk = BE->getSrc();
106  }
107  } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
108  // If we reached the CallEnter for this function, it has no statements.
109  if (CE->getCalleeContext() == SF)
110  break;
111  }
112 
113  if (Node->pred_empty())
114  return std::make_pair(nullptr, nullptr);
115 
116  Node = *Node->pred_begin();
117  }
118 
119  return std::make_pair(S, Blk);
120 }
121 
122 /// Adjusts a return value when the called function's return type does not
123 /// match the caller's expression type. This can happen when a dynamic call
124 /// is devirtualized, and the overriding method has a covariant (more specific)
125 /// return type than the parent's method. For C++ objects, this means we need
126 /// to add base casts.
127 static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
128  StoreManager &StoreMgr) {
129  // For now, the only adjustments we handle apply only to locations.
130  if (!V.getAs<Loc>())
131  return V;
132 
133  // If the types already match, don't do any unnecessary work.
134  ExpectedTy = ExpectedTy.getCanonicalType();
135  ActualTy = ActualTy.getCanonicalType();
136  if (ExpectedTy == ActualTy)
137  return V;
138 
139  // No adjustment is needed between Objective-C pointer types.
140  if (ExpectedTy->isObjCObjectPointerType() &&
141  ActualTy->isObjCObjectPointerType())
142  return V;
143 
144  // C++ object pointers may need "derived-to-base" casts.
145  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
146  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
147  if (ExpectedClass && ActualClass) {
148  CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
149  /*DetectVirtual=*/false);
150  if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
151  !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
152  return StoreMgr.evalDerivedToBase(V, Paths.front());
153  }
154  }
155 
156  // Unfortunately, Objective-C does not enforce that overridden methods have
157  // covariant return types, so we can't assert that that never happens.
158  // Be safe and return UnknownVal().
159  return UnknownVal();
160 }
161 
163  ExplodedNode *Pred,
164  ExplodedNodeSet &Dst) {
165  // Find the last statement in the function and the corresponding basic block.
166  const Stmt *LastSt = nullptr;
167  const CFGBlock *Blk = nullptr;
168  std::tie(LastSt, Blk) = getLastStmt(Pred);
169  if (!Blk || !LastSt) {
170  Dst.Add(Pred);
171  return;
172  }
173 
174  // Here, we destroy the current location context. We use the current
175  // function's entire body as a diagnostic statement, with which the program
176  // point will be associated. However, we only want to use LastStmt as a
177  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
178  // is dead.
179  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
180  const LocationContext *LCtx = Pred->getLocationContext();
181  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
182  LCtx->getAnalysisDeclContext()->getBody(),
184 }
185 
187  const StackFrameContext *calleeCtx) {
188  const Decl *RuntimeCallee = calleeCtx->getDecl();
189  const Decl *StaticDecl = Call->getDecl();
190  assert(RuntimeCallee);
191  if (!StaticDecl)
192  return true;
193  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
194 }
195 
196 /// The call exit is simulated with a sequence of nodes, which occur between
197 /// CallExitBegin and CallExitEnd. The following operations occur between the
198 /// two program points:
199 /// 1. CallExitBegin (triggers the start of call exit sequence)
200 /// 2. Bind the return value
201 /// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
202 /// 4. CallExitEnd (switch to the caller context)
203 /// 5. PostStmt<CallExpr>
205  // Step 1 CEBNode was generated before the call.
207  const StackFrameContext *calleeCtx =
209 
210  // The parent context might not be a stack frame, so make sure we
211  // look up the first enclosing stack frame.
212  const StackFrameContext *callerCtx =
213  calleeCtx->getParent()->getCurrentStackFrame();
214 
215  const Stmt *CE = calleeCtx->getCallSite();
216  ProgramStateRef state = CEBNode->getState();
217  // Find the last statement in the function and the corresponding basic block.
218  const Stmt *LastSt = nullptr;
219  const CFGBlock *Blk = nullptr;
220  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
221 
222  // Generate a CallEvent /before/ cleaning the state, so that we can get the
223  // correct value for 'this' (if necessary).
225  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
226 
227  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
228 
229  // If the callee returns an expression, bind its value to CallExpr.
230  if (CE) {
231  if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
232  const LocationContext *LCtx = CEBNode->getLocationContext();
233  SVal V = state->getSVal(RS, LCtx);
234 
235  // Ensure that the return type matches the type of the returned Expr.
236  if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
237  QualType ReturnedTy =
238  CallEvent::getDeclaredResultType(calleeCtx->getDecl());
239  if (!ReturnedTy.isNull()) {
240  if (const Expr *Ex = dyn_cast<Expr>(CE)) {
241  V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
242  getStoreManager());
243  }
244  }
245  }
246 
247  state = state->BindExpr(CE, callerCtx, V);
248  }
249 
250  // Bind the constructed object value to CXXConstructExpr.
251  if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
252  loc::MemRegionVal This =
253  svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
254  SVal ThisV = state->getSVal(This);
255  ThisV = state->getSVal(ThisV.castAs<Loc>());
256  state = state->BindExpr(CCE, callerCtx, ThisV);
257  }
258 
259  if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
260  // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
261  // while to reach the actual CXXNewExpr element from here, so keep the
262  // region for later use.
263  // Additionally cast the return value of the inlined operator new
264  // (which is of type 'void *') to the correct object type.
265  SVal AllocV = state->getSVal(CNE, callerCtx);
266  AllocV = svalBuilder.evalCast(
267  AllocV, CNE->getType(),
268  getContext().getPointerType(getContext().VoidTy));
269 
270  state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
271  AllocV);
272  }
273  }
274 
275  // Step 3: BindedRetNode -> CleanedNodes
276  // If we can find a statement and a block in the inlined function, run remove
277  // dead bindings before returning from the call. This is important to ensure
278  // that we report the issues such as leaks in the stack contexts in which
279  // they occurred.
280  ExplodedNodeSet CleanedNodes;
281  if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
282  static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
283  PostStmt Loc(LastSt, calleeCtx, &retValBind);
284  bool isNew;
285  ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
286  BindedRetNode->addPredecessor(CEBNode, G);
287  if (!isNew)
288  return;
289 
290  NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
291  currBldrCtx = &Ctx;
292  // Here, we call the Symbol Reaper with 0 statement and callee location
293  // context, telling it to clean up everything in the callee's context
294  // (and its children). We use the callee's function body as a diagnostic
295  // statement, with which the program point will be associated.
296  removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
297  calleeCtx->getAnalysisDeclContext()->getBody(),
299  currBldrCtx = nullptr;
300  } else {
301  CleanedNodes.Add(CEBNode);
302  }
303 
304  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
305  E = CleanedNodes.end(); I != E; ++I) {
306 
307  // Step 4: Generate the CallExit and leave the callee's context.
308  // CleanedNodes -> CEENode
309  CallExitEnd Loc(calleeCtx, callerCtx);
310  bool isNew;
311  ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
312 
313  ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
314  CEENode->addPredecessor(*I, G);
315  if (!isNew)
316  return;
317 
318  // Step 5: Perform the post-condition check of the CallExpr and enqueue the
319  // result onto the work list.
320  // CEENode -> Dst -> WorkList
321  NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
322  SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
323  &Ctx);
324  SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
325 
326  CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
327 
328  ExplodedNodeSet DstPostCall;
329  if (const CXXNewExpr *CNE = dyn_cast_or_null<CXXNewExpr>(CE)) {
330  ExplodedNodeSet DstPostPostCallCallback;
331  getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
332  CEENode, *UpdatedCall, *this,
333  /*WasInlined=*/true);
334  for (auto I : DstPostPostCallCallback) {
336  CNE,
337  *getObjectUnderConstruction(I->getState(), CNE,
338  calleeCtx->getParent()),
339  DstPostCall, I, *this,
340  /*WasInlined=*/true);
341  }
342  } else {
343  getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
344  *UpdatedCall, *this,
345  /*WasInlined=*/true);
346  }
347  ExplodedNodeSet Dst;
348  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
349  getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
350  *this,
351  /*WasInlined=*/true);
352  } else if (CE &&
353  !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
355  getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
356  *this, /*WasInlined=*/true);
357  } else {
358  Dst.insert(DstPostCall);
359  }
360 
361  // Enqueue the next element in the block.
362  for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
363  PSI != PSE; ++PSI) {
364  Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
365  calleeCtx->getIndex()+1);
366  }
367  }
368 }
369 
370 void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
371  bool &IsRecursive, unsigned &StackDepth) {
372  IsRecursive = false;
373  StackDepth = 0;
374 
375  while (LCtx) {
376  if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
377  const Decl *DI = SFC->getDecl();
378 
379  // Mark recursive (and mutually recursive) functions and always count
380  // them when measuring the stack depth.
381  if (DI == D) {
382  IsRecursive = true;
383  ++StackDepth;
384  LCtx = LCtx->getParent();
385  continue;
386  }
387 
388  // Do not count the small functions when determining the stack depth.
389  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
390  const CFG *CalleeCFG = CalleeADC->getCFG();
391  if (CalleeCFG->getNumBlockIDs() > AMgr.options.getAlwaysInlineSize())
392  ++StackDepth;
393  }
394  LCtx = LCtx->getParent();
395  }
396 }
397 
398 // The GDM component containing the dynamic dispatch bifurcation info. When
399 // the exact type of the receiver is not known, we want to explore both paths -
400 // one on which we do inline it and the other one on which we don't. This is
401 // done to ensure we do not drop coverage.
402 // This is the map from the receiver region to a bool, specifying either we
403 // consider this region's information precise or not along the given path.
404 namespace {
406  DynamicDispatchModeInlined = 1,
407  DynamicDispatchModeConservative
408  };
409 } // end anonymous namespace
410 
411 REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
413  unsigned))
414 
415 bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
416  NodeBuilder &Bldr, ExplodedNode *Pred,
418  assert(D);
419 
420  const LocationContext *CurLC = Pred->getLocationContext();
421  const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame();
422  const LocationContext *ParentOfCallee = CallerSFC;
423  if (Call.getKind() == CE_Block &&
424  !cast<BlockCall>(Call).isConversionFromLambda()) {
425  const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
426  assert(BR && "If we have the block definition we should have its region");
427  AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
428  ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
429  cast<BlockDecl>(D),
430  BR);
431  }
432 
433  // This may be NULL, but that's fine.
434  const Expr *CallE = Call.getOriginExpr();
435 
436  // Construct a new stack frame for the callee.
437  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
438  const StackFrameContext *CalleeSFC =
439  CalleeADC->getStackFrame(ParentOfCallee, CallE,
440  currBldrCtx->getBlock(),
441  currStmtIdx);
442 
443  CallEnter Loc(CallE, CalleeSFC, CurLC);
444 
445  // Construct a new state which contains the mapping from actual to
446  // formal arguments.
447  State = State->enterStackFrame(Call, CalleeSFC);
448 
449  bool isNew;
450  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
451  N->addPredecessor(Pred, G);
452  if (isNew)
453  Engine.getWorkList()->enqueue(N);
454  }
455 
456  // If we decided to inline the call, the successor has been manually
457  // added onto the work list so remove it from the node builder.
458  Bldr.takeNodes(Pred);
459 
460  NumInlinedCalls++;
461  Engine.FunctionSummaries->bumpNumTimesInlined(D);
462 
463  // Mark the decl as visited.
464  if (VisitedCallees)
465  VisitedCallees->insert(D);
466 
467  return true;
468 }
469 
471  const Stmt *CallE) {
472  const void *ReplayState = State->get<ReplayWithoutInlining>();
473  if (!ReplayState)
474  return nullptr;
475 
476  assert(ReplayState == CallE && "Backtracked to the wrong call.");
477  (void)CallE;
478 
479  return State->remove<ReplayWithoutInlining>();
480 }
481 
483  ExplodedNodeSet &dst) {
484  // Perform the previsit of the CallExpr.
485  ExplodedNodeSet dstPreVisit;
486  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
487 
488  // Get the call in its initial state. We use this as a template to perform
489  // all the checks.
491  CallEventRef<> CallTemplate
492  = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
493 
494  // Evaluate the function call. We try each of the checkers
495  // to see if the can evaluate the function call.
496  ExplodedNodeSet dstCallEvaluated;
497  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
498  I != E; ++I) {
499  evalCall(dstCallEvaluated, *I, *CallTemplate);
500  }
501 
502  // Finally, perform the post-condition check of the CallExpr and store
503  // the created nodes in 'Dst'.
504  // Note that if the call was inlined, dstCallEvaluated will be empty.
505  // The post-CallExpr check will occur in processCallExit.
506  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
507  *this);
508 }
509 
511  const CallEvent &Call) {
512  // WARNING: At this time, the state attached to 'Call' may be older than the
513  // state in 'Pred'. This is a minor optimization since CheckerManager will
514  // use an updated CallEvent instance when calling checkers, but if 'Call' is
515  // ever used directly in this function all callers should be updated to pass
516  // the most recent state. (It is probably not worth doing the work here since
517  // for some callers this will not be necessary.)
518 
519  // Run any pre-call checks using the generic call interface.
520  ExplodedNodeSet dstPreVisit;
521  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, Call, *this);
522 
523  // Actually evaluate the function call. We try each of the checkers
524  // to see if the can evaluate the function call, and get a callback at
525  // defaultEvalCall if all of them fail.
526  ExplodedNodeSet dstCallEvaluated;
527  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
528  Call, *this);
529 
530  // Finally, run any post-call checks.
531  getCheckerManager().runCheckersForPostCall(Dst, dstCallEvaluated,
532  Call, *this);
533 }
534 
536  const LocationContext *LCtx,
538  const Expr *E = Call.getOriginExpr();
539  if (!E)
540  return State;
541 
542  // Some method families have known return values.
543  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
544  switch (Msg->getMethodFamily()) {
545  default:
546  break;
547  case OMF_autorelease:
548  case OMF_retain:
549  case OMF_self: {
550  // These methods return their receivers.
551  return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
552  }
553  }
554  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
555  SVal ThisV = C->getCXXThisVal();
556  ThisV = State->getSVal(ThisV.castAs<Loc>());
557  return State->BindExpr(E, LCtx, ThisV);
558  }
559 
560  SVal R;
561  QualType ResultTy = Call.getResultType();
562  unsigned Count = currBldrCtx->blockCount();
563  if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
564  // Conjure a temporary if the function returns an object by value.
565  SVal Target;
566  assert(RTC->getStmt() == Call.getOriginExpr());
567  EvalCallOptions CallOpts; // FIXME: We won't really need those.
568  std::tie(State, Target) =
569  prepareForObjectConstruction(Call.getOriginExpr(), State, LCtx,
570  RTC->getConstructionContext(), CallOpts);
571  assert(Target.getAsRegion());
572  // Invalidate the region so that it didn't look uninitialized. Don't notify
573  // the checkers.
574  State = State->invalidateRegions(Target.getAsRegion(), E, Count, LCtx,
575  /* CausedByPointerEscape=*/false, nullptr,
576  &Call, nullptr);
577 
578  R = State->getSVal(Target.castAs<Loc>(), E->getType());
579  } else {
580  // Conjure a symbol if the return value is unknown.
581 
582  // See if we need to conjure a heap pointer instead of
583  // a regular unknown pointer.
584  bool IsHeapPointer = false;
585  if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
586  if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
587  // FIXME: Delegate this to evalCall in MallocChecker?
588  IsHeapPointer = true;
589  }
590 
591  R = IsHeapPointer ? svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count)
592  : svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy,
593  Count);
594  }
595  return State->BindExpr(E, LCtx, R);
596 }
597 
598 // Conservatively evaluate call by invalidating regions and binding
599 // a conjured return value.
600 void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
601  ExplodedNode *Pred,
603  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
604  State = bindReturnValue(Call, Pred->getLocationContext(), State);
605 
606  // And make the result node.
607  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
608 }
609 
610 ExprEngine::CallInlinePolicy
611 ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
612  AnalyzerOptions &Opts,
613  const ExprEngine::EvalCallOptions &CallOpts) {
614  const LocationContext *CurLC = Pred->getLocationContext();
615  const StackFrameContext *CallerSFC = CurLC->getCurrentStackFrame();
616  switch (Call.getKind()) {
617  case CE_Function:
618  case CE_Block:
619  break;
620  case CE_CXXMember:
623  return CIP_DisallowedAlways;
624  break;
625  case CE_CXXConstructor: {
627  return CIP_DisallowedAlways;
628 
629  const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
630 
631  const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
632 
633  auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
634  const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
635  : nullptr;
636 
637  if (CC && isa<NewAllocatedObjectConstructionContext>(CC) &&
638  !Opts.mayInlineCXXAllocator())
639  return CIP_DisallowedOnce;
640 
641  // FIXME: We don't handle constructors or destructors for arrays properly.
642  // Even once we do, we still need to be careful about implicitly-generated
643  // initializers for array fields in default move/copy constructors.
644  // We still allow construction into ElementRegion targets when they don't
645  // represent array elements.
646  if (CallOpts.IsArrayCtorOrDtor)
647  return CIP_DisallowedOnce;
648 
649  // Inlining constructors requires including initializers in the CFG.
650  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
651  assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
652  (void)ADC;
653 
654  // If the destructor is trivial, it's always safe to inline the constructor.
655  if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
656  break;
657 
658  // For other types, only inline constructors if destructor inlining is
659  // also enabled.
661  return CIP_DisallowedAlways;
662 
663  if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
664  // If we don't handle temporary destructors, we shouldn't inline
665  // their constructors.
666  if (CallOpts.IsTemporaryCtorOrDtor &&
668  return CIP_DisallowedOnce;
669 
670  // If we did not find the correct this-region, it would be pointless
671  // to inline the constructor. Instead we will simply invalidate
672  // the fake temporary target.
674  return CIP_DisallowedOnce;
675 
676  // If the temporary is lifetime-extended by binding it to a reference-type
677  // field within an aggregate, automatic destructors don't work properly.
679  return CIP_DisallowedOnce;
680  }
681 
682  break;
683  }
684  case CE_CXXDestructor: {
686  return CIP_DisallowedAlways;
687 
688  // Inlining destructors requires building the CFG correctly.
689  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
690  assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
691  (void)ADC;
692 
693  // FIXME: We don't handle constructors or destructors for arrays properly.
694  if (CallOpts.IsArrayCtorOrDtor)
695  return CIP_DisallowedOnce;
696 
697  // Allow disabling temporary destructor inlining with a separate option.
698  if (CallOpts.IsTemporaryCtorOrDtor && !Opts.mayInlineCXXTemporaryDtors())
699  return CIP_DisallowedOnce;
700 
701  // If we did not find the correct this-region, it would be pointless
702  // to inline the destructor. Instead we will simply invalidate
703  // the fake temporary target.
705  return CIP_DisallowedOnce;
706  break;
707  }
708  case CE_CXXAllocator:
709  if (Opts.mayInlineCXXAllocator())
710  break;
711  // Do not inline allocators until we model deallocators.
712  // This is unfortunate, but basically necessary for smart pointers and such.
713  return CIP_DisallowedAlways;
714  case CE_ObjCMessage:
715  if (!Opts.mayInlineObjCMethod())
716  return CIP_DisallowedAlways;
717  if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
719  return CIP_DisallowedAlways;
720  break;
721  }
722 
723  return CIP_Allowed;
724 }
725 
726 /// Returns true if the given C++ class contains a member with the given name.
727 static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
728  StringRef Name) {
729  const IdentifierInfo &II = Ctx.Idents.get(Name);
730  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
731  if (!RD->lookup(DeclName).empty())
732  return true;
733 
734  CXXBasePaths Paths(false, false, false);
735  if (RD->lookupInBases(
736  [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
737  return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
738  },
739  Paths))
740  return true;
741 
742  return false;
743 }
744 
745 /// Returns true if the given C++ class is a container or iterator.
746 ///
747 /// Our heuristic for this is whether it contains a method named 'begin()' or a
748 /// nested type named 'iterator' or 'iterator_category'.
749 static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
750  return hasMember(Ctx, RD, "begin") ||
751  hasMember(Ctx, RD, "iterator") ||
752  hasMember(Ctx, RD, "iterator_category");
753 }
754 
755 /// Returns true if the given function refers to a method of a C++ container
756 /// or iterator.
757 ///
758 /// We generally do a poor job modeling most containers right now, and might
759 /// prefer not to inline their methods.
760 static bool isContainerMethod(const ASTContext &Ctx,
761  const FunctionDecl *FD) {
762  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
763  return isContainerClass(Ctx, MD->getParent());
764  return false;
765 }
766 
767 /// Returns true if the given function is the destructor of a class named
768 /// "shared_ptr".
769 static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
770  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
771  if (!Dtor)
772  return false;
773 
774  const CXXRecordDecl *RD = Dtor->getParent();
775  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
776  if (II->isStr("shared_ptr"))
777  return true;
778 
779  return false;
780 }
781 
782 /// Returns true if the function in \p CalleeADC may be inlined in general.
783 ///
784 /// This checks static properties of the function, such as its signature and
785 /// CFG, to determine whether the analyzer should ever consider inlining it,
786 /// in any context.
787 static bool mayInlineDecl(AnalysisManager &AMgr,
788  AnalysisDeclContext *CalleeADC) {
789  AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
790  // FIXME: Do not inline variadic calls.
791  if (CallEvent::isVariadic(CalleeADC->getDecl()))
792  return false;
793 
794  // Check certain C++-related inlining policies.
795  ASTContext &Ctx = CalleeADC->getASTContext();
796  if (Ctx.getLangOpts().CPlusPlus) {
797  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
798  // Conditionally control the inlining of template functions.
799  if (!Opts.mayInlineTemplateFunctions())
800  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
801  return false;
802 
803  // Conditionally control the inlining of C++ standard library functions.
804  if (!Opts.mayInlineCXXStandardLibrary())
805  if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
807  return false;
808 
809  // Conditionally control the inlining of methods on objects that look
810  // like C++ containers.
811  if (!Opts.mayInlineCXXContainerMethods())
812  if (!AMgr.isInCodeFile(FD->getLocation()))
813  if (isContainerMethod(Ctx, FD))
814  return false;
815 
816  // Conditionally control the inlining of the destructor of C++ shared_ptr.
817  // We don't currently do a good job modeling shared_ptr because we can't
818  // see the reference count, so treating as opaque is probably the best
819  // idea.
820  if (!Opts.mayInlineCXXSharedPtrDtor())
821  if (isCXXSharedPtrDtor(FD))
822  return false;
823  }
824  }
825 
826  // It is possible that the CFG cannot be constructed.
827  // Be safe, and check if the CalleeCFG is valid.
828  const CFG *CalleeCFG = CalleeADC->getCFG();
829  if (!CalleeCFG)
830  return false;
831 
832  // Do not inline large functions.
833  if (CalleeCFG->getNumBlockIDs() > Opts.getMaxInlinableSize())
834  return false;
835 
836  // It is possible that the live variables analysis cannot be
837  // run. If so, bail out.
838  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
839  return false;
840 
841  return true;
842 }
843 
844 bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
845  const ExplodedNode *Pred,
846  const EvalCallOptions &CallOpts) {
847  if (!D)
848  return false;
849 
851  AnalyzerOptions &Opts = AMgr.options;
853  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
854 
855  // The auto-synthesized bodies are essential to inline as they are
856  // usually small and commonly used. Note: we should do this check early on to
857  // ensure we always inline these calls.
858  if (CalleeADC->isBodyAutosynthesized())
859  return true;
860 
861  if (!AMgr.shouldInlineCall())
862  return false;
863 
864  // Check if this function has been marked as non-inlinable.
865  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
866  if (MayInline.hasValue()) {
867  if (!MayInline.getValue())
868  return false;
869 
870  } else {
871  // We haven't actually checked the static properties of this function yet.
872  // Do that now, and record our decision in the function summaries.
873  if (mayInlineDecl(getAnalysisManager(), CalleeADC)) {
874  Engine.FunctionSummaries->markMayInline(D);
875  } else {
876  Engine.FunctionSummaries->markShouldNotInline(D);
877  return false;
878  }
879  }
880 
881  // Check if we should inline a call based on its kind.
882  // FIXME: this checks both static and dynamic properties of the call, which
883  // means we're redoing a bit of work that could be cached in the function
884  // summary.
885  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
886  if (CIP != CIP_Allowed) {
887  if (CIP == CIP_DisallowedAlways) {
888  assert(!MayInline.hasValue() || MayInline.getValue());
889  Engine.FunctionSummaries->markShouldNotInline(D);
890  }
891  return false;
892  }
893 
894  const CFG *CalleeCFG = CalleeADC->getCFG();
895 
896  // Do not inline if recursive or we've reached max stack frame count.
897  bool IsRecursive = false;
898  unsigned StackDepth = 0;
899  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
900  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
901  ((CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize())
902  || IsRecursive))
903  return false;
904 
905  // Do not inline large functions too many times.
906  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
907  Opts.getMaxTimesInlineLarge()) &&
908  CalleeCFG->getNumBlockIDs() >=
910  NumReachedInlineCountMax++;
911  return false;
912  }
913 
914  if (HowToInline == Inline_Minimal &&
915  (CalleeCFG->getNumBlockIDs() > Opts.getAlwaysInlineSize()
916  || IsRecursive))
917  return false;
918 
919  return true;
920 }
921 
922 static bool isTrivialObjectAssignment(const CallEvent &Call) {
923  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
924  if (!ICall)
925  return false;
926 
927  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
928  if (!MD)
929  return false;
931  return false;
932 
933  return MD->isTrivial();
934 }
935 
937  const CallEvent &CallTemplate,
938  const EvalCallOptions &CallOpts) {
939  // Make sure we have the most recent state attached to the call.
940  ProgramStateRef State = Pred->getState();
941  CallEventRef<> Call = CallTemplate.cloneWithState(State);
942 
943  // Special-case trivial assignment operators.
944  if (isTrivialObjectAssignment(*Call)) {
945  performTrivialCopy(Bldr, Pred, *Call);
946  return;
947  }
948 
949  // Try to inline the call.
950  // The origin expression here is just used as a kind of checksum;
951  // this should still be safe even for CallEvents that don't come from exprs.
952  const Expr *E = Call->getOriginExpr();
953 
954  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
955  if (InlinedFailedState) {
956  // If we already tried once and failed, make sure we don't retry later.
957  State = InlinedFailedState;
958  } else {
959  RuntimeDefinition RD = Call->getRuntimeDefinition();
960  const Decl *D = RD.getDecl();
961  if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
962  if (RD.mayHaveOtherDefinitions()) {
964 
965  // Explore with and without inlining the call.
966  if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
967  BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
968  return;
969  }
970 
971  // Don't inline if we're not in any dynamic dispatch mode.
972  if (Options.getIPAMode() != IPAK_DynamicDispatch) {
973  conservativeEvalCall(*Call, Bldr, Pred, State);
974  return;
975  }
976  }
977 
978  // We are not bifurcating and we do have a Decl, so just inline.
979  if (inlineCall(*Call, D, Bldr, Pred, State))
980  return;
981  }
982  }
983 
984  // If we can't inline it, handle the return value and invalidate the regions.
985  conservativeEvalCall(*Call, Bldr, Pred, State);
986 }
987 
988 void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
989  const CallEvent &Call, const Decl *D,
990  NodeBuilder &Bldr, ExplodedNode *Pred) {
991  assert(BifurReg);
992  BifurReg = BifurReg->StripCasts();
993 
994  // Check if we've performed the split already - note, we only want
995  // to split the path once per memory region.
996  ProgramStateRef State = Pred->getState();
997  const unsigned *BState =
998  State->get<DynamicDispatchBifurcationMap>(BifurReg);
999  if (BState) {
1000  // If we are on "inline path", keep inlining if possible.
1001  if (*BState == DynamicDispatchModeInlined)
1002  if (inlineCall(Call, D, Bldr, Pred, State))
1003  return;
1004  // If inline failed, or we are on the path where we assume we
1005  // don't have enough info about the receiver to inline, conjure the
1006  // return value and invalidate the regions.
1007  conservativeEvalCall(Call, Bldr, Pred, State);
1008  return;
1009  }
1010 
1011  // If we got here, this is the first time we process a message to this
1012  // region, so split the path.
1013  ProgramStateRef IState =
1014  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1015  DynamicDispatchModeInlined);
1016  inlineCall(Call, D, Bldr, Pred, IState);
1017 
1018  ProgramStateRef NoIState =
1019  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1020  DynamicDispatchModeConservative);
1021  conservativeEvalCall(Call, Bldr, Pred, NoIState);
1022 
1023  NumOfDynamicDispatchPathSplits++;
1024 }
1025 
1027  ExplodedNodeSet &Dst) {
1028  ExplodedNodeSet dstPreVisit;
1029  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1030 
1031  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1032 
1033  if (RS->getRetValue()) {
1034  for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1035  ei = dstPreVisit.end(); it != ei; ++it) {
1036  B.generateNode(RS, *it, (*it)->getState());
1037  }
1038  }
1039 }
AnalysisDeclContextManager & getAnalysisDeclContextManager()
Represents a function declaration or definition.
Definition: Decl.h:1714
unsigned InlineMaxStackDepth
The inlining stack depth limit.
SVal evalDerivedToBase(SVal Derived, const CastExpr *Cast)
Evaluates a chain of derived-to-base casts through the path specified in Cast.
Definition: Store.cpp:248
bool empty() const
Definition: CFG.h:713
A (possibly-)qualified type.
Definition: Type.h:655
MemRegion - The root abstract class for all memory regions.
Definition: MemRegion.h:94
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
const CXXConstructorDecl * getDecl() const override
Definition: CallEvent.h:815
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition: ExprEngine.h:106
Stmt * getBody() const
Get the body of the Declaration.
succ_iterator succ_begin()
Definition: CFG.h:750
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
void processCallExit(ExplodedNode *Pred) override
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr...
Stmt - This represents one statement.
Definition: Stmt.h:66
This builder class is useful for generating nodes that resulted from visiting a statement.
Definition: CoreEngine.h:370
ProgramPoint getProgramPoint(bool IsPreVisit=false, const ProgramPointTag *Tag=nullptr) const
Returns an appropriate ProgramPoint for this call.
Definition: CallEvent.cpp:236
Decl - This represents one declaration (or definition), e.g.
Definition: DeclBase.h:86
bool mayInlineCXXTemporaryDtors()
Returns true if C++ temporary destructors should be inlined during analysis.
Represents a point when we begin processing an inlined call.
Definition: ProgramPoint.h:600
Manages the lifetime of CallEvent objects.
Definition: CallEvent.h:1009
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Hints for figuring out of a call should be inlined during evalCall().
Definition: ExprEngine.h:96
Represents a call to a C++ constructor.
Definition: ExprCXX.h:1239
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition: ExprEngine.h:103
const NestedNameSpecifier * Specifier
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx)
Definition: CallEvent.cpp:1195
const ProgramStateRef & getState() const
SVal evalCast(SVal val, QualType castTy, QualType originalType)
Represents a path from a specific derived class (which is not represented as part of the path) to a p...
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
unsigned succ_size() const
Definition: CFG.h:768
const Expr * getOriginExpr() const
Returns the expression whose value will be the result of this call.
Definition: CallEvent.h:248
void takeNodes(const ExplodedNodeSet &S)
Definition: CoreEngine.h:321
ASTContext & getASTContext() const
loc::MemRegionVal getCXXThis(const CXXMethodDecl *D, const StackFrameContext *SFC)
Return a memory region for the &#39;this&#39; object reference.
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
void enqueue(ExplodedNodeSet &Set)
Enqueue the given set of nodes onto the work list.
Definition: CoreEngine.cpp:520
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer&#39;s garbage collection - remove dead symbols and bindings from the state...
Definition: ExprEngine.cpp:616
bool mayInlineTemplateFunctions()
Returns whether or not templated functions may be considered for inlining.
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition: DeclCXX.cpp:2086
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
Definition: ProgramPoint.h:615
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition: Decl.h:297
One of these records is kept for each identifier that is lexed.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition: ASTContext.h:150
LineState State
AnalysisDeclContext contains the context data for the function or method under analysis.
const Expr * getRetValue() const
Definition: Stmt.cpp:928
static bool isInCodeFile(SourceLocation SL, const SourceManager &SM)
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
i32 captured_struct **param SharedsTy A type which contains references the shared variables *param Shareds Context with the list of shared variables from the p *TaskFunction *param Data Additional data for task generation like final * state
Optional< T > getLocationAs() const LLVM_LVALUE_FUNCTION
IdentifierTable & Idents
Definition: ASTContext.h:538
STATISTIC(NumOfDynamicDispatchPathSplits, "The # of times we split the path due to imprecise dynamic dispatch info")
BlockDataRegion - A region that represents a block instance.
Definition: MemRegion.h:668
Represents any expression that calls an Objective-C method.
Definition: CallEvent.h:892
virtual Kind getKind() const =0
Returns the kind of call this is.
static bool isInStdNamespace(const Decl *D)
Returns true if the root namespace of the given declaration is the &#39;std&#39; C++ namespace.
WorkList * getWorkList() const
Definition: CoreEngine.h:165
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting obj-c messages.
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition: ExprEngine.h:636
bool mayInlineCXXContainerMethods()
Returns whether or not methods of C++ container objects may be considered for inlining.
const StackFrameContext * getCurrentStackFrame() const
T * getAnalysis()
Return the specified analysis object, lazily running the analysis if necessary.
const LocationContext * getLocationContext() const
const LocationContext * getParent() const
virtual const CXXConstructExpr * getOriginExpr() const
Definition: CallEvent.h:811
unsigned getMinCFGSizeTreatFunctionsAsLarge()
Returns the number of basic blocks a function needs to have to be considered large for the &#39;max-times...
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
const CoreEngine & getCoreEngine() const
Definition: ExprEngine.h:392
ExplodedNode * getFirstPred()
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition: DeclCXX.h:1468
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
Definition: CallEvent.cpp:344
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition: DeclBase.h:875
lookup_result lookup(DeclarationName Name) const
lookup - Find the declarations (if any) with the given Name in this context.
Definition: DeclBase.cpp:1539
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
CheckerManager & getCheckerManager() const
Definition: ExprEngine.h:186
void removeDeadOnEndOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
static bool isTrivialObjectAssignment(const CallEvent &Call)
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
Represents a non-static C++ member function call, no matter how it is written.
Definition: CallEvent.h:621
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function&#39;s return type does not match the caller&#39;s expression ...
DeclarationNameTable DeclarationNames
Definition: ASTContext.h:541
CFGBlock - Represents a single basic block in a source-level CFG.
Definition: CFG.h:548
Represents a point when we finish the call exit sequence (for inlined call).
Definition: ProgramPoint.h:658
AnalysisDeclContext * getContext(const Decl *D)
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
void processBeginOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L) override
Called by CoreEngine.
Expr - This represents one expression.
Definition: Expr.h:106
IPAKind getIPAMode()
Returns the inter-procedural analysis mode.
CFG - Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt...
Definition: CFG.h:1002
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to...
Definition: Type.cpp:1575
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Definition: CallEvent.cpp:1216
Represents a C++ destructor within a class.
Definition: DeclCXX.h:2686
AnalyzerOptions & getAnalyzerOptions() override
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition: CoreEngine.h:228
CFGConstructor - Represents C++ constructor call.
Definition: CFG.h:150
void Add(ExplodedNode *N)
Refers to regular member function and operator calls.
bool mayInlineObjCMethod()
Returns true if ObjectiveC inlining is enabled, false otherwise.
IdentifierInfo * getAsIdentifierInfo() const
getAsIdentifierInfo - Retrieve the IdentifierInfo * stored in this declaration name, or NULL if this declaration name isn&#39;t a simple identifier.
Refers to constructors (implicit or explicit).
QualType getType() const
Definition: Expr.h:128
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng)
Run checkers for evaluating a call.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition: ExprEngine.h:182
Traits for storing the call processing policy inside GDM.
Definition: ExprEngine.h:795
REGISTER_TRAIT_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap, CLANG_ENTO_PROGRAMSTATE_MAP(const MemRegion *, unsigned)) bool ExprEngine
ReturnStmt - This represents a return, optionally of an expression: return; return 4;...
Definition: Stmt.h:1433
bool isBodyAutosynthesized() const
Checks if the body of the Decl is generated by the BodyFarm.
ExplodedNode * getNode(const ProgramPoint &L, ProgramStateRef State, bool IsSink=false, bool *IsNew=nullptr)
Retrieve the node associated with a (Location,State) pair, where the &#39;Location&#39; is a ProgramPoint in ...
const StackFrameContext * getStackFrame(LocationContext const *Parent, const Stmt *S, const CFGBlock *Blk, unsigned Idx)
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition: Decl.h:2050
Enable inlining of dynamically dispatched methods.
bool isNull() const
Return true if this QualType doesn&#39;t point to a type yet.
Definition: Type.h:720
Optional< T > getAs() const
Convert to the specified SVal type, returning None if this SVal is not of the desired type...
Definition: SVals.h:112
While alive, includes the current analysis stack in a crash trace.
CanQualType getCanonicalTypeUnqualified() const
const MemRegion * StripCasts(bool StripBaseCasts=true) const
Definition: MemRegion.cpp:1152
void runCheckersForNewAllocator(const CXXNewExpr *NE, SVal Target, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
Defines the runtime definition of the called function.
Definition: CallEvent.h:127
QualType getCanonicalType() const
Definition: Type.h:5864
const FunctionDecl * getDecl() const override
Definition: CallEvent.cpp:513
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
AnalysisManager & getAnalysisManager() override
Definition: ExprEngine.h:184
const MemRegion * getAsRegion() const
Definition: SVals.cpp:151
Represents a new-expression for memory allocation and constructor calls, e.g: "new CXXNewExpr(foo)"...
Definition: ExprCXX.h:1845
CallEventManager & getCallEventManager()
Definition: ProgramState.h:566
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checks and allowing checkers to be responsible for handli...
bool mayInlineCXXStandardLibrary()
Returns whether or not C++ standard library functions may be considered for inlining.
Represents a static or instance method of a struct/union/class.
Definition: DeclCXX.h:2031
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition: ExprEngine.h:99
ProgramPoint getLocation() const
getLocation - Returns the edge associated with the given node.
SVal - This represents a symbolic expression, which can be either an L-value or an R-value...
Definition: SVals.h:76
DeclarationName getIdentifier(const IdentifierInfo *ID)
getIdentifier - Create a declaration name that is a simple identifier.
#define CLANG_ENTO_PROGRAMSTATE_MAP(Key, Value)
Helper for registering a map trait.
const Decl * getDecl() const
bool isObjCObjectPointerType() const
Definition: Type.h:6146
Do minimal inlining of callees.
Definition: ExprEngine.h:92
unsigned getNumBlockIDs() const
getNumBlockIDs - Returns the total number of BlockIDs allocated (which start at 0).
Definition: CFG.h:1168
Refers to destructors (implicit or explicit).
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition: CallEvent.h:152
void insert(const ExplodedNodeSet &S)
Optional< T > getAs() const
Convert to the specified CFGElement type, returning None if this CFGElement is not of the desired typ...
Definition: CFG.h:109
static bool mayInlineDecl(AnalysisManager &AMgr, AnalysisDeclContext *CalleeADC)
Returns true if the function in CalleeADC may be inlined in general.
ast_type_traits::DynTypedNode Node
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
Dataflow Directional Tag Classes.
CFG::BuildOptions & getCFGBuildOptions()
Return the build options used to construct the CFG.
StoreManager & getStoreManager()
Definition: ExprEngine.h:372
const StackFrameContext * getCalleeContext() const
Definition: ProgramPoint.h:610
DeclarationName - The name of a declaration.
const CXXRecordDecl * getParent() const
Returns the parent of this method declaration, which is the class in which this method is defined...
Definition: DeclCXX.h:2151
bool isAmbiguous(CanQualType BaseType)
Determine whether the path from the most-derived type to the given base type is ambiguous (i...
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition: DeclCXX.cpp:2065
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition: CallEvent.h:148
Represents an abstract call to a function or method along a particular path.
Definition: CallEvent.h:164
ProgramStateManager & getStateManager() override
Definition: ExprEngine.h:370
const Decl * getDecl() const
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
bool lookupInBases(BaseMatchesCallback BaseMatches, CXXBasePaths &Paths, bool LookupInDependent=false) const
Look for entities within the base classes of this C++ class, transitively searching all base class su...
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition: SVals.h:104
bool includeTemporaryDtorsInCFG()
Returns whether or not the destructors for C++ temporary objects should be included in the CFG...
const LocationContext * getLocationContext() const
Definition: ProgramPoint.h:180
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
Definition: CallEvent.cpp:315
CXXBasePath & front()
Represents a base class of a C++ class.
Definition: DeclCXX.h:192
SourceManager & getSourceManager()
Definition: ASTContext.h:644
QualType getResultType() const
Returns the result type, adjusted for references.
Definition: CallEvent.cpp:69
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred)
Generates a node in the ExplodedGraph.
Definition: CoreEngine.h:281
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate.h) and friends (in DeclFriend.h).
ConstructionContext&#39;s subclasses describe different ways of constructing an object in C++...
Represents a C++ struct/union/class.
Definition: DeclCXX.h:300
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K)
Returns the option controlling which C++ member functions will be considered for inlining.
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition: ExprEngine.h:111
pred_iterator pred_begin()
QualType getPointerType(QualType T) const
Return the uniqued reference to the type for a pointer to the specified type.
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition: Expr.h:2283
ExplodedNode * generateNode(const Stmt *S, ExplodedNode *Pred, ProgramStateRef St, const ProgramPointTag *tag=nullptr, ProgramPoint::Kind K=ProgramPoint::PostStmtKind)
Definition: CoreEngine.h:399
unsigned getMaxTimesInlineLarge()
Returns the maximum times a large function could be inlined.
ProgramStateRef invalidateRegions(unsigned BlockCount, ProgramStateRef Orig=nullptr) const
Returns a new state with all argument regions invalidated.
Definition: CallEvent.cpp:196
virtual void enqueue(const WorkListUnit &U)=0
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *parent, const BlockDecl *BD, const void *ContextData)
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
bool mayInlineCXXAllocator()
Returns whether or not allocator call may be considered for inlining.
AnalysisPurgeMode AnalysisPurgeOpt
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
bool mayInlineCXXSharedPtrDtor()
Returns whether or not the destructor of C++ &#39;shared_ptr&#39; may be considered for inlining.
Optional< T > getAs() const
Convert to the specified ProgramPoint type, returning None if this ProgramPoint is not of the desired...
Definition: ProgramPoint.h:152
AnalysisDeclContext * getAnalysisDeclContext() const
Represents a call to a C++ constructor.
Definition: CallEvent.h:786
const LangOptions & getLangOpts() const
Definition: ASTContext.h:689
void processCallEnter(NodeBuilderContext &BC, CallEnter CE, ExplodedNode *Pred) override
Generate the entry node of the callee.
CallEventRef< T > cloneWithState(ProgramStateRef State) const
Definition: CallEvent.h:108