clang  10.0.0svn
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1 //=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines ExprEngine's support for calls and returns.
10 //
11 //===----------------------------------------------------------------------===//
12 
16 #include "clang/AST/DeclCXX.h"
21 #include "llvm/ADT/SmallSet.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/Support/SaveAndRestore.h"
24 
25 using namespace clang;
26 using namespace ento;
27 
28 #define DEBUG_TYPE "ExprEngine"
29 
30 STATISTIC(NumOfDynamicDispatchPathSplits,
31  "The # of times we split the path due to imprecise dynamic dispatch info");
32 
33 STATISTIC(NumInlinedCalls,
34  "The # of times we inlined a call");
35 
36 STATISTIC(NumReachedInlineCountMax,
37  "The # of times we reached inline count maximum");
38 
40  ExplodedNode *Pred) {
41  // Get the entry block in the CFG of the callee.
42  const StackFrameContext *calleeCtx = CE.getCalleeContext();
43  PrettyStackTraceLocationContext CrashInfo(calleeCtx);
44  const CFGBlock *Entry = CE.getEntry();
45 
46  // Validate the CFG.
47  assert(Entry->empty());
48  assert(Entry->succ_size() == 1);
49 
50  // Get the solitary successor.
51  const CFGBlock *Succ = *(Entry->succ_begin());
52 
53  // Construct an edge representing the starting location in the callee.
54  BlockEdge Loc(Entry, Succ, calleeCtx);
55 
56  ProgramStateRef state = Pred->getState();
57 
58  // Construct a new node, notify checkers that analysis of the function has
59  // begun, and add the resultant nodes to the worklist.
60  bool isNew;
61  ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
62  Node->addPredecessor(Pred, G);
63  if (isNew) {
64  ExplodedNodeSet DstBegin;
65  processBeginOfFunction(BC, Node, DstBegin, Loc);
66  Engine.enqueue(DstBegin);
67  }
68 }
69 
70 // Find the last statement on the path to the exploded node and the
71 // corresponding Block.
72 static std::pair<const Stmt*,
73  const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
74  const Stmt *S = nullptr;
75  const CFGBlock *Blk = nullptr;
76  const StackFrameContext *SF = Node->getStackFrame();
77 
78  // Back up through the ExplodedGraph until we reach a statement node in this
79  // stack frame.
80  while (Node) {
81  const ProgramPoint &PP = Node->getLocation();
82 
83  if (PP.getStackFrame() == SF) {
84  if (Optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
85  S = SP->getStmt();
86  break;
87  } else if (Optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
88  S = CEE->getCalleeContext()->getCallSite();
89  if (S)
90  break;
91 
92  // If there is no statement, this is an implicitly-generated call.
93  // We'll walk backwards over it and then continue the loop to find
94  // an actual statement.
96  do {
97  Node = Node->getFirstPred();
98  CE = Node->getLocationAs<CallEnter>();
99  } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
100 
101  // Continue searching the graph.
102  } else if (Optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
103  Blk = BE->getSrc();
104  }
105  } else if (Optional<CallEnter> CE = PP.getAs<CallEnter>()) {
106  // If we reached the CallEnter for this function, it has no statements.
107  if (CE->getCalleeContext() == SF)
108  break;
109  }
110 
111  if (Node->pred_empty())
112  return std::make_pair(nullptr, nullptr);
113 
114  Node = *Node->pred_begin();
115  }
116 
117  return std::make_pair(S, Blk);
118 }
119 
120 /// Adjusts a return value when the called function's return type does not
121 /// match the caller's expression type. This can happen when a dynamic call
122 /// is devirtualized, and the overriding method has a covariant (more specific)
123 /// return type than the parent's method. For C++ objects, this means we need
124 /// to add base casts.
125 static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
126  StoreManager &StoreMgr) {
127  // For now, the only adjustments we handle apply only to locations.
128  if (!V.getAs<Loc>())
129  return V;
130 
131  // If the types already match, don't do any unnecessary work.
132  ExpectedTy = ExpectedTy.getCanonicalType();
133  ActualTy = ActualTy.getCanonicalType();
134  if (ExpectedTy == ActualTy)
135  return V;
136 
137  // No adjustment is needed between Objective-C pointer types.
138  if (ExpectedTy->isObjCObjectPointerType() &&
139  ActualTy->isObjCObjectPointerType())
140  return V;
141 
142  // C++ object pointers may need "derived-to-base" casts.
143  const CXXRecordDecl *ExpectedClass = ExpectedTy->getPointeeCXXRecordDecl();
144  const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
145  if (ExpectedClass && ActualClass) {
146  CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
147  /*DetectVirtual=*/false);
148  if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
149  !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
150  return StoreMgr.evalDerivedToBase(V, Paths.front());
151  }
152  }
153 
154  // Unfortunately, Objective-C does not enforce that overridden methods have
155  // covariant return types, so we can't assert that that never happens.
156  // Be safe and return UnknownVal().
157  return UnknownVal();
158 }
159 
161  ExplodedNode *Pred,
162  ExplodedNodeSet &Dst) {
163  // Find the last statement in the function and the corresponding basic block.
164  const Stmt *LastSt = nullptr;
165  const CFGBlock *Blk = nullptr;
166  std::tie(LastSt, Blk) = getLastStmt(Pred);
167  if (!Blk || !LastSt) {
168  Dst.Add(Pred);
169  return;
170  }
171 
172  // Here, we destroy the current location context. We use the current
173  // function's entire body as a diagnostic statement, with which the program
174  // point will be associated. However, we only want to use LastStmt as a
175  // reference for what to clean up if it's a ReturnStmt; otherwise, everything
176  // is dead.
177  SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
178  const LocationContext *LCtx = Pred->getLocationContext();
179  removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
180  LCtx->getAnalysisDeclContext()->getBody(),
182 }
183 
184 static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call,
185  const StackFrameContext *calleeCtx) {
186  const Decl *RuntimeCallee = calleeCtx->getDecl();
187  const Decl *StaticDecl = Call->getDecl();
188  assert(RuntimeCallee);
189  if (!StaticDecl)
190  return true;
191  return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
192 }
193 
194 /// The call exit is simulated with a sequence of nodes, which occur between
195 /// CallExitBegin and CallExitEnd. The following operations occur between the
196 /// two program points:
197 /// 1. CallExitBegin (triggers the start of call exit sequence)
198 /// 2. Bind the return value
199 /// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
200 /// 4. CallExitEnd (switch to the caller context)
201 /// 5. PostStmt<CallExpr>
203  // Step 1 CEBNode was generated before the call.
205  const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
206 
207  // The parent context might not be a stack frame, so make sure we
208  // look up the first enclosing stack frame.
209  const StackFrameContext *callerCtx =
210  calleeCtx->getParent()->getStackFrame();
211 
212  const Stmt *CE = calleeCtx->getCallSite();
213  ProgramStateRef state = CEBNode->getState();
214  // Find the last statement in the function and the corresponding basic block.
215  const Stmt *LastSt = nullptr;
216  const CFGBlock *Blk = nullptr;
217  std::tie(LastSt, Blk) = getLastStmt(CEBNode);
218 
219  // Generate a CallEvent /before/ cleaning the state, so that we can get the
220  // correct value for 'this' (if necessary).
222  CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
223 
224  // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
225 
226  // If the callee returns an expression, bind its value to CallExpr.
227  if (CE) {
228  if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
229  const LocationContext *LCtx = CEBNode->getLocationContext();
230  SVal V = state->getSVal(RS, LCtx);
231 
232  // Ensure that the return type matches the type of the returned Expr.
233  if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
234  QualType ReturnedTy =
235  CallEvent::getDeclaredResultType(calleeCtx->getDecl());
236  if (!ReturnedTy.isNull()) {
237  if (const Expr *Ex = dyn_cast<Expr>(CE)) {
238  V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
239  getStoreManager());
240  }
241  }
242  }
243 
244  state = state->BindExpr(CE, callerCtx, V);
245  }
246 
247  // Bind the constructed object value to CXXConstructExpr.
248  if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
250  svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
251  SVal ThisV = state->getSVal(This);
252  ThisV = state->getSVal(ThisV.castAs<Loc>());
253  state = state->BindExpr(CCE, callerCtx, ThisV);
254  }
255 
256  if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
257  // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
258  // while to reach the actual CXXNewExpr element from here, so keep the
259  // region for later use.
260  // Additionally cast the return value of the inlined operator new
261  // (which is of type 'void *') to the correct object type.
262  SVal AllocV = state->getSVal(CNE, callerCtx);
263  AllocV = svalBuilder.evalCast(
264  AllocV, CNE->getType(),
265  getContext().getPointerType(getContext().VoidTy));
266 
267  state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
268  AllocV);
269  }
270  }
271 
272  // Step 3: BindedRetNode -> CleanedNodes
273  // If we can find a statement and a block in the inlined function, run remove
274  // dead bindings before returning from the call. This is important to ensure
275  // that we report the issues such as leaks in the stack contexts in which
276  // they occurred.
277  ExplodedNodeSet CleanedNodes;
278  if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
279  static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
280  PostStmt Loc(LastSt, calleeCtx, &retValBind);
281  bool isNew;
282  ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
283  BindedRetNode->addPredecessor(CEBNode, G);
284  if (!isNew)
285  return;
286 
287  NodeBuilderContext Ctx(getCoreEngine(), Blk, BindedRetNode);
288  currBldrCtx = &Ctx;
289  // Here, we call the Symbol Reaper with 0 statement and callee location
290  // context, telling it to clean up everything in the callee's context
291  // (and its children). We use the callee's function body as a diagnostic
292  // statement, with which the program point will be associated.
293  removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
294  calleeCtx->getAnalysisDeclContext()->getBody(),
296  currBldrCtx = nullptr;
297  } else {
298  CleanedNodes.Add(CEBNode);
299  }
300 
301  for (ExplodedNodeSet::iterator I = CleanedNodes.begin(),
302  E = CleanedNodes.end(); I != E; ++I) {
303 
304  // Step 4: Generate the CallExit and leave the callee's context.
305  // CleanedNodes -> CEENode
306  CallExitEnd Loc(calleeCtx, callerCtx);
307  bool isNew;
308  ProgramStateRef CEEState = (*I == CEBNode) ? state : (*I)->getState();
309 
310  ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
311  CEENode->addPredecessor(*I, G);
312  if (!isNew)
313  return;
314 
315  // Step 5: Perform the post-condition check of the CallExpr and enqueue the
316  // result onto the work list.
317  // CEENode -> Dst -> WorkList
318  NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
319  SaveAndRestore<const NodeBuilderContext*> NBCSave(currBldrCtx,
320  &Ctx);
321  SaveAndRestore<unsigned> CBISave(currStmtIdx, calleeCtx->getIndex());
322 
323  CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
324 
325  ExplodedNodeSet DstPostCall;
326  if (const CXXNewExpr *CNE = dyn_cast_or_null<CXXNewExpr>(CE)) {
327  ExplodedNodeSet DstPostPostCallCallback;
328  getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
329  CEENode, *UpdatedCall, *this,
330  /*wasInlined=*/true);
331  for (auto I : DstPostPostCallCallback) {
333  CNE,
334  *getObjectUnderConstruction(I->getState(), CNE,
335  calleeCtx->getParent()),
336  DstPostCall, I, *this,
337  /*wasInlined=*/true);
338  }
339  } else {
340  getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
341  *UpdatedCall, *this,
342  /*wasInlined=*/true);
343  }
344  ExplodedNodeSet Dst;
345  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
346  getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
347  *this,
348  /*wasInlined=*/true);
349  } else if (CE &&
350  !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
351  AMgr.getAnalyzerOptions().MayInlineCXXAllocator)) {
352  getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
353  *this, /*wasInlined=*/true);
354  } else {
355  Dst.insert(DstPostCall);
356  }
357 
358  // Enqueue the next element in the block.
359  for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
360  PSI != PSE; ++PSI) {
361  Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(),
362  calleeCtx->getIndex()+1);
363  }
364  }
365 }
366 
367 bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
368  // When there are no branches in the function, it means that there's no
369  // exponential complexity introduced by inlining such function.
370  // Such functions also don't trigger various fundamental problems
371  // with our inlining mechanism, such as the problem of
372  // inlined defensive checks. Hence isLinear().
373  const CFG *Cfg = ADC->getCFG();
374  return Cfg->isLinear() || Cfg->size() <= AMgr.options.AlwaysInlineSize;
375 }
376 
377 bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
378  const CFG *Cfg = ADC->getCFG();
379  return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
380 }
381 
382 bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
383  const CFG *Cfg = ADC->getCFG();
384  return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
385 }
386 
387 void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
388  bool &IsRecursive, unsigned &StackDepth) {
389  IsRecursive = false;
390  StackDepth = 0;
391 
392  while (LCtx) {
393  if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
394  const Decl *DI = SFC->getDecl();
395 
396  // Mark recursive (and mutually recursive) functions and always count
397  // them when measuring the stack depth.
398  if (DI == D) {
399  IsRecursive = true;
400  ++StackDepth;
401  LCtx = LCtx->getParent();
402  continue;
403  }
404 
405  // Do not count the small functions when determining the stack depth.
406  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
407  if (!isSmall(CalleeADC))
408  ++StackDepth;
409  }
410  LCtx = LCtx->getParent();
411  }
412 }
413 
414 // The GDM component containing the dynamic dispatch bifurcation info. When
415 // the exact type of the receiver is not known, we want to explore both paths -
416 // one on which we do inline it and the other one on which we don't. This is
417 // done to ensure we do not drop coverage.
418 // This is the map from the receiver region to a bool, specifying either we
419 // consider this region's information precise or not along the given path.
420 namespace {
422  DynamicDispatchModeInlined = 1,
423  DynamicDispatchModeConservative
424  };
425 } // end anonymous namespace
426 
427 REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
428  const MemRegion *, unsigned)
429 
430 bool ExprEngine::inlineCall(const CallEvent &Call, const Decl *D,
431  NodeBuilder &Bldr, ExplodedNode *Pred,
433  assert(D);
434 
435  const LocationContext *CurLC = Pred->getLocationContext();
436  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
437  const LocationContext *ParentOfCallee = CallerSFC;
438  if (Call.getKind() == CE_Block &&
439  !cast<BlockCall>(Call).isConversionFromLambda()) {
440  const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
441  assert(BR && "If we have the block definition we should have its region");
442  AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
443  ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
444  cast<BlockDecl>(D),
445  BR);
446  }
447 
448  // This may be NULL, but that's fine.
449  const Expr *CallE = Call.getOriginExpr();
450 
451  // Construct a new stack frame for the callee.
452  AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
453  const StackFrameContext *CalleeSFC =
454  CalleeADC->getStackFrame(ParentOfCallee, CallE, currBldrCtx->getBlock(),
455  currBldrCtx->blockCount(), currStmtIdx);
456 
457  CallEnter Loc(CallE, CalleeSFC, CurLC);
458 
459  // Construct a new state which contains the mapping from actual to
460  // formal arguments.
461  State = State->enterStackFrame(Call, CalleeSFC);
462 
463  bool isNew;
464  if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
465  N->addPredecessor(Pred, G);
466  if (isNew)
467  Engine.getWorkList()->enqueue(N);
468  }
469 
470  // If we decided to inline the call, the successor has been manually
471  // added onto the work list so remove it from the node builder.
472  Bldr.takeNodes(Pred);
473 
474  NumInlinedCalls++;
475  Engine.FunctionSummaries->bumpNumTimesInlined(D);
476 
477  // Mark the decl as visited.
478  if (VisitedCallees)
479  VisitedCallees->insert(D);
480 
481  return true;
482 }
483 
485  const Stmt *CallE) {
486  const void *ReplayState = State->get<ReplayWithoutInlining>();
487  if (!ReplayState)
488  return nullptr;
489 
490  assert(ReplayState == CallE && "Backtracked to the wrong call.");
491  (void)CallE;
492 
493  return State->remove<ReplayWithoutInlining>();
494 }
495 
497  ExplodedNodeSet &dst) {
498  // Perform the previsit of the CallExpr.
499  ExplodedNodeSet dstPreVisit;
500  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
501 
502  // Get the call in its initial state. We use this as a template to perform
503  // all the checks.
505  CallEventRef<> CallTemplate
506  = CEMgr.getSimpleCall(CE, Pred->getState(), Pred->getLocationContext());
507 
508  // Evaluate the function call. We try each of the checkers
509  // to see if the can evaluate the function call.
510  ExplodedNodeSet dstCallEvaluated;
511  for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end();
512  I != E; ++I) {
513  evalCall(dstCallEvaluated, *I, *CallTemplate);
514  }
515 
516  // Finally, perform the post-condition check of the CallExpr and store
517  // the created nodes in 'Dst'.
518  // Note that if the call was inlined, dstCallEvaluated will be empty.
519  // The post-CallExpr check will occur in processCallExit.
520  getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
521  *this);
522 }
523 
524 ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
525  const CallEvent &Call) {
526  const Expr *E = Call.getOriginExpr();
527  // FIXME: Constructors to placement arguments of operator new
528  // are not supported yet.
529  if (!E || isa<CXXNewExpr>(E))
530  return State;
531 
532  const LocationContext *LC = Call.getLocationContext();
533  for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
534  unsigned I = Call.getASTArgumentIndex(CallI);
535  if (Optional<SVal> V =
536  getObjectUnderConstruction(State, {E, I}, LC)) {
537  SVal VV = *V;
538  (void)VV;
539  assert(cast<VarRegion>(VV.castAs<loc::MemRegionVal>().getRegion())
540  ->getStackFrame()->getParent()
541  ->getStackFrame() == LC->getStackFrame());
542  State = finishObjectConstruction(State, {E, I}, LC);
543  }
544  }
545 
546  return State;
547 }
548 
549 void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
550  ExplodedNode *Pred,
551  const CallEvent &Call) {
552  ProgramStateRef State = Pred->getState();
553  ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
554  if (CleanedState == State) {
555  Dst.insert(Pred);
556  return;
557  }
558 
559  const Expr *E = Call.getOriginExpr();
560  const LocationContext *LC = Call.getLocationContext();
561  NodeBuilder B(Pred, Dst, *currBldrCtx);
562  static SimpleProgramPointTag Tag("ExprEngine",
563  "Finish argument construction");
564  PreStmt PP(E, LC, &Tag);
565  B.generateNode(PP, CleanedState, Pred);
566 }
567 
569  const CallEvent &Call) {
570  // WARNING: At this time, the state attached to 'Call' may be older than the
571  // state in 'Pred'. This is a minor optimization since CheckerManager will
572  // use an updated CallEvent instance when calling checkers, but if 'Call' is
573  // ever used directly in this function all callers should be updated to pass
574  // the most recent state. (It is probably not worth doing the work here since
575  // for some callers this will not be necessary.)
576 
577  // Run any pre-call checks using the generic call interface.
578  ExplodedNodeSet dstPreVisit;
579  getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred,
580  Call, *this);
581 
582  // Actually evaluate the function call. We try each of the checkers
583  // to see if the can evaluate the function call, and get a callback at
584  // defaultEvalCall if all of them fail.
585  ExplodedNodeSet dstCallEvaluated;
586  getCheckerManager().runCheckersForEvalCall(dstCallEvaluated, dstPreVisit,
587  Call, *this);
588 
589  // If there were other constructors called for object-type arguments
590  // of this call, clean them up.
591  ExplodedNodeSet dstArgumentCleanup;
592  for (auto I : dstCallEvaluated)
593  finishArgumentConstruction(dstArgumentCleanup, I, Call);
594 
595  // Finally, run any post-call checks.
596  getCheckerManager().runCheckersForPostCall(Dst, dstArgumentCleanup,
597  Call, *this);
598 }
599 
601  const LocationContext *LCtx,
602  ProgramStateRef State) {
603  const Expr *E = Call.getOriginExpr();
604  if (!E)
605  return State;
606 
607  // Some method families have known return values.
608  if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
609  switch (Msg->getMethodFamily()) {
610  default:
611  break;
612  case OMF_autorelease:
613  case OMF_retain:
614  case OMF_self: {
615  // These methods return their receivers.
616  return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
617  }
618  }
619  } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
620  SVal ThisV = C->getCXXThisVal();
621  ThisV = State->getSVal(ThisV.castAs<Loc>());
622  return State->BindExpr(E, LCtx, ThisV);
623  }
624 
625  SVal R;
626  QualType ResultTy = Call.getResultType();
627  unsigned Count = currBldrCtx->blockCount();
628  if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
629  // Conjure a temporary if the function returns an object by value.
630  SVal Target;
631  assert(RTC->getStmt() == Call.getOriginExpr());
632  EvalCallOptions CallOpts; // FIXME: We won't really need those.
633  std::tie(State, Target) =
634  prepareForObjectConstruction(Call.getOriginExpr(), State, LCtx,
635  RTC->getConstructionContext(), CallOpts);
636  const MemRegion *TargetR = Target.getAsRegion();
637  assert(TargetR);
638  // Invalidate the region so that it didn't look uninitialized. If this is
639  // a field or element constructor, we do not want to invalidate
640  // the whole structure. Pointer escape is meaningless because
641  // the structure is a product of conservative evaluation
642  // and therefore contains nothing interesting at this point.
644  ITraits.setTrait(TargetR,
646  State = State->invalidateRegions(TargetR, E, Count, LCtx,
647  /* CausesPointerEscape=*/false, nullptr,
648  &Call, &ITraits);
649 
650  R = State->getSVal(Target.castAs<Loc>(), E->getType());
651  } else {
652  // Conjure a symbol if the return value is unknown.
653 
654  // See if we need to conjure a heap pointer instead of
655  // a regular unknown pointer.
656  bool IsHeapPointer = false;
657  if (const auto *CNE = dyn_cast<CXXNewExpr>(E))
658  if (CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
659  // FIXME: Delegate this to evalCall in MallocChecker?
660  IsHeapPointer = true;
661  }
662 
663  R = IsHeapPointer ? svalBuilder.getConjuredHeapSymbolVal(E, LCtx, Count)
664  : svalBuilder.conjureSymbolVal(nullptr, E, LCtx, ResultTy,
665  Count);
666  }
667  return State->BindExpr(E, LCtx, R);
668 }
669 
670 // Conservatively evaluate call by invalidating regions and binding
671 // a conjured return value.
672 void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
673  ExplodedNode *Pred,
674  ProgramStateRef State) {
675  State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
676  State = bindReturnValue(Call, Pred->getLocationContext(), State);
677 
678  // And make the result node.
679  Bldr.generateNode(Call.getProgramPoint(), State, Pred);
680 }
681 
682 ExprEngine::CallInlinePolicy
683 ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
684  AnalyzerOptions &Opts,
685  const ExprEngine::EvalCallOptions &CallOpts) {
686  const LocationContext *CurLC = Pred->getLocationContext();
687  const StackFrameContext *CallerSFC = CurLC->getStackFrame();
688  switch (Call.getKind()) {
689  case CE_Function:
690  case CE_Block:
691  break;
692  case CE_CXXMember:
695  return CIP_DisallowedAlways;
696  break;
697  case CE_CXXConstructor: {
699  return CIP_DisallowedAlways;
700 
701  const CXXConstructorCall &Ctor = cast<CXXConstructorCall>(Call);
702 
703  const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
704 
705  auto CCE = getCurrentCFGElement().getAs<CFGConstructor>();
706  const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
707  : nullptr;
708 
709  if (CC && isa<NewAllocatedObjectConstructionContext>(CC) &&
710  !Opts.MayInlineCXXAllocator)
711  return CIP_DisallowedOnce;
712 
713  // FIXME: We don't handle constructors or destructors for arrays properly.
714  // Even once we do, we still need to be careful about implicitly-generated
715  // initializers for array fields in default move/copy constructors.
716  // We still allow construction into ElementRegion targets when they don't
717  // represent array elements.
718  if (CallOpts.IsArrayCtorOrDtor)
719  return CIP_DisallowedOnce;
720 
721  // Inlining constructors requires including initializers in the CFG.
722  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
723  assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
724  (void)ADC;
725 
726  // If the destructor is trivial, it's always safe to inline the constructor.
727  if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
728  break;
729 
730  // For other types, only inline constructors if destructor inlining is
731  // also enabled.
733  return CIP_DisallowedAlways;
734 
735  if (CtorExpr->getConstructionKind() == CXXConstructExpr::CK_Complete) {
736  // If we don't handle temporary destructors, we shouldn't inline
737  // their constructors.
738  if (CallOpts.IsTemporaryCtorOrDtor &&
739  !Opts.ShouldIncludeTemporaryDtorsInCFG)
740  return CIP_DisallowedOnce;
741 
742  // If we did not find the correct this-region, it would be pointless
743  // to inline the constructor. Instead we will simply invalidate
744  // the fake temporary target.
746  return CIP_DisallowedOnce;
747 
748  // If the temporary is lifetime-extended by binding it to a reference-type
749  // field within an aggregate, automatic destructors don't work properly.
751  return CIP_DisallowedOnce;
752  }
753 
754  break;
755  }
756  case CE_CXXDestructor: {
758  return CIP_DisallowedAlways;
759 
760  // Inlining destructors requires building the CFG correctly.
761  const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
762  assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
763  (void)ADC;
764 
765  // FIXME: We don't handle constructors or destructors for arrays properly.
766  if (CallOpts.IsArrayCtorOrDtor)
767  return CIP_DisallowedOnce;
768 
769  // Allow disabling temporary destructor inlining with a separate option.
770  if (CallOpts.IsTemporaryCtorOrDtor &&
771  !Opts.MayInlineCXXTemporaryDtors)
772  return CIP_DisallowedOnce;
773 
774  // If we did not find the correct this-region, it would be pointless
775  // to inline the destructor. Instead we will simply invalidate
776  // the fake temporary target.
778  return CIP_DisallowedOnce;
779  break;
780  }
781  case CE_CXXAllocator:
782  if (Opts.MayInlineCXXAllocator)
783  break;
784  // Do not inline allocators until we model deallocators.
785  // This is unfortunate, but basically necessary for smart pointers and such.
786  return CIP_DisallowedAlways;
787  case CE_ObjCMessage:
788  if (!Opts.MayInlineObjCMethod)
789  return CIP_DisallowedAlways;
790  if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
792  return CIP_DisallowedAlways;
793  break;
794  }
795 
796  return CIP_Allowed;
797 }
798 
799 /// Returns true if the given C++ class contains a member with the given name.
800 static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
801  StringRef Name) {
802  const IdentifierInfo &II = Ctx.Idents.get(Name);
803  DeclarationName DeclName = Ctx.DeclarationNames.getIdentifier(&II);
804  if (!RD->lookup(DeclName).empty())
805  return true;
806 
807  CXXBasePaths Paths(false, false, false);
808  if (RD->lookupInBases(
809  [DeclName](const CXXBaseSpecifier *Specifier, CXXBasePath &Path) {
810  return CXXRecordDecl::FindOrdinaryMember(Specifier, Path, DeclName);
811  },
812  Paths))
813  return true;
814 
815  return false;
816 }
817 
818 /// Returns true if the given C++ class is a container or iterator.
819 ///
820 /// Our heuristic for this is whether it contains a method named 'begin()' or a
821 /// nested type named 'iterator' or 'iterator_category'.
822 static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
823  return hasMember(Ctx, RD, "begin") ||
824  hasMember(Ctx, RD, "iterator") ||
825  hasMember(Ctx, RD, "iterator_category");
826 }
827 
828 /// Returns true if the given function refers to a method of a C++ container
829 /// or iterator.
830 ///
831 /// We generally do a poor job modeling most containers right now, and might
832 /// prefer not to inline their methods.
833 static bool isContainerMethod(const ASTContext &Ctx,
834  const FunctionDecl *FD) {
835  if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
836  return isContainerClass(Ctx, MD->getParent());
837  return false;
838 }
839 
840 /// Returns true if the given function is the destructor of a class named
841 /// "shared_ptr".
842 static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
843  const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
844  if (!Dtor)
845  return false;
846 
847  const CXXRecordDecl *RD = Dtor->getParent();
848  if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
849  if (II->isStr("shared_ptr"))
850  return true;
851 
852  return false;
853 }
854 
855 /// Returns true if the function in \p CalleeADC may be inlined in general.
856 ///
857 /// This checks static properties of the function, such as its signature and
858 /// CFG, to determine whether the analyzer should ever consider inlining it,
859 /// in any context.
860 bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
861  AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
862  // FIXME: Do not inline variadic calls.
863  if (CallEvent::isVariadic(CalleeADC->getDecl()))
864  return false;
865 
866  // Check certain C++-related inlining policies.
867  ASTContext &Ctx = CalleeADC->getASTContext();
868  if (Ctx.getLangOpts().CPlusPlus) {
869  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
870  // Conditionally control the inlining of template functions.
871  if (!Opts.MayInlineTemplateFunctions)
872  if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
873  return false;
874 
875  // Conditionally control the inlining of C++ standard library functions.
876  if (!Opts.MayInlineCXXStandardLibrary)
877  if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
879  return false;
880 
881  // Conditionally control the inlining of methods on objects that look
882  // like C++ containers.
883  if (!Opts.MayInlineCXXContainerMethods)
884  if (!AMgr.isInCodeFile(FD->getLocation()))
885  if (isContainerMethod(Ctx, FD))
886  return false;
887 
888  // Conditionally control the inlining of the destructor of C++ shared_ptr.
889  // We don't currently do a good job modeling shared_ptr because we can't
890  // see the reference count, so treating as opaque is probably the best
891  // idea.
892  if (!Opts.MayInlineCXXSharedPtrDtor)
893  if (isCXXSharedPtrDtor(FD))
894  return false;
895  }
896  }
897 
898  // It is possible that the CFG cannot be constructed.
899  // Be safe, and check if the CalleeCFG is valid.
900  const CFG *CalleeCFG = CalleeADC->getCFG();
901  if (!CalleeCFG)
902  return false;
903 
904  // Do not inline large functions.
905  if (isHuge(CalleeADC))
906  return false;
907 
908  // It is possible that the live variables analysis cannot be
909  // run. If so, bail out.
910  if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
911  return false;
912 
913  return true;
914 }
915 
916 bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
917  const ExplodedNode *Pred,
918  const EvalCallOptions &CallOpts) {
919  if (!D)
920  return false;
921 
922  AnalysisManager &AMgr = getAnalysisManager();
923  AnalyzerOptions &Opts = AMgr.options;
924  AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
925  AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
926 
927  // The auto-synthesized bodies are essential to inline as they are
928  // usually small and commonly used. Note: we should do this check early on to
929  // ensure we always inline these calls.
930  if (CalleeADC->isBodyAutosynthesized())
931  return true;
932 
933  if (!AMgr.shouldInlineCall())
934  return false;
935 
936  // Check if this function has been marked as non-inlinable.
937  Optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
938  if (MayInline.hasValue()) {
939  if (!MayInline.getValue())
940  return false;
941 
942  } else {
943  // We haven't actually checked the static properties of this function yet.
944  // Do that now, and record our decision in the function summaries.
945  if (mayInlineDecl(CalleeADC)) {
946  Engine.FunctionSummaries->markMayInline(D);
947  } else {
948  Engine.FunctionSummaries->markShouldNotInline(D);
949  return false;
950  }
951  }
952 
953  // Check if we should inline a call based on its kind.
954  // FIXME: this checks both static and dynamic properties of the call, which
955  // means we're redoing a bit of work that could be cached in the function
956  // summary.
957  CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
958  if (CIP != CIP_Allowed) {
959  if (CIP == CIP_DisallowedAlways) {
960  assert(!MayInline.hasValue() || MayInline.getValue());
961  Engine.FunctionSummaries->markShouldNotInline(D);
962  }
963  return false;
964  }
965 
966  // Do not inline if recursive or we've reached max stack frame count.
967  bool IsRecursive = false;
968  unsigned StackDepth = 0;
969  examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
970  if ((StackDepth >= Opts.InlineMaxStackDepth) &&
971  (!isSmall(CalleeADC) || IsRecursive))
972  return false;
973 
974  // Do not inline large functions too many times.
975  if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
976  Opts.MaxTimesInlineLarge) &&
977  isLarge(CalleeADC)) {
978  NumReachedInlineCountMax++;
979  return false;
980  }
981 
982  if (HowToInline == Inline_Minimal && (!isSmall(CalleeADC) || IsRecursive))
983  return false;
984 
985  return true;
986 }
987 
988 static bool isTrivialObjectAssignment(const CallEvent &Call) {
989  const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
990  if (!ICall)
991  return false;
992 
993  const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
994  if (!MD)
995  return false;
997  return false;
998 
999  return MD->isTrivial();
1000 }
1001 
1003  const CallEvent &CallTemplate,
1004  const EvalCallOptions &CallOpts) {
1005  // Make sure we have the most recent state attached to the call.
1006  ProgramStateRef State = Pred->getState();
1007  CallEventRef<> Call = CallTemplate.cloneWithState(State);
1008 
1009  // Special-case trivial assignment operators.
1010  if (isTrivialObjectAssignment(*Call)) {
1011  performTrivialCopy(Bldr, Pred, *Call);
1012  return;
1013  }
1014 
1015  // Try to inline the call.
1016  // The origin expression here is just used as a kind of checksum;
1017  // this should still be safe even for CallEvents that don't come from exprs.
1018  const Expr *E = Call->getOriginExpr();
1019 
1020  ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1021  if (InlinedFailedState) {
1022  // If we already tried once and failed, make sure we don't retry later.
1023  State = InlinedFailedState;
1024  } else {
1025  RuntimeDefinition RD = Call->getRuntimeDefinition();
1026  const Decl *D = RD.getDecl();
1027  if (shouldInlineCall(*Call, D, Pred, CallOpts)) {
1028  if (RD.mayHaveOtherDefinitions()) {
1030 
1031  // Explore with and without inlining the call.
1032  if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1033  BifurcateCall(RD.getDispatchRegion(), *Call, D, Bldr, Pred);
1034  return;
1035  }
1036 
1037  // Don't inline if we're not in any dynamic dispatch mode.
1038  if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1039  conservativeEvalCall(*Call, Bldr, Pred, State);
1040  return;
1041  }
1042  }
1043 
1044  // We are not bifurcating and we do have a Decl, so just inline.
1045  if (inlineCall(*Call, D, Bldr, Pred, State))
1046  return;
1047  }
1048  }
1049 
1050  // If we can't inline it, handle the return value and invalidate the regions.
1051  conservativeEvalCall(*Call, Bldr, Pred, State);
1052 }
1053 
1054 void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1055  const CallEvent &Call, const Decl *D,
1056  NodeBuilder &Bldr, ExplodedNode *Pred) {
1057  assert(BifurReg);
1058  BifurReg = BifurReg->StripCasts();
1059 
1060  // Check if we've performed the split already - note, we only want
1061  // to split the path once per memory region.
1062  ProgramStateRef State = Pred->getState();
1063  const unsigned *BState =
1064  State->get<DynamicDispatchBifurcationMap>(BifurReg);
1065  if (BState) {
1066  // If we are on "inline path", keep inlining if possible.
1067  if (*BState == DynamicDispatchModeInlined)
1068  if (inlineCall(Call, D, Bldr, Pred, State))
1069  return;
1070  // If inline failed, or we are on the path where we assume we
1071  // don't have enough info about the receiver to inline, conjure the
1072  // return value and invalidate the regions.
1073  conservativeEvalCall(Call, Bldr, Pred, State);
1074  return;
1075  }
1076 
1077  // If we got here, this is the first time we process a message to this
1078  // region, so split the path.
1079  ProgramStateRef IState =
1080  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1081  DynamicDispatchModeInlined);
1082  inlineCall(Call, D, Bldr, Pred, IState);
1083 
1084  ProgramStateRef NoIState =
1085  State->set<DynamicDispatchBifurcationMap>(BifurReg,
1086  DynamicDispatchModeConservative);
1087  conservativeEvalCall(Call, Bldr, Pred, NoIState);
1088 
1089  NumOfDynamicDispatchPathSplits++;
1090 }
1091 
1093  ExplodedNodeSet &Dst) {
1094  ExplodedNodeSet dstPreVisit;
1095  getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1096 
1097  StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1098 
1099  if (RS->getRetValue()) {
1100  for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1101  ei = dstPreVisit.end(); it != ei; ++it) {
1102  B.generateNode(RS, *it, (*it)->getState());
1103  }
1104  }
1105 }
Represents a function declaration or definition.
Definition: Decl.h:1784
unsigned InlineMaxStackDepth
The inlining stack depth limit.
bool empty() const
Definition: CFG.h:919
A (possibly-)qualified type.
Definition: Type.h:643
MemRegion - The root abstract class for all memory regions.
Definition: MemRegion.h:94
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
const CXXConstructorDecl * getDecl() const override
Definition: CallEvent.h:846
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition: ExprEngine.h:106
Stmt * getBody() const
Get the body of the Declaration.
succ_iterator succ_begin()
Definition: CFG.h:956
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
void processCallExit(ExplodedNode *Pred) override
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr...
Stmt - This represents one statement.
Definition: Stmt.h:66
Information about invalidation for a particular region/symbol.
Definition: MemRegion.h:1445
This builder class is useful for generating nodes that resulted from visiting a statement.
Definition: CoreEngine.h:378
ProgramPoint getProgramPoint(bool IsPreVisit=false, const ProgramPointTag *Tag=nullptr) const
Returns an appropriate ProgramPoint for this call.
Definition: CallEvent.cpp:340
unsigned size() const
Return the total number of CFGBlocks within the CFG This is simply a renaming of the getNumBlockIDs()...
Definition: CFG.h:1398
Decl - This represents one declaration (or definition), e.g.
Definition: DeclBase.h:88
Represents a point when we begin processing an inlined call.
Definition: ProgramPoint.h:630
Manages the lifetime of CallEvent objects.
Definition: CallEvent.h:1148
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
IntrusiveRefCntPtr< const ProgramState > ProgramStateRef
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Hints for figuring out of a call should be inlined during evalCall().
Definition: ExprEngine.h:96
Represents a call to a C++ constructor.
Definition: ExprCXX.h:1422
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition: ExprEngine.h:103
const NestedNameSpecifier * Specifier
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx)
Definition: CallEvent.cpp:1359
const ProgramStateRef & getState() const
SVal evalCast(SVal val, QualType castTy, QualType originalType)
Represents a path from a specific derived class (which is not represented as part of the path) to a p...
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
unsigned succ_size() const
Definition: CFG.h:974
const Expr * getOriginExpr() const
Returns the expression whose value will be the result of this call.
Definition: CallEvent.h:222
ASTContext & getASTContext() const
bool This(InterpState &S, CodePtr OpPC)
Definition: Interp.h:830
loc::MemRegionVal getCXXThis(const CXXMethodDecl *D, const StackFrameContext *SFC)
Return a memory region for the &#39;this&#39; object reference.
void setTrait(SymbolRef Sym, InvalidationKinds IK)
Definition: MemRegion.cpp:1585
static Optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object&#39;s ConstructionContext, retrieve such object&#39;s location.
Definition: ExprEngine.cpp:474
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
void enqueue(ExplodedNodeSet &Set)
Enqueue the given set of nodes onto the work list.
Definition: CoreEngine.cpp:577
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer&#39;s garbage collection - remove dead symbols and bindings from the state...
Definition: ExprEngine.cpp:689
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition: DeclCXX.cpp:2239
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
Definition: ProgramPoint.h:645
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition: Decl.h:297
One of these records is kept for each identifier that is lexed.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition: ASTContext.h:160
LineState State
AnalysisDeclContext contains the context data for the function or method under analysis.
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
i32 captured_struct **param SharedsTy A type which contains references the shared variables *param Shareds Context with the list of shared variables from the p *TaskFunction *param Data Additional data for task generation like final * state
const StackFrameContext * getStackFrame() const
Definition: ProgramPoint.h:183
IdentifierTable & Idents
Definition: ASTContext.h:579
STATISTIC(NumOfDynamicDispatchPathSplits, "The # of times we split the path due to imprecise dynamic dispatch info")
Represents any expression that calls an Objective-C method.
Definition: CallEvent.h:938
virtual Kind getKind() const =0
Returns the kind of call this is.
static bool isInStdNamespace(const Decl *D)
Returns true if the root namespace of the given declaration is the &#39;std&#39; C++ namespace.
WorkList * getWorkList() const
Definition: CoreEngine.h:171
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting obj-c messages.
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition: ExprEngine.h:659
T * getAnalysis()
Return the specified analysis object, lazily running the analysis if necessary.
const LocationContext * getLocationContext() const
const LocationContext * getParent() const
virtual const CXXConstructExpr * getOriginExpr() const
Definition: CallEvent.h:842
bool isLinear() const
Returns true if the CFG has no branches.
Definition: CFG.cpp:4903
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
const CoreEngine & getCoreEngine() const
Definition: ExprEngine.h:408
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition: DeclCXX.h:1270
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
Definition: CallEvent.cpp:491
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition: DeclBase.h:877
lookup_result lookup(DeclarationName Name) const
lookup - Find the declarations (if any) with the given Name in this context.
Definition: DeclBase.cpp:1609
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
CheckerManager & getCheckerManager() const
Definition: ExprEngine.h:190
void removeDeadOnEndOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
static bool isTrivialObjectAssignment(const CallEvent &Call)
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
Represents a non-static C++ member function call, no matter how it is written.
Definition: CallEvent.h:638
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function&#39;s return type does not match the caller&#39;s expression ...
DeclarationNameTable DeclarationNames
Definition: ASTContext.h:582
Represents a single basic block in a source-level CFG.
Definition: CFG.h:576
Represents a point when we finish the call exit sequence (for inlined call).
Definition: ProgramPoint.h:688
AnalysisDeclContext * getContext(const Decl *D)
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
const LocationContext * getLocationContext() const
The context in which the call is being evaluated.
Definition: CallEvent.h:212
void processBeginOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L) override
Called by CoreEngine.
This represents one expression.
Definition: Expr.h:108
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt...
Definition: CFG.h:1226
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to...
Definition: Type.cpp:1677
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Gets an outside caller given a callee context.
Definition: CallEvent.cpp:1380
#define V(N, I)
Definition: ASTContext.h:2921
Represents a C++ destructor within a class.
Definition: DeclCXX.h:2629
AnalyzerOptions & getAnalyzerOptions() override
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition: CoreEngine.h:236
Represents C++ constructor call.
Definition: CFG.h:156
void Add(ExplodedNode *N)
Refers to regular member function and operator calls.
IdentifierInfo * getAsIdentifierInfo() const
Retrieve the IdentifierInfo * stored in this declaration name, or null if this declaration name isn&#39;t...
Refers to constructors (implicit or explicit).
QualType getType() const
Definition: Expr.h:137
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng)
Run checkers for evaluating a call.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition: ExprEngine.h:182
ReturnStmt - This represents a return, optionally of an expression: return; return 4;...
Definition: Stmt.h:2620
bool isBodyAutosynthesized() const
Checks if the body of the Decl is generated by the BodyFarm.
ExplodedNode * getNode(const ProgramPoint &L, ProgramStateRef State, bool IsSink=false, bool *IsNew=nullptr)
Retrieve the node associated with a (Location,State) pair, where the &#39;Location&#39; is a ProgramPoint in ...
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition: Decl.h:2076
Enable inlining of dynamically dispatched methods.
bool isNull() const
Return true if this QualType doesn&#39;t point to a type yet.
Definition: Type.h:708
IPAKind getIPAMode() const
Returns the inter-procedural analysis mode.
const MemRegion * getRegion() const
Get the underlining region.
Definition: SVals.h:605
While alive, includes the current analysis stack in a crash trace.
CanQualType getCanonicalTypeUnqualified() const
void runCheckersForNewAllocator(const CXXNewExpr *NE, SVal Target, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
Defines the runtime definition of the called function.
Definition: CallEvent.h:101
QualType getCanonicalType() const
Definition: Type.h:6187
const FunctionDecl * getDecl() const override
Definition: CallEvent.cpp:669
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
Expr * getRetValue()
Definition: Stmt.h:2653
AnalysisManager & getAnalysisManager() override
Definition: ExprEngine.h:184
const MemRegion * getAsRegion() const
Definition: SVals.cpp:151
Represents a new-expression for memory allocation and constructor calls, e.g: "new CXXNewExpr(foo)"...
Definition: ExprCXX.h:2100
CallEventManager & getCallEventManager()
Definition: ProgramState.h:530
REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap, const MemRegion *, unsigned) bool ExprEngine
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checkers and allowing checkers to be responsible for hand...
Represents a static or instance method of a struct/union/class.
Definition: DeclCXX.h:1905
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition: ExprEngine.h:99
SVal - This represents a symbolic expression, which can be either an L-value or an R-value...
Definition: SVals.h:75
DeclarationName getIdentifier(const IdentifierInfo *ID)
Create a declaration name that is a simple identifier.
const Decl * getDecl() const
bool isObjCObjectPointerType() const
Definition: Type.h:6495
Do minimal inlining of callees.
Definition: ExprEngine.h:92
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition: CFG.h:1393
Refers to destructors (implicit or explicit).
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition: CallEvent.h:126
void insert(const ExplodedNodeSet &S)
Optional< T > getAs() const
Convert to the specified CFGElement type, returning None if this CFGElement is not of the desired typ...
Definition: CFG.h:109
ast_type_traits::DynTypedNode Node
pred_iterator pred_begin()
Definition: CFG.h:938
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
Dataflow Directional Tag Classes.
CFG::BuildOptions & getCFGBuildOptions()
Return the build options used to construct the CFG.
StoreManager & getStoreManager()
Definition: ExprEngine.h:386
const StackFrameContext * getCalleeContext() const
Definition: ProgramPoint.h:640
The name of a declaration.
const CXXRecordDecl * getParent() const
Return the parent of this method declaration, which is the class in which this method is defined...
Definition: DeclCXX.h:2028
const MemRegion * StripCasts(bool StripBaseAndDerivedCasts=true) const
Definition: MemRegion.cpp:1196
bool isAmbiguous(CanQualType BaseType)
Determine whether the path from the most-derived type to the given base type is ambiguous (i...
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition: DeclCXX.cpp:2218
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition: CallEvent.h:122
Represents an abstract call to a function or method along a particular path.
Definition: CallEvent.h:138
ProgramStateManager & getStateManager() override
Definition: ExprEngine.h:384
const Decl * getDecl() const
const StackFrameContext * getStackFrame(LocationContext const *Parent, const Stmt *S, const CFGBlock *Blk, unsigned BlockCount, unsigned Idx)
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
bool lookupInBases(BaseMatchesCallback BaseMatches, CXXBasePaths &Paths, bool LookupInDependent=false) const
Look for entities within the base classes of this C++ class, transitively searching all base class su...
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition: SVals.h:103
virtual unsigned getASTArgumentIndex(unsigned CallArgumentIndex) const
Some call event sub-classes conveniently adjust mismatching AST indices to match parameter indices...
Definition: CallEvent.h:420
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
Definition: CallEvent.cpp:462
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K) const
Returns the option controlling which C++ member functions will be considered for inlining.
CXXBasePath & front()
const StackFrameContext * getStackFrame() const
Represents a base class of a C++ class.
Definition: DeclCXX.h:147
Stores options for the analyzer from the command line.
SourceManager & getSourceManager()
Definition: ASTContext.h:678
QualType getResultType() const
Returns the result type, adjusted for references.
Definition: CallEvent.cpp:69
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred)
Generates a node in the ExplodedGraph.
Definition: CoreEngine.h:289
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate.h) and friends (in DeclFriend.h).
ConstructionContext&#39;s subclasses describe different ways of constructing an object in C++...
Represents a C++ struct/union/class.
Definition: DeclCXX.h:255
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition: ExprEngine.h:111
virtual unsigned getNumArgs() const =0
Returns the number of arguments (explicit and implicit).
QualType getPointerType(QualType T) const
Return the uniqued reference to the type for a pointer to the specified type.
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition: Expr.h:2521
ExplodedNode * generateNode(const Stmt *S, ExplodedNode *Pred, ProgramStateRef St, const ProgramPointTag *tag=nullptr, ProgramPoint::Kind K=ProgramPoint::PostStmtKind)
Definition: CoreEngine.h:407
ProgramStateRef invalidateRegions(unsigned BlockCount, ProgramStateRef Orig=nullptr) const
Returns a new state with all argument regions invalidated.
Definition: CallEvent.cpp:286
virtual void enqueue(const WorkListUnit &U)=0
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *parent, const BlockDecl *BD, const void *ContextData)
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
const StackFrameContext * getStackFrame() const
AnalysisPurgeMode AnalysisPurgeOpt
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
Optional< T > getAs() const
Convert to the specified ProgramPoint type, returning None if this ProgramPoint is not of the desired...
Definition: ProgramPoint.h:151
AnalysisDeclContext * getAnalysisDeclContext() const
Represents a call to a C++ constructor.
Definition: CallEvent.h:817
const LangOptions & getLangOpts() const
Definition: ASTContext.h:723
void processCallEnter(NodeBuilderContext &BC, CallEnter CE, ExplodedNode *Pred) override
Generate the entry node of the callee.
CallEventRef< T > cloneWithState(ProgramStateRef State) const
Definition: CallEvent.h:82