clang 23.0.0git
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines ExprEngine's support for calls and returns.
10//
11//===----------------------------------------------------------------------===//
12
14#include "clang/AST/Decl.h"
15#include "clang/AST/DeclCXX.h"
23#include "llvm/Support/Casting.h"
24#include "llvm/Support/Compiler.h"
25#include "llvm/Support/SaveAndRestore.h"
26#include <optional>
27
28using namespace clang;
29using namespace ento;
30
31#define DEBUG_TYPE "ExprEngine"
32
34 NumOfDynamicDispatchPathSplits,
35 "The # of times we split the path due to imprecise dynamic dispatch info");
36
37STAT_COUNTER(NumInlinedCalls, "The # of times we inlined a call");
38
39STAT_COUNTER(NumReachedInlineCountMax,
40 "The # of times we reached inline count maximum");
41
43 // Get the entry block in the CFG of the callee.
44 const CFGBlock *Entry = CE.getEntry();
45
46 // Validate the CFG.
47 assert(Entry->empty());
48 assert(Entry->succ_size() == 1);
49
50 // Get the solitary successor.
51 const CFGBlock *Succ = *(Entry->succ_begin());
52
53 // Construct an edge representing the starting location in the callee.
54 BlockEdge Loc(Entry, Succ, CE.getCalleeContext());
55
56 ProgramStateRef state = Pred->getState();
57
58 // Construct a new node, notify checkers that analysis of the function has
59 // begun, and add the resultant nodes to the worklist.
60 bool isNew;
61 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
62 Node->addPredecessor(Pred, G);
63 if (isNew) {
64 // FIXME: In the `processBeginOfFunction` callback
65 // `ExprEngine::getCurrLocationContext()` can be different from the
66 // `LocationContext` queried from e.g. the `ExplodedNode`s. I'm not
67 // touching this now because this commit is NFC; but in the future it would
68 // be nice to avoid this inconsistency.
69 ExplodedNodeSet DstBegin;
70 processBeginOfFunction(Node, DstBegin, Loc);
71 Engine.enqueue(DstBegin);
72 }
73}
74
75// Find the last statement on the path to the exploded node and the
76// corresponding Block.
77static std::pair<const Stmt*,
78 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
79 const Stmt *S = nullptr;
80 const CFGBlock *Blk = nullptr;
81 const StackFrameContext *SF = Node->getStackFrame();
82
83 // Back up through the ExplodedGraph until we reach a statement node in this
84 // stack frame.
85 while (Node) {
86 const ProgramPoint &PP = Node->getLocation();
87
88 if (PP.getStackFrame() == SF) {
89 if (std::optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
90 S = SP->getStmt();
91 break;
92 } else if (std::optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
93 S = CEE->getCalleeContext()->getCallSite();
94 if (S)
95 break;
96
97 // If there is no statement, this is an implicitly-generated call.
98 // We'll walk backwards over it and then continue the loop to find
99 // an actual statement.
100 std::optional<CallEnter> CE;
101 do {
102 Node = Node->getFirstPred();
103 CE = Node->getLocationAs<CallEnter>();
104 } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
105
106 // Continue searching the graph.
107 } else if (std::optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
108 Blk = BE->getSrc();
109 }
110 } else if (std::optional<CallEnter> CE = PP.getAs<CallEnter>()) {
111 // If we reached the CallEnter for this function, it has no statements.
112 if (CE->getCalleeContext() == SF)
113 break;
114 }
115
116 if (Node->pred_empty())
117 return std::make_pair(nullptr, nullptr);
118
119 Node = *Node->pred_begin();
120 }
121
122 return std::make_pair(S, Blk);
123}
124
125/// Adjusts a return value when the called function's return type does not
126/// match the caller's expression type. This can happen when a dynamic call
127/// is devirtualized, and the overriding method has a covariant (more specific)
128/// return type than the parent's method. For C++ objects, this means we need
129/// to add base casts.
130static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
131 StoreManager &StoreMgr) {
132 // For now, the only adjustments we handle apply only to locations.
133 if (!isa<Loc>(V))
134 return V;
135
136 // If the types already match, don't do any unnecessary work.
137 ExpectedTy = ExpectedTy.getCanonicalType();
138 ActualTy = ActualTy.getCanonicalType();
139 if (ExpectedTy == ActualTy)
140 return V;
141
142 // No adjustment is needed between Objective-C pointer types.
143 if (ExpectedTy->isObjCObjectPointerType() &&
144 ActualTy->isObjCObjectPointerType())
145 return V;
146
147 // C++ object pointers may need "derived-to-base" casts.
149 const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
150 if (ExpectedClass && ActualClass) {
151 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
152 /*DetectVirtual=*/false);
153 if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
154 !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
155 return StoreMgr.evalDerivedToBase(V, Paths.front());
156 }
157 }
158
159 // Unfortunately, Objective-C does not enforce that overridden methods have
160 // covariant return types, so we can't assert that that never happens.
161 // Be safe and return UnknownVal().
162 return UnknownVal();
163}
164
166 ExplodedNodeSet &Dst) {
167 // Find the last statement in the function and the corresponding basic block.
168 const Stmt *LastSt = nullptr;
169 const CFGBlock *Blk = nullptr;
170 std::tie(LastSt, Blk) = getLastStmt(Pred);
171 if (!Blk || !LastSt) {
172 Dst.insert(Pred);
173 return;
174 }
175
176 // Here, we destroy the current location context. We use the current
177 // function's entire body as a diagnostic statement, with which the program
178 // point will be associated. However, we only want to use LastStmt as a
179 // reference for what to clean up if it's a ReturnStmt; otherwise, everything
180 // is dead.
181 const LocationContext *LCtx = Pred->getLocationContext();
182 removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
185}
186
188 const StackFrameContext *calleeCtx) {
189 const Decl *RuntimeCallee = calleeCtx->getDecl();
190 const Decl *StaticDecl = Call->getDecl();
191 assert(RuntimeCallee);
192 if (!StaticDecl)
193 return true;
194 return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
195}
196
197// Returns the number of elements in the array currently being destructed.
198// If the element count is not found 0 will be returned.
200 const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB) {
202 "The call event is not a destructor call!");
203
204 const auto &DtorCall = cast<CXXDestructorCall>(Call);
205
206 auto ThisVal = DtorCall.getCXXThisVal();
207
208 if (auto ThisElementRegion = dyn_cast<ElementRegion>(ThisVal.getAsRegion())) {
209 auto ArrayRegion = ThisElementRegion->getAsArrayOffset().getRegion();
210 auto ElementType = ThisElementRegion->getElementType();
211
212 auto ElementCount =
213 getDynamicElementCount(State, ArrayRegion, SVB, ElementType);
214
215 if (!ElementCount.isConstant())
216 return 0;
217
218 return ElementCount.getAsInteger()->getLimitedValue();
219 }
220
221 return 0;
222}
223
224ProgramStateRef ExprEngine::removeStateTraitsUsedForArrayEvaluation(
225 ProgramStateRef State, const CXXConstructExpr *E,
226 const LocationContext *LCtx) {
227
228 assert(LCtx && "Location context must be provided!");
229
230 if (E) {
231 if (getPendingInitLoop(State, E, LCtx))
232 State = removePendingInitLoop(State, E, LCtx);
233
234 if (getIndexOfElementToConstruct(State, E, LCtx))
235 State = removeIndexOfElementToConstruct(State, E, LCtx);
236 }
237
238 if (getPendingArrayDestruction(State, LCtx))
239 State = removePendingArrayDestruction(State, LCtx);
240
241 return State;
242}
243
244/// The call exit is simulated with a sequence of nodes, which occur between
245/// CallExitBegin and CallExitEnd. The following operations occur between the
246/// two program points:
247/// 1. CallExitBegin (triggers the start of call exit sequence)
248/// 2. Bind the return value
249/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
250/// 4. CallExitEnd
251/// 5. PostStmt<CallExpr>
252/// Steps 1-3. happen in the callee context; but there is a context switch and
253/// steps 4-5. happen in the caller context.
255 // Step 1 CEBNode was generated before the call.
256 const StackFrameContext *CalleeCtx = CEBNode->getStackFrame();
257
258 // The parent context might not be a stack frame, so make sure we
259 // look up the first enclosing stack frame.
260 const StackFrameContext *CallerCtx = CalleeCtx->getParent()->getStackFrame();
261
262 const Stmt *CE = CalleeCtx->getCallSite();
263 ProgramStateRef State = CEBNode->getState();
264 // Find the last statement in the function and the corresponding basic block.
265 auto [LastSt, Blk] = getLastStmt(CEBNode);
266
267 const CFGBlock *PrePurgeBlock =
268 isa_and_nonnull<ReturnStmt>(LastSt) ? Blk : &CEBNode->getCFG().getExit();
269 // The first half of this process happens in the callee context:
270 setCurrLocationContextAndBlock(CalleeCtx, PrePurgeBlock);
271
272 // Generate a CallEvent /before/ cleaning the State, so that we can get the
273 // correct value for 'this' (if necessary).
275 CallEventRef<> Call = CEMgr.getCaller(CalleeCtx, State);
276
277 // Step 2: generate node with bound return value: CEBNode -> BoundRetNode.
278
279 // If this variable is set to 'true' the analyzer will evaluate the call
280 // statement we are about to exit again, instead of continuing the execution
281 // from the statement after the call. This is useful for non-POD type array
282 // construction where the CXXConstructExpr is referenced only once in the CFG,
283 // but we want to evaluate it as many times as many elements the array has.
284 bool ShouldRepeatCall = false;
285
286 if (const auto *DtorDecl =
287 dyn_cast_or_null<CXXDestructorDecl>(Call->getDecl())) {
288 if (auto Idx = getPendingArrayDestruction(State, CallerCtx)) {
289 ShouldRepeatCall = *Idx > 0;
290
291 auto ThisVal = svalBuilder.getCXXThis(DtorDecl->getParent(), CalleeCtx);
292 State = State->killBinding(ThisVal);
293 }
294 }
295
296 // If the callee returns an expression, bind its value to CallExpr.
297 if (CE) {
298 if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
299 const LocationContext *LCtx = CEBNode->getLocationContext();
300 SVal V = State->getSVal(RS, LCtx);
301
302 // Ensure that the return type matches the type of the returned Expr.
303 if (wasDifferentDeclUsedForInlining(Call, CalleeCtx)) {
304 QualType ReturnedTy =
306 if (!ReturnedTy.isNull()) {
307 if (const Expr *Ex = dyn_cast<Expr>(CE)) {
308 V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
310 }
311 }
312 }
313
314 State = State->BindExpr(CE, CallerCtx, V);
315 }
316
317 // Bind the constructed object value to CXXConstructExpr.
318 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
320 svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), CalleeCtx);
321 SVal ThisV = State->getSVal(This);
322 ThisV = State->getSVal(ThisV.castAs<Loc>());
323 State = State->BindExpr(CCE, CallerCtx, ThisV);
324
325 ShouldRepeatCall = shouldRepeatCtorCall(State, CCE, CallerCtx);
326 }
327
328 if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
329 // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
330 // while to reach the actual CXXNewExpr element from here, so keep the
331 // region for later use.
332 // Additionally cast the return value of the inlined operator new
333 // (which is of type 'void *') to the correct object type.
334 SVal AllocV = State->getSVal(CNE, CallerCtx);
335 AllocV = svalBuilder.evalCast(
336 AllocV, CNE->getType(),
337 getContext().getPointerType(getContext().VoidTy));
338
339 State = addObjectUnderConstruction(State, CNE, CalleeCtx->getParent(),
340 AllocV);
341 }
342 }
343
344 if (!ShouldRepeatCall) {
345 State = removeStateTraitsUsedForArrayEvaluation(
346 State, dyn_cast_or_null<CXXConstructExpr>(CE), CallerCtx);
347 }
348
349 // Step 3: BoundRetNode -> CleanedNodes
350 // If we can find a statement and a block in the inlined function, run remove
351 // dead bindings before returning from the call. This is important to ensure
352 // that we report the issues such as leaks in the stack contexts in which
353 // they occurred.
354 ExplodedNodeSet CleanedNodes;
355 if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
356 static SimpleProgramPointTag RetValBind("ExprEngine", "Bind Return Value");
357 auto Loc = isa<ReturnStmt>(LastSt)
358 ? ProgramPoint{PostStmt(LastSt, CalleeCtx, &RetValBind)}
359 : ProgramPoint{EpsilonPoint(CalleeCtx, /*Data1=*/nullptr,
360 /*Data2=*/nullptr, &RetValBind)};
361
362 ExplodedNode *BoundRetNode = Engine.makeNode(Loc, State, CEBNode);
363 if (!BoundRetNode)
364 return;
365
366 // We call removeDead in the context of the callee.
368 BoundRetNode, CleanedNodes, /*ReferenceStmt=*/nullptr, CalleeCtx,
369 /*DiagnosticStmt=*/CalleeCtx->getAnalysisDeclContext()->getBody(),
371 } else {
372 CleanedNodes.insert(CEBNode);
373 }
374
375 // The second half of this process happens in the caller context. This is an
376 // exception to the general rule that the current LocationContext and Block
377 // stay the same within a single call to dispatchWorkItem.
379 setCurrLocationContextAndBlock(CallerCtx, CalleeCtx->getCallSiteBlock());
380 SaveAndRestore CBISave(currStmtIdx, CalleeCtx->getIndex());
381
382 for (ExplodedNode *N : CleanedNodes) {
383 // Step 4: Generate the CallExitEnd node.
384 // CleanedNodes -> CEENode
385 CallExitEnd Loc(CalleeCtx, CallerCtx);
386 ProgramStateRef CEEState = (N == CEBNode) ? State : N->getState();
387
388 ExplodedNode *CEENode = Engine.makeNode(Loc, CEEState, N);
389 if (!CEENode)
390 return;
391
392 // Step 5: Perform the post-condition check of the CallExpr and enqueue the
393 // result onto the work list.
394 // CEENode -> Dst -> WorkList
395
396 CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
397
398 ExplodedNodeSet DstPostPostCallCallback;
399 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback, CEENode,
400 *UpdatedCall, *this,
401 /*wasInlined=*/true);
402 ExplodedNodeSet DstPostCall;
403 if (llvm::isa_and_nonnull<CXXNewExpr>(CE)) {
404 for (ExplodedNode *I : DstPostPostCallCallback) {
406 cast<CXXAllocatorCall>(*UpdatedCall), DstPostCall, I, *this,
407 /*wasInlined=*/true);
408 }
409 } else {
410 DstPostCall.insert(DstPostPostCallCallback);
411 }
412
413 ExplodedNodeSet Dst;
414 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
415 getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
416 *this,
417 /*wasInlined=*/true);
418 } else if (CE &&
419 !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
420 AMgr.getAnalyzerOptions().MayInlineCXXAllocator)) {
421 getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
422 *this, /*wasInlined=*/true);
423 } else {
424 Dst.insert(DstPostCall);
425 }
426
427 // Enqueue the next element in the block.
428 for (ExplodedNode *DstNode : Dst) {
429 unsigned Idx = CalleeCtx->getIndex() + (ShouldRepeatCall ? 0 : 1);
430
431 Engine.getWorkList()->enqueue(DstNode, CalleeCtx->getCallSiteBlock(),
432 Idx);
433 }
434 }
435}
436
437bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
438 // When there are no branches in the function, it means that there's no
439 // exponential complexity introduced by inlining such function.
440 // Such functions also don't trigger various fundamental problems
441 // with our inlining mechanism, such as the problem of
442 // inlined defensive checks. Hence isLinear().
443 const CFG *Cfg = ADC->getCFG();
444 return Cfg->isLinear() || Cfg->size() <= AMgr.options.AlwaysInlineSize;
445}
446
447bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
448 const CFG *Cfg = ADC->getCFG();
449 return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
450}
451
452bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
453 const CFG *Cfg = ADC->getCFG();
454 return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
455}
456
457void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
458 bool &IsRecursive, unsigned &StackDepth) {
459 IsRecursive = false;
460 StackDepth = 0;
461
462 while (LCtx) {
463 if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
464 const Decl *DI = SFC->getDecl();
465
466 // Mark recursive (and mutually recursive) functions and always count
467 // them when measuring the stack depth.
468 if (DI == D) {
469 IsRecursive = true;
470 ++StackDepth;
471 LCtx = LCtx->getParent();
472 continue;
473 }
474
475 // Do not count the small functions when determining the stack depth.
476 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
477 if (!isSmall(CalleeADC))
478 ++StackDepth;
479 }
480 LCtx = LCtx->getParent();
481 }
482}
483
484// The GDM component containing the dynamic dispatch bifurcation info. When
485// the exact type of the receiver is not known, we want to explore both paths -
486// one on which we do inline it and the other one on which we don't. This is
487// done to ensure we do not drop coverage.
488// This is the map from the receiver region to a bool, specifying either we
489// consider this region's information precise or not along the given path.
490namespace {
491 enum DynamicDispatchMode {
492 DynamicDispatchModeInlined = 1,
493 DynamicDispatchModeConservative
494 };
495} // end anonymous namespace
496
497REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
498 const MemRegion *, unsigned)
499REGISTER_TRAIT_WITH_PROGRAMSTATE(CTUDispatchBifurcation, bool)
500
501void ExprEngine::ctuBifurcate(const CallEvent &Call, const Decl *D,
502 NodeBuilder &Bldr, ExplodedNode *Pred,
503 ProgramStateRef State) {
504 ProgramStateRef ConservativeEvalState = nullptr;
505 if (Call.isForeign() && !isSecondPhaseCTU()) {
506 const auto IK = AMgr.options.getCTUPhase1Inlining();
507 const bool DoInline = IK == CTUPhase1InliningKind::All ||
509 isSmall(AMgr.getAnalysisDeclContext(D)));
510 if (DoInline) {
511 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
512 return;
513 }
514 const bool BState = State->get<CTUDispatchBifurcation>();
515 if (!BState) { // This is the first time we see this foreign function.
516 // Enqueue it to be analyzed in the second (ctu) phase.
517 inlineCall(Engine.getCTUWorkList(), Call, D, Bldr, Pred, State);
518 // Conservatively evaluate in the first phase.
519 ConservativeEvalState = State->set<CTUDispatchBifurcation>(true);
520 conservativeEvalCall(Call, Bldr, Pred, ConservativeEvalState);
521 } else {
522 conservativeEvalCall(Call, Bldr, Pred, State);
523 }
524 return;
525 }
526 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
527}
528
529void ExprEngine::inlineCall(WorkList *WList, const CallEvent &Call,
530 const Decl *D, NodeBuilder &Bldr,
531 ExplodedNode *Pred, ProgramStateRef State) {
532 assert(D);
533
534 const LocationContext *CurLC = Pred->getLocationContext();
535 const StackFrameContext *CallerSFC = CurLC->getStackFrame();
536 const LocationContext *ParentOfCallee = CallerSFC;
537 if (Call.getKind() == CE_Block &&
538 !cast<BlockCall>(Call).isConversionFromLambda()) {
539 const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
540 assert(BR && "If we have the block definition we should have its region");
541 AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
542 ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
544 BR);
545 }
546
547 // This may be NULL, but that's fine.
548 const Expr *CallE = Call.getOriginExpr();
549
550 // Construct a new stack frame for the callee.
551 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
552 const StackFrameContext *CalleeSFC =
553 CalleeADC->getStackFrame(ParentOfCallee, CallE, getCurrBlock(),
554 getNumVisitedCurrent(), currStmtIdx);
555
556 CallEnter Loc(CallE, CalleeSFC, CurLC);
557
558 // Construct a new state which contains the mapping from actual to
559 // formal arguments.
560 State = State->enterStackFrame(Call, CalleeSFC);
561
562 bool isNew;
563 if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
564 N->addPredecessor(Pred, G);
565 if (isNew)
566 WList->enqueue(N);
567 }
568
569 // If we decided to inline the call, the successor has been manually
570 // added onto the work list so remove it from the node builder.
571 Bldr.takeNodes(Pred);
572
573 NumInlinedCalls++;
574 Engine.FunctionSummaries->bumpNumTimesInlined(D);
575
576 // Do not mark as visited in the 2nd run (CTUWList), so the function will
577 // be visited as top-level, this way we won't loose reports in non-ctu
578 // mode. Considering the case when a function in a foreign TU calls back
579 // into the main TU.
580 // Note, during the 1st run, it doesn't matter if we mark the foreign
581 // functions as visited (or not) because they can never appear as a top level
582 // function in the main TU.
583 if (!isSecondPhaseCTU())
584 // Mark the decl as visited.
585 if (VisitedCallees)
586 VisitedCallees->insert(D);
587}
588
590 const Stmt *CallE) {
591 const void *ReplayState = State->get<ReplayWithoutInlining>();
592 if (!ReplayState)
593 return nullptr;
594
595 assert(ReplayState == CallE && "Backtracked to the wrong call.");
596 (void)CallE;
597
598 return State->remove<ReplayWithoutInlining>();
599}
600
602 ExplodedNodeSet &dst) {
603 // Perform the previsit of the CallExpr.
604 ExplodedNodeSet dstPreVisit;
605 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
606
607 // Get the call in its initial state. We use this as a template to perform
608 // all the checks.
610 CallEventRef<> CallTemplate = CEMgr.getSimpleCall(
611 CE, Pred->getState(), Pred->getLocationContext(), getCFGElementRef());
612
613 // Evaluate the function call. We try each of the checkers
614 // to see if the can evaluate the function call.
615 ExplodedNodeSet dstCallEvaluated;
616 for (ExplodedNode *N : dstPreVisit) {
617 evalCall(dstCallEvaluated, N, *CallTemplate);
618 }
619
620 // Finally, perform the post-condition check of the CallExpr and store
621 // the created nodes in 'Dst'.
622 // Note that if the call was inlined, dstCallEvaluated will be empty.
623 // The post-CallExpr check will occur in processCallExit.
624 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
625 *this);
626}
627
628ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
629 const CallEvent &Call) {
630 // WARNING: The state attached to 'Call' may be obsolete, do not call any
631 // methods that rely on it!
632 const Expr *E = Call.getOriginExpr();
633 // FIXME: Constructors to placement arguments of operator new
634 // are not supported yet.
635 if (!E || isa<CXXNewExpr>(E))
636 return State;
637
638 const LocationContext *LC = Call.getLocationContext();
639 for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
640 unsigned I = Call.getASTArgumentIndex(CallI);
641 if (std::optional<SVal> V = getObjectUnderConstruction(State, {E, I}, LC)) {
642 SVal VV = *V;
643 (void)VV;
645 ->getStackFrame()->getParent()
646 ->getStackFrame() == LC->getStackFrame());
647 State = finishObjectConstruction(State, {E, I}, LC);
648 }
649 }
650
651 return State;
652}
653
654void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
655 ExplodedNode *Pred,
656 const CallEvent &Call) {
657 // WARNING: The state attached to 'Call' may be obsolete, do not call any
658 // methods that rely on it!
659 ProgramStateRef State = Pred->getState();
660 ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
661 if (CleanedState == State) {
662 Dst.insert(Pred);
663 return;
664 }
665
666 const Expr *E = Call.getOriginExpr();
667 const LocationContext *LC = Call.getLocationContext();
668 NodeBuilder B(Pred, Dst, *currBldrCtx);
669 static SimpleProgramPointTag Tag("ExprEngine",
670 "Finish argument construction");
671 PreStmt PP(E, LC, &Tag);
672 B.generateNode(PP, CleanedState, Pred);
673}
674
676 const CallEvent &CallTemplate) {
677 // NOTE: CallTemplate is called a "template" because its attached state may
678 // be obsolete (compared to the state of Pred). The state-dependent methods
679 // of CallEvent should be used only after a `cloneWithState` call that
680 // attaches the up-to-date state to this template object.
681
682 // Run any pre-call checks using the generic call interface.
683 ExplodedNodeSet dstPreVisit;
684 getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, CallTemplate,
685 *this);
686
687 // Actually evaluate the function call. We try each of the checkers
688 // to see if the can evaluate the function call, and get a callback at
689 // defaultEvalCall if all of them fail.
690 ExplodedNodeSet dstCallEvaluated;
692 dstCallEvaluated, dstPreVisit, CallTemplate, *this, EvalCallOptions());
693
694 // If there were other constructors called for object-type arguments
695 // of this call, clean them up.
696 ExplodedNodeSet dstArgumentCleanup;
697 for (ExplodedNode *I : dstCallEvaluated)
698 finishArgumentConstruction(dstArgumentCleanup, I, CallTemplate);
699
700 ExplodedNodeSet dstPostCall;
701 getCheckerManager().runCheckersForPostCall(dstPostCall, dstArgumentCleanup,
702 CallTemplate, *this);
703
704 // Escaping symbols conjured during invalidating the regions above.
705 // Note that, for inlined calls the nodes were put back into the worklist,
706 // so we can assume that every node belongs to a conservative call at this
707 // point.
708
709 // Run pointerEscape callback with the newly conjured symbols.
711 for (ExplodedNode *I : dstPostCall) {
712 ProgramStateRef State = I->getState();
713 CallEventRef<> Call = CallTemplate.cloneWithState(State);
714 NodeBuilder B(I, Dst, *currBldrCtx);
715 Escaped.clear();
716 {
717 unsigned Arg = -1;
718 for (const ParmVarDecl *PVD : Call->parameters()) {
719 ++Arg;
720 QualType ParamTy = PVD->getType();
721 if (ParamTy.isNull() ||
722 (!ParamTy->isPointerType() && !ParamTy->isReferenceType()))
723 continue;
724 QualType Pointee = ParamTy->getPointeeType();
725 if (Pointee.isConstQualified() || Pointee->isVoidType())
726 continue;
727 if (const MemRegion *MR = Call->getArgSVal(Arg).getAsRegion())
728 Escaped.emplace_back(loc::MemRegionVal(MR), State->getSVal(MR, Pointee));
729 }
730 }
731
732 State = processPointerEscapedOnBind(State, Escaped, I->getLocationContext(),
734
735 if (State == I->getState())
736 Dst.insert(I);
737 else
738 B.generateNode(I->getLocation(), State, I);
739 }
740}
741
743 const LocationContext *LCtx,
744 ProgramStateRef State) {
745 const Expr *E = Call.getOriginExpr();
746 const ConstCFGElementRef &Elem = Call.getCFGElementRef();
747 if (!E)
748 return State;
749
750 // Some method families have known return values.
751 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
752 switch (Msg->getMethodFamily()) {
753 default:
754 break;
755 case OMF_autorelease:
756 case OMF_retain:
757 case OMF_self: {
758 // These methods return their receivers.
759 return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
760 }
761 }
762 } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
763 SVal ThisV = C->getCXXThisVal();
764 ThisV = State->getSVal(ThisV.castAs<Loc>());
765 return State->BindExpr(E, LCtx, ThisV);
766 }
767
768 SVal R;
769 QualType ResultTy = Call.getResultType();
770 unsigned Count = getNumVisitedCurrent();
771 if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
772 // Conjure a temporary if the function returns an object by value.
773 SVal Target;
774 assert(RTC->getStmt() == Call.getOriginExpr());
775 EvalCallOptions CallOpts; // FIXME: We won't really need those.
776 std::tie(State, Target) = handleConstructionContext(
777 Call.getOriginExpr(), State, currBldrCtx, LCtx,
778 RTC->getConstructionContext(), CallOpts);
779 const MemRegion *TargetR = Target.getAsRegion();
780 assert(TargetR);
781 // Invalidate the region so that it didn't look uninitialized. If this is
782 // a field or element constructor, we do not want to invalidate
783 // the whole structure. Pointer escape is meaningless because
784 // the structure is a product of conservative evaluation
785 // and therefore contains nothing interesting at this point.
787 ITraits.setTrait(TargetR,
789 State = State->invalidateRegions(TargetR, Elem, Count, LCtx,
790 /* CausesPointerEscape=*/false, nullptr,
791 &Call, &ITraits);
792
793 R = State->getSVal(Target.castAs<Loc>(), E->getType());
794 } else {
795 // Conjure a symbol if the return value is unknown.
796
797 // See if we need to conjure a heap pointer instead of
798 // a regular unknown pointer.
799 const auto *CNE = dyn_cast<CXXNewExpr>(E);
800 if (CNE && CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
801 R = svalBuilder.getConjuredHeapSymbolVal(Elem, LCtx, E->getType(), Count);
802 const MemRegion *MR = R.getAsRegion()->StripCasts();
803
804 // Store the extent of the allocated object(s).
805 SVal ElementCount;
806 if (const Expr *SizeExpr = CNE->getArraySize().value_or(nullptr)) {
807 ElementCount = State->getSVal(SizeExpr, LCtx);
808 } else {
809 ElementCount = svalBuilder.makeIntVal(1, /*IsUnsigned=*/true);
810 }
811
812 SVal ElementSize = getElementExtent(CNE->getAllocatedType(), svalBuilder);
813
814 SVal Size =
815 svalBuilder.evalBinOp(State, BO_Mul, ElementCount, ElementSize,
816 svalBuilder.getArrayIndexType());
817
818 // FIXME: This line is to prevent a crash. For more details please check
819 // issue #56264.
820 if (Size.isUndef())
821 Size = UnknownVal();
822
823 State = setDynamicExtent(State, MR, Size.castAs<DefinedOrUnknownSVal>());
824 } else {
825 R = svalBuilder.conjureSymbolVal(Elem, LCtx, ResultTy, Count);
826 }
827 }
828 return State->BindExpr(E, LCtx, R);
829}
830
831// Conservatively evaluate call by invalidating regions and binding
832// a conjured return value.
833void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
834 ExplodedNode *Pred, ProgramStateRef State) {
835 State = Call.invalidateRegions(getNumVisitedCurrent(), State);
836 State = bindReturnValue(Call, Pred->getLocationContext(), State);
837
838 // And make the result node.
839 static SimpleProgramPointTag PT("ExprEngine", "Conservative eval call");
840 Bldr.generateNode(Call.getProgramPoint(false, &PT), State, Pred);
841}
842
843ExprEngine::CallInlinePolicy
844ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
845 AnalyzerOptions &Opts,
846 const EvalCallOptions &CallOpts) {
847 const LocationContext *CurLC = Pred->getLocationContext();
848 const StackFrameContext *CallerSFC = CurLC->getStackFrame();
849 switch (Call.getKind()) {
850 case CE_Function:
852 case CE_Block:
853 break;
854 case CE_CXXMember:
857 return CIP_DisallowedAlways;
858 break;
859 case CE_CXXConstructor: {
861 return CIP_DisallowedAlways;
862
864
865 const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
866
868 const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
869 : nullptr;
870
871 if (llvm::isa_and_nonnull<NewAllocatedObjectConstructionContext>(CC) &&
872 !Opts.MayInlineCXXAllocator)
873 return CIP_DisallowedOnce;
874
875 if (CallOpts.IsArrayCtorOrDtor) {
876 if (!shouldInlineArrayConstruction(Pred->getState(), CtorExpr, CurLC))
877 return CIP_DisallowedOnce;
878 }
879
880 // Inlining constructors requires including initializers in the CFG.
881 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
882 assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
883 (void)ADC;
884
885 // If the destructor is trivial, it's always safe to inline the constructor.
886 if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
887 break;
888
889 // For other types, only inline constructors if destructor inlining is
890 // also enabled.
892 return CIP_DisallowedAlways;
893
895 // If we don't handle temporary destructors, we shouldn't inline
896 // their constructors.
897 if (CallOpts.IsTemporaryCtorOrDtor &&
898 !Opts.ShouldIncludeTemporaryDtorsInCFG)
899 return CIP_DisallowedOnce;
900
901 // If we did not find the correct this-region, it would be pointless
902 // to inline the constructor. Instead we will simply invalidate
903 // the fake temporary target.
905 return CIP_DisallowedOnce;
906
907 // If the temporary is lifetime-extended by binding it to a reference-type
908 // field within an aggregate, automatic destructors don't work properly.
910 return CIP_DisallowedOnce;
911 }
912
913 break;
914 }
916 // This doesn't really increase the cost of inlining ever, because
917 // the stack frame of the inherited constructor is trivial.
918 return CIP_Allowed;
919 }
920 case CE_CXXDestructor: {
922 return CIP_DisallowedAlways;
923
924 // Inlining destructors requires building the CFG correctly.
925 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
926 assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
927 (void)ADC;
928
929 if (CallOpts.IsArrayCtorOrDtor) {
930 if (!shouldInlineArrayDestruction(getElementCountOfArrayBeingDestructed(
931 Call, Pred->getState(), svalBuilder))) {
932 return CIP_DisallowedOnce;
933 }
934 }
935
936 // Allow disabling temporary destructor inlining with a separate option.
937 if (CallOpts.IsTemporaryCtorOrDtor &&
938 !Opts.MayInlineCXXTemporaryDtors)
939 return CIP_DisallowedOnce;
940
941 // If we did not find the correct this-region, it would be pointless
942 // to inline the destructor. Instead we will simply invalidate
943 // the fake temporary target.
945 return CIP_DisallowedOnce;
946 break;
947 }
949 [[fallthrough]];
950 case CE_CXXAllocator:
951 if (Opts.MayInlineCXXAllocator)
952 break;
953 // Do not inline allocators until we model deallocators.
954 // This is unfortunate, but basically necessary for smart pointers and such.
955 return CIP_DisallowedAlways;
956 case CE_ObjCMessage:
957 if (!Opts.MayInlineObjCMethod)
958 return CIP_DisallowedAlways;
959 if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
961 return CIP_DisallowedAlways;
962 break;
963 }
964
965 return CIP_Allowed;
966}
967
968/// Returns true if the given C++ class contains a member with the given name.
969static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
970 StringRef Name) {
971 const IdentifierInfo &II = Ctx.Idents.get(Name);
972 return RD->hasMemberName(Ctx.DeclarationNames.getIdentifier(&II));
973}
974
975/// Returns true if the given C++ class is a container or iterator.
976///
977/// Our heuristic for this is whether it contains a method named 'begin()' or a
978/// nested type named 'iterator' or 'iterator_category'.
979static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
980 return hasMember(Ctx, RD, "begin") ||
981 hasMember(Ctx, RD, "iterator") ||
982 hasMember(Ctx, RD, "iterator_category");
983}
984
985/// Returns true if the given function refers to a method of a C++ container
986/// or iterator.
987///
988/// We generally do a poor job modeling most containers right now, and might
989/// prefer not to inline their methods.
990static bool isContainerMethod(const ASTContext &Ctx,
991 const FunctionDecl *FD) {
992 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
993 return isContainerClass(Ctx, MD->getParent());
994 return false;
995}
996
997/// Returns true if the given function is the destructor of a class named
998/// "shared_ptr".
999static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
1000 const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
1001 if (!Dtor)
1002 return false;
1003
1004 const CXXRecordDecl *RD = Dtor->getParent();
1005 if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
1006 if (II->isStr("shared_ptr"))
1007 return true;
1008
1009 return false;
1010}
1011
1012/// Returns true if the function in \p CalleeADC may be inlined in general.
1013///
1014/// This checks static properties of the function, such as its signature and
1015/// CFG, to determine whether the analyzer should ever consider inlining it,
1016/// in any context.
1017bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
1018 AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
1019 // FIXME: Do not inline variadic calls.
1020 if (CallEvent::isVariadic(CalleeADC->getDecl()))
1021 return false;
1022
1023 // Check certain C++-related inlining policies.
1024 ASTContext &Ctx = CalleeADC->getASTContext();
1025 if (Ctx.getLangOpts().CPlusPlus) {
1026 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
1027 // Conditionally control the inlining of template functions.
1028 if (!Opts.MayInlineTemplateFunctions)
1029 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
1030 return false;
1031
1032 // Conditionally control the inlining of C++ standard library functions.
1033 if (!Opts.MayInlineCXXStandardLibrary)
1034 if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
1036 return false;
1037
1038 // Conditionally control the inlining of methods on objects that look
1039 // like C++ containers.
1040 if (!Opts.MayInlineCXXContainerMethods)
1041 if (!AMgr.isInCodeFile(FD->getLocation()))
1042 if (isContainerMethod(Ctx, FD))
1043 return false;
1044
1045 // Conditionally control the inlining of the destructor of C++ shared_ptr.
1046 // We don't currently do a good job modeling shared_ptr because we can't
1047 // see the reference count, so treating as opaque is probably the best
1048 // idea.
1049 if (!Opts.MayInlineCXXSharedPtrDtor)
1050 if (isCXXSharedPtrDtor(FD))
1051 return false;
1052 }
1053 }
1054
1055 // It is possible that the CFG cannot be constructed.
1056 // Be safe, and check if the CalleeCFG is valid.
1057 const CFG *CalleeCFG = CalleeADC->getCFG();
1058 if (!CalleeCFG)
1059 return false;
1060
1061 // Do not inline large functions.
1062 if (isHuge(CalleeADC))
1063 return false;
1064
1065 // It is possible that the live variables analysis cannot be
1066 // run. If so, bail out.
1067 if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
1068 return false;
1069
1070 return true;
1071}
1072
1073bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
1074 const ExplodedNode *Pred,
1075 const EvalCallOptions &CallOpts) {
1076 if (!D)
1077 return false;
1078
1079 AnalysisManager &AMgr = getAnalysisManager();
1080 AnalyzerOptions &Opts = AMgr.options;
1081 AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
1082 AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
1083
1084 // The auto-synthesized bodies are essential to inline as they are
1085 // usually small and commonly used. Note: we should do this check early on to
1086 // ensure we always inline these calls.
1087 if (CalleeADC->isBodyAutosynthesized())
1088 return true;
1089
1090 if (!AMgr.shouldInlineCall())
1091 return false;
1092
1093 // Check if this function has been marked as non-inlinable.
1094 std::optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
1095 if (MayInline) {
1096 if (!*MayInline)
1097 return false;
1098
1099 } else {
1100 // We haven't actually checked the static properties of this function yet.
1101 // Do that now, and record our decision in the function summaries.
1102 if (mayInlineDecl(CalleeADC)) {
1103 Engine.FunctionSummaries->markMayInline(D);
1104 } else {
1105 Engine.FunctionSummaries->markShouldNotInline(D);
1106 return false;
1107 }
1108 }
1109
1110 // Check if we should inline a call based on its kind.
1111 // FIXME: this checks both static and dynamic properties of the call, which
1112 // means we're redoing a bit of work that could be cached in the function
1113 // summary.
1114 CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
1115 if (CIP != CIP_Allowed) {
1116 if (CIP == CIP_DisallowedAlways) {
1117 assert(!MayInline || *MayInline);
1118 Engine.FunctionSummaries->markShouldNotInline(D);
1119 }
1120 return false;
1121 }
1122
1123 // Do not inline if recursive or we've reached max stack frame count.
1124 bool IsRecursive = false;
1125 unsigned StackDepth = 0;
1126 examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
1127 if ((StackDepth >= Opts.InlineMaxStackDepth) &&
1128 (!isSmall(CalleeADC) || IsRecursive))
1129 return false;
1130
1131 // Do not inline large functions too many times.
1132 if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
1133 Opts.MaxTimesInlineLarge) &&
1134 isLarge(CalleeADC)) {
1135 NumReachedInlineCountMax++;
1136 return false;
1137 }
1138
1139 if (HowToInline == Inline_Minimal && (!isSmall(CalleeADC) || IsRecursive))
1140 return false;
1141
1142 return true;
1143}
1144
1145bool ExprEngine::shouldInlineArrayConstruction(const ProgramStateRef State,
1146 const CXXConstructExpr *CE,
1147 const LocationContext *LCtx) {
1148 if (!CE)
1149 return false;
1150
1151 // FIXME: Handle other arrays types.
1152 if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType())) {
1153 unsigned ArrSize = getContext().getConstantArrayElementCount(CAT);
1154
1155 // This might seem conter-intuitive at first glance, but the functions are
1156 // closely related. Reasoning about destructors depends only on the type
1157 // of the expression that initialized the memory region, which is the
1158 // CXXConstructExpr. So to avoid code repetition, the work is delegated
1159 // to the function that reasons about destructor inlining. Also note that
1160 // if the constructors of the array elements are inlined, the destructors
1161 // can also be inlined and if the destructors can be inline, it's safe to
1162 // inline the constructors.
1163 return shouldInlineArrayDestruction(ArrSize);
1164 }
1165
1166 // Check if we're inside an ArrayInitLoopExpr, and it's sufficiently small.
1167 if (auto Size = getPendingInitLoop(State, CE, LCtx))
1168 return shouldInlineArrayDestruction(*Size);
1169
1170 return false;
1171}
1172
1173bool ExprEngine::shouldInlineArrayDestruction(uint64_t Size) {
1174
1175 uint64_t maxAllowedSize = AMgr.options.maxBlockVisitOnPath;
1176
1177 // Declaring a 0 element array is also possible.
1178 return Size <= maxAllowedSize && Size > 0;
1179}
1180
1181bool ExprEngine::shouldRepeatCtorCall(ProgramStateRef State,
1182 const CXXConstructExpr *E,
1183 const LocationContext *LCtx) {
1184
1185 if (!E)
1186 return false;
1187
1188 auto Ty = E->getType();
1189
1190 // FIXME: Handle non constant array types
1191 if (const auto *CAT = dyn_cast<ConstantArrayType>(Ty)) {
1193 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1194 }
1195
1196 if (auto Size = getPendingInitLoop(State, E, LCtx))
1197 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1198
1199 return false;
1200}
1201
1203 const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
1204 if (!ICall)
1205 return false;
1206
1207 const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
1208 if (!MD)
1209 return false;
1211 return false;
1212
1213 return MD->isTrivial();
1214}
1215
1217 const CallEvent &Call,
1218 const EvalCallOptions &CallOpts) {
1219 // Make sure we have the most recent state attached to the call.
1220 ProgramStateRef State = Pred->getState();
1221
1222 // Special-case trivial assignment operators.
1224 performTrivialCopy(Bldr, Pred, Call);
1225 return;
1226 }
1227
1228 // Try to inline the call.
1229 // The origin expression here is just used as a kind of checksum;
1230 // this should still be safe even for CallEvents that don't come from exprs.
1231 const Expr *E = Call.getOriginExpr();
1232
1233 ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1234 if (InlinedFailedState) {
1235 // If we already tried once and failed, make sure we don't retry later.
1236 State = InlinedFailedState;
1237 } else {
1238 RuntimeDefinition RD = Call.getRuntimeDefinition();
1239 Call.setForeign(RD.isForeign());
1240 const Decl *D = RD.getDecl();
1241 if (shouldInlineCall(Call, D, Pred, CallOpts)) {
1242 if (RD.mayHaveOtherDefinitions()) {
1244
1245 // Explore with and without inlining the call.
1246 if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1247 BifurcateCall(RD.getDispatchRegion(), Call, D, Bldr, Pred);
1248 return;
1249 }
1250
1251 // Don't inline if we're not in any dynamic dispatch mode.
1252 if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1253 conservativeEvalCall(Call, Bldr, Pred, State);
1254 return;
1255 }
1256 }
1257 ctuBifurcate(Call, D, Bldr, Pred, State);
1258 return;
1259 }
1260 }
1261
1262 // If we can't inline it, clean up the state traits used only if the function
1263 // is inlined.
1264 State = removeStateTraitsUsedForArrayEvaluation(
1265 State, dyn_cast_or_null<CXXConstructExpr>(E), Call.getLocationContext());
1266
1267 // Also handle the return value and invalidate the regions.
1268 conservativeEvalCall(Call, Bldr, Pred, State);
1269}
1270
1271void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1272 const CallEvent &Call, const Decl *D,
1273 NodeBuilder &Bldr, ExplodedNode *Pred) {
1274 assert(BifurReg);
1275 BifurReg = BifurReg->StripCasts();
1276
1277 // Check if we've performed the split already - note, we only want
1278 // to split the path once per memory region.
1279 ProgramStateRef State = Pred->getState();
1280 const unsigned *BState =
1281 State->get<DynamicDispatchBifurcationMap>(BifurReg);
1282 if (BState) {
1283 // If we are on "inline path", keep inlining if possible.
1284 if (*BState == DynamicDispatchModeInlined)
1285 ctuBifurcate(Call, D, Bldr, Pred, State);
1286 // If inline failed, or we are on the path where we assume we
1287 // don't have enough info about the receiver to inline, conjure the
1288 // return value and invalidate the regions.
1289 conservativeEvalCall(Call, Bldr, Pred, State);
1290 return;
1291 }
1292
1293 // If we got here, this is the first time we process a message to this
1294 // region, so split the path.
1295 ProgramStateRef IState =
1296 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1297 DynamicDispatchModeInlined);
1298 ctuBifurcate(Call, D, Bldr, Pred, IState);
1299
1300 ProgramStateRef NoIState =
1301 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1302 DynamicDispatchModeConservative);
1303 conservativeEvalCall(Call, Bldr, Pred, NoIState);
1304
1305 NumOfDynamicDispatchPathSplits++;
1306}
1307
1309 ExplodedNodeSet &Dst) {
1310 ExplodedNodeSet dstPreVisit;
1311 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1312
1313 NodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1314
1315 if (RS->getRetValue()) {
1316 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1317 ei = dstPreVisit.end(); it != ei; ++it) {
1318 B.generateNode(RS, *it, (*it)->getState());
1319 }
1320 }
1321}
#define V(N, I)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
#define STAT_COUNTER(VARNAME, DESC)
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
static bool isTrivialObjectAssignment(const CallEvent &Call)
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function's return type does not match the caller's expression ...
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
static unsigned getElementCountOfArrayBeingDestructed(const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB)
#define REGISTER_MAP_WITH_PROGRAMSTATE(Name, Key, Value)
Declares an immutable map of type NameTy, suitable for placement into the ProgramState.
#define REGISTER_TRAIT_WITH_PROGRAMSTATE(Name, Type)
Declares a program state trait for type Type called Name, and introduce a type named NameTy.
a trap message and trap category.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition ASTContext.h:226
SourceManager & getSourceManager()
Definition ASTContext.h:859
DeclarationNameTable DeclarationNames
Definition ASTContext.h:802
IdentifierTable & Idents
Definition ASTContext.h:798
const LangOptions & getLangOpts() const
Definition ASTContext.h:952
uint64_t getConstantArrayElementCount(const ConstantArrayType *CA) const
Return number of constant array elements.
AnalysisDeclContext * getContext(const Decl *D)
AnalysisDeclContext contains the context data for the function, method or block under analysis.
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *ParentLC, const BlockDecl *BD, const void *Data)
Obtain a context of the block invocation using its parent context.
static bool isInStdNamespace(const Decl *D)
const StackFrameContext * getStackFrame(LocationContext const *ParentLC, const Stmt *S, const CFGBlock *Blk, unsigned BlockCount, unsigned Index)
Obtain a context of the call stack using its parent context.
ASTContext & getASTContext() const
CFG::BuildOptions & getCFGBuildOptions()
Stores options for the analyzer from the command line.
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K) const
Returns the option controlling which C++ member functions will be considered for inlining.
IPAKind getIPAMode() const
Returns the inter-procedural analysis mode.
CTUPhase1InliningKind getCTUPhase1Inlining() const
unsigned InlineMaxStackDepth
The inlining stack depth limit.
Represents a single basic block in a source-level CFG.
Definition CFG.h:632
bool empty() const
Definition CFG.h:980
succ_iterator succ_begin()
Definition CFG.h:1017
unsigned succ_size() const
Definition CFG.h:1035
Represents C++ constructor call.
Definition CFG.h:158
std::optional< T > getAs() const
Convert to the specified CFGElement type, returning std::nullopt if this CFGElement is not of the des...
Definition CFG.h:110
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
Definition CFG.h:1250
unsigned size() const
Return the total number of CFGBlocks within the CFG This is simply a renaming of the getNumBlockIDs()...
Definition CFG.h:1448
bool isLinear() const
Returns true if the CFG has no branches.
Definition CFG.cpp:5451
CFGBlock & getExit()
Definition CFG.h:1366
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition CFG.h:1443
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
CXXBasePath & front()
bool isAmbiguous(CanQualType BaseType) const
Determine whether the path from the most-derived type to the given base type is ambiguous (i....
Represents a call to a C++ constructor.
Definition ExprCXX.h:1549
CXXConstructionKind getConstructionKind() const
Determine whether this constructor is actually constructing a base class (rather than a complete obje...
Definition ExprCXX.h:1660
Represents a C++ destructor within a class.
Definition DeclCXX.h:2876
Represents a static or instance method of a struct/union/class.
Definition DeclCXX.h:2136
const CXXRecordDecl * getParent() const
Return the parent of this method declaration, which is the class in which this method is defined.
Definition DeclCXX.h:2262
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition DeclCXX.cpp:2753
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition DeclCXX.cpp:2732
Represents a C++ struct/union/class.
Definition DeclCXX.h:258
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition DeclCXX.h:1372
bool hasMemberName(DeclarationName N) const
Determine whether this class has a member with the given name, possibly in a non-dependent base class...
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Represents a point when we begin processing an inlined call.
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
const StackFrameContext * getCalleeContext() const
Represents a point when we finish the call exit sequence (for inlined call).
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition Expr.h:2946
ConstructionContext's subclasses describe different ways of constructing an object in C++.
Decl - This represents one declaration (or definition), e.g.
Definition DeclBase.h:86
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition DeclBase.h:978
DeclarationName getIdentifier(const IdentifierInfo *ID)
Create a declaration name that is a simple identifier.
IdentifierInfo * getAsIdentifierInfo() const
Retrieve the IdentifierInfo * stored in this declaration name, or null if this declaration name isn't...
This is a meta program point, which should be skipped by all the diagnostic reasoning etc.
This represents one expression.
Definition Expr.h:112
QualType getType() const
Definition Expr.h:144
Represents a function declaration or definition.
Definition Decl.h:2015
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition Decl.h:2392
One of these records is kept for each identifier that is lexed.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
It wraps the AnalysisDeclContext to represent both the call stack with the help of StackFrameContext ...
const Decl * getDecl() const
LLVM_ATTRIBUTE_RETURNS_NONNULL AnalysisDeclContext * getAnalysisDeclContext() const
const LocationContext * getParent() const
It might return null.
const StackFrameContext * getStackFrame() const
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition Decl.h:340
Represents a parameter to a function.
Definition Decl.h:1805
const StackFrameContext * getStackFrame() const
std::optional< T > getAs() const
Convert to the specified ProgramPoint type, returning std::nullopt if this ProgramPoint is not of the...
A (possibly-)qualified type.
Definition TypeBase.h:937
bool isNull() const
Return true if this QualType doesn't point to a type yet.
Definition TypeBase.h:1004
QualType getCanonicalType() const
Definition TypeBase.h:8483
bool isConstQualified() const
Determine whether this type is const-qualified.
Definition TypeBase.h:8504
ReturnStmt - This represents a return, optionally of an expression: return; return 4;.
Definition Stmt.h:3161
Expr * getRetValue()
Definition Stmt.h:3188
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
It represents a stack frame of the call stack (based on CallEvent).
const Stmt * getCallSite() const
const CFGBlock * getCallSiteBlock() const
Stmt - This represents one statement.
Definition Stmt.h:86
bool isVoidType() const
Definition TypeBase.h:9034
bool isPointerType() const
Definition TypeBase.h:8668
CanQualType getCanonicalTypeUnqualified() const
bool isReferenceType() const
Definition TypeBase.h:8692
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to.
Definition Type.cpp:1923
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
Definition Type.cpp:754
bool isObjCObjectPointerType() const
Definition TypeBase.h:8847
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
Represents a call to a C++ constructor.
Definition CallEvent.h:997
const CXXConstructorDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Definition CallEvent.h:1028
const CXXConstructExpr * getOriginExpr() const override
Returns the expression whose value will be the result of this call.
Definition CallEvent.h:1024
Represents a non-static C++ member function call, no matter how it is written.
Definition CallEvent.h:690
const FunctionDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Manages the lifetime of CallEvent objects.
Definition CallEvent.h:1374
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx, CFGBlock::ConstCFGElementRef ElemRef)
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Gets an outside caller given a callee context.
Represents an abstract call to a function or method along a particular path.
Definition CallEvent.h:153
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
Definition CallEvent.h:1491
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting function calls (including methods, constructors, destructors etc.
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng, const EvalCallOptions &CallOpts)
Run checkers for evaluating a call.
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
void runCheckersForNewAllocator(const CXXAllocatorCall &Call, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting function calls (including methods, constructors, destructors etc.
WorkList * getCTUWorkList() const
Definition CoreEngine.h:163
WorkList * getWorkList() const
Definition CoreEngine.h:162
ExplodedNodeSet is a set of ExplodedNode * elements with the invariant that its elements cannot be nu...
void insert(ExplodedNode *N)
const ProgramStateRef & getState() const
ProgramPoint getLocation() const
getLocation - Returns the edge associated with the given node.
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
const StackFrameContext * getStackFrame() const
const LocationContext * getLocationContext() const
std::optional< T > getLocationAs() const &
ExplodedNode * getFirstPred()
ProgramStateManager & getStateManager()
Definition ExprEngine.h:481
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer's garbage collection - remove dead symbols and bindings from the state.
std::pair< ProgramStateRef, SVal > handleConstructionContext(const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx, const LocationContext *LCtx, const ConstructionContext *CC, EvalCallOptions &CallOpts, unsigned Idx=0)
A convenient wrapper around computeObjectUnderConstruction and updateObjectsUnderConstruction.
Definition ExprEngine.h:820
void removeDeadOnEndOfFunction(ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
void processCallEnter(CallEnter CE, ExplodedNode *Pred)
Generate the entry node of the callee.
void processCallExit(ExplodedNode *Pred)
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr.
static std::optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object's ConstructionContext,...
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition ExprEngine.h:771
@ Inline_Minimal
Do minimal inlining of callees.
Definition ExprEngine.h:132
ProgramStateRef processPointerEscapedOnBind(ProgramStateRef State, ArrayRef< std::pair< SVal, SVal > > LocAndVals, const LocationContext *LCtx, PointerEscapeKind Kind, const CallEvent *Call)
Call PointerEscape callback when a value escapes as a result of bind.
void setCurrLocationContextAndBlock(const LocationContext *LC, const CFGBlock *B)
Definition ExprEngine.h:246
static std::optional< unsigned > getIndexOfElementToConstruct(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retreives which element is being constructed in a non-POD type array.
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition ExprEngine.h:216
StoreManager & getStoreManager()
Definition ExprEngine.h:484
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checkers and allowing checkers to be responsible for hand...
ConstCFGElementRef getCFGElementRef() const
Definition ExprEngine.h:293
static std::optional< unsigned > getPendingArrayDestruction(ProgramStateRef State, const LocationContext *LCtx)
Retreives which element is being destructed in a non-POD type array.
void resetCurrLocationContextAndBlock()
Definition ExprEngine.h:261
CheckerManager & getCheckerManager() const
Definition ExprEngine.h:225
void processBeginOfFunction(ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L)
Called by CoreEngine.
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
unsigned getNumVisitedCurrent() const
Definition ExprEngine.h:303
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
AnalysisManager & getAnalysisManager()
Definition ExprEngine.h:218
const CFGBlock * getCurrBlock() const
Get the 'current' CFGBlock corresponding to the current work item (elementary analysis step handled b...
Definition ExprEngine.h:289
static std::optional< unsigned > getPendingInitLoop(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retreives the size of the array in the pending ArrayInitLoopExpr.
MemRegion - The root abstract class for all memory regions.
Definition MemRegion.h:98
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * StripCasts(bool StripBaseAndDerivedCasts=true) const
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition CoreEngine.h:245
void takeNodes(const ExplodedNodeSet &S)
Definition CoreEngine.h:310
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred, bool MarkAsSink=false)
Generates a node in the ExplodedGraph.
Represents any expression that calls an Objective-C method.
Definition CallEvent.h:1261
CallEventManager & getCallEventManager()
Information about invalidation for a particular region/symbol.
Definition MemRegion.h:1657
void setTrait(SymbolRef Sym, InvalidationKinds IK)
Defines the runtime definition of the called function.
Definition CallEvent.h:110
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition CallEvent.h:141
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition CallEvent.h:137
SVal - This represents a symbolic expression, which can be either an L-value or an R-value.
Definition SVals.h:56
QualType getType(const ASTContext &) const
Try to get a reasonable type for the given value.
Definition SVals.cpp:180
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition SVals.h:83
SVal evalDerivedToBase(SVal Derived, const CastExpr *Cast)
Evaluates a chain of derived-to-base casts through the path specified in Cast.
Definition Store.cpp:254
virtual void enqueue(const WorkListUnit &U)=0
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * getRegion() const
Get the underlining region.
Definition SVals.h:493
@ PSK_EscapeOutParameters
Escape for a new symbol that was generated into a region that the analyzer cannot follow during a con...
DefinedOrUnknownSVal getDynamicElementCount(ProgramStateRef State, const MemRegion *MR, SValBuilder &SVB, QualType Ty)
IntrusiveRefCntPtr< const ProgramState > ProgramStateRef
ProgramStateRef setDynamicExtent(ProgramStateRef State, const MemRegion *MR, DefinedOrUnknownSVal Extent)
Set the dynamic extent Extent of the region MR.
@ CE_CXXInheritedConstructor
Definition CallEvent.h:69
@ CE_CXXStaticOperator
Definition CallEvent.h:62
@ CE_CXXDestructor
Definition CallEvent.h:65
@ CE_CXXDeallocator
Definition CallEvent.h:73
@ CE_CXXAllocator
Definition CallEvent.h:72
@ CE_CXXConstructor
Definition CallEvent.h:68
@ CE_CXXMemberOperator
Definition CallEvent.h:64
DefinedOrUnknownSVal getElementExtent(QualType Ty, SValBuilder &SVB)
std::variant< struct RequiresDecl, struct HeaderDecl, struct UmbrellaDirDecl, struct ModuleDecl, struct ExcludeDecl, struct ExportDecl, struct ExportAsDecl, struct ExternModuleDecl, struct UseDecl, struct LinkDecl, struct ConfigMacrosDecl, struct ConflictDecl > Decl
All declarations that can appear in a module declaration.
The JSON file list parser is used to communicate input to InstallAPI.
bool isa(CodeGen::Address addr)
Definition Address.h:330
CFGBlock::ConstCFGElementRef ConstCFGElementRef
Definition CFG.h:1227
@ ExpectedClass
@ IPAK_DynamicDispatch
Enable inlining of dynamically dispatched methods.
@ IPAK_DynamicDispatchBifurcate
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
@ CIMK_Destructors
Refers to destructors (implicit or explicit).
@ CIMK_MemberFunctions
Refers to regular member function and operator calls.
@ CIMK_Constructors
Refers to constructors (implicit or explicit).
U cast(CodeGen::Address addr)
Definition Address.h:327
unsigned long uint64_t
Hints for figuring out of a call should be inlined during evalCall().
Definition ExprEngine.h:95
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition ExprEngine.h:110
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition ExprEngine.h:105
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition ExprEngine.h:102
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition ExprEngine.h:98
Traits for storing the call processing policy inside GDM.