clang 23.0.0git
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines ExprEngine's support for calls and returns.
10//
11//===----------------------------------------------------------------------===//
12
14#include "clang/AST/Decl.h"
15#include "clang/AST/DeclCXX.h"
23#include "llvm/Support/Casting.h"
24#include "llvm/Support/Compiler.h"
25#include "llvm/Support/SaveAndRestore.h"
26#include <optional>
27
28using namespace clang;
29using namespace ento;
30
31#define DEBUG_TYPE "ExprEngine"
32
34 NumOfDynamicDispatchPathSplits,
35 "The # of times we split the path due to imprecise dynamic dispatch info");
36
37STAT_COUNTER(NumInlinedCalls, "The # of times we inlined a call");
38
39STAT_COUNTER(NumReachedInlineCountMax,
40 "The # of times we reached inline count maximum");
41
43 // Get the entry block in the CFG of the callee.
44 const CFGBlock *Entry = CE.getEntry();
45
46 // Validate the CFG.
47 assert(Entry->empty());
48 assert(Entry->succ_size() == 1);
49
50 // Get the solitary successor.
51 const CFGBlock *Succ = *(Entry->succ_begin());
52
53 // Construct an edge representing the starting location in the callee.
54 BlockEdge Loc(Entry, Succ, CE.getCalleeContext());
55
56 ProgramStateRef state = Pred->getState();
57
58 // Construct a new node, notify checkers that analysis of the function has
59 // begun, and add the resultant nodes to the worklist.
60 bool isNew;
61 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
62 Node->addPredecessor(Pred, G);
63 if (isNew) {
64 // FIXME: In the `processBeginOfFunction` callback
65 // `ExprEngine::getCurrLocationContext()` can be different from the
66 // `LocationContext` queried from e.g. the `ExplodedNode`s. I'm not
67 // touching this now because this commit is NFC; but in the future it would
68 // be nice to avoid this inconsistency.
69 ExplodedNodeSet DstBegin;
70 processBeginOfFunction(Node, DstBegin, Loc);
71 Engine.enqueue(DstBegin);
72 }
73}
74
75// Find the last statement on the path to the exploded node and the
76// corresponding Block.
77static std::pair<const Stmt*,
78 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
79 const Stmt *S = nullptr;
80 const CFGBlock *Blk = nullptr;
81 const StackFrameContext *SF = Node->getStackFrame();
82
83 // Back up through the ExplodedGraph until we reach a statement node in this
84 // stack frame.
85 while (Node) {
86 const ProgramPoint &PP = Node->getLocation();
87
88 if (PP.getStackFrame() == SF) {
89 if (std::optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
90 S = SP->getStmt();
91 break;
92 } else if (std::optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
93 S = CEE->getCalleeContext()->getCallSite();
94 if (S)
95 break;
96
97 // If there is no statement, this is an implicitly-generated call.
98 // We'll walk backwards over it and then continue the loop to find
99 // an actual statement.
100 std::optional<CallEnter> CE;
101 do {
102 Node = Node->getFirstPred();
103 CE = Node->getLocationAs<CallEnter>();
104 } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
105
106 // Continue searching the graph.
107 } else if (std::optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
108 Blk = BE->getSrc();
109 }
110 } else if (std::optional<CallEnter> CE = PP.getAs<CallEnter>()) {
111 // If we reached the CallEnter for this function, it has no statements.
112 if (CE->getCalleeContext() == SF)
113 break;
114 }
115
116 if (Node->pred_empty())
117 return std::make_pair(nullptr, nullptr);
118
119 Node = *Node->pred_begin();
120 }
121
122 return std::make_pair(S, Blk);
123}
124
125/// Adjusts a return value when the called function's return type does not
126/// match the caller's expression type. This can happen when a dynamic call
127/// is devirtualized, and the overriding method has a covariant (more specific)
128/// return type than the parent's method. For C++ objects, this means we need
129/// to add base casts.
130static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
131 StoreManager &StoreMgr) {
132 // For now, the only adjustments we handle apply only to locations.
133 if (!isa<Loc>(V))
134 return V;
135
136 // If the types already match, don't do any unnecessary work.
137 ExpectedTy = ExpectedTy.getCanonicalType();
138 ActualTy = ActualTy.getCanonicalType();
139 if (ExpectedTy == ActualTy)
140 return V;
141
142 // No adjustment is needed between Objective-C pointer types.
143 if (ExpectedTy->isObjCObjectPointerType() &&
144 ActualTy->isObjCObjectPointerType())
145 return V;
146
147 // C++ object pointers may need "derived-to-base" casts.
149 const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
150 if (ExpectedClass && ActualClass) {
151 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
152 /*DetectVirtual=*/false);
153 if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
154 !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
155 return StoreMgr.evalDerivedToBase(V, Paths.front());
156 }
157 }
158
159 // Unfortunately, Objective-C does not enforce that overridden methods have
160 // covariant return types, so we can't assert that that never happens.
161 // Be safe and return UnknownVal().
162 return UnknownVal();
163}
164
166 ExplodedNodeSet &Dst) {
167 // Find the last statement in the function and the corresponding basic block.
168 const Stmt *LastSt = nullptr;
169 const CFGBlock *Blk = nullptr;
170 std::tie(LastSt, Blk) = getLastStmt(Pred);
171 if (!Blk || !LastSt) {
172 Dst.insert(Pred);
173 return;
174 }
175
176 // Here, we destroy the current location context. We use the current
177 // function's entire body as a diagnostic statement, with which the program
178 // point will be associated. However, we only want to use LastStmt as a
179 // reference for what to clean up if it's a ReturnStmt; otherwise, everything
180 // is dead.
181 const LocationContext *LCtx = Pred->getLocationContext();
182 removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
185}
186
188 const StackFrameContext *calleeCtx) {
189 const Decl *RuntimeCallee = calleeCtx->getDecl();
190 const Decl *StaticDecl = Call->getDecl();
191 assert(RuntimeCallee);
192 if (!StaticDecl)
193 return true;
194 return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
195}
196
197// Returns the number of elements in the array currently being destructed.
198// If the element count is not found 0 will be returned.
200 const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB) {
202 "The call event is not a destructor call!");
203
204 const auto &DtorCall = cast<CXXDestructorCall>(Call);
205
206 auto ThisVal = DtorCall.getCXXThisVal();
207
208 if (auto ThisElementRegion = dyn_cast<ElementRegion>(ThisVal.getAsRegion())) {
209 auto ArrayRegion = ThisElementRegion->getAsArrayOffset().getRegion();
210 auto ElementType = ThisElementRegion->getElementType();
211
212 auto ElementCount =
213 getDynamicElementCount(State, ArrayRegion, SVB, ElementType);
214
215 if (!ElementCount.isConstant())
216 return 0;
217
218 return ElementCount.getAsInteger()->getLimitedValue();
219 }
220
221 return 0;
222}
223
224ProgramStateRef ExprEngine::removeStateTraitsUsedForArrayEvaluation(
225 ProgramStateRef State, const CXXConstructExpr *E,
226 const LocationContext *LCtx) {
227
228 assert(LCtx && "Location context must be provided!");
229
230 if (E) {
231 if (getPendingInitLoop(State, E, LCtx))
232 State = removePendingInitLoop(State, E, LCtx);
233
234 if (getIndexOfElementToConstruct(State, E, LCtx))
235 State = removeIndexOfElementToConstruct(State, E, LCtx);
236 }
237
238 if (getPendingArrayDestruction(State, LCtx))
239 State = removePendingArrayDestruction(State, LCtx);
240
241 return State;
242}
243
244/// The call exit is simulated with a sequence of nodes, which occur between
245/// CallExitBegin and CallExitEnd. The following operations occur between the
246/// two program points:
247/// 1. CallExitBegin (triggers the start of call exit sequence)
248/// 2. Bind the return value
249/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
250/// 4. CallExitEnd
251/// 5. PostStmt<CallExpr>
252/// Steps 1-3. happen in the callee context; but there is a context switch and
253/// steps 4-5. happen in the caller context.
255 // Step 1 CEBNode was generated before the call.
256 const StackFrameContext *CalleeCtx = CEBNode->getStackFrame();
257
258 // The parent context might not be a stack frame, so make sure we
259 // look up the first enclosing stack frame.
260 const StackFrameContext *CallerCtx = CalleeCtx->getParent()->getStackFrame();
261
262 const Expr *CE = CalleeCtx->getCallSite();
263 ProgramStateRef State = CEBNode->getState();
264 // Find the last statement in the function and the corresponding basic block.
265 auto [LastSt, Blk] = getLastStmt(CEBNode);
266
267 const CFGBlock *PrePurgeBlock =
268 isa_and_nonnull<ReturnStmt>(LastSt) ? Blk : &CEBNode->getCFG().getExit();
269 // The first half of this process happens in the callee context:
270 setCurrLocationContextAndBlock(CalleeCtx, PrePurgeBlock);
271
272 // Generate a CallEvent /before/ cleaning the State, so that we can get the
273 // correct value for 'this' (if necessary).
275 CallEventRef<> Call = CEMgr.getCaller(CalleeCtx, State);
276
277 // Step 2: generate node with bound return value: CEBNode -> BoundRetNode.
278
279 // If this variable is set to 'true' the analyzer will evaluate the call
280 // statement we are about to exit again, instead of continuing the execution
281 // from the statement after the call. This is useful for non-POD type array
282 // construction where the CXXConstructExpr is referenced only once in the CFG,
283 // but we want to evaluate it as many times as many elements the array has.
284 bool ShouldRepeatCall = false;
285
286 if (const auto *DtorDecl =
287 dyn_cast_or_null<CXXDestructorDecl>(Call->getDecl())) {
288 if (auto Idx = getPendingArrayDestruction(State, CallerCtx)) {
289 ShouldRepeatCall = *Idx > 0;
290
291 auto ThisVal = svalBuilder.getCXXThis(DtorDecl->getParent(), CalleeCtx);
292 State = State->killBinding(ThisVal);
293 }
294 }
295
296 // If the callee returns an expression, bind its value to CallExpr.
297 if (CE) {
298 if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
299 const LocationContext *LCtx = CEBNode->getLocationContext();
300 // FIXME: This tries to look up the return statement in the environment,
301 // which is special cased to look up the subexpression RS->getRetValue()
302 // in environment. Instead of relying on this hack, pass
303 // RS->getRetValue() to getSVal() after checking it for nullness.
304 SVal V = State->getSVal(RS, LCtx);
305
306 // Ensure that the return type matches the type of the returned Expr.
307 if (wasDifferentDeclUsedForInlining(Call, CalleeCtx)) {
308 QualType ReturnedTy =
310 if (!ReturnedTy.isNull()) {
311 V = adjustReturnValue(V, CE->getType(), ReturnedTy,
313 }
314 }
315
316 State = State->BindExpr(CE, CallerCtx, V);
317 }
318
319 // Bind the constructed object value to CXXConstructExpr.
320 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
322 svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), CalleeCtx);
323 SVal ThisV = State->getSVal(This);
324 ThisV = State->getSVal(ThisV.castAs<Loc>());
325 State = State->BindExpr(CCE, CallerCtx, ThisV);
326
327 ShouldRepeatCall = shouldRepeatCtorCall(State, CCE, CallerCtx);
328 }
329
330 if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
331 // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
332 // while to reach the actual CXXNewExpr element from here, so keep the
333 // region for later use.
334 // Additionally cast the return value of the inlined operator new
335 // (which is of type 'void *') to the correct object type.
336 SVal AllocV = State->getSVal(CNE, CallerCtx);
337 AllocV = svalBuilder.evalCast(
338 AllocV, CNE->getType(),
339 getContext().getPointerType(getContext().VoidTy));
340
341 State = addObjectUnderConstruction(State, CNE, CalleeCtx->getParent(),
342 AllocV);
343 }
344 }
345
346 if (!ShouldRepeatCall) {
347 State = removeStateTraitsUsedForArrayEvaluation(
348 State, dyn_cast_or_null<CXXConstructExpr>(CE), CallerCtx);
349 }
350
351 // Step 3: BoundRetNode -> CleanedNodes
352 // If we can find a statement and a block in the inlined function, run remove
353 // dead bindings before returning from the call. This is important to ensure
354 // that we report the issues such as leaks in the stack contexts in which
355 // they occurred.
356 ExplodedNodeSet CleanedNodes;
357 if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
358 static SimpleProgramPointTag RetValBind("ExprEngine", "Bind Return Value");
359 auto Loc = isa<ReturnStmt>(LastSt)
360 ? ProgramPoint{PostStmt(LastSt, CalleeCtx, &RetValBind)}
361 : ProgramPoint{EpsilonPoint(CalleeCtx, /*Data1=*/nullptr,
362 /*Data2=*/nullptr, &RetValBind)};
363
364 ExplodedNode *BoundRetNode = Engine.makeNode(Loc, State, CEBNode);
365 if (!BoundRetNode)
366 return;
367
368 // We call removeDead in the context of the callee.
370 BoundRetNode, CleanedNodes, /*ReferenceStmt=*/nullptr, CalleeCtx,
371 /*DiagnosticStmt=*/CalleeCtx->getAnalysisDeclContext()->getBody(),
373 } else {
374 CleanedNodes.insert(CEBNode);
375 }
376
377 // The second half of this process happens in the caller context. This is an
378 // exception to the general rule that the current LocationContext and Block
379 // stay the same within a single call to dispatchWorkItem.
381 setCurrLocationContextAndBlock(CallerCtx, CalleeCtx->getCallSiteBlock());
382 SaveAndRestore CBISave(currStmtIdx, CalleeCtx->getIndex());
383
384 for (ExplodedNode *N : CleanedNodes) {
385 // Step 4: Generate the CallExitEnd node.
386 // CleanedNodes -> CEENode
387 CallExitEnd Loc(CalleeCtx, CallerCtx);
388 ProgramStateRef CEEState = (N == CEBNode) ? State : N->getState();
389
390 ExplodedNode *CEENode = Engine.makeNode(Loc, CEEState, N);
391 if (!CEENode)
392 return;
393
394 // Step 5: Perform the post-condition check of the CallExpr and enqueue the
395 // result onto the work list.
396 // CEENode -> Dst -> WorkList
397
398 CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
399
400 ExplodedNodeSet DstPostPostCallCallback;
401 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback, CEENode,
402 *UpdatedCall, *this,
403 /*wasInlined=*/true);
404 ExplodedNodeSet DstPostCall;
405 if (llvm::isa_and_nonnull<CXXNewExpr>(CE)) {
406 for (ExplodedNode *I : DstPostPostCallCallback) {
408 cast<CXXAllocatorCall>(*UpdatedCall), DstPostCall, I, *this,
409 /*wasInlined=*/true);
410 }
411 } else {
412 DstPostCall.insert(DstPostPostCallCallback);
413 }
414
415 ExplodedNodeSet Dst;
416 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
417 getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
418 *this,
419 /*wasInlined=*/true);
420 } else if (CE &&
421 !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
422 AMgr.getAnalyzerOptions().MayInlineCXXAllocator)) {
423 getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
424 *this, /*wasInlined=*/true);
425 } else {
426 Dst.insert(DstPostCall);
427 }
428
429 // Enqueue the next element in the block.
430 for (ExplodedNode *DstNode : Dst) {
431 unsigned Idx = CalleeCtx->getIndex() + (ShouldRepeatCall ? 0 : 1);
432
433 Engine.getWorkList()->enqueue(DstNode, CalleeCtx->getCallSiteBlock(),
434 Idx);
435 }
436 }
437}
438
439bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
440 // When there are no branches in the function, it means that there's no
441 // exponential complexity introduced by inlining such function.
442 // Such functions also don't trigger various fundamental problems
443 // with our inlining mechanism, such as the problem of
444 // inlined defensive checks. Hence isLinear().
445 const CFG *Cfg = ADC->getCFG();
446 return Cfg->isLinear() || Cfg->size() <= AMgr.options.AlwaysInlineSize;
447}
448
449bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
450 const CFG *Cfg = ADC->getCFG();
451 return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
452}
453
454bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
455 const CFG *Cfg = ADC->getCFG();
456 return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
457}
458
459void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
460 bool &IsRecursive, unsigned &StackDepth) {
461 IsRecursive = false;
462 StackDepth = 0;
463
464 while (LCtx) {
465 if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
466 const Decl *DI = SFC->getDecl();
467
468 // Mark recursive (and mutually recursive) functions and always count
469 // them when measuring the stack depth.
470 if (DI == D) {
471 IsRecursive = true;
472 ++StackDepth;
473 LCtx = LCtx->getParent();
474 continue;
475 }
476
477 // Do not count the small functions when determining the stack depth.
478 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
479 if (!isSmall(CalleeADC))
480 ++StackDepth;
481 }
482 LCtx = LCtx->getParent();
483 }
484}
485
486// The GDM component containing the dynamic dispatch bifurcation info. When
487// the exact type of the receiver is not known, we want to explore both paths -
488// one on which we do inline it and the other one on which we don't. This is
489// done to ensure we do not drop coverage.
490// This is the map from the receiver region to a bool, specifying either we
491// consider this region's information precise or not along the given path.
492namespace {
493 enum DynamicDispatchMode {
494 DynamicDispatchModeInlined = 1,
495 DynamicDispatchModeConservative
496 };
497} // end anonymous namespace
498
499REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
500 const MemRegion *, unsigned)
501REGISTER_TRAIT_WITH_PROGRAMSTATE(CTUDispatchBifurcation, bool)
502
503void ExprEngine::ctuBifurcate(const CallEvent &Call, const Decl *D,
504 NodeBuilder &Bldr, ExplodedNode *Pred,
505 ProgramStateRef State) {
506 ProgramStateRef ConservativeEvalState = nullptr;
507 if (Call.isForeign() && !isSecondPhaseCTU()) {
508 const auto IK = AMgr.options.getCTUPhase1Inlining();
509 const bool DoInline = IK == CTUPhase1InliningKind::All ||
511 isSmall(AMgr.getAnalysisDeclContext(D)));
512 if (DoInline) {
513 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
514 return;
515 }
516 const bool BState = State->get<CTUDispatchBifurcation>();
517 if (!BState) { // This is the first time we see this foreign function.
518 // Enqueue it to be analyzed in the second (ctu) phase.
519 inlineCall(Engine.getCTUWorkList(), Call, D, Bldr, Pred, State);
520 // Conservatively evaluate in the first phase.
521 ConservativeEvalState = State->set<CTUDispatchBifurcation>(true);
522 conservativeEvalCall(Call, Bldr, Pred, ConservativeEvalState);
523 } else {
524 conservativeEvalCall(Call, Bldr, Pred, State);
525 }
526 return;
527 }
528 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
529}
530
531void ExprEngine::inlineCall(WorkList *WList, const CallEvent &Call,
532 const Decl *D, NodeBuilder &Bldr,
533 ExplodedNode *Pred, ProgramStateRef State) {
534 assert(D);
535
536 const LocationContext *CurLC = Pred->getLocationContext();
537 const StackFrameContext *CallerSFC = CurLC->getStackFrame();
538 const LocationContext *ParentOfCallee = CallerSFC;
539 if (Call.getKind() == CE_Block &&
540 !cast<BlockCall>(Call).isConversionFromLambda()) {
541 const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
542 assert(BR && "If we have the block definition we should have its region");
543 AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
544 ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
546 BR);
547 }
548
549 // This may be NULL, but that's fine.
550 const Expr *CallE = Call.getOriginExpr();
551
552 // Construct a new stack frame for the callee.
553 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
554 const StackFrameContext *CalleeSFC =
555 CalleeADC->getStackFrame(ParentOfCallee, CallE, getCurrBlock(),
556 getNumVisitedCurrent(), currStmtIdx);
557
558 CallEnter Loc(CallE, CalleeSFC, CurLC);
559
560 // Construct a new state which contains the mapping from actual to
561 // formal arguments.
562 State = State->enterStackFrame(Call, CalleeSFC);
563
564 bool isNew;
565 if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
566 N->addPredecessor(Pred, G);
567 if (isNew)
568 WList->enqueue(N);
569 }
570
571 // If we decided to inline the call, the successor has been manually
572 // added onto the work list so remove it from the node builder.
573 Bldr.takeNodes(Pred);
574
575 NumInlinedCalls++;
576 Engine.FunctionSummaries->bumpNumTimesInlined(D);
577
578 // Do not mark as visited in the 2nd run (CTUWList), so the function will
579 // be visited as top-level, this way we won't loose reports in non-ctu
580 // mode. Considering the case when a function in a foreign TU calls back
581 // into the main TU.
582 // Note, during the 1st run, it doesn't matter if we mark the foreign
583 // functions as visited (or not) because they can never appear as a top level
584 // function in the main TU.
585 if (!isSecondPhaseCTU())
586 // Mark the decl as visited.
587 if (VisitedCallees)
588 VisitedCallees->insert(D);
589}
590
592 const Expr *CallE) {
593 const void *ReplayState = State->get<ReplayWithoutInlining>();
594 if (!ReplayState)
595 return nullptr;
596
597 assert(ReplayState == CallE && "Backtracked to the wrong call.");
598 (void)CallE;
599
600 return State->remove<ReplayWithoutInlining>();
601}
602
604 ExplodedNodeSet &dst) {
605 // Perform the previsit of the CallExpr.
606 ExplodedNodeSet dstPreVisit;
607 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
608
609 // Get the call in its initial state. We use this as a template to perform
610 // all the checks.
612 CallEventRef<> CallTemplate = CEMgr.getSimpleCall(
613 CE, Pred->getState(), Pred->getLocationContext(), getCFGElementRef());
614
615 // Evaluate the function call. We try each of the checkers
616 // to see if the can evaluate the function call.
617 ExplodedNodeSet dstCallEvaluated;
618 for (ExplodedNode *N : dstPreVisit) {
619 evalCall(dstCallEvaluated, N, *CallTemplate);
620 }
621
622 // Finally, perform the post-condition check of the CallExpr and store
623 // the created nodes in 'Dst'.
624 // Note that if the call was inlined, dstCallEvaluated will be empty.
625 // The post-CallExpr check will occur in processCallExit.
626 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
627 *this);
628}
629
630ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
631 const CallEvent &Call) {
632 // WARNING: The state attached to 'Call' may be obsolete, do not call any
633 // methods that rely on it!
634 const Expr *E = Call.getOriginExpr();
635 // FIXME: Constructors to placement arguments of operator new
636 // are not supported yet.
637 if (!E || isa<CXXNewExpr>(E))
638 return State;
639
640 const LocationContext *LC = Call.getLocationContext();
641 for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
642 unsigned I = Call.getASTArgumentIndex(CallI);
643 if (std::optional<SVal> V = getObjectUnderConstruction(State, {E, I}, LC)) {
644 SVal VV = *V;
645 (void)VV;
647 ->getStackFrame()->getParent()
648 ->getStackFrame() == LC->getStackFrame());
649 State = finishObjectConstruction(State, {E, I}, LC);
650 }
651 }
652
653 return State;
654}
655
656void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
657 ExplodedNode *Pred,
658 const CallEvent &Call) {
659 // WARNING: The state attached to 'Call' may be obsolete, do not call any
660 // methods that rely on it!
661 ProgramStateRef State = Pred->getState();
662 ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
663 if (CleanedState == State) {
664 Dst.insert(Pred);
665 return;
666 }
667
668 const Expr *E = Call.getOriginExpr();
669 const LocationContext *LC = Call.getLocationContext();
670 NodeBuilder B(Pred, Dst, *currBldrCtx);
671 static SimpleProgramPointTag Tag("ExprEngine",
672 "Finish argument construction");
673 PreStmt PP(E, LC, &Tag);
674 B.generateNode(PP, CleanedState, Pred);
675}
676
678 const CallEvent &CallTemplate) {
679 // NOTE: CallTemplate is called a "template" because its attached state may
680 // be obsolete (compared to the state of Pred). The state-dependent methods
681 // of CallEvent should be used only after a `cloneWithState` call that
682 // attaches the up-to-date state to this template object.
683
684 // Run any pre-call checks using the generic call interface.
685 ExplodedNodeSet dstPreVisit;
686 getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, CallTemplate,
687 *this);
688
689 // Actually evaluate the function call. We try each of the checkers
690 // to see if the can evaluate the function call, and get a callback at
691 // defaultEvalCall if all of them fail.
692 ExplodedNodeSet dstCallEvaluated;
694 dstCallEvaluated, dstPreVisit, CallTemplate, *this, EvalCallOptions());
695
696 // If there were other constructors called for object-type arguments
697 // of this call, clean them up.
698 ExplodedNodeSet dstArgumentCleanup;
699 for (ExplodedNode *I : dstCallEvaluated)
700 finishArgumentConstruction(dstArgumentCleanup, I, CallTemplate);
701
702 ExplodedNodeSet dstPostCall;
703 getCheckerManager().runCheckersForPostCall(dstPostCall, dstArgumentCleanup,
704 CallTemplate, *this);
705
706 // Escaping symbols conjured during invalidating the regions above.
707 // Note that, for inlined calls the nodes were put back into the worklist,
708 // so we can assume that every node belongs to a conservative call at this
709 // point.
710
711 // Run pointerEscape callback with the newly conjured symbols.
713 for (ExplodedNode *I : dstPostCall) {
714 ProgramStateRef State = I->getState();
715 CallEventRef<> Call = CallTemplate.cloneWithState(State);
716 NodeBuilder B(I, Dst, *currBldrCtx);
717 Escaped.clear();
718 {
719 unsigned Arg = -1;
720 for (const ParmVarDecl *PVD : Call->parameters()) {
721 ++Arg;
722 QualType ParamTy = PVD->getType();
723 if (ParamTy.isNull() ||
724 (!ParamTy->isPointerType() && !ParamTy->isReferenceType()))
725 continue;
726 QualType Pointee = ParamTy->getPointeeType();
727 if (Pointee.isConstQualified() || Pointee->isVoidType())
728 continue;
729 if (const MemRegion *MR = Call->getArgSVal(Arg).getAsRegion())
730 Escaped.emplace_back(loc::MemRegionVal(MR), State->getSVal(MR, Pointee));
731 }
732 }
733
734 State = processPointerEscapedOnBind(State, Escaped, I->getLocationContext(),
736
737 if (State == I->getState())
738 Dst.insert(I);
739 else
740 B.generateNode(I->getLocation(), State, I);
741 }
742}
743
745 const LocationContext *LCtx,
746 ProgramStateRef State) {
747 const Expr *E = Call.getOriginExpr();
748 const ConstCFGElementRef &Elem = Call.getCFGElementRef();
749 if (!E)
750 return State;
751
752 // Some method families have known return values.
753 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
754 switch (Msg->getMethodFamily()) {
755 default:
756 break;
757 case OMF_autorelease:
758 case OMF_retain:
759 case OMF_self: {
760 // These methods return their receivers.
761 return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
762 }
763 }
764 } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
765 SVal ThisV = C->getCXXThisVal();
766 ThisV = State->getSVal(ThisV.castAs<Loc>());
767 return State->BindExpr(E, LCtx, ThisV);
768 }
769
770 SVal R;
771 QualType ResultTy = Call.getResultType();
772 unsigned Count = getNumVisitedCurrent();
773 if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
774 // Conjure a temporary if the function returns an object by value.
775 SVal Target;
776 assert(RTC->getStmt() == Call.getOriginExpr());
777 EvalCallOptions CallOpts; // FIXME: We won't really need those.
778 std::tie(State, Target) = handleConstructionContext(
779 Call.getOriginExpr(), State, currBldrCtx, LCtx,
780 RTC->getConstructionContext(), CallOpts);
781 const MemRegion *TargetR = Target.getAsRegion();
782 assert(TargetR);
783 // Invalidate the region so that it didn't look uninitialized. If this is
784 // a field or element constructor, we do not want to invalidate
785 // the whole structure. Pointer escape is meaningless because
786 // the structure is a product of conservative evaluation
787 // and therefore contains nothing interesting at this point.
789 ITraits.setTrait(TargetR,
791 State = State->invalidateRegions(TargetR, Elem, Count, LCtx,
792 /* CausesPointerEscape=*/false, nullptr,
793 &Call, &ITraits);
794
795 R = State->getSVal(Target.castAs<Loc>(), E->getType());
796 } else {
797 // Conjure a symbol if the return value is unknown.
798
799 // See if we need to conjure a heap pointer instead of
800 // a regular unknown pointer.
801 const auto *CNE = dyn_cast<CXXNewExpr>(E);
802 if (CNE && CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
803 R = svalBuilder.getConjuredHeapSymbolVal(Elem, LCtx, E->getType(), Count);
804 const MemRegion *MR = R.getAsRegion()->StripCasts();
805
806 // Store the extent of the allocated object(s).
807 SVal ElementCount;
808 if (const Expr *SizeExpr = CNE->getArraySize().value_or(nullptr)) {
809 ElementCount = State->getSVal(SizeExpr, LCtx);
810 } else {
811 ElementCount = svalBuilder.makeIntVal(1, /*IsUnsigned=*/true);
812 }
813
814 SVal ElementSize = getElementExtent(CNE->getAllocatedType(), svalBuilder);
815
816 SVal Size =
817 svalBuilder.evalBinOp(State, BO_Mul, ElementCount, ElementSize,
818 svalBuilder.getArrayIndexType());
819
820 // FIXME: This line is to prevent a crash. For more details please check
821 // issue #56264.
822 if (Size.isUndef())
823 Size = UnknownVal();
824
825 State = setDynamicExtent(State, MR, Size.castAs<DefinedOrUnknownSVal>());
826 } else {
827 R = svalBuilder.conjureSymbolVal(Elem, LCtx, ResultTy, Count);
828 }
829 }
830 return State->BindExpr(E, LCtx, R);
831}
832
833// Conservatively evaluate call by invalidating regions and binding
834// a conjured return value.
835void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
836 ExplodedNode *Pred, ProgramStateRef State) {
837 State = Call.invalidateRegions(getNumVisitedCurrent(), State);
838 State = bindReturnValue(Call, Pred->getLocationContext(), State);
839
840 // And make the result node.
841 static SimpleProgramPointTag PT("ExprEngine", "Conservative eval call");
842 Bldr.generateNode(Call.getProgramPoint(false, &PT), State, Pred);
843}
844
845ExprEngine::CallInlinePolicy
846ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
847 AnalyzerOptions &Opts,
848 const EvalCallOptions &CallOpts) {
849 const LocationContext *CurLC = Pred->getLocationContext();
850 const StackFrameContext *CallerSFC = CurLC->getStackFrame();
851 switch (Call.getKind()) {
852 case CE_Function:
854 case CE_Block:
855 break;
856 case CE_CXXMember:
859 return CIP_DisallowedAlways;
860 break;
861 case CE_CXXConstructor: {
863 return CIP_DisallowedAlways;
864
866
867 const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
868
870 const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
871 : nullptr;
872
873 if (llvm::isa_and_nonnull<NewAllocatedObjectConstructionContext>(CC) &&
874 !Opts.MayInlineCXXAllocator)
875 return CIP_DisallowedOnce;
876
877 if (CallOpts.IsArrayCtorOrDtor) {
878 if (!shouldInlineArrayConstruction(Pred->getState(), CtorExpr, CurLC))
879 return CIP_DisallowedOnce;
880 }
881
882 // Inlining constructors requires including initializers in the CFG.
883 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
884 assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
885 (void)ADC;
886
887 // If the destructor is trivial, it's always safe to inline the constructor.
888 if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
889 break;
890
891 // For other types, only inline constructors if destructor inlining is
892 // also enabled.
894 return CIP_DisallowedAlways;
895
897 // If we don't handle temporary destructors, we shouldn't inline
898 // their constructors.
899 if (CallOpts.IsTemporaryCtorOrDtor &&
900 !Opts.ShouldIncludeTemporaryDtorsInCFG)
901 return CIP_DisallowedOnce;
902
903 // If we did not find the correct this-region, it would be pointless
904 // to inline the constructor. Instead we will simply invalidate
905 // the fake temporary target.
907 return CIP_DisallowedOnce;
908
909 // If the temporary is lifetime-extended by binding it to a reference-type
910 // field within an aggregate, automatic destructors don't work properly.
912 return CIP_DisallowedOnce;
913 }
914
915 break;
916 }
918 // This doesn't really increase the cost of inlining ever, because
919 // the stack frame of the inherited constructor is trivial.
920 return CIP_Allowed;
921 }
922 case CE_CXXDestructor: {
924 return CIP_DisallowedAlways;
925
926 // Inlining destructors requires building the CFG correctly.
927 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
928 assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
929 (void)ADC;
930
931 if (CallOpts.IsArrayCtorOrDtor) {
932 if (!shouldInlineArrayDestruction(getElementCountOfArrayBeingDestructed(
933 Call, Pred->getState(), svalBuilder))) {
934 return CIP_DisallowedOnce;
935 }
936 }
937
938 // Allow disabling temporary destructor inlining with a separate option.
939 if (CallOpts.IsTemporaryCtorOrDtor &&
940 !Opts.MayInlineCXXTemporaryDtors)
941 return CIP_DisallowedOnce;
942
943 // If we did not find the correct this-region, it would be pointless
944 // to inline the destructor. Instead we will simply invalidate
945 // the fake temporary target.
947 return CIP_DisallowedOnce;
948 break;
949 }
951 [[fallthrough]];
952 case CE_CXXAllocator:
953 if (Opts.MayInlineCXXAllocator)
954 break;
955 // Do not inline allocators until we model deallocators.
956 // This is unfortunate, but basically necessary for smart pointers and such.
957 return CIP_DisallowedAlways;
958 case CE_ObjCMessage:
959 if (!Opts.MayInlineObjCMethod)
960 return CIP_DisallowedAlways;
961 if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
963 return CIP_DisallowedAlways;
964 break;
965 }
966
967 return CIP_Allowed;
968}
969
970/// Returns true if the given C++ class contains a member with the given name.
971static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
972 StringRef Name) {
973 const IdentifierInfo &II = Ctx.Idents.get(Name);
974 return RD->hasMemberName(Ctx.DeclarationNames.getIdentifier(&II));
975}
976
977/// Returns true if the given C++ class is a container or iterator.
978///
979/// Our heuristic for this is whether it contains a method named 'begin()' or a
980/// nested type named 'iterator' or 'iterator_category'.
981static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
982 return hasMember(Ctx, RD, "begin") ||
983 hasMember(Ctx, RD, "iterator") ||
984 hasMember(Ctx, RD, "iterator_category");
985}
986
987/// Returns true if the given function refers to a method of a C++ container
988/// or iterator.
989///
990/// We generally do a poor job modeling most containers right now, and might
991/// prefer not to inline their methods.
992static bool isContainerMethod(const ASTContext &Ctx,
993 const FunctionDecl *FD) {
994 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
995 return isContainerClass(Ctx, MD->getParent());
996 return false;
997}
998
999/// Returns true if the given function is the destructor of a class named
1000/// "shared_ptr".
1001static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
1002 const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
1003 if (!Dtor)
1004 return false;
1005
1006 const CXXRecordDecl *RD = Dtor->getParent();
1007 if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
1008 if (II->isStr("shared_ptr"))
1009 return true;
1010
1011 return false;
1012}
1013
1014/// Returns true if the function in \p CalleeADC may be inlined in general.
1015///
1016/// This checks static properties of the function, such as its signature and
1017/// CFG, to determine whether the analyzer should ever consider inlining it,
1018/// in any context.
1019bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
1020 AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
1021 // FIXME: Do not inline variadic calls.
1022 if (CallEvent::isVariadic(CalleeADC->getDecl()))
1023 return false;
1024
1025 // Check certain C++-related inlining policies.
1026 ASTContext &Ctx = CalleeADC->getASTContext();
1027 if (Ctx.getLangOpts().CPlusPlus) {
1028 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
1029 // Conditionally control the inlining of template functions.
1030 if (!Opts.MayInlineTemplateFunctions)
1031 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
1032 return false;
1033
1034 // Conditionally control the inlining of C++ standard library functions.
1035 if (!Opts.MayInlineCXXStandardLibrary)
1036 if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
1038 return false;
1039
1040 // Conditionally control the inlining of methods on objects that look
1041 // like C++ containers.
1042 if (!Opts.MayInlineCXXContainerMethods)
1043 if (!AMgr.isInCodeFile(FD->getLocation()))
1044 if (isContainerMethod(Ctx, FD))
1045 return false;
1046
1047 // Conditionally control the inlining of the destructor of C++ shared_ptr.
1048 // We don't currently do a good job modeling shared_ptr because we can't
1049 // see the reference count, so treating as opaque is probably the best
1050 // idea.
1051 if (!Opts.MayInlineCXXSharedPtrDtor)
1052 if (isCXXSharedPtrDtor(FD))
1053 return false;
1054 }
1055 }
1056
1057 // It is possible that the CFG cannot be constructed.
1058 // Be safe, and check if the CalleeCFG is valid.
1059 const CFG *CalleeCFG = CalleeADC->getCFG();
1060 if (!CalleeCFG)
1061 return false;
1062
1063 // Do not inline large functions.
1064 if (isHuge(CalleeADC))
1065 return false;
1066
1067 // It is possible that the live variables analysis cannot be
1068 // run. If so, bail out.
1069 if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
1070 return false;
1071
1072 return true;
1073}
1074
1075bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
1076 const ExplodedNode *Pred,
1077 const EvalCallOptions &CallOpts) {
1078 if (!D)
1079 return false;
1080
1081 AnalysisManager &AMgr = getAnalysisManager();
1082 AnalyzerOptions &Opts = AMgr.options;
1083 AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
1084 AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
1085
1086 // The auto-synthesized bodies are essential to inline as they are
1087 // usually small and commonly used. Note: we should do this check early on to
1088 // ensure we always inline these calls.
1089 if (CalleeADC->isBodyAutosynthesized())
1090 return true;
1091
1092 if (!AMgr.shouldInlineCall())
1093 return false;
1094
1095 // Check if this function has been marked as non-inlinable.
1096 std::optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
1097 if (MayInline) {
1098 if (!*MayInline)
1099 return false;
1100
1101 } else {
1102 // We haven't actually checked the static properties of this function yet.
1103 // Do that now, and record our decision in the function summaries.
1104 if (mayInlineDecl(CalleeADC)) {
1105 Engine.FunctionSummaries->markMayInline(D);
1106 } else {
1107 Engine.FunctionSummaries->markShouldNotInline(D);
1108 return false;
1109 }
1110 }
1111
1112 // Check if we should inline a call based on its kind.
1113 // FIXME: this checks both static and dynamic properties of the call, which
1114 // means we're redoing a bit of work that could be cached in the function
1115 // summary.
1116 CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
1117 if (CIP != CIP_Allowed) {
1118 if (CIP == CIP_DisallowedAlways) {
1119 assert(!MayInline || *MayInline);
1120 Engine.FunctionSummaries->markShouldNotInline(D);
1121 }
1122 return false;
1123 }
1124
1125 // Do not inline if recursive or we've reached max stack frame count.
1126 bool IsRecursive = false;
1127 unsigned StackDepth = 0;
1128 examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
1129 if ((StackDepth >= Opts.InlineMaxStackDepth) &&
1130 (!isSmall(CalleeADC) || IsRecursive))
1131 return false;
1132
1133 // Do not inline large functions too many times.
1134 if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
1135 Opts.MaxTimesInlineLarge) &&
1136 isLarge(CalleeADC)) {
1137 NumReachedInlineCountMax++;
1138 return false;
1139 }
1140
1141 if (HowToInline == Inline_Minimal && (!isSmall(CalleeADC) || IsRecursive))
1142 return false;
1143
1144 return true;
1145}
1146
1147bool ExprEngine::shouldInlineArrayConstruction(const ProgramStateRef State,
1148 const CXXConstructExpr *CE,
1149 const LocationContext *LCtx) {
1150 if (!CE)
1151 return false;
1152
1153 // FIXME: Handle other arrays types.
1154 if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType())) {
1155 unsigned ArrSize = getContext().getConstantArrayElementCount(CAT);
1156
1157 // This might seem conter-intuitive at first glance, but the functions are
1158 // closely related. Reasoning about destructors depends only on the type
1159 // of the expression that initialized the memory region, which is the
1160 // CXXConstructExpr. So to avoid code repetition, the work is delegated
1161 // to the function that reasons about destructor inlining. Also note that
1162 // if the constructors of the array elements are inlined, the destructors
1163 // can also be inlined and if the destructors can be inline, it's safe to
1164 // inline the constructors.
1165 return shouldInlineArrayDestruction(ArrSize);
1166 }
1167
1168 // Check if we're inside an ArrayInitLoopExpr, and it's sufficiently small.
1169 if (auto Size = getPendingInitLoop(State, CE, LCtx))
1170 return shouldInlineArrayDestruction(*Size);
1171
1172 return false;
1173}
1174
1175bool ExprEngine::shouldInlineArrayDestruction(uint64_t Size) {
1176
1177 uint64_t maxAllowedSize = AMgr.options.maxBlockVisitOnPath;
1178
1179 // Declaring a 0 element array is also possible.
1180 return Size <= maxAllowedSize && Size > 0;
1181}
1182
1183bool ExprEngine::shouldRepeatCtorCall(ProgramStateRef State,
1184 const CXXConstructExpr *E,
1185 const LocationContext *LCtx) {
1186
1187 if (!E)
1188 return false;
1189
1190 auto Ty = E->getType();
1191
1192 // FIXME: Handle non constant array types
1193 if (const auto *CAT = dyn_cast<ConstantArrayType>(Ty)) {
1195 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1196 }
1197
1198 if (auto Size = getPendingInitLoop(State, E, LCtx))
1199 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1200
1201 return false;
1202}
1203
1205 const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
1206 if (!ICall)
1207 return false;
1208
1209 const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
1210 if (!MD)
1211 return false;
1213 return false;
1214
1215 return MD->isTrivial();
1216}
1217
1219 const CallEvent &Call,
1220 const EvalCallOptions &CallOpts) {
1221 // Make sure we have the most recent state attached to the call.
1222 ProgramStateRef State = Pred->getState();
1223
1224 // Special-case trivial assignment operators.
1226 performTrivialCopy(Bldr, Pred, Call);
1227 return;
1228 }
1229
1230 const Expr *E = Call.getOriginExpr();
1231
1232 ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1233 if (InlinedFailedState) {
1234 // If we already tried once and failed, make sure we don't retry later.
1235 State = InlinedFailedState;
1236 } else {
1237 RuntimeDefinition RD = Call.getRuntimeDefinition();
1238 Call.setForeign(RD.isForeign());
1239 const Decl *D = RD.getDecl();
1240 if (shouldInlineCall(Call, D, Pred, CallOpts)) {
1241 if (RD.mayHaveOtherDefinitions()) {
1243
1244 // Explore with and without inlining the call.
1245 if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1246 BifurcateCall(RD.getDispatchRegion(), Call, D, Bldr, Pred);
1247 return;
1248 }
1249
1250 // Don't inline if we're not in any dynamic dispatch mode.
1251 if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1252 conservativeEvalCall(Call, Bldr, Pred, State);
1253 return;
1254 }
1255 }
1256 ctuBifurcate(Call, D, Bldr, Pred, State);
1257 return;
1258 }
1259 }
1260
1261 // If we can't inline it, clean up the state traits used only if the function
1262 // is inlined.
1263 State = removeStateTraitsUsedForArrayEvaluation(
1264 State, dyn_cast_or_null<CXXConstructExpr>(E), Call.getLocationContext());
1265
1266 // Also handle the return value and invalidate the regions.
1267 conservativeEvalCall(Call, Bldr, Pred, State);
1268}
1269
1270void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1271 const CallEvent &Call, const Decl *D,
1272 NodeBuilder &Bldr, ExplodedNode *Pred) {
1273 assert(BifurReg);
1274 BifurReg = BifurReg->StripCasts();
1275
1276 // Check if we've performed the split already - note, we only want
1277 // to split the path once per memory region.
1278 ProgramStateRef State = Pred->getState();
1279 const unsigned *BState =
1280 State->get<DynamicDispatchBifurcationMap>(BifurReg);
1281 if (BState) {
1282 // If we are on "inline path", keep inlining if possible.
1283 if (*BState == DynamicDispatchModeInlined)
1284 ctuBifurcate(Call, D, Bldr, Pred, State);
1285 // If inline failed, or we are on the path where we assume we
1286 // don't have enough info about the receiver to inline, conjure the
1287 // return value and invalidate the regions.
1288 conservativeEvalCall(Call, Bldr, Pred, State);
1289 return;
1290 }
1291
1292 // If we got here, this is the first time we process a message to this
1293 // region, so split the path.
1294 ProgramStateRef IState =
1295 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1296 DynamicDispatchModeInlined);
1297 ctuBifurcate(Call, D, Bldr, Pred, IState);
1298
1299 ProgramStateRef NoIState =
1300 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1301 DynamicDispatchModeConservative);
1302 conservativeEvalCall(Call, Bldr, Pred, NoIState);
1303
1304 NumOfDynamicDispatchPathSplits++;
1305}
1306
1308 ExplodedNodeSet &Dst) {
1309 ExplodedNodeSet dstPreVisit;
1310 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1311
1312 NodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1313
1314 if (RS->getRetValue()) {
1315 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1316 ei = dstPreVisit.end(); it != ei; ++it) {
1317 B.generateNode(RS, *it, (*it)->getState());
1318 }
1319 }
1320}
#define V(N, I)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
#define STAT_COUNTER(VARNAME, DESC)
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
static bool isTrivialObjectAssignment(const CallEvent &Call)
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function's return type does not match the caller's expression ...
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
static unsigned getElementCountOfArrayBeingDestructed(const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB)
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Expr *CallE)
#define REGISTER_MAP_WITH_PROGRAMSTATE(Name, Key, Value)
Declares an immutable map of type NameTy, suitable for placement into the ProgramState.
#define REGISTER_TRAIT_WITH_PROGRAMSTATE(Name, Type)
Declares a program state trait for type Type called Name, and introduce a type named NameTy.
a trap message and trap category.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition ASTContext.h:226
SourceManager & getSourceManager()
Definition ASTContext.h:859
DeclarationNameTable DeclarationNames
Definition ASTContext.h:802
IdentifierTable & Idents
Definition ASTContext.h:798
const LangOptions & getLangOpts() const
Definition ASTContext.h:952
uint64_t getConstantArrayElementCount(const ConstantArrayType *CA) const
Return number of constant array elements.
AnalysisDeclContext * getContext(const Decl *D)
AnalysisDeclContext contains the context data for the function, method or block under analysis.
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *ParentLC, const BlockDecl *BD, const void *Data)
Obtain a context of the block invocation using its parent context.
static bool isInStdNamespace(const Decl *D)
const StackFrameContext * getStackFrame(LocationContext const *ParentLC, const Expr *E, const CFGBlock *Blk, unsigned BlockCount, unsigned Index)
Obtain a context of the call stack using its parent context.
ASTContext & getASTContext() const
CFG::BuildOptions & getCFGBuildOptions()
Stores options for the analyzer from the command line.
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K) const
Returns the option controlling which C++ member functions will be considered for inlining.
IPAKind getIPAMode() const
Returns the inter-procedural analysis mode.
CTUPhase1InliningKind getCTUPhase1Inlining() const
unsigned InlineMaxStackDepth
The inlining stack depth limit.
Represents a single basic block in a source-level CFG.
Definition CFG.h:632
bool empty() const
Definition CFG.h:980
succ_iterator succ_begin()
Definition CFG.h:1017
unsigned succ_size() const
Definition CFG.h:1035
Represents C++ constructor call.
Definition CFG.h:158
std::optional< T > getAs() const
Convert to the specified CFGElement type, returning std::nullopt if this CFGElement is not of the des...
Definition CFG.h:110
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
Definition CFG.h:1250
unsigned size() const
Return the total number of CFGBlocks within the CFG This is simply a renaming of the getNumBlockIDs()...
Definition CFG.h:1448
bool isLinear() const
Returns true if the CFG has no branches.
Definition CFG.cpp:5460
CFGBlock & getExit()
Definition CFG.h:1366
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition CFG.h:1443
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
CXXBasePath & front()
bool isAmbiguous(CanQualType BaseType) const
Determine whether the path from the most-derived type to the given base type is ambiguous (i....
Represents a call to a C++ constructor.
Definition ExprCXX.h:1552
CXXConstructionKind getConstructionKind() const
Determine whether this constructor is actually constructing a base class (rather than a complete obje...
Definition ExprCXX.h:1663
Represents a C++ destructor within a class.
Definition DeclCXX.h:2889
Represents a static or instance method of a struct/union/class.
Definition DeclCXX.h:2136
const CXXRecordDecl * getParent() const
Return the parent of this method declaration, which is the class in which this method is defined.
Definition DeclCXX.h:2275
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition DeclCXX.cpp:2753
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition DeclCXX.cpp:2732
Represents a C++ struct/union/class.
Definition DeclCXX.h:258
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition DeclCXX.h:1372
bool hasMemberName(DeclarationName N) const
Determine whether this class has a member with the given name, possibly in a non-dependent base class...
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Represents a point when we begin processing an inlined call.
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
const StackFrameContext * getCalleeContext() const
Represents a point when we finish the call exit sequence (for inlined call).
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition Expr.h:2946
ConstructionContext's subclasses describe different ways of constructing an object in C++.
Decl - This represents one declaration (or definition), e.g.
Definition DeclBase.h:86
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition DeclBase.h:991
DeclarationName getIdentifier(const IdentifierInfo *ID)
Create a declaration name that is a simple identifier.
IdentifierInfo * getAsIdentifierInfo() const
Retrieve the IdentifierInfo * stored in this declaration name, or null if this declaration name isn't...
This is a meta program point, which should be skipped by all the diagnostic reasoning etc.
This represents one expression.
Definition Expr.h:112
QualType getType() const
Definition Expr.h:144
Represents a function declaration or definition.
Definition Decl.h:2015
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition Decl.h:2392
One of these records is kept for each identifier that is lexed.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
It wraps the AnalysisDeclContext to represent both the call stack with the help of StackFrameContext ...
const Decl * getDecl() const
LLVM_ATTRIBUTE_RETURNS_NONNULL AnalysisDeclContext * getAnalysisDeclContext() const
const LocationContext * getParent() const
It might return null.
const StackFrameContext * getStackFrame() const
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition Decl.h:340
Represents a parameter to a function.
Definition Decl.h:1805
const StackFrameContext * getStackFrame() const
std::optional< T > getAs() const
Convert to the specified ProgramPoint type, returning std::nullopt if this ProgramPoint is not of the...
A (possibly-)qualified type.
Definition TypeBase.h:937
bool isNull() const
Return true if this QualType doesn't point to a type yet.
Definition TypeBase.h:1004
QualType getCanonicalType() const
Definition TypeBase.h:8483
bool isConstQualified() const
Determine whether this type is const-qualified.
Definition TypeBase.h:8504
ReturnStmt - This represents a return, optionally of an expression: return; return 4;.
Definition Stmt.h:3166
Expr * getRetValue()
Definition Stmt.h:3193
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
It represents a stack frame of the call stack (based on CallEvent).
const Expr * getCallSite() const
const CFGBlock * getCallSiteBlock() const
Stmt - This represents one statement.
Definition Stmt.h:86
bool isVoidType() const
Definition TypeBase.h:9034
bool isPointerType() const
Definition TypeBase.h:8668
CanQualType getCanonicalTypeUnqualified() const
bool isReferenceType() const
Definition TypeBase.h:8692
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to.
Definition Type.cpp:1923
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
Definition Type.cpp:754
bool isObjCObjectPointerType() const
Definition TypeBase.h:8847
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
Represents a call to a C++ constructor.
Definition CallEvent.h:997
const CXXConstructorDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Definition CallEvent.h:1028
const CXXConstructExpr * getOriginExpr() const override
Returns the expression whose value will be the result of this call.
Definition CallEvent.h:1024
Represents a non-static C++ member function call, no matter how it is written.
Definition CallEvent.h:690
const FunctionDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Manages the lifetime of CallEvent objects.
Definition CallEvent.h:1374
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx, CFGBlock::ConstCFGElementRef ElemRef)
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Gets an outside caller given a callee context.
Represents an abstract call to a function or method along a particular path.
Definition CallEvent.h:153
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
Definition CallEvent.h:1491
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting function calls (including methods, constructors, destructors etc.
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng, const EvalCallOptions &CallOpts)
Run checkers for evaluating a call.
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
void runCheckersForNewAllocator(const CXXAllocatorCall &Call, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting function calls (including methods, constructors, destructors etc.
WorkList * getCTUWorkList() const
Definition CoreEngine.h:162
WorkList * getWorkList() const
Definition CoreEngine.h:161
ExplodedNodeSet is a set of ExplodedNode * elements with the invariant that its elements cannot be nu...
void insert(ExplodedNode *N)
const ProgramStateRef & getState() const
ProgramPoint getLocation() const
getLocation - Returns the edge associated with the given node.
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
const StackFrameContext * getStackFrame() const
const LocationContext * getLocationContext() const
std::optional< T > getLocationAs() const &
ExplodedNode * getFirstPred()
ProgramStateManager & getStateManager()
Definition ExprEngine.h:480
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer's garbage collection - remove dead symbols and bindings from the state.
std::pair< ProgramStateRef, SVal > handleConstructionContext(const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx, const LocationContext *LCtx, const ConstructionContext *CC, EvalCallOptions &CallOpts, unsigned Idx=0)
A convenient wrapper around computeObjectUnderConstruction and updateObjectsUnderConstruction.
Definition ExprEngine.h:819
void removeDeadOnEndOfFunction(ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
void processCallEnter(CallEnter CE, ExplodedNode *Pred)
Generate the entry node of the callee.
void processCallExit(ExplodedNode *Pred)
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr.
static std::optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object's ConstructionContext,...
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition ExprEngine.h:770
@ Inline_Minimal
Do minimal inlining of callees.
Definition ExprEngine.h:131
ProgramStateRef processPointerEscapedOnBind(ProgramStateRef State, ArrayRef< std::pair< SVal, SVal > > LocAndVals, const LocationContext *LCtx, PointerEscapeKind Kind, const CallEvent *Call)
Call PointerEscape callback when a value escapes as a result of bind.
void setCurrLocationContextAndBlock(const LocationContext *LC, const CFGBlock *B)
Definition ExprEngine.h:245
static std::optional< unsigned > getIndexOfElementToConstruct(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retreives which element is being constructed in a non-POD type array.
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition ExprEngine.h:215
StoreManager & getStoreManager()
Definition ExprEngine.h:483
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checkers and allowing checkers to be responsible for hand...
ConstCFGElementRef getCFGElementRef() const
Definition ExprEngine.h:292
static std::optional< unsigned > getPendingArrayDestruction(ProgramStateRef State, const LocationContext *LCtx)
Retreives which element is being destructed in a non-POD type array.
void resetCurrLocationContextAndBlock()
Definition ExprEngine.h:260
CheckerManager & getCheckerManager() const
Definition ExprEngine.h:224
void processBeginOfFunction(ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L)
Called by CoreEngine.
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
unsigned getNumVisitedCurrent() const
Definition ExprEngine.h:302
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
AnalysisManager & getAnalysisManager()
Definition ExprEngine.h:217
const CFGBlock * getCurrBlock() const
Get the 'current' CFGBlock corresponding to the current work item (elementary analysis step handled b...
Definition ExprEngine.h:288
static std::optional< unsigned > getPendingInitLoop(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retreives the size of the array in the pending ArrayInitLoopExpr.
MemRegion - The root abstract class for all memory regions.
Definition MemRegion.h:98
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * StripCasts(bool StripBaseAndDerivedCasts=true) const
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition CoreEngine.h:244
void takeNodes(const ExplodedNodeSet &S)
Definition CoreEngine.h:309
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred, bool MarkAsSink=false)
Generates a node in the ExplodedGraph.
Represents any expression that calls an Objective-C method.
Definition CallEvent.h:1261
CallEventManager & getCallEventManager()
Information about invalidation for a particular region/symbol.
Definition MemRegion.h:1662
void setTrait(SymbolRef Sym, InvalidationKinds IK)
Defines the runtime definition of the called function.
Definition CallEvent.h:110
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition CallEvent.h:141
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition CallEvent.h:137
SVal - This represents a symbolic expression, which can be either an L-value or an R-value.
Definition SVals.h:56
QualType getType(const ASTContext &) const
Try to get a reasonable type for the given value.
Definition SVals.cpp:180
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition SVals.h:83
SVal evalDerivedToBase(SVal Derived, const CastExpr *Cast)
Evaluates a chain of derived-to-base casts through the path specified in Cast.
Definition Store.cpp:254
virtual void enqueue(const WorkListUnit &U)=0
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * getRegion() const
Get the underlining region.
Definition SVals.h:493
@ PSK_EscapeOutParameters
Escape for a new symbol that was generated into a region that the analyzer cannot follow during a con...
DefinedOrUnknownSVal getDynamicElementCount(ProgramStateRef State, const MemRegion *MR, SValBuilder &SVB, QualType Ty)
IntrusiveRefCntPtr< const ProgramState > ProgramStateRef
ProgramStateRef setDynamicExtent(ProgramStateRef State, const MemRegion *MR, DefinedOrUnknownSVal Extent)
Set the dynamic extent Extent of the region MR.
@ CE_CXXInheritedConstructor
Definition CallEvent.h:69
@ CE_CXXStaticOperator
Definition CallEvent.h:62
@ CE_CXXDestructor
Definition CallEvent.h:65
@ CE_CXXDeallocator
Definition CallEvent.h:73
@ CE_CXXAllocator
Definition CallEvent.h:72
@ CE_CXXConstructor
Definition CallEvent.h:68
@ CE_CXXMemberOperator
Definition CallEvent.h:64
DefinedOrUnknownSVal getElementExtent(QualType Ty, SValBuilder &SVB)
std::variant< struct RequiresDecl, struct HeaderDecl, struct UmbrellaDirDecl, struct ModuleDecl, struct ExcludeDecl, struct ExportDecl, struct ExportAsDecl, struct ExternModuleDecl, struct UseDecl, struct LinkDecl, struct ConfigMacrosDecl, struct ConflictDecl > Decl
All declarations that can appear in a module declaration.
The JSON file list parser is used to communicate input to InstallAPI.
bool isa(CodeGen::Address addr)
Definition Address.h:330
CFGBlock::ConstCFGElementRef ConstCFGElementRef
Definition CFG.h:1227
@ ExpectedClass
@ IPAK_DynamicDispatch
Enable inlining of dynamically dispatched methods.
@ IPAK_DynamicDispatchBifurcate
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
@ CIMK_Destructors
Refers to destructors (implicit or explicit).
@ CIMK_MemberFunctions
Refers to regular member function and operator calls.
@ CIMK_Constructors
Refers to constructors (implicit or explicit).
U cast(CodeGen::Address addr)
Definition Address.h:327
unsigned long uint64_t
Hints for figuring out of a call should be inlined during evalCall().
Definition ExprEngine.h:94
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition ExprEngine.h:109
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition ExprEngine.h:104
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition ExprEngine.h:101
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition ExprEngine.h:97
Traits for storing the call processing policy inside GDM.