clang 23.0.0git
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines ExprEngine's support for calls and returns.
10//
11//===----------------------------------------------------------------------===//
12
14#include "clang/AST/Decl.h"
15#include "clang/AST/DeclCXX.h"
23#include "llvm/Support/Casting.h"
24#include "llvm/Support/Compiler.h"
25#include "llvm/Support/SaveAndRestore.h"
26#include <optional>
27
28using namespace clang;
29using namespace ento;
30
31#define DEBUG_TYPE "ExprEngine"
32
34 NumOfDynamicDispatchPathSplits,
35 "The # of times we split the path due to imprecise dynamic dispatch info");
36
37STAT_COUNTER(NumInlinedCalls, "The # of times we inlined a call");
38
39STAT_COUNTER(NumReachedInlineCountMax,
40 "The # of times we reached inline count maximum");
41
43 // Get the entry block in the CFG of the callee.
44 const CFGBlock *Entry = CE.getEntry();
45
46 // Validate the CFG.
47 assert(Entry->empty());
48 assert(Entry->succ_size() == 1);
49
50 // Get the solitary successor.
51 const CFGBlock *Succ = *(Entry->succ_begin());
52
53 // Construct an edge representing the starting location in the callee.
54 BlockEdge Loc(Entry, Succ, CE.getCalleeContext());
55
56 ProgramStateRef state = Pred->getState();
57
58 // Construct a new node, notify checkers that analysis of the function has
59 // begun, and add the resultant nodes to the worklist.
60 bool isNew;
61 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
62 Node->addPredecessor(Pred, G);
63 if (isNew) {
64 // FIXME: In the `processBeginOfFunction` callback
65 // `ExprEngine::getCurrLocationContext()` can be different from the
66 // `LocationContext` queried from e.g. the `ExplodedNode`s. I'm not
67 // touching this now because this commit is NFC; but in the future it would
68 // be nice to avoid this inconsistency.
69 ExplodedNodeSet DstBegin;
70 processBeginOfFunction(Node, DstBegin, Loc);
71 Engine.enqueue(DstBegin);
72 }
73}
74
75// Find the last statement on the path to the exploded node and the
76// corresponding Block.
77static std::pair<const Stmt*,
78 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
79 const Stmt *S = nullptr;
80 const CFGBlock *Blk = nullptr;
81 const StackFrame *SF = Node->getStackFrame();
82
83 // Back up through the ExplodedGraph until we reach a statement node in this
84 // stack frame.
85 while (Node) {
86 const ProgramPoint &PP = Node->getLocation();
87
88 if (PP.getStackFrame() == SF) {
89 if (std::optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
90 S = SP->getStmt();
91 break;
92 } else if (std::optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
93 S = CEE->getCalleeContext()->getCallSite();
94 if (S)
95 break;
96
97 // If there is no statement, this is an implicitly-generated call.
98 // We'll walk backwards over it and then continue the loop to find
99 // an actual statement.
100 std::optional<CallEnter> CE;
101 do {
102 Node = Node->getFirstPred();
103 CE = Node->getLocationAs<CallEnter>();
104 } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
105
106 // Continue searching the graph.
107 } else if (std::optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
108 Blk = BE->getSrc();
109 }
110 } else if (std::optional<CallEnter> CE = PP.getAs<CallEnter>()) {
111 // If we reached the CallEnter for this function, it has no statements.
112 if (CE->getCalleeContext() == SF)
113 break;
114 }
115
116 if (Node->pred_empty())
117 return std::make_pair(nullptr, nullptr);
118
119 Node = *Node->pred_begin();
120 }
121
122 return std::make_pair(S, Blk);
123}
124
125/// Adjusts a return value when the called function's return type does not
126/// match the caller's expression type. This can happen when a dynamic call
127/// is devirtualized, and the overriding method has a covariant (more specific)
128/// return type than the parent's method. For C++ objects, this means we need
129/// to add base casts.
130static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
131 StoreManager &StoreMgr) {
132 // For now, the only adjustments we handle apply only to locations.
133 if (!isa<Loc>(V))
134 return V;
135
136 // If the types already match, don't do any unnecessary work.
137 ExpectedTy = ExpectedTy.getCanonicalType();
138 ActualTy = ActualTy.getCanonicalType();
139 if (ExpectedTy == ActualTy)
140 return V;
141
142 // No adjustment is needed between Objective-C pointer types.
143 if (ExpectedTy->isObjCObjectPointerType() &&
144 ActualTy->isObjCObjectPointerType())
145 return V;
146
147 // C++ object pointers may need "derived-to-base" casts.
149 const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
150 if (ExpectedClass && ActualClass) {
151 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
152 /*DetectVirtual=*/false);
153 if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
154 !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
155 return StoreMgr.evalDerivedToBase(V, Paths.front());
156 }
157 }
158
159 // Unfortunately, Objective-C does not enforce that overridden methods have
160 // covariant return types, so we can't assert that that never happens.
161 // Be safe and return UnknownVal().
162 return UnknownVal();
163}
164
166 ExplodedNodeSet &Dst) {
167 // Find the last statement in the function and the corresponding basic block.
168 const Stmt *LastSt = nullptr;
169 const CFGBlock *Blk = nullptr;
170 std::tie(LastSt, Blk) = getLastStmt(Pred);
171 if (!Blk || !LastSt) {
172 Dst.insert(Pred);
173 return;
174 }
175
176 // Here, we destroy the current location context. We use the current
177 // function's entire body as a diagnostic statement, with which the program
178 // point will be associated. However, we only want to use LastStmt as a
179 // reference for what to clean up if it's a ReturnStmt; otherwise, everything
180 // is dead.
181 const LocationContext *LCtx = Pred->getLocationContext();
182 removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
185}
186
188 const StackFrame *calleeCtx) {
189 const Decl *RuntimeCallee = calleeCtx->getDecl();
190 const Decl *StaticDecl = Call->getDecl();
191 assert(RuntimeCallee);
192 if (!StaticDecl)
193 return true;
194 return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
195}
196
197// Returns the number of elements in the array currently being destructed.
198// If the element count is not found 0 will be returned.
200 const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB) {
202 "The call event is not a destructor call!");
203
204 const auto &DtorCall = cast<CXXDestructorCall>(Call);
205
206 auto ThisVal = DtorCall.getCXXThisVal();
207
208 if (auto ThisElementRegion = dyn_cast<ElementRegion>(ThisVal.getAsRegion())) {
209 auto ArrayRegion = ThisElementRegion->getAsArrayOffset().getRegion();
210 auto ElementType = ThisElementRegion->getElementType();
211
212 auto ElementCount =
213 getDynamicElementCount(State, ArrayRegion, SVB, ElementType);
214
215 if (!ElementCount.isConstant())
216 return 0;
217
218 return ElementCount.getAsInteger()->getLimitedValue();
219 }
220
221 return 0;
222}
223
224ProgramStateRef ExprEngine::removeStateTraitsUsedForArrayEvaluation(
225 ProgramStateRef State, const CXXConstructExpr *E,
226 const LocationContext *LCtx) {
227
228 assert(LCtx && "Location context must be provided!");
229
230 if (E) {
231 if (getPendingInitLoop(State, E, LCtx))
232 State = removePendingInitLoop(State, E, LCtx);
233
234 if (getIndexOfElementToConstruct(State, E, LCtx))
235 State = removeIndexOfElementToConstruct(State, E, LCtx);
236 }
237
238 if (getPendingArrayDestruction(State, LCtx))
239 State = removePendingArrayDestruction(State, LCtx);
240
241 return State;
242}
243
244/// The call exit is simulated with a sequence of nodes, which occur between
245/// CallExitBegin and CallExitEnd. The following operations occur between the
246/// two program points:
247/// 1. CallExitBegin (triggers the start of call exit sequence)
248/// 2. Bind the return value
249/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
250/// 4. CallExitEnd
251/// 5. PostStmt<CallExpr>
252/// Steps 1-3. happen in the callee context; but there is a context switch and
253/// steps 4-5. happen in the caller context.
255 // Step 1 CEBNode was generated before the call.
256 const StackFrame *CalleeSF = CEBNode->getStackFrame();
257
258 // The parent context might not be a stack frame, so make sure we
259 // look up the first enclosing stack frame.
260 const StackFrame *CallerSF = CalleeSF->getParent()->getStackFrame();
261
262 const Expr *CE = CalleeSF->getCallSite();
263 ProgramStateRef State = CEBNode->getState();
264 // Find the last statement in the function and the corresponding basic block.
265 auto [LastSt, Blk] = getLastStmt(CEBNode);
266
267 const CFGBlock *PrePurgeBlock =
268 isa_and_nonnull<ReturnStmt>(LastSt) ? Blk : &CEBNode->getCFG().getExit();
269 // The first half of this process happens in the callee context:
270 setCurrLocationContextAndBlock(CalleeSF, PrePurgeBlock);
271
272 // Generate a CallEvent /before/ cleaning the State, so that we can get the
273 // correct value for 'this' (if necessary).
275 CallEventRef<> Call = CEMgr.getCaller(CalleeSF, State);
276
277 // Step 2: generate node with bound return value: CEBNode -> BoundRetNode.
278
279 // If this variable is set to 'true' the analyzer will evaluate the call
280 // statement we are about to exit again, instead of continuing the execution
281 // from the statement after the call. This is useful for non-POD type array
282 // construction where the CXXConstructExpr is referenced only once in the CFG,
283 // but we want to evaluate it as many times as many elements the array has.
284 bool ShouldRepeatCall = false;
285
286 if (const auto *DtorDecl =
287 dyn_cast_or_null<CXXDestructorDecl>(Call->getDecl())) {
288 if (auto Idx = getPendingArrayDestruction(State, CallerSF)) {
289 ShouldRepeatCall = *Idx > 0;
290
291 auto ThisVal = svalBuilder.getCXXThis(DtorDecl->getParent(), CalleeSF);
292 State = State->killBinding(ThisVal);
293 }
294 }
295
296 // If the callee returns an expression, bind its value to CallExpr.
297 if (CE) {
298 if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
299 const LocationContext *LCtx = CEBNode->getLocationContext();
300
301 SVal V = UndefinedVal();
302 if (RS->getRetValue())
303 V = State->getSVal(RS->getRetValue(), LCtx);
304
305 // Ensure that the return type matches the type of the returned Expr.
306 if (wasDifferentDeclUsedForInlining(Call, CalleeSF)) {
307 QualType ReturnedTy =
309 if (!ReturnedTy.isNull()) {
310 V = adjustReturnValue(V, CE->getType(), ReturnedTy,
312 }
313 }
314
315 State = State->BindExpr(CE, CallerSF, V);
316 }
317
318 // Bind the constructed object value to CXXConstructExpr.
319 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
321 svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), CalleeSF);
322 SVal ThisV = State->getSVal(This);
323 ThisV = State->getSVal(ThisV.castAs<Loc>());
324 State = State->BindExpr(CCE, CallerSF, ThisV);
325
326 ShouldRepeatCall = shouldRepeatCtorCall(State, CCE, CallerSF);
327 }
328
329 if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
330 // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
331 // while to reach the actual CXXNewExpr element from here, so keep the
332 // region for later use.
333 // Additionally cast the return value of the inlined operator new
334 // (which is of type 'void *') to the correct object type.
335 SVal AllocV = State->getSVal(CNE, CallerSF);
336 AllocV = svalBuilder.evalCast(
337 AllocV, CNE->getType(),
338 getContext().getPointerType(getContext().VoidTy));
339
340 State =
341 addObjectUnderConstruction(State, CNE, CalleeSF->getParent(), AllocV);
342 }
343 }
344
345 if (!ShouldRepeatCall) {
346 State = removeStateTraitsUsedForArrayEvaluation(
347 State, dyn_cast_or_null<CXXConstructExpr>(CE), CallerSF);
348 }
349
350 // Step 3: BoundRetNode -> CleanedNodes
351 // If we can find a statement and a block in the inlined function, run remove
352 // dead bindings before returning from the call. This is important to ensure
353 // that we report the issues such as leaks in the stack contexts in which
354 // they occurred.
355 ExplodedNodeSet CleanedNodes;
356 if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
357 static SimpleProgramPointTag RetValBind("ExprEngine", "Bind Return Value");
358 auto Loc = isa<ReturnStmt>(LastSt)
359 ? ProgramPoint{PostStmt(LastSt, CalleeSF, &RetValBind)}
360 : ProgramPoint{EpsilonPoint(CalleeSF, /*Data1=*/nullptr,
361 /*Data2=*/nullptr, &RetValBind)};
362
363 ExplodedNode *BoundRetNode = Engine.makeNode(Loc, State, CEBNode);
364 if (!BoundRetNode)
365 return;
366
367 // We call removeDead in the context of the callee.
368 removeDead(BoundRetNode, CleanedNodes, /*ReferenceStmt=*/nullptr, CalleeSF,
369 /*DiagnosticStmt=*/CalleeSF->getAnalysisDeclContext()->getBody(),
371 } else {
372 CleanedNodes.insert(CEBNode);
373 }
374
375 // The second half of this process happens in the caller context. This is an
376 // exception to the general rule that the current LocationContext and Block
377 // stay the same within a single call to dispatchWorkItem.
380 SaveAndRestore CBISave(currStmtIdx, CalleeSF->getIndex());
381
382 for (ExplodedNode *N : CleanedNodes) {
383 // Step 4: Generate the CallExitEnd node.
384 // CleanedNodes -> CEENode
385 CallExitEnd Loc(CalleeSF, CallerSF);
386 ProgramStateRef CEEState = (N == CEBNode) ? State : N->getState();
387
388 ExplodedNode *CEENode = Engine.makeNode(Loc, CEEState, N);
389 if (!CEENode)
390 return;
391
392 // Step 5: Perform the post-condition check of the CallExpr and enqueue the
393 // result onto the work list.
394 // CEENode -> Dst -> WorkList
395
396 CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
397
398 ExplodedNodeSet DstPostPostCallCallback;
399 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback, CEENode,
400 *UpdatedCall, *this,
401 /*wasInlined=*/true);
402 ExplodedNodeSet DstPostCall;
403 if (llvm::isa_and_nonnull<CXXNewExpr>(CE)) {
404 for (ExplodedNode *I : DstPostPostCallCallback) {
406 cast<CXXAllocatorCall>(*UpdatedCall), DstPostCall, I, *this,
407 /*wasInlined=*/true);
408 }
409 } else {
410 DstPostCall.insert(DstPostPostCallCallback);
411 }
412
413 ExplodedNodeSet Dst;
414 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
415 getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
416 *this,
417 /*wasInlined=*/true);
418 } else if (CE &&
419 !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
420 AMgr.getAnalyzerOptions().MayInlineCXXAllocator)) {
421 getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
422 *this, /*wasInlined=*/true);
423 } else {
424 Dst.insert(DstPostCall);
425 }
426
427 // Enqueue the next element in the block.
428 for (ExplodedNode *DstNode : Dst) {
429 unsigned Idx = CalleeSF->getIndex() + (ShouldRepeatCall ? 0 : 1);
430
431 Engine.getWorkList()->enqueue(DstNode, CalleeSF->getCallSiteBlock(), Idx);
432 }
433 }
434}
435
436bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
437 // When there are no branches in the function, it means that there's no
438 // exponential complexity introduced by inlining such function.
439 // Such functions also don't trigger various fundamental problems
440 // with our inlining mechanism, such as the problem of
441 // inlined defensive checks. Hence isLinear().
442 const CFG *Cfg = ADC->getCFG();
443 return Cfg->isLinear() || Cfg->size() <= AMgr.options.AlwaysInlineSize;
444}
445
446bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
447 const CFG *Cfg = ADC->getCFG();
448 return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
449}
450
451bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
452 const CFG *Cfg = ADC->getCFG();
453 return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
454}
455
456void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
457 bool &IsRecursive, unsigned &StackDepth) {
458 IsRecursive = false;
459 StackDepth = 0;
460
461 while (LCtx) {
462 if (const StackFrame *SF = dyn_cast<StackFrame>(LCtx)) {
463 const Decl *DI = SF->getDecl();
464
465 // Mark recursive (and mutually recursive) functions and always count
466 // them when measuring the stack depth.
467 if (DI == D) {
468 IsRecursive = true;
469 ++StackDepth;
470 LCtx = LCtx->getParent();
471 continue;
472 }
473
474 // Do not count the small functions when determining the stack depth.
475 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
476 if (!isSmall(CalleeADC))
477 ++StackDepth;
478 }
479 LCtx = LCtx->getParent();
480 }
481}
482
483// The GDM component containing the dynamic dispatch bifurcation info. When
484// the exact type of the receiver is not known, we want to explore both paths -
485// one on which we do inline it and the other one on which we don't. This is
486// done to ensure we do not drop coverage.
487// This is the map from the receiver region to a bool, specifying either we
488// consider this region's information precise or not along the given path.
489namespace {
490 enum DynamicDispatchMode {
491 DynamicDispatchModeInlined = 1,
492 DynamicDispatchModeConservative
493 };
494} // end anonymous namespace
495
496REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
497 const MemRegion *, unsigned)
498REGISTER_TRAIT_WITH_PROGRAMSTATE(CTUDispatchBifurcation, bool)
499
500void ExprEngine::ctuBifurcate(const CallEvent &Call, const Decl *D,
501 NodeBuilder &Bldr, ExplodedNode *Pred,
502 ProgramStateRef State) {
503 ProgramStateRef ConservativeEvalState = nullptr;
504 if (Call.isForeign() && !isSecondPhaseCTU()) {
505 const auto IK = AMgr.options.getCTUPhase1Inlining();
506 const bool DoInline = IK == CTUPhase1InliningKind::All ||
508 isSmall(AMgr.getAnalysisDeclContext(D)));
509 if (DoInline) {
510 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
511 return;
512 }
513 const bool BState = State->get<CTUDispatchBifurcation>();
514 if (!BState) { // This is the first time we see this foreign function.
515 // Enqueue it to be analyzed in the second (ctu) phase.
516 inlineCall(Engine.getCTUWorkList(), Call, D, Bldr, Pred, State);
517 // Conservatively evaluate in the first phase.
518 ConservativeEvalState = State->set<CTUDispatchBifurcation>(true);
519 conservativeEvalCall(Call, Bldr, Pred, ConservativeEvalState);
520 } else {
521 conservativeEvalCall(Call, Bldr, Pred, State);
522 }
523 return;
524 }
525 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
526}
527
528void ExprEngine::inlineCall(WorkList *WList, const CallEvent &Call,
529 const Decl *D, NodeBuilder &Bldr,
530 ExplodedNode *Pred, ProgramStateRef State) {
531 assert(D);
532
533 const LocationContext *CurLC = Pred->getLocationContext();
534 const StackFrame *CallerSF = CurLC->getStackFrame();
535 const BlockDataRegion *BlockInvocationData = nullptr;
536 if (Call.getKind() == CE_Block &&
537 !cast<BlockCall>(Call).isConversionFromLambda()) {
538 BlockInvocationData = cast<BlockCall>(Call).getBlockRegion();
539 assert(BlockInvocationData &&
540 "If we have the block definition we should have its region");
541 }
542
543 // This may be NULL, but that's fine.
544 const Expr *CallE = Call.getOriginExpr();
545
546 // Construct a new stack frame for the callee.
547 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
548 const StackFrame *CalleeSF = CalleeADC->getStackFrame(
549 CallerSF, BlockInvocationData, CallE, getCurrBlock(),
550 getNumVisitedCurrent(), currStmtIdx);
551
552 CallEnter Loc(CallE, CalleeSF, CurLC);
553
554 // Construct a new state which contains the mapping from actual to
555 // formal arguments.
556 State = State->enterStackFrame(Call, CalleeSF);
557
558 bool isNew;
559 if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
560 N->addPredecessor(Pred, G);
561 if (isNew)
562 WList->enqueue(N);
563 }
564
565 // If we decided to inline the call, the successor has been manually
566 // added onto the work list so remove it from the node builder.
567 Bldr.takeNodes(Pred);
568
569 NumInlinedCalls++;
570 Engine.FunctionSummaries->bumpNumTimesInlined(D);
571
572 // Do not mark as visited in the 2nd run (CTUWList), so the function will
573 // be visited as top-level, this way we won't loose reports in non-ctu
574 // mode. Considering the case when a function in a foreign TU calls back
575 // into the main TU.
576 // Note, during the 1st run, it doesn't matter if we mark the foreign
577 // functions as visited (or not) because they can never appear as a top level
578 // function in the main TU.
579 if (!isSecondPhaseCTU())
580 // Mark the decl as visited.
581 if (VisitedCallees)
582 VisitedCallees->insert(D);
583}
584
586 const Expr *CallE) {
587 const void *ReplayState = State->get<ReplayWithoutInlining>();
588 if (!ReplayState)
589 return nullptr;
590
591 assert(ReplayState == CallE && "Backtracked to the wrong call.");
592 (void)CallE;
593
594 return State->remove<ReplayWithoutInlining>();
595}
596
598 ExplodedNodeSet &dst) {
599 // Perform the previsit of the CallExpr.
600 ExplodedNodeSet dstPreVisit;
601 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
602
603 // Get the call in its initial state. We use this as a template to perform
604 // all the checks.
606 CallEventRef<> CallTemplate = CEMgr.getSimpleCall(
607 CE, Pred->getState(), Pred->getLocationContext(), getCFGElementRef());
608
609 // Evaluate the function call. We try each of the checkers
610 // to see if the can evaluate the function call.
611 ExplodedNodeSet dstCallEvaluated;
612 for (ExplodedNode *N : dstPreVisit) {
613 evalCall(dstCallEvaluated, N, *CallTemplate);
614 }
615
616 // Finally, perform the post-condition check of the CallExpr and store
617 // the created nodes in 'Dst'.
618 // Note that if the call was inlined, dstCallEvaluated will be empty.
619 // The post-CallExpr check will occur in processCallExit.
620 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
621 *this);
622}
623
624ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
625 const CallEvent &Call) {
626 // WARNING: The state attached to 'Call' may be obsolete, do not call any
627 // methods that rely on it!
628 const Expr *E = Call.getOriginExpr();
629 // FIXME: Constructors to placement arguments of operator new
630 // are not supported yet.
631 if (!E || isa<CXXNewExpr>(E))
632 return State;
633
634 const LocationContext *LC = Call.getLocationContext();
635 for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
636 unsigned I = Call.getASTArgumentIndex(CallI);
637 if (std::optional<SVal> V = getObjectUnderConstruction(State, {E, I}, LC)) {
638 SVal VV = *V;
639 (void)VV;
641 ->getStackFrame()->getParent()
642 ->getStackFrame() == LC->getStackFrame());
643 State = finishObjectConstruction(State, {E, I}, LC);
644 }
645 }
646
647 return State;
648}
649
650void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
651 ExplodedNode *Pred,
652 const CallEvent &Call) {
653 // WARNING: The state attached to 'Call' may be obsolete, do not call any
654 // methods that rely on it!
655 ProgramStateRef State = Pred->getState();
656 ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
657 if (CleanedState == State) {
658 Dst.insert(Pred);
659 return;
660 }
661
662 const Expr *E = Call.getOriginExpr();
663 const LocationContext *LC = Call.getLocationContext();
664 static SimpleProgramPointTag Tag("ExprEngine",
665 "Finish argument construction");
666 Dst.insert(Engine.makeNode(PreStmt(E, LC, &Tag), CleanedState, Pred));
667}
668
670 const CallEvent &CallTemplate) {
671 // NOTE: CallTemplate is called a "template" because its attached state may
672 // be obsolete (compared to the state of Pred). The state-dependent methods
673 // of CallEvent should be used only after a `cloneWithState` call that
674 // attaches the up-to-date state to this template object.
675
676 // Run any pre-call checks using the generic call interface.
677 ExplodedNodeSet dstPreVisit;
678 getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, CallTemplate,
679 *this);
680
681 // Actually evaluate the function call. We try each of the checkers
682 // to see if the can evaluate the function call, and get a callback at
683 // defaultEvalCall if all of them fail.
684 ExplodedNodeSet dstCallEvaluated;
686 dstCallEvaluated, dstPreVisit, CallTemplate, *this, EvalCallOptions());
687
688 // If there were other constructors called for object-type arguments
689 // of this call, clean them up.
690 ExplodedNodeSet dstArgumentCleanup;
691 for (ExplodedNode *I : dstCallEvaluated)
692 finishArgumentConstruction(dstArgumentCleanup, I, CallTemplate);
693
694 ExplodedNodeSet dstPostCall;
695 getCheckerManager().runCheckersForPostCall(dstPostCall, dstArgumentCleanup,
696 CallTemplate, *this);
697
698 // Escaping symbols conjured during invalidating the regions above.
699 // Note that, for inlined calls the nodes were put back into the worklist,
700 // so we can assume that every node belongs to a conservative call at this
701 // point.
702
703 // Run pointerEscape callback with the newly conjured symbols.
705 for (ExplodedNode *I : dstPostCall) {
706 ProgramStateRef State = I->getState();
707 CallEventRef<> Call = CallTemplate.cloneWithState(State);
708 Escaped.clear();
709 {
710 unsigned Arg = -1;
711 for (const ParmVarDecl *PVD : Call->parameters()) {
712 ++Arg;
713 QualType ParamTy = PVD->getType();
714 if (ParamTy.isNull() ||
715 (!ParamTy->isPointerType() && !ParamTy->isReferenceType()))
716 continue;
717 QualType Pointee = ParamTy->getPointeeType();
718 if (Pointee.isConstQualified() || Pointee->isVoidType())
719 continue;
720 if (const MemRegion *MR = Call->getArgSVal(Arg).getAsRegion())
721 Escaped.emplace_back(loc::MemRegionVal(MR), State->getSVal(MR, Pointee));
722 }
723 }
724
725 State = processPointerEscapedOnBind(State, Escaped, I->getLocationContext(),
727
728 if (State != I->getState())
729 I = Engine.makeNode(I->getLocation(), State, I);
730
731 Dst.insert(I);
732 }
733}
734
736 const LocationContext *LCtx,
737 ProgramStateRef State) {
738 const Expr *E = Call.getOriginExpr();
739 const ConstCFGElementRef &Elem = Call.getCFGElementRef();
740 if (!E)
741 return State;
742
743 // Some method families have known return values.
744 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
745 switch (Msg->getMethodFamily()) {
746 default:
747 break;
748 case OMF_autorelease:
749 case OMF_retain:
750 case OMF_self: {
751 // These methods return their receivers.
752 return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
753 }
754 }
755 } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
756 SVal ThisV = C->getCXXThisVal();
757 ThisV = State->getSVal(ThisV.castAs<Loc>());
758 return State->BindExpr(E, LCtx, ThisV);
759 }
760
761 SVal R;
762 QualType ResultTy = Call.getResultType();
763 unsigned Count = getNumVisitedCurrent();
764 if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
765 // Conjure a temporary if the function returns an object by value.
766 SVal Target;
767 assert(RTC->getStmt() == Call.getOriginExpr());
768 EvalCallOptions CallOpts; // FIXME: We won't really need those.
769 std::tie(State, Target) = handleConstructionContext(
770 Call.getOriginExpr(), State, currBldrCtx, LCtx,
771 RTC->getConstructionContext(), CallOpts);
772 const MemRegion *TargetR = Target.getAsRegion();
773 assert(TargetR);
774 // Invalidate the region so that it didn't look uninitialized. If this is
775 // a field or element constructor, we do not want to invalidate
776 // the whole structure. Pointer escape is meaningless because
777 // the structure is a product of conservative evaluation
778 // and therefore contains nothing interesting at this point.
780 ITraits.setTrait(TargetR,
782 State = State->invalidateRegions(TargetR, Elem, Count, LCtx,
783 /* CausesPointerEscape=*/false, nullptr,
784 &Call, &ITraits);
785
786 R = State->getSVal(Target.castAs<Loc>(), E->getType());
787 } else {
788 // Conjure a symbol if the return value is unknown.
789
790 // See if we need to conjure a heap pointer instead of
791 // a regular unknown pointer.
792 const auto *CNE = dyn_cast<CXXNewExpr>(E);
793 if (CNE && CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
794 R = svalBuilder.getConjuredHeapSymbolVal(Elem, LCtx, E->getType(), Count);
795 const MemRegion *MR = R.getAsRegion()->StripCasts();
796
797 // Store the extent of the allocated object(s).
798 SVal ElementCount;
799 if (const Expr *SizeExpr = CNE->getArraySize().value_or(nullptr)) {
800 ElementCount = State->getSVal(SizeExpr, LCtx);
801 } else {
802 ElementCount = svalBuilder.makeIntVal(1, /*IsUnsigned=*/true);
803 }
804
805 SVal ElementSize = getElementExtent(CNE->getAllocatedType(), svalBuilder);
806
807 SVal Size =
808 svalBuilder.evalBinOp(State, BO_Mul, ElementCount, ElementSize,
809 svalBuilder.getArrayIndexType());
810
811 // FIXME: This line is to prevent a crash. For more details please check
812 // issue #56264.
813 if (Size.isUndef())
814 Size = UnknownVal();
815
816 State = setDynamicExtent(State, MR, Size.castAs<DefinedOrUnknownSVal>());
817 } else {
818 R = svalBuilder.conjureSymbolVal(Elem, LCtx, ResultTy, Count);
819 }
820 }
821 return State->BindExpr(E, LCtx, R);
822}
823
824// Conservatively evaluate call by invalidating regions and binding
825// a conjured return value.
826void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
827 ExplodedNode *Pred, ProgramStateRef State) {
828 State = Call.invalidateRegions(getNumVisitedCurrent(), State);
829 State = bindReturnValue(Call, Pred->getLocationContext(), State);
830
831 // And make the result node.
832 static SimpleProgramPointTag PT("ExprEngine", "Conservative eval call");
833 Bldr.generateNode(Call.getProgramPoint(false, &PT), State, Pred);
834}
835
836ExprEngine::CallInlinePolicy
837ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
838 AnalyzerOptions &Opts,
839 const EvalCallOptions &CallOpts) {
840 const LocationContext *CurLC = Pred->getLocationContext();
841 const StackFrame *CallerSF = CurLC->getStackFrame();
842 switch (Call.getKind()) {
843 case CE_Function:
845 case CE_Block:
846 break;
847 case CE_CXXMember:
850 return CIP_DisallowedAlways;
851 break;
852 case CE_CXXConstructor: {
854 return CIP_DisallowedAlways;
855
857
858 const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
859
861 const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
862 : nullptr;
863
864 if (llvm::isa_and_nonnull<NewAllocatedObjectConstructionContext>(CC) &&
865 !Opts.MayInlineCXXAllocator)
866 return CIP_DisallowedOnce;
867
868 if (CallOpts.IsArrayCtorOrDtor) {
869 if (!shouldInlineArrayConstruction(Pred->getState(), CtorExpr, CurLC))
870 return CIP_DisallowedOnce;
871 }
872
873 // Inlining constructors requires including initializers in the CFG.
874 const AnalysisDeclContext *ADC = CallerSF->getAnalysisDeclContext();
875 assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
876 (void)ADC;
877
878 // If the destructor is trivial, it's always safe to inline the constructor.
879 if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
880 break;
881
882 // For other types, only inline constructors if destructor inlining is
883 // also enabled.
885 return CIP_DisallowedAlways;
886
888 // If we don't handle temporary destructors, we shouldn't inline
889 // their constructors.
890 if (CallOpts.IsTemporaryCtorOrDtor &&
891 !Opts.ShouldIncludeTemporaryDtorsInCFG)
892 return CIP_DisallowedOnce;
893
894 // If we did not find the correct this-region, it would be pointless
895 // to inline the constructor. Instead we will simply invalidate
896 // the fake temporary target.
898 return CIP_DisallowedOnce;
899
900 // If the temporary is lifetime-extended by binding it to a reference-type
901 // field within an aggregate, automatic destructors don't work properly.
903 return CIP_DisallowedOnce;
904 }
905
906 break;
907 }
909 // This doesn't really increase the cost of inlining ever, because
910 // the stack frame of the inherited constructor is trivial.
911 return CIP_Allowed;
912 }
913 case CE_CXXDestructor: {
915 return CIP_DisallowedAlways;
916
917 // Inlining destructors requires building the CFG correctly.
918 const AnalysisDeclContext *ADC = CallerSF->getAnalysisDeclContext();
919 assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
920 (void)ADC;
921
922 if (CallOpts.IsArrayCtorOrDtor) {
923 if (!shouldInlineArrayDestruction(getElementCountOfArrayBeingDestructed(
924 Call, Pred->getState(), svalBuilder))) {
925 return CIP_DisallowedOnce;
926 }
927 }
928
929 // Allow disabling temporary destructor inlining with a separate option.
930 if (CallOpts.IsTemporaryCtorOrDtor &&
931 !Opts.MayInlineCXXTemporaryDtors)
932 return CIP_DisallowedOnce;
933
934 // If we did not find the correct this-region, it would be pointless
935 // to inline the destructor. Instead we will simply invalidate
936 // the fake temporary target.
938 return CIP_DisallowedOnce;
939 break;
940 }
942 [[fallthrough]];
943 case CE_CXXAllocator:
944 if (Opts.MayInlineCXXAllocator)
945 break;
946 // Do not inline allocators until we model deallocators.
947 // This is unfortunate, but basically necessary for smart pointers and such.
948 return CIP_DisallowedAlways;
949 case CE_ObjCMessage:
950 if (!Opts.MayInlineObjCMethod)
951 return CIP_DisallowedAlways;
952 if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
954 return CIP_DisallowedAlways;
955 break;
956 }
957
958 return CIP_Allowed;
959}
960
961/// Returns true if the given C++ class contains a member with the given name.
962static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
963 StringRef Name) {
964 const IdentifierInfo &II = Ctx.Idents.get(Name);
965 return RD->hasMemberName(Ctx.DeclarationNames.getIdentifier(&II));
966}
967
968/// Returns true if the given C++ class is a container or iterator.
969///
970/// Our heuristic for this is whether it contains a method named 'begin()' or a
971/// nested type named 'iterator' or 'iterator_category'.
972static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
973 return hasMember(Ctx, RD, "begin") ||
974 hasMember(Ctx, RD, "iterator") ||
975 hasMember(Ctx, RD, "iterator_category");
976}
977
978/// Returns true if the given function refers to a method of a C++ container
979/// or iterator.
980///
981/// We generally do a poor job modeling most containers right now, and might
982/// prefer not to inline their methods.
983static bool isContainerMethod(const ASTContext &Ctx,
984 const FunctionDecl *FD) {
985 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
986 return isContainerClass(Ctx, MD->getParent());
987 return false;
988}
989
990/// Returns true if the given function is the destructor of a class named
991/// "shared_ptr".
992static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
993 const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
994 if (!Dtor)
995 return false;
996
997 const CXXRecordDecl *RD = Dtor->getParent();
998 if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
999 if (II->isStr("shared_ptr"))
1000 return true;
1001
1002 return false;
1003}
1004
1005/// Returns true if the function in \p CalleeADC may be inlined in general.
1006///
1007/// This checks static properties of the function, such as its signature and
1008/// CFG, to determine whether the analyzer should ever consider inlining it,
1009/// in any context.
1010bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
1011 AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
1012 // FIXME: Do not inline variadic calls.
1013 if (CallEvent::isVariadic(CalleeADC->getDecl()))
1014 return false;
1015
1016 // Check certain C++-related inlining policies.
1017 ASTContext &Ctx = CalleeADC->getASTContext();
1018 if (Ctx.getLangOpts().CPlusPlus) {
1019 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
1020 // Conditionally control the inlining of template functions.
1021 if (!Opts.MayInlineTemplateFunctions)
1022 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
1023 return false;
1024
1025 // Conditionally control the inlining of C++ standard library functions.
1026 if (!Opts.MayInlineCXXStandardLibrary)
1027 if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
1029 return false;
1030
1031 // Conditionally control the inlining of methods on objects that look
1032 // like C++ containers.
1033 if (!Opts.MayInlineCXXContainerMethods)
1034 if (!AMgr.isInCodeFile(FD->getLocation()))
1035 if (isContainerMethod(Ctx, FD))
1036 return false;
1037
1038 // Conditionally control the inlining of the destructor of C++ shared_ptr.
1039 // We don't currently do a good job modeling shared_ptr because we can't
1040 // see the reference count, so treating as opaque is probably the best
1041 // idea.
1042 if (!Opts.MayInlineCXXSharedPtrDtor)
1043 if (isCXXSharedPtrDtor(FD))
1044 return false;
1045 }
1046 }
1047
1048 // It is possible that the CFG cannot be constructed.
1049 // Be safe, and check if the CalleeCFG is valid.
1050 const CFG *CalleeCFG = CalleeADC->getCFG();
1051 if (!CalleeCFG)
1052 return false;
1053
1054 // Do not inline large functions.
1055 if (isHuge(CalleeADC))
1056 return false;
1057
1058 // It is possible that the live variables analysis cannot be
1059 // run. If so, bail out.
1060 if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
1061 return false;
1062
1063 return true;
1064}
1065
1066bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
1067 const ExplodedNode *Pred,
1068 const EvalCallOptions &CallOpts) {
1069 if (!D)
1070 return false;
1071
1072 AnalysisManager &AMgr = getAnalysisManager();
1073 AnalyzerOptions &Opts = AMgr.options;
1074 AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
1075 AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
1076
1077 // The auto-synthesized bodies are essential to inline as they are
1078 // usually small and commonly used. Note: we should do this check early on to
1079 // ensure we always inline these calls.
1080 if (CalleeADC->isBodyAutosynthesized())
1081 return true;
1082
1083 if (!AMgr.shouldInlineCall())
1084 return false;
1085
1086 // Check if this function has been marked as non-inlinable.
1087 std::optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
1088 if (MayInline) {
1089 if (!*MayInline)
1090 return false;
1091
1092 } else {
1093 // We haven't actually checked the static properties of this function yet.
1094 // Do that now, and record our decision in the function summaries.
1095 if (mayInlineDecl(CalleeADC)) {
1096 Engine.FunctionSummaries->markMayInline(D);
1097 } else {
1098 Engine.FunctionSummaries->markShouldNotInline(D);
1099 return false;
1100 }
1101 }
1102
1103 // Check if we should inline a call based on its kind.
1104 // FIXME: this checks both static and dynamic properties of the call, which
1105 // means we're redoing a bit of work that could be cached in the function
1106 // summary.
1107 CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
1108 if (CIP != CIP_Allowed) {
1109 if (CIP == CIP_DisallowedAlways) {
1110 assert(!MayInline || *MayInline);
1111 Engine.FunctionSummaries->markShouldNotInline(D);
1112 }
1113 return false;
1114 }
1115
1116 // Do not inline if recursive or we've reached max stack frame count.
1117 bool IsRecursive = false;
1118 unsigned StackDepth = 0;
1119 examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
1120 if ((StackDepth >= Opts.InlineMaxStackDepth) &&
1121 (!isSmall(CalleeADC) || IsRecursive))
1122 return false;
1123
1124 // Do not inline large functions too many times.
1125 if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
1126 Opts.MaxTimesInlineLarge) &&
1127 isLarge(CalleeADC)) {
1128 NumReachedInlineCountMax++;
1129 return false;
1130 }
1131
1132 if (HowToInline == Inline_Minimal && (!isSmall(CalleeADC) || IsRecursive))
1133 return false;
1134
1135 return true;
1136}
1137
1138bool ExprEngine::shouldInlineArrayConstruction(const ProgramStateRef State,
1139 const CXXConstructExpr *CE,
1140 const LocationContext *LCtx) {
1141 if (!CE)
1142 return false;
1143
1144 // FIXME: Handle other arrays types.
1145 if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType())) {
1146 unsigned ArrSize = getContext().getConstantArrayElementCount(CAT);
1147
1148 // This might seem conter-intuitive at first glance, but the functions are
1149 // closely related. Reasoning about destructors depends only on the type
1150 // of the expression that initialized the memory region, which is the
1151 // CXXConstructExpr. So to avoid code repetition, the work is delegated
1152 // to the function that reasons about destructor inlining. Also note that
1153 // if the constructors of the array elements are inlined, the destructors
1154 // can also be inlined and if the destructors can be inline, it's safe to
1155 // inline the constructors.
1156 return shouldInlineArrayDestruction(ArrSize);
1157 }
1158
1159 // Check if we're inside an ArrayInitLoopExpr, and it's sufficiently small.
1160 if (auto Size = getPendingInitLoop(State, CE, LCtx))
1161 return shouldInlineArrayDestruction(*Size);
1162
1163 return false;
1164}
1165
1166bool ExprEngine::shouldInlineArrayDestruction(uint64_t Size) {
1167
1168 uint64_t maxAllowedSize = AMgr.options.maxBlockVisitOnPath;
1169
1170 // Declaring a 0 element array is also possible.
1171 return Size <= maxAllowedSize && Size > 0;
1172}
1173
1174bool ExprEngine::shouldRepeatCtorCall(ProgramStateRef State,
1175 const CXXConstructExpr *E,
1176 const LocationContext *LCtx) {
1177
1178 if (!E)
1179 return false;
1180
1181 auto Ty = E->getType();
1182
1183 // FIXME: Handle non constant array types
1184 if (const auto *CAT = dyn_cast<ConstantArrayType>(Ty)) {
1186 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1187 }
1188
1189 if (auto Size = getPendingInitLoop(State, E, LCtx))
1190 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1191
1192 return false;
1193}
1194
1196 const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
1197 if (!ICall)
1198 return false;
1199
1200 const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
1201 if (!MD)
1202 return false;
1204 return false;
1205
1206 return MD->isTrivial();
1207}
1208
1210 const CallEvent &Call,
1211 const EvalCallOptions &CallOpts) {
1212 // Make sure we have the most recent state attached to the call.
1213 ProgramStateRef State = Pred->getState();
1214
1215 // Special-case trivial assignment operators.
1217 performTrivialCopy(Bldr, Pred, Call);
1218 return;
1219 }
1220
1221 const Expr *E = Call.getOriginExpr();
1222
1223 ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1224 if (InlinedFailedState) {
1225 // If we already tried once and failed, make sure we don't retry later.
1226 State = InlinedFailedState;
1227 } else {
1228 RuntimeDefinition RD = Call.getRuntimeDefinition();
1229 Call.setForeign(RD.isForeign());
1230 const Decl *D = RD.getDecl();
1231 if (shouldInlineCall(Call, D, Pred, CallOpts)) {
1232 if (RD.mayHaveOtherDefinitions()) {
1234
1235 // Explore with and without inlining the call.
1236 if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1237 BifurcateCall(RD.getDispatchRegion(), Call, D, Bldr, Pred);
1238 return;
1239 }
1240
1241 // Don't inline if we're not in any dynamic dispatch mode.
1242 if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1243 conservativeEvalCall(Call, Bldr, Pred, State);
1244 return;
1245 }
1246 }
1247 ctuBifurcate(Call, D, Bldr, Pred, State);
1248 return;
1249 }
1250 }
1251
1252 // If we can't inline it, clean up the state traits used only if the function
1253 // is inlined.
1254 State = removeStateTraitsUsedForArrayEvaluation(
1255 State, dyn_cast_or_null<CXXConstructExpr>(E), Call.getLocationContext());
1256
1257 // Also handle the return value and invalidate the regions.
1258 conservativeEvalCall(Call, Bldr, Pred, State);
1259}
1260
1261void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1262 const CallEvent &Call, const Decl *D,
1263 NodeBuilder &Bldr, ExplodedNode *Pred) {
1264 assert(BifurReg);
1265 BifurReg = BifurReg->StripCasts();
1266
1267 // Check if we've performed the split already - note, we only want
1268 // to split the path once per memory region.
1269 ProgramStateRef State = Pred->getState();
1270 const unsigned *BState =
1271 State->get<DynamicDispatchBifurcationMap>(BifurReg);
1272 if (BState) {
1273 // If we are on "inline path", keep inlining if possible.
1274 if (*BState == DynamicDispatchModeInlined)
1275 ctuBifurcate(Call, D, Bldr, Pred, State);
1276 // If inline failed, or we are on the path where we assume we
1277 // don't have enough info about the receiver to inline, conjure the
1278 // return value and invalidate the regions.
1279 conservativeEvalCall(Call, Bldr, Pred, State);
1280 return;
1281 }
1282
1283 // If we got here, this is the first time we process a message to this
1284 // region, so split the path.
1285 ProgramStateRef IState =
1286 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1287 DynamicDispatchModeInlined);
1288 ctuBifurcate(Call, D, Bldr, Pred, IState);
1289
1290 ProgramStateRef NoIState =
1291 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1292 DynamicDispatchModeConservative);
1293 conservativeEvalCall(Call, Bldr, Pred, NoIState);
1294
1295 NumOfDynamicDispatchPathSplits++;
1296}
1297
1299 ExplodedNodeSet &Dst) {
1300 ExplodedNodeSet dstPreVisit;
1301 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1302
1303 NodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1304
1305 if (RS->getRetValue()) {
1306 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1307 ei = dstPreVisit.end(); it != ei; ++it) {
1308 B.generateNode(RS, *it, (*it)->getState());
1309 }
1310 }
1311}
#define V(N, I)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
#define STAT_COUNTER(VARNAME, DESC)
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrame *calleeCtx)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
static bool isTrivialObjectAssignment(const CallEvent &Call)
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function's return type does not match the caller's expression ...
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
static unsigned getElementCountOfArrayBeingDestructed(const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB)
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Expr *CallE)
#define REGISTER_MAP_WITH_PROGRAMSTATE(Name, Key, Value)
Declares an immutable map of type NameTy, suitable for placement into the ProgramState.
#define REGISTER_TRAIT_WITH_PROGRAMSTATE(Name, Type)
Declares a program state trait for type Type called Name, and introduce a type named NameTy.
a trap message and trap category.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition ASTContext.h:227
SourceManager & getSourceManager()
Definition ASTContext.h:866
DeclarationNameTable DeclarationNames
Definition ASTContext.h:809
IdentifierTable & Idents
Definition ASTContext.h:805
const LangOptions & getLangOpts() const
Definition ASTContext.h:959
uint64_t getConstantArrayElementCount(const ConstantArrayType *CA) const
Return number of constant array elements.
AnalysisDeclContext * getContext(const Decl *D)
AnalysisDeclContext contains the context data for the function, method or block under analysis.
static bool isInStdNamespace(const Decl *D)
ASTContext & getASTContext() const
const StackFrame * getStackFrame(LocationContext const *ParentLC, const void *Data, const Expr *E, const CFGBlock *Blk, unsigned BlockCount, unsigned Index)
Obtain a context of the call stack using its parent context.
CFG::BuildOptions & getCFGBuildOptions()
Stores options for the analyzer from the command line.
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K) const
Returns the option controlling which C++ member functions will be considered for inlining.
IPAKind getIPAMode() const
Returns the inter-procedural analysis mode.
CTUPhase1InliningKind getCTUPhase1Inlining() const
unsigned InlineMaxStackDepth
The inlining stack depth limit.
Represents a single basic block in a source-level CFG.
Definition CFG.h:632
bool empty() const
Definition CFG.h:980
succ_iterator succ_begin()
Definition CFG.h:1017
unsigned succ_size() const
Definition CFG.h:1035
Represents C++ constructor call.
Definition CFG.h:158
std::optional< T > getAs() const
Convert to the specified CFGElement type, returning std::nullopt if this CFGElement is not of the des...
Definition CFG.h:110
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
Definition CFG.h:1250
unsigned size() const
Return the total number of CFGBlocks within the CFG This is simply a renaming of the getNumBlockIDs()...
Definition CFG.h:1448
bool isLinear() const
Returns true if the CFG has no branches.
Definition CFG.cpp:5460
CFGBlock & getExit()
Definition CFG.h:1366
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition CFG.h:1443
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
CXXBasePath & front()
bool isAmbiguous(CanQualType BaseType) const
Determine whether the path from the most-derived type to the given base type is ambiguous (i....
Represents a call to a C++ constructor.
Definition ExprCXX.h:1552
CXXConstructionKind getConstructionKind() const
Determine whether this constructor is actually constructing a base class (rather than a complete obje...
Definition ExprCXX.h:1663
Represents a C++ destructor within a class.
Definition DeclCXX.h:2882
Represents a static or instance method of a struct/union/class.
Definition DeclCXX.h:2132
const CXXRecordDecl * getParent() const
Return the parent of this method declaration, which is the class in which this method is defined.
Definition DeclCXX.h:2271
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition DeclCXX.cpp:2753
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition DeclCXX.cpp:2732
Represents a C++ struct/union/class.
Definition DeclCXX.h:258
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition DeclCXX.h:1372
bool hasMemberName(DeclarationName N) const
Determine whether this class has a member with the given name, possibly in a non-dependent base class...
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Represents a point when we begin processing an inlined call.
const StackFrame * getCalleeContext() const
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
Represents a point when we finish the call exit sequence (for inlined call).
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition Expr.h:2946
ConstructionContext's subclasses describe different ways of constructing an object in C++.
Decl - This represents one declaration (or definition), e.g.
Definition DeclBase.h:86
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition DeclBase.h:991
DeclarationName getIdentifier(const IdentifierInfo *ID)
Create a declaration name that is a simple identifier.
IdentifierInfo * getAsIdentifierInfo() const
Retrieve the IdentifierInfo * stored in this declaration name, or null if this declaration name isn't...
This is a meta program point, which should be skipped by all the diagnostic reasoning etc.
This represents one expression.
Definition Expr.h:112
QualType getType() const
Definition Expr.h:144
Represents a function declaration or definition.
Definition Decl.h:2018
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition Decl.h:2395
One of these records is kept for each identifier that is lexed.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
const Decl * getDecl() const
LLVM_ATTRIBUTE_RETURNS_NONNULL AnalysisDeclContext * getAnalysisDeclContext() const
const LocationContext * getParent() const
It might return null.
const StackFrame * getStackFrame() const
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition Decl.h:340
Represents a parameter to a function.
Definition Decl.h:1808
const StackFrame * getStackFrame() const
std::optional< T > getAs() const
Convert to the specified ProgramPoint type, returning std::nullopt if this ProgramPoint is not of the...
A (possibly-)qualified type.
Definition TypeBase.h:937
bool isNull() const
Return true if this QualType doesn't point to a type yet.
Definition TypeBase.h:1004
QualType getCanonicalType() const
Definition TypeBase.h:8488
bool isConstQualified() const
Determine whether this type is const-qualified.
Definition TypeBase.h:8509
ReturnStmt - This represents a return, optionally of an expression: return; return 4;.
Definition Stmt.h:3166
Expr * getRetValue()
Definition Stmt.h:3193
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
It represents a stack frame of the call stack (based on CallEvent).
unsigned getIndex() const
const Expr * getCallSite() const
const CFGBlock * getCallSiteBlock() const
Stmt - This represents one statement.
Definition Stmt.h:86
bool isVoidType() const
Definition TypeBase.h:9039
bool isPointerType() const
Definition TypeBase.h:8673
CanQualType getCanonicalTypeUnqualified() const
bool isReferenceType() const
Definition TypeBase.h:8697
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to.
Definition Type.cpp:1958
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
Definition Type.cpp:789
bool isObjCObjectPointerType() const
Definition TypeBase.h:8852
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
Represents a call to a C++ constructor.
Definition CallEvent.h:997
const CXXConstructorDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Definition CallEvent.h:1028
const CXXConstructExpr * getOriginExpr() const override
Returns the expression whose value will be the result of this call.
Definition CallEvent.h:1024
Represents a non-static C++ member function call, no matter how it is written.
Definition CallEvent.h:690
const FunctionDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Manages the lifetime of CallEvent objects.
Definition CallEvent.h:1374
CallEventRef getCaller(const StackFrame *CalleeSF, ProgramStateRef State)
Gets an outside caller given a callee context.
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx, CFGBlock::ConstCFGElementRef ElemRef)
Represents an abstract call to a function or method along a particular path.
Definition CallEvent.h:153
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
Definition CallEvent.h:1490
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting function calls (including methods, constructors, destructors etc.
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng, const EvalCallOptions &CallOpts)
Run checkers for evaluating a call.
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
void runCheckersForNewAllocator(const CXXAllocatorCall &Call, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting function calls (including methods, constructors, destructors etc.
WorkList * getCTUWorkList() const
Definition CoreEngine.h:162
WorkList * getWorkList() const
Definition CoreEngine.h:161
ExplodedNodeSet is a set of ExplodedNode * elements with the invariant that its elements cannot be nu...
void insert(ExplodedNode *N)
const ProgramStateRef & getState() const
ProgramPoint getLocation() const
getLocation - Returns the edge associated with the given node.
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
const LocationContext * getLocationContext() const
std::optional< T > getLocationAs() const &
ExplodedNode * getFirstPred()
const StackFrame * getStackFrame() const
ProgramStateManager & getStateManager()
Definition ExprEngine.h:480
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer's garbage collection - remove dead symbols and bindings from the state.
std::pair< ProgramStateRef, SVal > handleConstructionContext(const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx, const LocationContext *LCtx, const ConstructionContext *CC, EvalCallOptions &CallOpts, unsigned Idx=0)
A convenient wrapper around computeObjectUnderConstruction and updateObjectsUnderConstruction.
Definition ExprEngine.h:819
void removeDeadOnEndOfFunction(ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
void processCallEnter(CallEnter CE, ExplodedNode *Pred)
Generate the entry node of the callee.
void processCallExit(ExplodedNode *Pred)
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr.
static std::optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object's ConstructionContext,...
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition ExprEngine.h:770
@ Inline_Minimal
Do minimal inlining of callees.
Definition ExprEngine.h:131
ProgramStateRef processPointerEscapedOnBind(ProgramStateRef State, ArrayRef< std::pair< SVal, SVal > > LocAndVals, const LocationContext *LCtx, PointerEscapeKind Kind, const CallEvent *Call)
Call PointerEscape callback when a value escapes as a result of bind.
void setCurrLocationContextAndBlock(const LocationContext *LC, const CFGBlock *B)
Definition ExprEngine.h:245
static std::optional< unsigned > getIndexOfElementToConstruct(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retrieves which element is being constructed in a non-POD type array.
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition ExprEngine.h:215
StoreManager & getStoreManager()
Definition ExprEngine.h:483
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checkers and allowing checkers to be responsible for hand...
ConstCFGElementRef getCFGElementRef() const
Definition ExprEngine.h:292
static std::optional< unsigned > getPendingArrayDestruction(ProgramStateRef State, const LocationContext *LCtx)
Retrieves which element is being destructed in a non-POD type array.
void resetCurrLocationContextAndBlock()
Definition ExprEngine.h:260
CheckerManager & getCheckerManager() const
Definition ExprEngine.h:224
void processBeginOfFunction(ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L)
Called by CoreEngine.
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
unsigned getNumVisitedCurrent() const
Definition ExprEngine.h:302
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
AnalysisManager & getAnalysisManager()
Definition ExprEngine.h:217
const CFGBlock * getCurrBlock() const
Get the 'current' CFGBlock corresponding to the current work item (elementary analysis step handled b...
Definition ExprEngine.h:288
static std::optional< unsigned > getPendingInitLoop(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retrieves the size of the array in the pending ArrayInitLoopExpr.
MemRegion - The root abstract class for all memory regions.
Definition MemRegion.h:98
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * StripCasts(bool StripBaseAndDerivedCasts=true) const
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition CoreEngine.h:267
void takeNodes(const ExplodedNodeSet &S)
Definition CoreEngine.h:332
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred, bool MarkAsSink=false)
Generates a node in the ExplodedGraph.
Represents any expression that calls an Objective-C method.
Definition CallEvent.h:1261
CallEventManager & getCallEventManager()
Information about invalidation for a particular region/symbol.
Definition MemRegion.h:1661
void setTrait(SymbolRef Sym, InvalidationKinds IK)
Defines the runtime definition of the called function.
Definition CallEvent.h:110
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition CallEvent.h:141
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition CallEvent.h:137
SVal - This represents a symbolic expression, which can be either an L-value or an R-value.
Definition SVals.h:56
QualType getType(const ASTContext &) const
Try to get a reasonable type for the given value.
Definition SVals.cpp:180
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition SVals.h:83
SVal evalDerivedToBase(SVal Derived, const CastExpr *Cast)
Evaluates a chain of derived-to-base casts through the path specified in Cast.
Definition Store.cpp:254
virtual void enqueue(const WorkListUnit &U)=0
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * getRegion() const
Get the underlining region.
Definition SVals.h:493
@ PSK_EscapeOutParameters
Escape for a new symbol that was generated into a region that the analyzer cannot follow during a con...
DefinedOrUnknownSVal getDynamicElementCount(ProgramStateRef State, const MemRegion *MR, SValBuilder &SVB, QualType Ty)
IntrusiveRefCntPtr< const ProgramState > ProgramStateRef
ProgramStateRef setDynamicExtent(ProgramStateRef State, const MemRegion *MR, DefinedOrUnknownSVal Extent)
Set the dynamic extent Extent of the region MR.
@ CE_CXXInheritedConstructor
Definition CallEvent.h:69
@ CE_CXXStaticOperator
Definition CallEvent.h:62
@ CE_CXXDestructor
Definition CallEvent.h:65
@ CE_CXXDeallocator
Definition CallEvent.h:73
@ CE_CXXAllocator
Definition CallEvent.h:72
@ CE_CXXConstructor
Definition CallEvent.h:68
@ CE_CXXMemberOperator
Definition CallEvent.h:64
DefinedOrUnknownSVal getElementExtent(QualType Ty, SValBuilder &SVB)
std::variant< struct RequiresDecl, struct HeaderDecl, struct UmbrellaDirDecl, struct ModuleDecl, struct ExcludeDecl, struct ExportDecl, struct ExportAsDecl, struct ExternModuleDecl, struct UseDecl, struct LinkDecl, struct ConfigMacrosDecl, struct ConflictDecl > Decl
All declarations that can appear in a module declaration.
The JSON file list parser is used to communicate input to InstallAPI.
bool isa(CodeGen::Address addr)
Definition Address.h:330
CFGBlock::ConstCFGElementRef ConstCFGElementRef
Definition CFG.h:1227
@ ExpectedClass
@ IPAK_DynamicDispatch
Enable inlining of dynamically dispatched methods.
@ IPAK_DynamicDispatchBifurcate
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
@ CIMK_Destructors
Refers to destructors (implicit or explicit).
@ CIMK_MemberFunctions
Refers to regular member function and operator calls.
@ CIMK_Constructors
Refers to constructors (implicit or explicit).
U cast(CodeGen::Address addr)
Definition Address.h:327
unsigned long uint64_t
Hints for figuring out if a call should be inlined during evalCall().
Definition ExprEngine.h:94
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition ExprEngine.h:109
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition ExprEngine.h:104
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition ExprEngine.h:101
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition ExprEngine.h:97
Traits for storing the call processing policy inside GDM.