clang 22.0.0git
ExprEngineCallAndReturn.cpp
Go to the documentation of this file.
1//=-- ExprEngineCallAndReturn.cpp - Support for call/return -----*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines ExprEngine's support for calls and returns.
10//
11//===----------------------------------------------------------------------===//
12
14#include "clang/AST/Decl.h"
15#include "clang/AST/DeclCXX.h"
23#include "llvm/Support/Casting.h"
24#include "llvm/Support/Compiler.h"
25#include "llvm/Support/SaveAndRestore.h"
26#include <optional>
27
28using namespace clang;
29using namespace ento;
30
31#define DEBUG_TYPE "ExprEngine"
32
34 NumOfDynamicDispatchPathSplits,
35 "The # of times we split the path due to imprecise dynamic dispatch info");
36
37STAT_COUNTER(NumInlinedCalls, "The # of times we inlined a call");
38
39STAT_COUNTER(NumReachedInlineCountMax,
40 "The # of times we reached inline count maximum");
41
43 ExplodedNode *Pred) {
44 // Get the entry block in the CFG of the callee.
45 const CFGBlock *Entry = CE.getEntry();
46
47 // Validate the CFG.
48 assert(Entry->empty());
49 assert(Entry->succ_size() == 1);
50
51 // Get the solitary successor.
52 const CFGBlock *Succ = *(Entry->succ_begin());
53
54 // Construct an edge representing the starting location in the callee.
55 BlockEdge Loc(Entry, Succ, CE.getCalleeContext());
56
57 ProgramStateRef state = Pred->getState();
58
59 // Construct a new node, notify checkers that analysis of the function has
60 // begun, and add the resultant nodes to the worklist.
61 bool isNew;
62 ExplodedNode *Node = G.getNode(Loc, state, false, &isNew);
63 Node->addPredecessor(Pred, G);
64 if (isNew) {
65 ExplodedNodeSet DstBegin;
66 processBeginOfFunction(BC, Node, DstBegin, Loc);
67 Engine.enqueue(DstBegin);
68 }
69}
70
71// Find the last statement on the path to the exploded node and the
72// corresponding Block.
73static std::pair<const Stmt*,
74 const CFGBlock*> getLastStmt(const ExplodedNode *Node) {
75 const Stmt *S = nullptr;
76 const CFGBlock *Blk = nullptr;
77 const StackFrameContext *SF = Node->getStackFrame();
78
79 // Back up through the ExplodedGraph until we reach a statement node in this
80 // stack frame.
81 while (Node) {
82 const ProgramPoint &PP = Node->getLocation();
83
84 if (PP.getStackFrame() == SF) {
85 if (std::optional<StmtPoint> SP = PP.getAs<StmtPoint>()) {
86 S = SP->getStmt();
87 break;
88 } else if (std::optional<CallExitEnd> CEE = PP.getAs<CallExitEnd>()) {
89 S = CEE->getCalleeContext()->getCallSite();
90 if (S)
91 break;
92
93 // If there is no statement, this is an implicitly-generated call.
94 // We'll walk backwards over it and then continue the loop to find
95 // an actual statement.
96 std::optional<CallEnter> CE;
97 do {
98 Node = Node->getFirstPred();
99 CE = Node->getLocationAs<CallEnter>();
100 } while (!CE || CE->getCalleeContext() != CEE->getCalleeContext());
101
102 // Continue searching the graph.
103 } else if (std::optional<BlockEdge> BE = PP.getAs<BlockEdge>()) {
104 Blk = BE->getSrc();
105 }
106 } else if (std::optional<CallEnter> CE = PP.getAs<CallEnter>()) {
107 // If we reached the CallEnter for this function, it has no statements.
108 if (CE->getCalleeContext() == SF)
109 break;
110 }
111
112 if (Node->pred_empty())
113 return std::make_pair(nullptr, nullptr);
114
115 Node = *Node->pred_begin();
116 }
117
118 return std::make_pair(S, Blk);
119}
120
121/// Adjusts a return value when the called function's return type does not
122/// match the caller's expression type. This can happen when a dynamic call
123/// is devirtualized, and the overriding method has a covariant (more specific)
124/// return type than the parent's method. For C++ objects, this means we need
125/// to add base casts.
126static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy,
127 StoreManager &StoreMgr) {
128 // For now, the only adjustments we handle apply only to locations.
129 if (!isa<Loc>(V))
130 return V;
131
132 // If the types already match, don't do any unnecessary work.
133 ExpectedTy = ExpectedTy.getCanonicalType();
134 ActualTy = ActualTy.getCanonicalType();
135 if (ExpectedTy == ActualTy)
136 return V;
137
138 // No adjustment is needed between Objective-C pointer types.
139 if (ExpectedTy->isObjCObjectPointerType() &&
140 ActualTy->isObjCObjectPointerType())
141 return V;
142
143 // C++ object pointers may need "derived-to-base" casts.
145 const CXXRecordDecl *ActualClass = ActualTy->getPointeeCXXRecordDecl();
146 if (ExpectedClass && ActualClass) {
147 CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
148 /*DetectVirtual=*/false);
149 if (ActualClass->isDerivedFrom(ExpectedClass, Paths) &&
150 !Paths.isAmbiguous(ActualTy->getCanonicalTypeUnqualified())) {
151 return StoreMgr.evalDerivedToBase(V, Paths.front());
152 }
153 }
154
155 // Unfortunately, Objective-C does not enforce that overridden methods have
156 // covariant return types, so we can't assert that that never happens.
157 // Be safe and return UnknownVal().
158 return UnknownVal();
159}
160
162 ExplodedNode *Pred,
163 ExplodedNodeSet &Dst) {
164 // Find the last statement in the function and the corresponding basic block.
165 const Stmt *LastSt = nullptr;
166 const CFGBlock *Blk = nullptr;
167 std::tie(LastSt, Blk) = getLastStmt(Pred);
168 if (!Blk || !LastSt) {
169 Dst.Add(Pred);
170 return;
171 }
172
173 // Here, we destroy the current location context. We use the current
174 // function's entire body as a diagnostic statement, with which the program
175 // point will be associated. However, we only want to use LastStmt as a
176 // reference for what to clean up if it's a ReturnStmt; otherwise, everything
177 // is dead.
178 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC);
179 const LocationContext *LCtx = Pred->getLocationContext();
180 removeDead(Pred, Dst, dyn_cast<ReturnStmt>(LastSt), LCtx,
183}
184
186 const StackFrameContext *calleeCtx) {
187 const Decl *RuntimeCallee = calleeCtx->getDecl();
188 const Decl *StaticDecl = Call->getDecl();
189 assert(RuntimeCallee);
190 if (!StaticDecl)
191 return true;
192 return RuntimeCallee->getCanonicalDecl() != StaticDecl->getCanonicalDecl();
193}
194
195// Returns the number of elements in the array currently being destructed.
196// If the element count is not found 0 will be returned.
198 const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB) {
200 "The call event is not a destructor call!");
201
202 const auto &DtorCall = cast<CXXDestructorCall>(Call);
203
204 auto ThisVal = DtorCall.getCXXThisVal();
205
206 if (auto ThisElementRegion = dyn_cast<ElementRegion>(ThisVal.getAsRegion())) {
207 auto ArrayRegion = ThisElementRegion->getAsArrayOffset().getRegion();
208 auto ElementType = ThisElementRegion->getElementType();
209
210 auto ElementCount =
211 getDynamicElementCount(State, ArrayRegion, SVB, ElementType);
212
213 if (!ElementCount.isConstant())
214 return 0;
215
216 return ElementCount.getAsInteger()->getLimitedValue();
217 }
218
219 return 0;
220}
221
222ProgramStateRef ExprEngine::removeStateTraitsUsedForArrayEvaluation(
223 ProgramStateRef State, const CXXConstructExpr *E,
224 const LocationContext *LCtx) {
225
226 assert(LCtx && "Location context must be provided!");
227
228 if (E) {
229 if (getPendingInitLoop(State, E, LCtx))
230 State = removePendingInitLoop(State, E, LCtx);
231
232 if (getIndexOfElementToConstruct(State, E, LCtx))
233 State = removeIndexOfElementToConstruct(State, E, LCtx);
234 }
235
236 if (getPendingArrayDestruction(State, LCtx))
237 State = removePendingArrayDestruction(State, LCtx);
238
239 return State;
240}
241
242/// The call exit is simulated with a sequence of nodes, which occur between
243/// CallExitBegin and CallExitEnd. The following operations occur between the
244/// two program points:
245/// 1. CallExitBegin (triggers the start of call exit sequence)
246/// 2. Bind the return value
247/// 3. Run Remove dead bindings to clean up the dead symbols from the callee.
248/// 4. CallExitEnd (switch to the caller context)
249/// 5. PostStmt<CallExpr>
251 // Step 1 CEBNode was generated before the call.
252 const StackFrameContext *calleeCtx = CEBNode->getStackFrame();
253
254 // The parent context might not be a stack frame, so make sure we
255 // look up the first enclosing stack frame.
256 const StackFrameContext *callerCtx =
257 calleeCtx->getParent()->getStackFrame();
258
259 const Stmt *CE = calleeCtx->getCallSite();
260 ProgramStateRef state = CEBNode->getState();
261 // Find the last statement in the function and the corresponding basic block.
262 const Stmt *LastSt = nullptr;
263 const CFGBlock *Blk = nullptr;
264 std::tie(LastSt, Blk) = getLastStmt(CEBNode);
265
266 // Generate a CallEvent /before/ cleaning the state, so that we can get the
267 // correct value for 'this' (if necessary).
269 CallEventRef<> Call = CEMgr.getCaller(calleeCtx, state);
270
271 // Step 2: generate node with bound return value: CEBNode -> BindedRetNode.
272
273 // If this variable is set to 'true' the analyzer will evaluate the call
274 // statement we are about to exit again, instead of continuing the execution
275 // from the statement after the call. This is useful for non-POD type array
276 // construction where the CXXConstructExpr is referenced only once in the CFG,
277 // but we want to evaluate it as many times as many elements the array has.
278 bool ShouldRepeatCall = false;
279
280 if (const auto *DtorDecl =
281 dyn_cast_or_null<CXXDestructorDecl>(Call->getDecl())) {
282 if (auto Idx = getPendingArrayDestruction(state, callerCtx)) {
283 ShouldRepeatCall = *Idx > 0;
284
285 auto ThisVal = svalBuilder.getCXXThis(DtorDecl->getParent(), calleeCtx);
286 state = state->killBinding(ThisVal);
287 }
288 }
289
290 // If the callee returns an expression, bind its value to CallExpr.
291 if (CE) {
292 if (const ReturnStmt *RS = dyn_cast_or_null<ReturnStmt>(LastSt)) {
293 const LocationContext *LCtx = CEBNode->getLocationContext();
294 SVal V = state->getSVal(RS, LCtx);
295
296 // Ensure that the return type matches the type of the returned Expr.
297 if (wasDifferentDeclUsedForInlining(Call, calleeCtx)) {
298 QualType ReturnedTy =
300 if (!ReturnedTy.isNull()) {
301 if (const Expr *Ex = dyn_cast<Expr>(CE)) {
302 V = adjustReturnValue(V, Ex->getType(), ReturnedTy,
304 }
305 }
306 }
307
308 state = state->BindExpr(CE, callerCtx, V);
309 }
310
311 // Bind the constructed object value to CXXConstructExpr.
312 if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(CE)) {
314 svalBuilder.getCXXThis(CCE->getConstructor()->getParent(), calleeCtx);
315 SVal ThisV = state->getSVal(This);
316 ThisV = state->getSVal(ThisV.castAs<Loc>());
317 state = state->BindExpr(CCE, callerCtx, ThisV);
318
319 ShouldRepeatCall = shouldRepeatCtorCall(state, CCE, callerCtx);
320 }
321
322 if (const auto *CNE = dyn_cast<CXXNewExpr>(CE)) {
323 // We are currently evaluating a CXXNewAllocator CFGElement. It takes a
324 // while to reach the actual CXXNewExpr element from here, so keep the
325 // region for later use.
326 // Additionally cast the return value of the inlined operator new
327 // (which is of type 'void *') to the correct object type.
328 SVal AllocV = state->getSVal(CNE, callerCtx);
329 AllocV = svalBuilder.evalCast(
330 AllocV, CNE->getType(),
331 getContext().getPointerType(getContext().VoidTy));
332
333 state = addObjectUnderConstruction(state, CNE, calleeCtx->getParent(),
334 AllocV);
335 }
336 }
337
338 if (!ShouldRepeatCall) {
339 state = removeStateTraitsUsedForArrayEvaluation(
340 state, dyn_cast_or_null<CXXConstructExpr>(CE), callerCtx);
341 }
342
343 // Step 3: BindedRetNode -> CleanedNodes
344 // If we can find a statement and a block in the inlined function, run remove
345 // dead bindings before returning from the call. This is important to ensure
346 // that we report the issues such as leaks in the stack contexts in which
347 // they occurred.
348 ExplodedNodeSet CleanedNodes;
349 if (LastSt && Blk && AMgr.options.AnalysisPurgeOpt != PurgeNone) {
350 static SimpleProgramPointTag retValBind("ExprEngine", "Bind Return Value");
351 auto Loc = isa<ReturnStmt>(LastSt)
352 ? ProgramPoint{PostStmt(LastSt, calleeCtx, &retValBind)}
353 : ProgramPoint{EpsilonPoint(calleeCtx, /*Data1=*/nullptr,
354 /*Data2=*/nullptr, &retValBind)};
355 const CFGBlock *PrePurgeBlock =
356 isa<ReturnStmt>(LastSt) ? Blk : &CEBNode->getCFG().getExit();
357 bool isNew;
358 ExplodedNode *BindedRetNode = G.getNode(Loc, state, false, &isNew);
359 BindedRetNode->addPredecessor(CEBNode, G);
360 if (!isNew)
361 return;
362
363 NodeBuilderContext Ctx(getCoreEngine(), PrePurgeBlock, BindedRetNode);
364 currBldrCtx = &Ctx;
365 // Here, we call the Symbol Reaper with 0 statement and callee location
366 // context, telling it to clean up everything in the callee's context
367 // (and its children). We use the callee's function body as a diagnostic
368 // statement, with which the program point will be associated.
369 removeDead(BindedRetNode, CleanedNodes, nullptr, calleeCtx,
370 calleeCtx->getAnalysisDeclContext()->getBody(),
372 currBldrCtx = nullptr;
373 } else {
374 CleanedNodes.Add(CEBNode);
375 }
376
377 for (ExplodedNode *N : CleanedNodes) {
378 // Step 4: Generate the CallExit and leave the callee's context.
379 // CleanedNodes -> CEENode
380 CallExitEnd Loc(calleeCtx, callerCtx);
381 bool isNew;
382 ProgramStateRef CEEState = (N == CEBNode) ? state : N->getState();
383
384 ExplodedNode *CEENode = G.getNode(Loc, CEEState, false, &isNew);
385 CEENode->addPredecessor(N, G);
386 if (!isNew)
387 return;
388
389 // Step 5: Perform the post-condition check of the CallExpr and enqueue the
390 // result onto the work list.
391 // CEENode -> Dst -> WorkList
392 NodeBuilderContext Ctx(Engine, calleeCtx->getCallSiteBlock(), CEENode);
393 SaveAndRestore<const NodeBuilderContext *> NBCSave(currBldrCtx, &Ctx);
394 SaveAndRestore CBISave(currStmtIdx, calleeCtx->getIndex());
395
396 CallEventRef<> UpdatedCall = Call.cloneWithState(CEEState);
397
398 ExplodedNodeSet DstPostCall;
399 if (llvm::isa_and_nonnull<CXXNewExpr>(CE)) {
400 ExplodedNodeSet DstPostPostCallCallback;
401 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback,
402 CEENode, *UpdatedCall, *this,
403 /*wasInlined=*/true);
404 for (ExplodedNode *I : DstPostPostCallCallback) {
406 cast<CXXAllocatorCall>(*UpdatedCall), DstPostCall, I, *this,
407 /*wasInlined=*/true);
408 }
409 } else {
410 getCheckerManager().runCheckersForPostCall(DstPostCall, CEENode,
411 *UpdatedCall, *this,
412 /*wasInlined=*/true);
413 }
414 ExplodedNodeSet Dst;
415 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Call)) {
416 getCheckerManager().runCheckersForPostObjCMessage(Dst, DstPostCall, *Msg,
417 *this,
418 /*wasInlined=*/true);
419 } else if (CE &&
420 !(isa<CXXNewExpr>(CE) && // Called when visiting CXXNewExpr.
421 AMgr.getAnalyzerOptions().MayInlineCXXAllocator)) {
422 getCheckerManager().runCheckersForPostStmt(Dst, DstPostCall, CE,
423 *this, /*wasInlined=*/true);
424 } else {
425 Dst.insert(DstPostCall);
426 }
427
428 // Enqueue the next element in the block.
429 for (ExplodedNodeSet::iterator PSI = Dst.begin(), PSE = Dst.end();
430 PSI != PSE; ++PSI) {
431 unsigned Idx = calleeCtx->getIndex() + (ShouldRepeatCall ? 0 : 1);
432
433 Engine.getWorkList()->enqueue(*PSI, calleeCtx->getCallSiteBlock(), Idx);
434 }
435 }
436}
437
438bool ExprEngine::isSmall(AnalysisDeclContext *ADC) const {
439 // When there are no branches in the function, it means that there's no
440 // exponential complexity introduced by inlining such function.
441 // Such functions also don't trigger various fundamental problems
442 // with our inlining mechanism, such as the problem of
443 // inlined defensive checks. Hence isLinear().
444 const CFG *Cfg = ADC->getCFG();
445 return Cfg->isLinear() || Cfg->size() <= AMgr.options.AlwaysInlineSize;
446}
447
448bool ExprEngine::isLarge(AnalysisDeclContext *ADC) const {
449 const CFG *Cfg = ADC->getCFG();
450 return Cfg->size() >= AMgr.options.MinCFGSizeTreatFunctionsAsLarge;
451}
452
453bool ExprEngine::isHuge(AnalysisDeclContext *ADC) const {
454 const CFG *Cfg = ADC->getCFG();
455 return Cfg->getNumBlockIDs() > AMgr.options.MaxInlinableSize;
456}
457
458void ExprEngine::examineStackFrames(const Decl *D, const LocationContext *LCtx,
459 bool &IsRecursive, unsigned &StackDepth) {
460 IsRecursive = false;
461 StackDepth = 0;
462
463 while (LCtx) {
464 if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LCtx)) {
465 const Decl *DI = SFC->getDecl();
466
467 // Mark recursive (and mutually recursive) functions and always count
468 // them when measuring the stack depth.
469 if (DI == D) {
470 IsRecursive = true;
471 ++StackDepth;
472 LCtx = LCtx->getParent();
473 continue;
474 }
475
476 // Do not count the small functions when determining the stack depth.
477 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(DI);
478 if (!isSmall(CalleeADC))
479 ++StackDepth;
480 }
481 LCtx = LCtx->getParent();
482 }
483}
484
485// The GDM component containing the dynamic dispatch bifurcation info. When
486// the exact type of the receiver is not known, we want to explore both paths -
487// one on which we do inline it and the other one on which we don't. This is
488// done to ensure we do not drop coverage.
489// This is the map from the receiver region to a bool, specifying either we
490// consider this region's information precise or not along the given path.
491namespace {
492 enum DynamicDispatchMode {
493 DynamicDispatchModeInlined = 1,
494 DynamicDispatchModeConservative
495 };
496} // end anonymous namespace
497
498REGISTER_MAP_WITH_PROGRAMSTATE(DynamicDispatchBifurcationMap,
499 const MemRegion *, unsigned)
500REGISTER_TRAIT_WITH_PROGRAMSTATE(CTUDispatchBifurcation, bool)
501
502void ExprEngine::ctuBifurcate(const CallEvent &Call, const Decl *D,
503 NodeBuilder &Bldr, ExplodedNode *Pred,
504 ProgramStateRef State) {
505 ProgramStateRef ConservativeEvalState = nullptr;
506 if (Call.isForeign() && !isSecondPhaseCTU()) {
507 const auto IK = AMgr.options.getCTUPhase1Inlining();
508 const bool DoInline = IK == CTUPhase1InliningKind::All ||
510 isSmall(AMgr.getAnalysisDeclContext(D)));
511 if (DoInline) {
512 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
513 return;
514 }
515 const bool BState = State->get<CTUDispatchBifurcation>();
516 if (!BState) { // This is the first time we see this foreign function.
517 // Enqueue it to be analyzed in the second (ctu) phase.
518 inlineCall(Engine.getCTUWorkList(), Call, D, Bldr, Pred, State);
519 // Conservatively evaluate in the first phase.
520 ConservativeEvalState = State->set<CTUDispatchBifurcation>(true);
521 conservativeEvalCall(Call, Bldr, Pred, ConservativeEvalState);
522 } else {
523 conservativeEvalCall(Call, Bldr, Pred, State);
524 }
525 return;
526 }
527 inlineCall(Engine.getWorkList(), Call, D, Bldr, Pred, State);
528}
529
530void ExprEngine::inlineCall(WorkList *WList, const CallEvent &Call,
531 const Decl *D, NodeBuilder &Bldr,
532 ExplodedNode *Pred, ProgramStateRef State) {
533 assert(D);
534
535 const LocationContext *CurLC = Pred->getLocationContext();
536 const StackFrameContext *CallerSFC = CurLC->getStackFrame();
537 const LocationContext *ParentOfCallee = CallerSFC;
538 if (Call.getKind() == CE_Block &&
539 !cast<BlockCall>(Call).isConversionFromLambda()) {
540 const BlockDataRegion *BR = cast<BlockCall>(Call).getBlockRegion();
541 assert(BR && "If we have the block definition we should have its region");
542 AnalysisDeclContext *BlockCtx = AMgr.getAnalysisDeclContext(D);
543 ParentOfCallee = BlockCtx->getBlockInvocationContext(CallerSFC,
545 BR);
546 }
547
548 // This may be NULL, but that's fine.
549 const Expr *CallE = Call.getOriginExpr();
550
551 // Construct a new stack frame for the callee.
552 AnalysisDeclContext *CalleeADC = AMgr.getAnalysisDeclContext(D);
553 const StackFrameContext *CalleeSFC =
554 CalleeADC->getStackFrame(ParentOfCallee, CallE, currBldrCtx->getBlock(),
555 currBldrCtx->blockCount(), currStmtIdx);
556
557 CallEnter Loc(CallE, CalleeSFC, CurLC);
558
559 // Construct a new state which contains the mapping from actual to
560 // formal arguments.
561 State = State->enterStackFrame(Call, CalleeSFC);
562
563 bool isNew;
564 if (ExplodedNode *N = G.getNode(Loc, State, false, &isNew)) {
565 N->addPredecessor(Pred, G);
566 if (isNew)
567 WList->enqueue(N);
568 }
569
570 // If we decided to inline the call, the successor has been manually
571 // added onto the work list so remove it from the node builder.
572 Bldr.takeNodes(Pred);
573
574 NumInlinedCalls++;
575 Engine.FunctionSummaries->bumpNumTimesInlined(D);
576
577 // Do not mark as visited in the 2nd run (CTUWList), so the function will
578 // be visited as top-level, this way we won't loose reports in non-ctu
579 // mode. Considering the case when a function in a foreign TU calls back
580 // into the main TU.
581 // Note, during the 1st run, it doesn't matter if we mark the foreign
582 // functions as visited (or not) because they can never appear as a top level
583 // function in the main TU.
584 if (!isSecondPhaseCTU())
585 // Mark the decl as visited.
586 if (VisitedCallees)
587 VisitedCallees->insert(D);
588}
589
591 const Stmt *CallE) {
592 const void *ReplayState = State->get<ReplayWithoutInlining>();
593 if (!ReplayState)
594 return nullptr;
595
596 assert(ReplayState == CallE && "Backtracked to the wrong call.");
597 (void)CallE;
598
599 return State->remove<ReplayWithoutInlining>();
600}
601
603 ExplodedNodeSet &dst) {
604 // Perform the previsit of the CallExpr.
605 ExplodedNodeSet dstPreVisit;
606 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, CE, *this);
607
608 // Get the call in its initial state. We use this as a template to perform
609 // all the checks.
611 CallEventRef<> CallTemplate = CEMgr.getSimpleCall(
612 CE, Pred->getState(), Pred->getLocationContext(), getCFGElementRef());
613
614 // Evaluate the function call. We try each of the checkers
615 // to see if the can evaluate the function call.
616 ExplodedNodeSet dstCallEvaluated;
617 for (ExplodedNode *N : dstPreVisit) {
618 evalCall(dstCallEvaluated, N, *CallTemplate);
619 }
620
621 // Finally, perform the post-condition check of the CallExpr and store
622 // the created nodes in 'Dst'.
623 // Note that if the call was inlined, dstCallEvaluated will be empty.
624 // The post-CallExpr check will occur in processCallExit.
625 getCheckerManager().runCheckersForPostStmt(dst, dstCallEvaluated, CE,
626 *this);
627}
628
629ProgramStateRef ExprEngine::finishArgumentConstruction(ProgramStateRef State,
630 const CallEvent &Call) {
631 // WARNING: The state attached to 'Call' may be obsolete, do not call any
632 // methods that rely on it!
633 const Expr *E = Call.getOriginExpr();
634 // FIXME: Constructors to placement arguments of operator new
635 // are not supported yet.
636 if (!E || isa<CXXNewExpr>(E))
637 return State;
638
639 const LocationContext *LC = Call.getLocationContext();
640 for (unsigned CallI = 0, CallN = Call.getNumArgs(); CallI != CallN; ++CallI) {
641 unsigned I = Call.getASTArgumentIndex(CallI);
642 if (std::optional<SVal> V = getObjectUnderConstruction(State, {E, I}, LC)) {
643 SVal VV = *V;
644 (void)VV;
646 ->getStackFrame()->getParent()
647 ->getStackFrame() == LC->getStackFrame());
648 State = finishObjectConstruction(State, {E, I}, LC);
649 }
650 }
651
652 return State;
653}
654
655void ExprEngine::finishArgumentConstruction(ExplodedNodeSet &Dst,
656 ExplodedNode *Pred,
657 const CallEvent &Call) {
658 // WARNING: The state attached to 'Call' may be obsolete, do not call any
659 // methods that rely on it!
660 ProgramStateRef State = Pred->getState();
661 ProgramStateRef CleanedState = finishArgumentConstruction(State, Call);
662 if (CleanedState == State) {
663 Dst.insert(Pred);
664 return;
665 }
666
667 const Expr *E = Call.getOriginExpr();
668 const LocationContext *LC = Call.getLocationContext();
669 NodeBuilder B(Pred, Dst, *currBldrCtx);
670 static SimpleProgramPointTag Tag("ExprEngine",
671 "Finish argument construction");
672 PreStmt PP(E, LC, &Tag);
673 B.generateNode(PP, CleanedState, Pred);
674}
675
677 const CallEvent &CallTemplate) {
678 // NOTE: CallTemplate is called a "template" because its attached state may
679 // be obsolete (compared to the state of Pred). The state-dependent methods
680 // of CallEvent should be used only after a `cloneWithState` call that
681 // attaches the up-to-date state to this template object.
682
683 // Run any pre-call checks using the generic call interface.
684 ExplodedNodeSet dstPreVisit;
685 getCheckerManager().runCheckersForPreCall(dstPreVisit, Pred, CallTemplate,
686 *this);
687
688 // Actually evaluate the function call. We try each of the checkers
689 // to see if the can evaluate the function call, and get a callback at
690 // defaultEvalCall if all of them fail.
691 ExplodedNodeSet dstCallEvaluated;
693 dstCallEvaluated, dstPreVisit, CallTemplate, *this, EvalCallOptions());
694
695 // If there were other constructors called for object-type arguments
696 // of this call, clean them up.
697 ExplodedNodeSet dstArgumentCleanup;
698 for (ExplodedNode *I : dstCallEvaluated)
699 finishArgumentConstruction(dstArgumentCleanup, I, CallTemplate);
700
701 ExplodedNodeSet dstPostCall;
702 getCheckerManager().runCheckersForPostCall(dstPostCall, dstArgumentCleanup,
703 CallTemplate, *this);
704
705 // Escaping symbols conjured during invalidating the regions above.
706 // Note that, for inlined calls the nodes were put back into the worklist,
707 // so we can assume that every node belongs to a conservative call at this
708 // point.
709
710 // Run pointerEscape callback with the newly conjured symbols.
712 for (ExplodedNode *I : dstPostCall) {
713 ProgramStateRef State = I->getState();
714 CallEventRef<> Call = CallTemplate.cloneWithState(State);
715 NodeBuilder B(I, Dst, *currBldrCtx);
716 Escaped.clear();
717 {
718 unsigned Arg = -1;
719 for (const ParmVarDecl *PVD : Call->parameters()) {
720 ++Arg;
721 QualType ParamTy = PVD->getType();
722 if (ParamTy.isNull() ||
723 (!ParamTy->isPointerType() && !ParamTy->isReferenceType()))
724 continue;
725 QualType Pointee = ParamTy->getPointeeType();
726 if (Pointee.isConstQualified() || Pointee->isVoidType())
727 continue;
728 if (const MemRegion *MR = Call->getArgSVal(Arg).getAsRegion())
729 Escaped.emplace_back(loc::MemRegionVal(MR), State->getSVal(MR, Pointee));
730 }
731 }
732
733 State = processPointerEscapedOnBind(State, Escaped, I->getLocationContext(),
735
736 if (State == I->getState())
737 Dst.insert(I);
738 else
739 B.generateNode(I->getLocation(), State, I);
740 }
741}
742
744 const LocationContext *LCtx,
745 ProgramStateRef State) {
746 const Expr *E = Call.getOriginExpr();
747 const ConstCFGElementRef &Elem = Call.getCFGElementRef();
748 if (!E)
749 return State;
750
751 // Some method families have known return values.
752 if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(&Call)) {
753 switch (Msg->getMethodFamily()) {
754 default:
755 break;
756 case OMF_autorelease:
757 case OMF_retain:
758 case OMF_self: {
759 // These methods return their receivers.
760 return State->BindExpr(E, LCtx, Msg->getReceiverSVal());
761 }
762 }
763 } else if (const CXXConstructorCall *C = dyn_cast<CXXConstructorCall>(&Call)){
764 SVal ThisV = C->getCXXThisVal();
765 ThisV = State->getSVal(ThisV.castAs<Loc>());
766 return State->BindExpr(E, LCtx, ThisV);
767 }
768
769 SVal R;
770 QualType ResultTy = Call.getResultType();
771 unsigned Count = currBldrCtx->blockCount();
772 if (auto RTC = getCurrentCFGElement().getAs<CFGCXXRecordTypedCall>()) {
773 // Conjure a temporary if the function returns an object by value.
774 SVal Target;
775 assert(RTC->getStmt() == Call.getOriginExpr());
776 EvalCallOptions CallOpts; // FIXME: We won't really need those.
777 std::tie(State, Target) = handleConstructionContext(
778 Call.getOriginExpr(), State, currBldrCtx, LCtx,
779 RTC->getConstructionContext(), CallOpts);
780 const MemRegion *TargetR = Target.getAsRegion();
781 assert(TargetR);
782 // Invalidate the region so that it didn't look uninitialized. If this is
783 // a field or element constructor, we do not want to invalidate
784 // the whole structure. Pointer escape is meaningless because
785 // the structure is a product of conservative evaluation
786 // and therefore contains nothing interesting at this point.
788 ITraits.setTrait(TargetR,
790 State = State->invalidateRegions(TargetR, Elem, Count, LCtx,
791 /* CausesPointerEscape=*/false, nullptr,
792 &Call, &ITraits);
793
794 R = State->getSVal(Target.castAs<Loc>(), E->getType());
795 } else {
796 // Conjure a symbol if the return value is unknown.
797
798 // See if we need to conjure a heap pointer instead of
799 // a regular unknown pointer.
800 const auto *CNE = dyn_cast<CXXNewExpr>(E);
801 if (CNE && CNE->getOperatorNew()->isReplaceableGlobalAllocationFunction()) {
802 R = svalBuilder.getConjuredHeapSymbolVal(Elem, LCtx, E->getType(), Count);
803 const MemRegion *MR = R.getAsRegion()->StripCasts();
804
805 // Store the extent of the allocated object(s).
806 SVal ElementCount;
807 if (const Expr *SizeExpr = CNE->getArraySize().value_or(nullptr)) {
808 ElementCount = State->getSVal(SizeExpr, LCtx);
809 } else {
810 ElementCount = svalBuilder.makeIntVal(1, /*IsUnsigned=*/true);
811 }
812
813 SVal ElementSize = getElementExtent(CNE->getAllocatedType(), svalBuilder);
814
815 SVal Size =
816 svalBuilder.evalBinOp(State, BO_Mul, ElementCount, ElementSize,
817 svalBuilder.getArrayIndexType());
818
819 // FIXME: This line is to prevent a crash. For more details please check
820 // issue #56264.
821 if (Size.isUndef())
822 Size = UnknownVal();
823
824 State = setDynamicExtent(State, MR, Size.castAs<DefinedOrUnknownSVal>());
825 } else {
826 R = svalBuilder.conjureSymbolVal(Elem, LCtx, ResultTy, Count);
827 }
828 }
829 return State->BindExpr(E, LCtx, R);
830}
831
832// Conservatively evaluate call by invalidating regions and binding
833// a conjured return value.
834void ExprEngine::conservativeEvalCall(const CallEvent &Call, NodeBuilder &Bldr,
835 ExplodedNode *Pred, ProgramStateRef State) {
836 State = Call.invalidateRegions(currBldrCtx->blockCount(), State);
837 State = bindReturnValue(Call, Pred->getLocationContext(), State);
838
839 // And make the result node.
840 static SimpleProgramPointTag PT("ExprEngine", "Conservative eval call");
841 Bldr.generateNode(Call.getProgramPoint(false, &PT), State, Pred);
842}
843
844ExprEngine::CallInlinePolicy
845ExprEngine::mayInlineCallKind(const CallEvent &Call, const ExplodedNode *Pred,
846 AnalyzerOptions &Opts,
847 const EvalCallOptions &CallOpts) {
848 const LocationContext *CurLC = Pred->getLocationContext();
849 const StackFrameContext *CallerSFC = CurLC->getStackFrame();
850 switch (Call.getKind()) {
851 case CE_Function:
853 case CE_Block:
854 break;
855 case CE_CXXMember:
858 return CIP_DisallowedAlways;
859 break;
860 case CE_CXXConstructor: {
862 return CIP_DisallowedAlways;
863
865
866 const CXXConstructExpr *CtorExpr = Ctor.getOriginExpr();
867
869 const ConstructionContext *CC = CCE ? CCE->getConstructionContext()
870 : nullptr;
871
872 if (llvm::isa_and_nonnull<NewAllocatedObjectConstructionContext>(CC) &&
873 !Opts.MayInlineCXXAllocator)
874 return CIP_DisallowedOnce;
875
876 if (CallOpts.IsArrayCtorOrDtor) {
877 if (!shouldInlineArrayConstruction(Pred->getState(), CtorExpr, CurLC))
878 return CIP_DisallowedOnce;
879 }
880
881 // Inlining constructors requires including initializers in the CFG.
882 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
883 assert(ADC->getCFGBuildOptions().AddInitializers && "No CFG initializers");
884 (void)ADC;
885
886 // If the destructor is trivial, it's always safe to inline the constructor.
887 if (Ctor.getDecl()->getParent()->hasTrivialDestructor())
888 break;
889
890 // For other types, only inline constructors if destructor inlining is
891 // also enabled.
893 return CIP_DisallowedAlways;
894
896 // If we don't handle temporary destructors, we shouldn't inline
897 // their constructors.
898 if (CallOpts.IsTemporaryCtorOrDtor &&
899 !Opts.ShouldIncludeTemporaryDtorsInCFG)
900 return CIP_DisallowedOnce;
901
902 // If we did not find the correct this-region, it would be pointless
903 // to inline the constructor. Instead we will simply invalidate
904 // the fake temporary target.
906 return CIP_DisallowedOnce;
907
908 // If the temporary is lifetime-extended by binding it to a reference-type
909 // field within an aggregate, automatic destructors don't work properly.
911 return CIP_DisallowedOnce;
912 }
913
914 break;
915 }
917 // This doesn't really increase the cost of inlining ever, because
918 // the stack frame of the inherited constructor is trivial.
919 return CIP_Allowed;
920 }
921 case CE_CXXDestructor: {
923 return CIP_DisallowedAlways;
924
925 // Inlining destructors requires building the CFG correctly.
926 const AnalysisDeclContext *ADC = CallerSFC->getAnalysisDeclContext();
927 assert(ADC->getCFGBuildOptions().AddImplicitDtors && "No CFG destructors");
928 (void)ADC;
929
930 if (CallOpts.IsArrayCtorOrDtor) {
931 if (!shouldInlineArrayDestruction(getElementCountOfArrayBeingDestructed(
932 Call, Pred->getState(), svalBuilder))) {
933 return CIP_DisallowedOnce;
934 }
935 }
936
937 // Allow disabling temporary destructor inlining with a separate option.
938 if (CallOpts.IsTemporaryCtorOrDtor &&
939 !Opts.MayInlineCXXTemporaryDtors)
940 return CIP_DisallowedOnce;
941
942 // If we did not find the correct this-region, it would be pointless
943 // to inline the destructor. Instead we will simply invalidate
944 // the fake temporary target.
946 return CIP_DisallowedOnce;
947 break;
948 }
950 [[fallthrough]];
951 case CE_CXXAllocator:
952 if (Opts.MayInlineCXXAllocator)
953 break;
954 // Do not inline allocators until we model deallocators.
955 // This is unfortunate, but basically necessary for smart pointers and such.
956 return CIP_DisallowedAlways;
957 case CE_ObjCMessage:
958 if (!Opts.MayInlineObjCMethod)
959 return CIP_DisallowedAlways;
960 if (!(Opts.getIPAMode() == IPAK_DynamicDispatch ||
962 return CIP_DisallowedAlways;
963 break;
964 }
965
966 return CIP_Allowed;
967}
968
969/// Returns true if the given C++ class contains a member with the given name.
970static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD,
971 StringRef Name) {
972 const IdentifierInfo &II = Ctx.Idents.get(Name);
973 return RD->hasMemberName(Ctx.DeclarationNames.getIdentifier(&II));
974}
975
976/// Returns true if the given C++ class is a container or iterator.
977///
978/// Our heuristic for this is whether it contains a method named 'begin()' or a
979/// nested type named 'iterator' or 'iterator_category'.
980static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD) {
981 return hasMember(Ctx, RD, "begin") ||
982 hasMember(Ctx, RD, "iterator") ||
983 hasMember(Ctx, RD, "iterator_category");
984}
985
986/// Returns true if the given function refers to a method of a C++ container
987/// or iterator.
988///
989/// We generally do a poor job modeling most containers right now, and might
990/// prefer not to inline their methods.
991static bool isContainerMethod(const ASTContext &Ctx,
992 const FunctionDecl *FD) {
993 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(FD))
994 return isContainerClass(Ctx, MD->getParent());
995 return false;
996}
997
998/// Returns true if the given function is the destructor of a class named
999/// "shared_ptr".
1000static bool isCXXSharedPtrDtor(const FunctionDecl *FD) {
1001 const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(FD);
1002 if (!Dtor)
1003 return false;
1004
1005 const CXXRecordDecl *RD = Dtor->getParent();
1006 if (const IdentifierInfo *II = RD->getDeclName().getAsIdentifierInfo())
1007 if (II->isStr("shared_ptr"))
1008 return true;
1009
1010 return false;
1011}
1012
1013/// Returns true if the function in \p CalleeADC may be inlined in general.
1014///
1015/// This checks static properties of the function, such as its signature and
1016/// CFG, to determine whether the analyzer should ever consider inlining it,
1017/// in any context.
1018bool ExprEngine::mayInlineDecl(AnalysisDeclContext *CalleeADC) const {
1019 AnalyzerOptions &Opts = AMgr.getAnalyzerOptions();
1020 // FIXME: Do not inline variadic calls.
1021 if (CallEvent::isVariadic(CalleeADC->getDecl()))
1022 return false;
1023
1024 // Check certain C++-related inlining policies.
1025 ASTContext &Ctx = CalleeADC->getASTContext();
1026 if (Ctx.getLangOpts().CPlusPlus) {
1027 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(CalleeADC->getDecl())) {
1028 // Conditionally control the inlining of template functions.
1029 if (!Opts.MayInlineTemplateFunctions)
1030 if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate)
1031 return false;
1032
1033 // Conditionally control the inlining of C++ standard library functions.
1034 if (!Opts.MayInlineCXXStandardLibrary)
1035 if (Ctx.getSourceManager().isInSystemHeader(FD->getLocation()))
1037 return false;
1038
1039 // Conditionally control the inlining of methods on objects that look
1040 // like C++ containers.
1041 if (!Opts.MayInlineCXXContainerMethods)
1042 if (!AMgr.isInCodeFile(FD->getLocation()))
1043 if (isContainerMethod(Ctx, FD))
1044 return false;
1045
1046 // Conditionally control the inlining of the destructor of C++ shared_ptr.
1047 // We don't currently do a good job modeling shared_ptr because we can't
1048 // see the reference count, so treating as opaque is probably the best
1049 // idea.
1050 if (!Opts.MayInlineCXXSharedPtrDtor)
1051 if (isCXXSharedPtrDtor(FD))
1052 return false;
1053 }
1054 }
1055
1056 // It is possible that the CFG cannot be constructed.
1057 // Be safe, and check if the CalleeCFG is valid.
1058 const CFG *CalleeCFG = CalleeADC->getCFG();
1059 if (!CalleeCFG)
1060 return false;
1061
1062 // Do not inline large functions.
1063 if (isHuge(CalleeADC))
1064 return false;
1065
1066 // It is possible that the live variables analysis cannot be
1067 // run. If so, bail out.
1068 if (!CalleeADC->getAnalysis<RelaxedLiveVariables>())
1069 return false;
1070
1071 return true;
1072}
1073
1074bool ExprEngine::shouldInlineCall(const CallEvent &Call, const Decl *D,
1075 const ExplodedNode *Pred,
1076 const EvalCallOptions &CallOpts) {
1077 if (!D)
1078 return false;
1079
1080 AnalysisManager &AMgr = getAnalysisManager();
1081 AnalyzerOptions &Opts = AMgr.options;
1082 AnalysisDeclContextManager &ADCMgr = AMgr.getAnalysisDeclContextManager();
1083 AnalysisDeclContext *CalleeADC = ADCMgr.getContext(D);
1084
1085 // The auto-synthesized bodies are essential to inline as they are
1086 // usually small and commonly used. Note: we should do this check early on to
1087 // ensure we always inline these calls.
1088 if (CalleeADC->isBodyAutosynthesized())
1089 return true;
1090
1091 if (!AMgr.shouldInlineCall())
1092 return false;
1093
1094 // Check if this function has been marked as non-inlinable.
1095 std::optional<bool> MayInline = Engine.FunctionSummaries->mayInline(D);
1096 if (MayInline) {
1097 if (!*MayInline)
1098 return false;
1099
1100 } else {
1101 // We haven't actually checked the static properties of this function yet.
1102 // Do that now, and record our decision in the function summaries.
1103 if (mayInlineDecl(CalleeADC)) {
1104 Engine.FunctionSummaries->markMayInline(D);
1105 } else {
1106 Engine.FunctionSummaries->markShouldNotInline(D);
1107 return false;
1108 }
1109 }
1110
1111 // Check if we should inline a call based on its kind.
1112 // FIXME: this checks both static and dynamic properties of the call, which
1113 // means we're redoing a bit of work that could be cached in the function
1114 // summary.
1115 CallInlinePolicy CIP = mayInlineCallKind(Call, Pred, Opts, CallOpts);
1116 if (CIP != CIP_Allowed) {
1117 if (CIP == CIP_DisallowedAlways) {
1118 assert(!MayInline || *MayInline);
1119 Engine.FunctionSummaries->markShouldNotInline(D);
1120 }
1121 return false;
1122 }
1123
1124 // Do not inline if recursive or we've reached max stack frame count.
1125 bool IsRecursive = false;
1126 unsigned StackDepth = 0;
1127 examineStackFrames(D, Pred->getLocationContext(), IsRecursive, StackDepth);
1128 if ((StackDepth >= Opts.InlineMaxStackDepth) &&
1129 (!isSmall(CalleeADC) || IsRecursive))
1130 return false;
1131
1132 // Do not inline large functions too many times.
1133 if ((Engine.FunctionSummaries->getNumTimesInlined(D) >
1134 Opts.MaxTimesInlineLarge) &&
1135 isLarge(CalleeADC)) {
1136 NumReachedInlineCountMax++;
1137 return false;
1138 }
1139
1140 if (HowToInline == Inline_Minimal && (!isSmall(CalleeADC) || IsRecursive))
1141 return false;
1142
1143 return true;
1144}
1145
1146bool ExprEngine::shouldInlineArrayConstruction(const ProgramStateRef State,
1147 const CXXConstructExpr *CE,
1148 const LocationContext *LCtx) {
1149 if (!CE)
1150 return false;
1151
1152 // FIXME: Handle other arrays types.
1153 if (const auto *CAT = dyn_cast<ConstantArrayType>(CE->getType())) {
1154 unsigned ArrSize = getContext().getConstantArrayElementCount(CAT);
1155
1156 // This might seem conter-intuitive at first glance, but the functions are
1157 // closely related. Reasoning about destructors depends only on the type
1158 // of the expression that initialized the memory region, which is the
1159 // CXXConstructExpr. So to avoid code repetition, the work is delegated
1160 // to the function that reasons about destructor inlining. Also note that
1161 // if the constructors of the array elements are inlined, the destructors
1162 // can also be inlined and if the destructors can be inline, it's safe to
1163 // inline the constructors.
1164 return shouldInlineArrayDestruction(ArrSize);
1165 }
1166
1167 // Check if we're inside an ArrayInitLoopExpr, and it's sufficiently small.
1168 if (auto Size = getPendingInitLoop(State, CE, LCtx))
1169 return shouldInlineArrayDestruction(*Size);
1170
1171 return false;
1172}
1173
1174bool ExprEngine::shouldInlineArrayDestruction(uint64_t Size) {
1175
1176 uint64_t maxAllowedSize = AMgr.options.maxBlockVisitOnPath;
1177
1178 // Declaring a 0 element array is also possible.
1179 return Size <= maxAllowedSize && Size > 0;
1180}
1181
1182bool ExprEngine::shouldRepeatCtorCall(ProgramStateRef State,
1183 const CXXConstructExpr *E,
1184 const LocationContext *LCtx) {
1185
1186 if (!E)
1187 return false;
1188
1189 auto Ty = E->getType();
1190
1191 // FIXME: Handle non constant array types
1192 if (const auto *CAT = dyn_cast<ConstantArrayType>(Ty)) {
1194 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1195 }
1196
1197 if (auto Size = getPendingInitLoop(State, E, LCtx))
1198 return Size > getIndexOfElementToConstruct(State, E, LCtx);
1199
1200 return false;
1201}
1202
1204 const CXXInstanceCall *ICall = dyn_cast<CXXInstanceCall>(&Call);
1205 if (!ICall)
1206 return false;
1207
1208 const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(ICall->getDecl());
1209 if (!MD)
1210 return false;
1212 return false;
1213
1214 return MD->isTrivial();
1215}
1216
1218 const CallEvent &Call,
1219 const EvalCallOptions &CallOpts) {
1220 // Make sure we have the most recent state attached to the call.
1221 ProgramStateRef State = Pred->getState();
1222
1223 // Special-case trivial assignment operators.
1225 performTrivialCopy(Bldr, Pred, Call);
1226 return;
1227 }
1228
1229 // Try to inline the call.
1230 // The origin expression here is just used as a kind of checksum;
1231 // this should still be safe even for CallEvents that don't come from exprs.
1232 const Expr *E = Call.getOriginExpr();
1233
1234 ProgramStateRef InlinedFailedState = getInlineFailedState(State, E);
1235 if (InlinedFailedState) {
1236 // If we already tried once and failed, make sure we don't retry later.
1237 State = InlinedFailedState;
1238 } else {
1239 RuntimeDefinition RD = Call.getRuntimeDefinition();
1240 Call.setForeign(RD.isForeign());
1241 const Decl *D = RD.getDecl();
1242 if (shouldInlineCall(Call, D, Pred, CallOpts)) {
1243 if (RD.mayHaveOtherDefinitions()) {
1245
1246 // Explore with and without inlining the call.
1247 if (Options.getIPAMode() == IPAK_DynamicDispatchBifurcate) {
1248 BifurcateCall(RD.getDispatchRegion(), Call, D, Bldr, Pred);
1249 return;
1250 }
1251
1252 // Don't inline if we're not in any dynamic dispatch mode.
1253 if (Options.getIPAMode() != IPAK_DynamicDispatch) {
1254 conservativeEvalCall(Call, Bldr, Pred, State);
1255 return;
1256 }
1257 }
1258 ctuBifurcate(Call, D, Bldr, Pred, State);
1259 return;
1260 }
1261 }
1262
1263 // If we can't inline it, clean up the state traits used only if the function
1264 // is inlined.
1265 State = removeStateTraitsUsedForArrayEvaluation(
1266 State, dyn_cast_or_null<CXXConstructExpr>(E), Call.getLocationContext());
1267
1268 // Also handle the return value and invalidate the regions.
1269 conservativeEvalCall(Call, Bldr, Pred, State);
1270}
1271
1272void ExprEngine::BifurcateCall(const MemRegion *BifurReg,
1273 const CallEvent &Call, const Decl *D,
1274 NodeBuilder &Bldr, ExplodedNode *Pred) {
1275 assert(BifurReg);
1276 BifurReg = BifurReg->StripCasts();
1277
1278 // Check if we've performed the split already - note, we only want
1279 // to split the path once per memory region.
1280 ProgramStateRef State = Pred->getState();
1281 const unsigned *BState =
1282 State->get<DynamicDispatchBifurcationMap>(BifurReg);
1283 if (BState) {
1284 // If we are on "inline path", keep inlining if possible.
1285 if (*BState == DynamicDispatchModeInlined)
1286 ctuBifurcate(Call, D, Bldr, Pred, State);
1287 // If inline failed, or we are on the path where we assume we
1288 // don't have enough info about the receiver to inline, conjure the
1289 // return value and invalidate the regions.
1290 conservativeEvalCall(Call, Bldr, Pred, State);
1291 return;
1292 }
1293
1294 // If we got here, this is the first time we process a message to this
1295 // region, so split the path.
1296 ProgramStateRef IState =
1297 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1298 DynamicDispatchModeInlined);
1299 ctuBifurcate(Call, D, Bldr, Pred, IState);
1300
1301 ProgramStateRef NoIState =
1302 State->set<DynamicDispatchBifurcationMap>(BifurReg,
1303 DynamicDispatchModeConservative);
1304 conservativeEvalCall(Call, Bldr, Pred, NoIState);
1305
1306 NumOfDynamicDispatchPathSplits++;
1307}
1308
1310 ExplodedNodeSet &Dst) {
1311 ExplodedNodeSet dstPreVisit;
1312 getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, RS, *this);
1313
1314 StmtNodeBuilder B(dstPreVisit, Dst, *currBldrCtx);
1315
1316 if (RS->getRetValue()) {
1317 for (ExplodedNodeSet::iterator it = dstPreVisit.begin(),
1318 ei = dstPreVisit.end(); it != ei; ++it) {
1319 B.generateNode(RS, *it, (*it)->getState());
1320 }
1321 }
1322}
#define V(N, I)
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
#define STAT_COUNTER(VARNAME, DESC)
static bool isContainerClass(const ASTContext &Ctx, const CXXRecordDecl *RD)
Returns true if the given C++ class is a container or iterator.
static ProgramStateRef getInlineFailedState(ProgramStateRef State, const Stmt *CallE)
static std::pair< const Stmt *, const CFGBlock * > getLastStmt(const ExplodedNode *Node)
static bool isTrivialObjectAssignment(const CallEvent &Call)
static bool isCXXSharedPtrDtor(const FunctionDecl *FD)
Returns true if the given function is the destructor of a class named "shared_ptr".
static bool hasMember(const ASTContext &Ctx, const CXXRecordDecl *RD, StringRef Name)
Returns true if the given C++ class contains a member with the given name.
static bool wasDifferentDeclUsedForInlining(CallEventRef<> Call, const StackFrameContext *calleeCtx)
static SVal adjustReturnValue(SVal V, QualType ExpectedTy, QualType ActualTy, StoreManager &StoreMgr)
Adjusts a return value when the called function's return type does not match the caller's expression ...
static bool isContainerMethod(const ASTContext &Ctx, const FunctionDecl *FD)
Returns true if the given function refers to a method of a C++ container or iterator.
static unsigned getElementCountOfArrayBeingDestructed(const CallEvent &Call, const ProgramStateRef State, SValBuilder &SVB)
#define REGISTER_MAP_WITH_PROGRAMSTATE(Name, Key, Value)
Declares an immutable map of type NameTy, suitable for placement into the ProgramState.
#define REGISTER_TRAIT_WITH_PROGRAMSTATE(Name, Type)
Declares a program state trait for type Type called Name, and introduce a type named NameTy.
a trap message and trap category.
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition ASTContext.h:220
SourceManager & getSourceManager()
Definition ASTContext.h:833
DeclarationNameTable DeclarationNames
Definition ASTContext.h:776
IdentifierTable & Idents
Definition ASTContext.h:772
const LangOptions & getLangOpts() const
Definition ASTContext.h:926
uint64_t getConstantArrayElementCount(const ConstantArrayType *CA) const
Return number of constant array elements.
AnalysisDeclContext * getContext(const Decl *D)
AnalysisDeclContext contains the context data for the function, method or block under analysis.
const BlockInvocationContext * getBlockInvocationContext(const LocationContext *ParentLC, const BlockDecl *BD, const void *Data)
Obtain a context of the block invocation using its parent context.
static bool isInStdNamespace(const Decl *D)
const StackFrameContext * getStackFrame(LocationContext const *ParentLC, const Stmt *S, const CFGBlock *Blk, unsigned BlockCount, unsigned Index)
Obtain a context of the call stack using its parent context.
ASTContext & getASTContext() const
CFG::BuildOptions & getCFGBuildOptions()
Stores options for the analyzer from the command line.
bool mayInlineCXXMemberFunction(CXXInlineableMemberKind K) const
Returns the option controlling which C++ member functions will be considered for inlining.
IPAKind getIPAMode() const
Returns the inter-procedural analysis mode.
CTUPhase1InliningKind getCTUPhase1Inlining() const
unsigned InlineMaxStackDepth
The inlining stack depth limit.
Represents a single basic block in a source-level CFG.
Definition CFG.h:605
bool empty() const
Definition CFG.h:953
succ_iterator succ_begin()
Definition CFG.h:990
unsigned succ_size() const
Definition CFG.h:1008
Represents C++ constructor call.
Definition CFG.h:157
std::optional< T > getAs() const
Convert to the specified CFGElement type, returning std::nullopt if this CFGElement is not of the des...
Definition CFG.h:109
Represents a source-level, intra-procedural CFG that represents the control-flow of a Stmt.
Definition CFG.h:1222
unsigned size() const
Return the total number of CFGBlocks within the CFG This is simply a renaming of the getNumBlockIDs()...
Definition CFG.h:1415
bool isLinear() const
Returns true if the CFG has no branches.
Definition CFG.cpp:5355
CFGBlock & getExit()
Definition CFG.h:1333
unsigned getNumBlockIDs() const
Returns the total number of BlockIDs allocated (which start at 0).
Definition CFG.h:1410
BasePaths - Represents the set of paths from a derived class to one of its (direct or indirect) bases...
CXXBasePath & front()
bool isAmbiguous(CanQualType BaseType)
Determine whether the path from the most-derived type to the given base type is ambiguous (i....
Represents a call to a C++ constructor.
Definition ExprCXX.h:1549
CXXConstructionKind getConstructionKind() const
Determine whether this constructor is actually constructing a base class (rather than a complete obje...
Definition ExprCXX.h:1660
Represents a C++ destructor within a class.
Definition DeclCXX.h:2869
Represents a static or instance method of a struct/union/class.
Definition DeclCXX.h:2129
const CXXRecordDecl * getParent() const
Return the parent of this method declaration, which is the class in which this method is defined.
Definition DeclCXX.h:2255
bool isMoveAssignmentOperator() const
Determine whether this is a move assignment operator.
Definition DeclCXX.cpp:2735
bool isCopyAssignmentOperator() const
Determine whether this is a copy-assignment operator, regardless of whether it was declared implicitl...
Definition DeclCXX.cpp:2714
Represents a C++ struct/union/class.
Definition DeclCXX.h:258
bool hasTrivialDestructor() const
Determine whether this class has a trivial destructor (C++ [class.dtor]p3)
Definition DeclCXX.h:1366
bool hasMemberName(DeclarationName N) const
Determine whether this class has a member with the given name, possibly in a non-dependent base class...
bool isDerivedFrom(const CXXRecordDecl *Base) const
Determine whether this class is derived from the class Base.
Represents a point when we begin processing an inlined call.
const CFGBlock * getEntry() const
Returns the entry block in the CFG for the entered function.
const StackFrameContext * getCalleeContext() const
Represents a point when we finish the call exit sequence (for inlined call).
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition Expr.h:2877
ConstructionContext's subclasses describe different ways of constructing an object in C++.
Decl - This represents one declaration (or definition), e.g.
Definition DeclBase.h:86
virtual Decl * getCanonicalDecl()
Retrieves the "canonical" declaration of the given declaration.
Definition DeclBase.h:978
DeclarationName getIdentifier(const IdentifierInfo *ID)
Create a declaration name that is a simple identifier.
IdentifierInfo * getAsIdentifierInfo() const
Retrieve the IdentifierInfo * stored in this declaration name, or null if this declaration name isn't...
This is a meta program point, which should be skipped by all the diagnostic reasoning etc.
This represents one expression.
Definition Expr.h:112
QualType getType() const
Definition Expr.h:144
Represents a function declaration or definition.
Definition Decl.h:2000
bool isTrivial() const
Whether this function is "trivial" in some specialized C++ senses.
Definition Decl.h:2377
One of these records is kept for each identifier that is lexed.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
It wraps the AnalysisDeclContext to represent both the call stack with the help of StackFrameContext ...
const Decl * getDecl() const
LLVM_ATTRIBUTE_RETURNS_NONNULL AnalysisDeclContext * getAnalysisDeclContext() const
const LocationContext * getParent() const
It might return null.
const StackFrameContext * getStackFrame() const
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Definition Decl.h:340
Represents a parameter to a function.
Definition Decl.h:1790
const StackFrameContext * getStackFrame() const
std::optional< T > getAs() const
Convert to the specified ProgramPoint type, returning std::nullopt if this ProgramPoint is not of the...
A (possibly-)qualified type.
Definition TypeBase.h:937
bool isNull() const
Return true if this QualType doesn't point to a type yet.
Definition TypeBase.h:1004
QualType getCanonicalType() const
Definition TypeBase.h:8330
bool isConstQualified() const
Determine whether this type is const-qualified.
Definition TypeBase.h:8351
ReturnStmt - This represents a return, optionally of an expression: return; return 4;.
Definition Stmt.h:3160
Expr * getRetValue()
Definition Stmt.h:3187
bool isInSystemHeader(SourceLocation Loc) const
Returns if a SourceLocation is in a system header.
It represents a stack frame of the call stack (based on CallEvent).
const Stmt * getCallSite() const
const CFGBlock * getCallSiteBlock() const
Stmt - This represents one statement.
Definition Stmt.h:85
bool isVoidType() const
Definition TypeBase.h:8871
bool isPointerType() const
Definition TypeBase.h:8515
CanQualType getCanonicalTypeUnqualified() const
bool isReferenceType() const
Definition TypeBase.h:8539
const CXXRecordDecl * getPointeeCXXRecordDecl() const
If this is a pointer or reference to a RecordType, return the CXXRecordDecl that the type refers to.
Definition Type.cpp:1909
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
Definition Type.cpp:752
bool isObjCObjectPointerType() const
Definition TypeBase.h:8684
AnalysisDeclContext * getAnalysisDeclContext(const Decl *D)
Represents a call to a C++ constructor.
Definition CallEvent.h:999
const CXXConstructorDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Definition CallEvent.h:1030
const CXXConstructExpr * getOriginExpr() const override
Returns the expression whose value will be the result of this call.
Definition CallEvent.h:1026
Represents a non-static C++ member function call, no matter how it is written.
Definition CallEvent.h:692
const FunctionDecl * getDecl() const override
Returns the declaration of the function or method that will be called.
Manages the lifetime of CallEvent objects.
Definition CallEvent.h:1376
CallEventRef getSimpleCall(const CallExpr *E, ProgramStateRef State, const LocationContext *LCtx, CFGBlock::ConstCFGElementRef ElemRef)
CallEventRef getCaller(const StackFrameContext *CalleeCtx, ProgramStateRef State)
Gets an outside caller given a callee context.
Represents an abstract call to a function or method along a particular path.
Definition CallEvent.h:153
CallEventRef< T > cloneWithState(ProgramStateRef NewState) const
Returns a copy of this CallEvent, but using the given state.
Definition CallEvent.h:1493
static QualType getDeclaredResultType(const Decl *D)
Returns the result type of a function or method declaration.
static bool isVariadic(const Decl *D)
Returns true if the given decl is known to be variadic.
void runCheckersForPreCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng)
Run checkers for pre-visiting obj-c messages.
void runCheckersForEvalCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &CE, ExprEngine &Eng, const EvalCallOptions &CallOpts)
Run checkers for evaluating a call.
void runCheckersForPostObjCMessage(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const ObjCMethodCall &msg, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
void runCheckersForPostStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting Stmts.
void runCheckersForNewAllocator(const CXXAllocatorCall &Call, ExplodedNodeSet &Dst, ExplodedNode *Pred, ExprEngine &Eng, bool wasInlined=false)
Run checkers between C++ operator new and constructor calls.
void runCheckersForPreStmt(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const Stmt *S, ExprEngine &Eng)
Run checkers for pre-visiting Stmts.
void runCheckersForPostCall(ExplodedNodeSet &Dst, const ExplodedNodeSet &Src, const CallEvent &Call, ExprEngine &Eng, bool wasInlined=false)
Run checkers for post-visiting obj-c messages.
WorkList * getCTUWorkList() const
Definition CoreEngine.h:173
WorkList * getWorkList() const
Definition CoreEngine.h:172
void insert(const ExplodedNodeSet &S)
void Add(ExplodedNode *N)
const ProgramStateRef & getState() const
ProgramPoint getLocation() const
getLocation - Returns the edge associated with the given node.
void addPredecessor(ExplodedNode *V, ExplodedGraph &G)
addPredeccessor - Adds a predecessor to the current node, and in tandem add this node as a successor ...
const StackFrameContext * getStackFrame() const
const LocationContext * getLocationContext() const
std::optional< T > getLocationAs() const &
ExplodedNode * getFirstPred()
ProgramStateManager & getStateManager()
Definition ExprEngine.h:421
void processCallEnter(NodeBuilderContext &BC, CallEnter CE, ExplodedNode *Pred)
Generate the entry node of the callee.
void processBeginOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst, const BlockEdge &L)
Called by CoreEngine.
void removeDead(ExplodedNode *Node, ExplodedNodeSet &Out, const Stmt *ReferenceStmt, const LocationContext *LC, const Stmt *DiagnosticStmt=nullptr, ProgramPoint::Kind K=ProgramPoint::PreStmtPurgeDeadSymbolsKind)
Run the analyzer's garbage collection - remove dead symbols and bindings from the state.
std::pair< ProgramStateRef, SVal > handleConstructionContext(const Expr *E, ProgramStateRef State, const NodeBuilderContext *BldrCtx, const LocationContext *LCtx, const ConstructionContext *CC, EvalCallOptions &CallOpts, unsigned Idx=0)
A convenient wrapper around computeObjectUnderConstruction and updateObjectsUnderConstruction.
Definition ExprEngine.h:763
void VisitReturnStmt(const ReturnStmt *R, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitReturnStmt - Transfer function logic for return statements.
const CoreEngine & getCoreEngine() const
Definition ExprEngine.h:452
void processCallExit(ExplodedNode *Pred)
Generate the sequence of nodes that simulate the call exit and the post visit for CallExpr.
static std::optional< SVal > getObjectUnderConstruction(ProgramStateRef State, const ConstructionContextItem &Item, const LocationContext *LC)
By looking at a certain item that may be potentially part of an object's ConstructionContext,...
CFGElement getCurrentCFGElement()
Return the CFG element corresponding to the worklist element that is currently being processed by Exp...
Definition ExprEngine.h:712
@ Inline_Minimal
Do minimal inlining of callees.
Definition ExprEngine.h:134
ProgramStateRef processPointerEscapedOnBind(ProgramStateRef State, ArrayRef< std::pair< SVal, SVal > > LocAndVals, const LocationContext *LCtx, PointerEscapeKind Kind, const CallEvent *Call)
Call PointerEscape callback when a value escapes as a result of bind.
static std::optional< unsigned > getIndexOfElementToConstruct(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retreives which element is being constructed in a non-POD type array.
void VisitCallExpr(const CallExpr *CE, ExplodedNode *Pred, ExplodedNodeSet &Dst)
VisitCall - Transfer function for function calls.
ASTContext & getContext() const
getContext - Return the ASTContext associated with this analysis.
Definition ExprEngine.h:196
StoreManager & getStoreManager()
Definition ExprEngine.h:424
void evalCall(ExplodedNodeSet &Dst, ExplodedNode *Pred, const CallEvent &Call)
Evaluate a call, running pre- and post-call checkers and allowing checkers to be responsible for hand...
ConstCFGElementRef getCFGElementRef() const
Definition ExprEngine.h:232
static std::optional< unsigned > getPendingArrayDestruction(ProgramStateRef State, const LocationContext *LCtx)
Retreives which element is being destructed in a non-POD type array.
CheckerManager & getCheckerManager() const
Definition ExprEngine.h:205
ProgramStateRef bindReturnValue(const CallEvent &Call, const LocationContext *LCtx, ProgramStateRef State)
Create a new state in which the call return value is binded to the call origin expression.
void removeDeadOnEndOfFunction(NodeBuilderContext &BC, ExplodedNode *Pred, ExplodedNodeSet &Dst)
Remove dead bindings/symbols before exiting a function.
void defaultEvalCall(NodeBuilder &B, ExplodedNode *Pred, const CallEvent &Call, const EvalCallOptions &CallOpts={})
Default implementation of call evaluation.
AnalysisManager & getAnalysisManager()
Definition ExprEngine.h:198
static std::optional< unsigned > getPendingInitLoop(ProgramStateRef State, const CXXConstructExpr *E, const LocationContext *LCtx)
Retreives the size of the array in the pending ArrayInitLoopExpr.
MemRegion - The root abstract class for all memory regions.
Definition MemRegion.h:98
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * StripCasts(bool StripBaseAndDerivedCasts=true) const
unsigned blockCount() const
Returns the number of times the current basic block has been visited on the exploded graph path.
Definition CoreEngine.h:224
This is the simplest builder which generates nodes in the ExplodedGraph.
Definition CoreEngine.h:240
ExplodedNode * generateNode(const ProgramPoint &PP, ProgramStateRef State, ExplodedNode *Pred)
Generates a node in the ExplodedGraph.
Definition CoreEngine.h:293
void takeNodes(const ExplodedNodeSet &S)
Definition CoreEngine.h:335
Represents any expression that calls an Objective-C method.
Definition CallEvent.h:1263
CallEventManager & getCallEventManager()
Information about invalidation for a particular region/symbol.
Definition MemRegion.h:1657
void setTrait(SymbolRef Sym, InvalidationKinds IK)
Defines the runtime definition of the called function.
Definition CallEvent.h:110
const MemRegion * getDispatchRegion()
When other definitions are possible, returns the region whose runtime type determines the method defi...
Definition CallEvent.h:141
bool mayHaveOtherDefinitions()
Check if the definition we have is precise.
Definition CallEvent.h:137
SVal - This represents a symbolic expression, which can be either an L-value or an R-value.
Definition SVals.h:56
QualType getType(const ASTContext &) const
Try to get a reasonable type for the given value.
Definition SVals.cpp:180
const MemRegion * getAsRegion() const
Definition SVals.cpp:119
T castAs() const
Convert to the specified SVal type, asserting that this SVal is of the desired type.
Definition SVals.h:83
This builder class is useful for generating nodes that resulted from visiting a statement.
Definition CoreEngine.h:384
ExplodedNode * generateNode(const Stmt *S, ExplodedNode *Pred, ProgramStateRef St, const ProgramPointTag *tag=nullptr, ProgramPoint::Kind K=ProgramPoint::PostStmtKind)
Definition CoreEngine.h:413
SVal evalDerivedToBase(SVal Derived, const CastExpr *Cast)
Evaluates a chain of derived-to-base casts through the path specified in Cast.
Definition Store.cpp:254
virtual void enqueue(const WorkListUnit &U)=0
LLVM_ATTRIBUTE_RETURNS_NONNULL const MemRegion * getRegion() const
Get the underlining region.
Definition SVals.h:493
@ PSK_EscapeOutParameters
Escape for a new symbol that was generated into a region that the analyzer cannot follow during a con...
DefinedOrUnknownSVal getDynamicElementCount(ProgramStateRef State, const MemRegion *MR, SValBuilder &SVB, QualType Ty)
IntrusiveRefCntPtr< const ProgramState > ProgramStateRef
ProgramStateRef setDynamicExtent(ProgramStateRef State, const MemRegion *MR, DefinedOrUnknownSVal Extent)
Set the dynamic extent Extent of the region MR.
@ CE_CXXInheritedConstructor
Definition CallEvent.h:69
@ CE_CXXStaticOperator
Definition CallEvent.h:62
@ CE_CXXDestructor
Definition CallEvent.h:65
@ CE_CXXDeallocator
Definition CallEvent.h:73
@ CE_CXXAllocator
Definition CallEvent.h:72
@ CE_CXXConstructor
Definition CallEvent.h:68
@ CE_CXXMemberOperator
Definition CallEvent.h:64
DefinedOrUnknownSVal getElementExtent(QualType Ty, SValBuilder &SVB)
std::variant< struct RequiresDecl, struct HeaderDecl, struct UmbrellaDirDecl, struct ModuleDecl, struct ExcludeDecl, struct ExportDecl, struct ExportAsDecl, struct ExternModuleDecl, struct UseDecl, struct LinkDecl, struct ConfigMacrosDecl, struct ConflictDecl > Decl
All declarations that can appear in a module declaration.
The JSON file list parser is used to communicate input to InstallAPI.
bool isa(CodeGen::Address addr)
Definition Address.h:330
CFGBlock::ConstCFGElementRef ConstCFGElementRef
Definition CFG.h:1199
@ ExpectedClass
@ IPAK_DynamicDispatch
Enable inlining of dynamically dispatched methods.
@ IPAK_DynamicDispatchBifurcate
Enable inlining of dynamically dispatched methods, bifurcate paths when exact type info is unavailabl...
@ CIMK_Destructors
Refers to destructors (implicit or explicit).
@ CIMK_MemberFunctions
Refers to regular member function and operator calls.
@ CIMK_Constructors
Refers to constructors (implicit or explicit).
U cast(CodeGen::Address addr)
Definition Address.h:327
unsigned long uint64_t
Hints for figuring out of a call should be inlined during evalCall().
Definition ExprEngine.h:97
bool IsTemporaryLifetimeExtendedViaAggregate
This call is a constructor for a temporary that is lifetime-extended by binding it to a reference-typ...
Definition ExprEngine.h:112
bool IsTemporaryCtorOrDtor
This call is a constructor or a destructor of a temporary value.
Definition ExprEngine.h:107
bool IsArrayCtorOrDtor
This call is a constructor or a destructor for a single element within an array, a part of array cons...
Definition ExprEngine.h:104
bool IsCtorOrDtorWithImproperlyModeledTargetRegion
This call is a constructor or a destructor for which we do not currently compute the this-region corr...
Definition ExprEngine.h:100
Traits for storing the call processing policy inside GDM.