56#include "llvm/ADT/APFixedPoint.h"
57#include "llvm/ADT/SmallBitVector.h"
58#include "llvm/ADT/StringExtras.h"
59#include "llvm/Support/Debug.h"
60#include "llvm/Support/SaveAndRestore.h"
61#include "llvm/Support/SipHash.h"
62#include "llvm/Support/TimeProfiler.h"
63#include "llvm/Support/raw_ostream.h"
68#define DEBUG_TYPE "exprconstant"
71using llvm::APFixedPoint;
75using llvm::FixedPointSemantics;
82 using SourceLocExprScopeGuard =
92 return dyn_cast_or_null<FieldDecl>(
E.getAsBaseOrMember().getPointer());
97 return dyn_cast_or_null<CXXRecordDecl>(
E.getAsBaseOrMember().getPointer());
102 return E.getAsBaseOrMember().getInt();
114 static const AllocSizeAttr *getAllocSizeAttr(
const CallExpr *CE) {
116 return DirectCallee->getAttr<AllocSizeAttr>();
118 return IndirectCallee->getAttr<AllocSizeAttr>();
126 static const CallExpr *tryUnwrapAllocSizeCall(
const Expr *
E) {
134 if (
const auto *FE = dyn_cast<FullExpr>(
E))
137 if (
const auto *Cast = dyn_cast<CastExpr>(
E))
138 E = Cast->getSubExpr()->IgnoreParens();
140 if (
const auto *CE = dyn_cast<CallExpr>(
E))
141 return getAllocSizeAttr(CE) ? CE :
nullptr;
148 const auto *
E =
Base.dyn_cast<
const Expr *>();
157 case ConstantExprKind::Normal:
158 case ConstantExprKind::ClassTemplateArgument:
159 case ConstantExprKind::ImmediateInvocation:
164 case ConstantExprKind::NonClassTemplateArgument:
167 llvm_unreachable(
"unknown ConstantExprKind");
172 case ConstantExprKind::Normal:
173 case ConstantExprKind::ImmediateInvocation:
176 case ConstantExprKind::ClassTemplateArgument:
177 case ConstantExprKind::NonClassTemplateArgument:
180 llvm_unreachable(
"unknown ConstantExprKind");
186 static const uint64_t AssumedSizeForUnsizedArray =
187 std::numeric_limits<uint64_t>::max() / 2;
197 bool &FirstEntryIsUnsizedArray) {
200 assert(!isBaseAnAllocSizeCall(
Base) &&
201 "Unsized arrays shouldn't appear here");
202 unsigned MostDerivedLength = 0;
205 for (
unsigned I = 0, N =
Path.size(); I != N; ++I) {
209 MostDerivedLength = I + 1;
212 if (
auto *CAT = dyn_cast<ConstantArrayType>(AT)) {
213 ArraySize = CAT->getZExtSize();
215 assert(I == 0 &&
"unexpected unsized array designator");
216 FirstEntryIsUnsizedArray =
true;
217 ArraySize = AssumedSizeForUnsizedArray;
223 MostDerivedLength = I + 1;
226 Type = VT->getElementType();
227 ArraySize = VT->getNumElements();
228 MostDerivedLength = I + 1;
231 Type = FD->getType();
233 MostDerivedLength = I + 1;
241 return MostDerivedLength;
245 struct SubobjectDesignator {
249 LLVM_PREFERRED_TYPE(
bool)
250 unsigned Invalid : 1;
253 LLVM_PREFERRED_TYPE(
bool)
254 unsigned IsOnePastTheEnd : 1;
257 LLVM_PREFERRED_TYPE(
bool)
258 unsigned FirstEntryIsAnUnsizedArray : 1;
261 LLVM_PREFERRED_TYPE(
bool)
262 unsigned MostDerivedIsArrayElement : 1;
266 unsigned MostDerivedPathLength : 28;
275 uint64_t MostDerivedArraySize;
284 SubobjectDesignator() : Invalid(
true) {}
287 : Invalid(
false), IsOnePastTheEnd(
false),
288 FirstEntryIsAnUnsizedArray(
false), MostDerivedIsArrayElement(
false),
289 MostDerivedPathLength(0), MostDerivedArraySize(0),
290 MostDerivedType(
T) {}
293 : Invalid(!
V.isLValue() || !
V.hasLValuePath()), IsOnePastTheEnd(
false),
294 FirstEntryIsAnUnsizedArray(
false), MostDerivedIsArrayElement(
false),
295 MostDerivedPathLength(0), MostDerivedArraySize(0) {
296 assert(
V.isLValue() &&
"Non-LValue used to make an LValue designator?");
298 IsOnePastTheEnd =
V.isLValueOnePastTheEnd();
300 Entries.insert(Entries.end(), VEntries.begin(), VEntries.end());
301 if (
V.getLValueBase()) {
302 bool IsArray =
false;
303 bool FirstIsUnsizedArray =
false;
304 MostDerivedPathLength = findMostDerivedSubobject(
305 Ctx,
V.getLValueBase(),
V.getLValuePath(), MostDerivedArraySize,
306 MostDerivedType, IsArray, FirstIsUnsizedArray);
307 MostDerivedIsArrayElement = IsArray;
308 FirstEntryIsAnUnsizedArray = FirstIsUnsizedArray;
314 unsigned NewLength) {
318 assert(
Base &&
"cannot truncate path for null pointer");
319 assert(NewLength <= Entries.size() &&
"not a truncation");
321 if (NewLength == Entries.size())
323 Entries.resize(NewLength);
325 bool IsArray =
false;
326 bool FirstIsUnsizedArray =
false;
327 MostDerivedPathLength = findMostDerivedSubobject(
328 Ctx,
Base, Entries, MostDerivedArraySize, MostDerivedType, IsArray,
329 FirstIsUnsizedArray);
330 MostDerivedIsArrayElement = IsArray;
331 FirstEntryIsAnUnsizedArray = FirstIsUnsizedArray;
341 bool isMostDerivedAnUnsizedArray()
const {
342 assert(!Invalid &&
"Calling this makes no sense on invalid designators");
343 return Entries.size() == 1 && FirstEntryIsAnUnsizedArray;
348 uint64_t getMostDerivedArraySize()
const {
349 assert(!isMostDerivedAnUnsizedArray() &&
"Unsized array has no size");
350 return MostDerivedArraySize;
354 bool isOnePastTheEnd()
const {
358 if (!isMostDerivedAnUnsizedArray() && MostDerivedIsArrayElement &&
359 Entries[MostDerivedPathLength - 1].getAsArrayIndex() ==
360 MostDerivedArraySize)
368 std::pair<uint64_t, uint64_t> validIndexAdjustments() {
369 if (Invalid || isMostDerivedAnUnsizedArray())
375 bool IsArray = MostDerivedPathLength == Entries.size() &&
376 MostDerivedIsArrayElement;
377 uint64_t ArrayIndex = IsArray ? Entries.back().getAsArrayIndex()
378 : (uint64_t)IsOnePastTheEnd;
380 IsArray ? getMostDerivedArraySize() : (uint64_t)1;
381 return {ArrayIndex, ArraySize - ArrayIndex};
385 bool isValidSubobject()
const {
388 return !isOnePastTheEnd();
396 assert(!Invalid &&
"invalid designator has no subobject type");
397 return MostDerivedPathLength == Entries.size()
408 MostDerivedIsArrayElement =
true;
410 MostDerivedPathLength = Entries.size();
414 void addUnsizedArrayUnchecked(
QualType ElemTy) {
417 MostDerivedType = ElemTy;
418 MostDerivedIsArrayElement =
true;
422 MostDerivedArraySize = AssumedSizeForUnsizedArray;
423 MostDerivedPathLength = Entries.size();
427 void addDeclUnchecked(
const Decl *
D,
bool Virtual =
false) {
431 if (
const FieldDecl *FD = dyn_cast<FieldDecl>(
D)) {
432 MostDerivedType = FD->getType();
433 MostDerivedIsArrayElement =
false;
434 MostDerivedArraySize = 0;
435 MostDerivedPathLength = Entries.size();
439 void addComplexUnchecked(
QualType EltTy,
bool Imag) {
444 MostDerivedType = EltTy;
445 MostDerivedIsArrayElement =
true;
446 MostDerivedArraySize = 2;
447 MostDerivedPathLength = Entries.size();
450 void addVectorElementUnchecked(
QualType EltTy, uint64_t Size,
453 MostDerivedType = EltTy;
454 MostDerivedPathLength = Entries.size();
455 MostDerivedArraySize = 0;
456 MostDerivedIsArrayElement =
false;
459 void diagnoseUnsizedArrayPointerArithmetic(EvalInfo &Info,
const Expr *
E);
460 void diagnosePointerArithmetic(EvalInfo &Info,
const Expr *
E,
463 void adjustIndex(EvalInfo &Info,
const Expr *
E,
APSInt N) {
464 if (Invalid || !N)
return;
465 uint64_t TruncatedN = N.extOrTrunc(64).getZExtValue();
466 if (isMostDerivedAnUnsizedArray()) {
467 diagnoseUnsizedArrayPointerArithmetic(Info,
E);
472 Entries.back().getAsArrayIndex() + TruncatedN);
479 bool IsArray = MostDerivedPathLength == Entries.size() &&
480 MostDerivedIsArrayElement;
481 uint64_t ArrayIndex = IsArray ? Entries.back().getAsArrayIndex()
482 : (uint64_t)IsOnePastTheEnd;
484 IsArray ? getMostDerivedArraySize() : (uint64_t)1;
486 if (N < -(int64_t)ArrayIndex || N > ArraySize - ArrayIndex) {
489 N = N.extend(std::max<unsigned>(N.getBitWidth() + 1, 65));
490 (llvm::APInt&)N += ArrayIndex;
491 assert(N.ugt(ArraySize) &&
"bounds check failed for in-bounds index");
492 diagnosePointerArithmetic(Info,
E, N);
497 ArrayIndex += TruncatedN;
498 assert(ArrayIndex <= ArraySize &&
499 "bounds check succeeded for out-of-bounds index");
504 IsOnePastTheEnd = (ArrayIndex != 0);
509 enum class ScopeKind {
517 CallRef() : OrigCallee(), CallIndex(0), Version() {}
518 CallRef(
const FunctionDecl *Callee,
unsigned CallIndex,
unsigned Version)
519 : OrigCallee(Callee), CallIndex(CallIndex), Version(Version) {}
521 explicit operator bool()
const {
return OrigCallee; }
547 CallStackFrame *Caller;
569 typedef std::pair<const void *, unsigned> MapKeyTy;
570 typedef std::map<MapKeyTy, APValue>
MapTy;
582 unsigned CurTempVersion = TempVersionStack.back();
584 unsigned getTempVersion()
const {
return TempVersionStack.back(); }
586 void pushTempVersion() {
587 TempVersionStack.push_back(++CurTempVersion);
590 void popTempVersion() {
591 TempVersionStack.pop_back();
595 return {Callee, Index, ++CurTempVersion};
606 llvm::DenseMap<const ValueDecl *, FieldDecl *> LambdaCaptureFields;
607 FieldDecl *LambdaThisCaptureField =
nullptr;
609 CallStackFrame(EvalInfo &Info,
SourceRange CallRange,
615 APValue *getTemporary(
const void *Key,
unsigned Version) {
616 MapKeyTy KV(Key, Version);
617 auto LB = Temporaries.lower_bound(KV);
618 if (LB != Temporaries.end() && LB->first == KV)
624 APValue *getCurrentTemporary(
const void *Key) {
625 auto UB = Temporaries.upper_bound(MapKeyTy(Key,
UINT_MAX));
626 if (UB != Temporaries.begin() && std::prev(UB)->first.first == Key)
627 return &std::prev(UB)->second;
632 unsigned getCurrentTemporaryVersion(
const void *Key)
const {
633 auto UB = Temporaries.upper_bound(MapKeyTy(Key,
UINT_MAX));
634 if (UB != Temporaries.begin() && std::prev(UB)->first.first == Key)
635 return std::prev(UB)->first.second;
643 template<
typename KeyT>
645 ScopeKind
Scope, LValue &LV);
650 void describe(llvm::raw_ostream &OS)
const override;
652 Frame *getCaller()
const override {
return Caller; }
653 SourceRange getCallRange()
const override {
return CallRange; }
654 const FunctionDecl *getCallee()
const override {
return Callee; }
656 bool isStdFunction()
const {
657 for (
const DeclContext *DC = Callee; DC; DC = DC->getParent())
658 if (DC->isStdNamespace())
665 bool CanEvalMSConstexpr =
false;
673 class ThisOverrideRAII {
675 ThisOverrideRAII(CallStackFrame &Frame,
const LValue *NewThis,
bool Enable)
676 : Frame(Frame), OldThis(Frame.This) {
678 Frame.This = NewThis;
680 ~ThisOverrideRAII() {
681 Frame.This = OldThis;
684 CallStackFrame &Frame;
685 const LValue *OldThis;
690 class ExprTimeTraceScope {
692 ExprTimeTraceScope(
const Expr *
E,
const ASTContext &Ctx, StringRef Name)
693 : TimeScope(Name, [
E, &Ctx] {
698 llvm::TimeTraceScope TimeScope;
703 struct MSConstexprContextRAII {
704 CallStackFrame &Frame;
706 explicit MSConstexprContextRAII(CallStackFrame &Frame,
bool Value)
707 : Frame(Frame), OldValue(Frame.CanEvalMSConstexpr) {
708 Frame.CanEvalMSConstexpr =
Value;
711 ~MSConstexprContextRAII() { Frame.CanEvalMSConstexpr = OldValue; }
716 const LValue &This,
QualType ThisType);
724 llvm::PointerIntPair<APValue*, 2, ScopeKind>
Value;
735 bool isDestroyedAtEndOf(ScopeKind K)
const {
736 return (
int)
Value.getInt() >= (
int)K;
738 bool endLifetime(EvalInfo &Info,
bool RunDestructors) {
739 if (RunDestructors) {
742 Loc = VD->getLocation();
751 bool hasSideEffect() {
752 return T.isDestructedType();
757 struct ObjectUnderConstruction {
760 friend bool operator==(
const ObjectUnderConstruction &LHS,
761 const ObjectUnderConstruction &RHS) {
762 return LHS.Base == RHS.Base && LHS.Path == RHS.Path;
764 friend llvm::hash_code
hash_value(
const ObjectUnderConstruction &Obj) {
765 return llvm::hash_combine(Obj.Base, Obj.Path);
768 enum class ConstructionPhase {
779template<>
struct DenseMapInfo<ObjectUnderConstruction> {
780 using Base = DenseMapInfo<APValue::LValueBase>;
782 return {Base::getEmptyKey(), {}}; }
784 return {Base::getTombstoneKey(), {}};
789 static bool isEqual(
const ObjectUnderConstruction &LHS,
790 const ObjectUnderConstruction &RHS) {
804 const Expr *AllocExpr =
nullptr;
815 if (
auto *NE = dyn_cast<CXXNewExpr>(AllocExpr))
816 return NE->isArray() ? ArrayNew : New;
817 assert(isa<CallExpr>(AllocExpr));
822 struct DynAllocOrder {
850 CallStackFrame *CurrentCall;
853 unsigned CallStackDepth;
856 unsigned NextCallIndex;
865 bool EnableNewConstInterp;
869 CallStackFrame BottomFrame;
879 enum class EvaluatingDeclKind {
886 EvaluatingDeclKind IsEvaluatingDecl = EvaluatingDeclKind::None;
893 llvm::DenseMap<ObjectUnderConstruction, ConstructionPhase>
894 ObjectsUnderConstruction;
899 std::map<DynamicAllocLValue, DynAlloc, DynAllocOrder> HeapAllocs;
902 unsigned NumHeapAllocs = 0;
904 struct EvaluatingConstructorRAII {
906 ObjectUnderConstruction
Object;
908 EvaluatingConstructorRAII(EvalInfo &EI, ObjectUnderConstruction Object,
912 EI.ObjectsUnderConstruction
913 .insert({
Object, HasBases ? ConstructionPhase::Bases
914 : ConstructionPhase::AfterBases})
917 void finishedConstructingBases() {
918 EI.ObjectsUnderConstruction[
Object] = ConstructionPhase::AfterBases;
920 void finishedConstructingFields() {
921 EI.ObjectsUnderConstruction[
Object] = ConstructionPhase::AfterFields;
923 ~EvaluatingConstructorRAII() {
924 if (DidInsert) EI.ObjectsUnderConstruction.erase(Object);
928 struct EvaluatingDestructorRAII {
930 ObjectUnderConstruction
Object;
932 EvaluatingDestructorRAII(EvalInfo &EI, ObjectUnderConstruction Object)
934 DidInsert = EI.ObjectsUnderConstruction
935 .insert({
Object, ConstructionPhase::Destroying})
938 void startedDestroyingBases() {
939 EI.ObjectsUnderConstruction[
Object] =
940 ConstructionPhase::DestroyingBases;
942 ~EvaluatingDestructorRAII() {
944 EI.ObjectsUnderConstruction.erase(Object);
951 return ObjectsUnderConstruction.lookup({
Base,
Path});
956 unsigned SpeculativeEvaluationDepth = 0;
964 bool HasActiveDiagnostic;
968 bool HasFoldFailureDiagnostic;
973 bool CheckingPotentialConstantExpression =
false;
981 bool CheckingForUndefinedBehavior =
false;
983 enum EvaluationMode {
986 EM_ConstantExpression,
993 EM_ConstantExpressionUnevaluated,
1001 EM_IgnoreSideEffects,
1006 bool checkingPotentialConstantExpression()
const override {
1007 return CheckingPotentialConstantExpression;
1013 bool checkingForUndefinedBehavior()
const override {
1014 return CheckingForUndefinedBehavior;
1018 : Ctx(const_cast<
ASTContext &>(
C)), EvalStatus(S), CurrentCall(nullptr),
1019 CallStackDepth(0), NextCallIndex(1),
1020 StepsLeft(
C.getLangOpts().ConstexprStepLimit),
1021 EnableNewConstInterp(
C.getLangOpts().EnableNewConstInterp),
1024 nullptr, CallRef()),
1025 EvaluatingDecl((const
ValueDecl *)nullptr),
1026 EvaluatingDeclValue(nullptr), HasActiveDiagnostic(
false),
1027 HasFoldFailureDiagnostic(
false), EvalMode(Mode) {}
1033 ASTContext &getCtx()
const override {
return Ctx; }
1036 EvaluatingDeclKind EDK = EvaluatingDeclKind::Ctor) {
1037 EvaluatingDecl =
Base;
1038 IsEvaluatingDecl = EDK;
1039 EvaluatingDeclValue = &
Value;
1045 if (checkingPotentialConstantExpression() && CallStackDepth > 1)
1047 if (NextCallIndex == 0) {
1049 FFDiag(
Loc, diag::note_constexpr_call_limit_exceeded);
1052 if (CallStackDepth <= getLangOpts().ConstexprCallDepth)
1054 FFDiag(
Loc, diag::note_constexpr_depth_limit_exceeded)
1055 << getLangOpts().ConstexprCallDepth;
1060 uint64_t ElemCount,
bool Diag) {
1066 ElemCount >
uint64_t(std::numeric_limits<unsigned>::max())) {
1068 FFDiag(
Loc, diag::note_constexpr_new_too_large) << ElemCount;
1078 if (ElemCount > Limit) {
1080 FFDiag(
Loc, diag::note_constexpr_new_exceeds_limits)
1081 << ElemCount << Limit;
1087 std::pair<CallStackFrame *, unsigned>
1088 getCallFrameAndDepth(
unsigned CallIndex) {
1089 assert(CallIndex &&
"no call index in getCallFrameAndDepth");
1092 unsigned Depth = CallStackDepth;
1093 CallStackFrame *Frame = CurrentCall;
1094 while (Frame->Index > CallIndex) {
1095 Frame = Frame->Caller;
1098 if (Frame->Index == CallIndex)
1099 return {Frame, Depth};
1100 return {
nullptr, 0};
1103 bool nextStep(
const Stmt *S) {
1105 FFDiag(S->getBeginLoc(), diag::note_constexpr_step_limit_exceeded);
1115 std::optional<DynAlloc *> Result;
1116 auto It = HeapAllocs.find(DA);
1117 if (It != HeapAllocs.end())
1118 Result = &It->second;
1124 CallStackFrame *Frame = getCallFrameAndDepth(
Call.CallIndex).first;
1125 return Frame ? Frame->getTemporary(
Call.getOrigParam(PVD),
Call.Version)
1130 struct StdAllocatorCaller {
1131 unsigned FrameIndex;
1133 explicit operator bool()
const {
return FrameIndex != 0; };
1136 StdAllocatorCaller getStdAllocatorCaller(StringRef FnName)
const {
1137 for (
const CallStackFrame *
Call = CurrentCall;
Call != &BottomFrame;
1139 const auto *MD = dyn_cast_or_null<CXXMethodDecl>(
Call->Callee);
1143 if (!FnII || !FnII->
isStr(FnName))
1147 dyn_cast<ClassTemplateSpecializationDecl>(MD->getParent());
1153 if (CTSD->isInStdNamespace() && ClassII &&
1154 ClassII->
isStr(
"allocator") && TAL.
size() >= 1 &&
1156 return {
Call->Index, TAL[0].getAsType()};
1162 void performLifetimeExtension() {
1164 llvm::erase_if(CleanupStack, [](Cleanup &
C) {
1165 return !
C.isDestroyedAtEndOf(ScopeKind::FullExpression);
1172 bool discardCleanups() {
1173 for (Cleanup &
C : CleanupStack) {
1174 if (
C.hasSideEffect() && !noteSideEffect()) {
1175 CleanupStack.clear();
1179 CleanupStack.clear();
1184 interp::Frame *getCurrentFrame()
override {
return CurrentCall; }
1185 const interp::Frame *getBottomFrame()
const override {
return &BottomFrame; }
1187 bool hasActiveDiagnostic()
override {
return HasActiveDiagnostic; }
1188 void setActiveDiagnostic(
bool Flag)
override { HasActiveDiagnostic = Flag; }
1190 void setFoldFailureDiagnostic(
bool Flag)
override {
1191 HasFoldFailureDiagnostic = Flag;
1202 bool hasPriorDiagnostic()
override {
1203 if (!EvalStatus.
Diag->empty()) {
1205 case EM_ConstantFold:
1206 case EM_IgnoreSideEffects:
1207 if (!HasFoldFailureDiagnostic)
1211 case EM_ConstantExpression:
1212 case EM_ConstantExpressionUnevaluated:
1213 setActiveDiagnostic(
false);
1220 unsigned getCallStackDepth()
override {
return CallStackDepth; }
1225 bool keepEvaluatingAfterSideEffect() {
1227 case EM_IgnoreSideEffects:
1230 case EM_ConstantExpression:
1231 case EM_ConstantExpressionUnevaluated:
1232 case EM_ConstantFold:
1235 return checkingPotentialConstantExpression() ||
1236 checkingForUndefinedBehavior();
1238 llvm_unreachable(
"Missed EvalMode case");
1243 bool noteSideEffect() {
1245 return keepEvaluatingAfterSideEffect();
1249 bool keepEvaluatingAfterUndefinedBehavior() {
1251 case EM_IgnoreSideEffects:
1252 case EM_ConstantFold:
1255 case EM_ConstantExpression:
1256 case EM_ConstantExpressionUnevaluated:
1257 return checkingForUndefinedBehavior();
1259 llvm_unreachable(
"Missed EvalMode case");
1265 bool noteUndefinedBehavior()
override {
1267 return keepEvaluatingAfterUndefinedBehavior();
1272 bool keepEvaluatingAfterFailure()
const override {
1277 case EM_ConstantExpression:
1278 case EM_ConstantExpressionUnevaluated:
1279 case EM_ConstantFold:
1280 case EM_IgnoreSideEffects:
1281 return checkingPotentialConstantExpression() ||
1282 checkingForUndefinedBehavior();
1284 llvm_unreachable(
"Missed EvalMode case");
1297 [[nodiscard]]
bool noteFailure() {
1305 bool KeepGoing = keepEvaluatingAfterFailure();
1310 class ArrayInitLoopIndex {
1315 ArrayInitLoopIndex(EvalInfo &Info)
1316 : Info(Info), OuterIndex(Info.ArrayInitIndex) {
1317 Info.ArrayInitIndex = 0;
1319 ~ArrayInitLoopIndex() { Info.ArrayInitIndex = OuterIndex; }
1321 operator uint64_t&() {
return Info.ArrayInitIndex; }
1326 struct FoldConstant {
1329 bool HadNoPriorDiags;
1330 EvalInfo::EvaluationMode OldMode;
1332 explicit FoldConstant(EvalInfo &Info,
bool Enabled)
1335 HadNoPriorDiags(Info.EvalStatus.
Diag &&
1336 Info.EvalStatus.
Diag->empty() &&
1337 !Info.EvalStatus.HasSideEffects),
1338 OldMode(Info.EvalMode) {
1340 Info.EvalMode = EvalInfo::EM_ConstantFold;
1342 void keepDiagnostics() { Enabled =
false; }
1344 if (Enabled && HadNoPriorDiags && !Info.EvalStatus.Diag->empty() &&
1345 !Info.EvalStatus.HasSideEffects)
1346 Info.EvalStatus.Diag->clear();
1347 Info.EvalMode = OldMode;
1353 struct IgnoreSideEffectsRAII {
1355 EvalInfo::EvaluationMode OldMode;
1356 explicit IgnoreSideEffectsRAII(EvalInfo &Info)
1357 : Info(Info), OldMode(Info.EvalMode) {
1358 Info.EvalMode = EvalInfo::EM_IgnoreSideEffects;
1361 ~IgnoreSideEffectsRAII() { Info.EvalMode = OldMode; }
1366 class SpeculativeEvaluationRAII {
1367 EvalInfo *Info =
nullptr;
1369 unsigned OldSpeculativeEvaluationDepth = 0;
1371 void moveFromAndCancel(SpeculativeEvaluationRAII &&
Other) {
1373 OldStatus =
Other.OldStatus;
1374 OldSpeculativeEvaluationDepth =
Other.OldSpeculativeEvaluationDepth;
1375 Other.Info =
nullptr;
1378 void maybeRestoreState() {
1382 Info->EvalStatus = OldStatus;
1383 Info->SpeculativeEvaluationDepth = OldSpeculativeEvaluationDepth;
1387 SpeculativeEvaluationRAII() =
default;
1389 SpeculativeEvaluationRAII(
1391 : Info(&Info), OldStatus(Info.EvalStatus),
1392 OldSpeculativeEvaluationDepth(Info.SpeculativeEvaluationDepth) {
1393 Info.EvalStatus.Diag = NewDiag;
1394 Info.SpeculativeEvaluationDepth = Info.CallStackDepth + 1;
1397 SpeculativeEvaluationRAII(
const SpeculativeEvaluationRAII &
Other) =
delete;
1398 SpeculativeEvaluationRAII(SpeculativeEvaluationRAII &&
Other) {
1399 moveFromAndCancel(std::move(
Other));
1402 SpeculativeEvaluationRAII &operator=(SpeculativeEvaluationRAII &&
Other) {
1403 maybeRestoreState();
1404 moveFromAndCancel(std::move(
Other));
1408 ~SpeculativeEvaluationRAII() { maybeRestoreState(); }
1413 template<ScopeKind Kind>
1416 unsigned OldStackSize;
1418 ScopeRAII(EvalInfo &Info)
1419 : Info(Info), OldStackSize(Info.CleanupStack.size()) {
1422 Info.CurrentCall->pushTempVersion();
1424 bool destroy(
bool RunDestructors =
true) {
1425 bool OK =
cleanup(Info, RunDestructors, OldStackSize);
1430 if (OldStackSize != -1U)
1434 Info.CurrentCall->popTempVersion();
1437 static bool cleanup(EvalInfo &Info,
bool RunDestructors,
1438 unsigned OldStackSize) {
1439 assert(OldStackSize <= Info.CleanupStack.size() &&
1440 "running cleanups out of order?");
1445 for (
unsigned I = Info.CleanupStack.size(); I > OldStackSize; --I) {
1446 if (Info.CleanupStack[I - 1].isDestroyedAtEndOf(Kind)) {
1447 if (!Info.CleanupStack[I - 1].endLifetime(Info, RunDestructors)) {
1455 auto NewEnd = Info.CleanupStack.begin() + OldStackSize;
1456 if (Kind != ScopeKind::Block)
1458 std::remove_if(NewEnd, Info.CleanupStack.end(), [](Cleanup &
C) {
1459 return C.isDestroyedAtEndOf(Kind);
1461 Info.CleanupStack.erase(NewEnd, Info.CleanupStack.end());
1465 typedef ScopeRAII<ScopeKind::Block> BlockScopeRAII;
1466 typedef ScopeRAII<ScopeKind::FullExpression> FullExpressionRAII;
1467 typedef ScopeRAII<ScopeKind::Call> CallScopeRAII;
1470bool SubobjectDesignator::checkSubobject(EvalInfo &Info,
const Expr *
E,
1474 if (isOnePastTheEnd()) {
1475 Info.CCEDiag(
E, diag::note_constexpr_past_end_subobject)
1486void SubobjectDesignator::diagnoseUnsizedArrayPointerArithmetic(EvalInfo &Info,
1488 Info.CCEDiag(
E, diag::note_constexpr_unsized_array_indexed);
1493void SubobjectDesignator::diagnosePointerArithmetic(EvalInfo &Info,
1498 if (MostDerivedPathLength == Entries.size() && MostDerivedIsArrayElement)
1499 Info.CCEDiag(
E, diag::note_constexpr_array_index)
1501 <<
static_cast<unsigned>(getMostDerivedArraySize());
1503 Info.CCEDiag(
E, diag::note_constexpr_array_index)
1508CallStackFrame::CallStackFrame(EvalInfo &Info,
SourceRange CallRange,
1513 Index(Info.NextCallIndex++) {
1514 Info.CurrentCall =
this;
1515 ++Info.CallStackDepth;
1518CallStackFrame::~CallStackFrame() {
1519 assert(Info.CurrentCall ==
this &&
"calls retired out of order");
1520 --Info.CallStackDepth;
1521 Info.CurrentCall = Caller;
1543 llvm_unreachable(
"unknown access kind");
1577 llvm_unreachable(
"unknown access kind");
1581 struct ComplexValue {
1589 ComplexValue() : FloatReal(
APFloat::Bogus()), FloatImag(
APFloat::Bogus()) {}
1591 void makeComplexFloat() { IsInt =
false; }
1592 bool isComplexFloat()
const {
return !IsInt; }
1593 APFloat &getComplexFloatReal() {
return FloatReal; }
1594 APFloat &getComplexFloatImag() {
return FloatImag; }
1596 void makeComplexInt() { IsInt =
true; }
1597 bool isComplexInt()
const {
return IsInt; }
1598 APSInt &getComplexIntReal() {
return IntReal; }
1599 APSInt &getComplexIntImag() {
return IntImag; }
1602 if (isComplexFloat())
1608 assert(
v.isComplexFloat() ||
v.isComplexInt());
1609 if (
v.isComplexFloat()) {
1611 FloatReal =
v.getComplexFloatReal();
1612 FloatImag =
v.getComplexFloatImag();
1615 IntReal =
v.getComplexIntReal();
1616 IntImag =
v.getComplexIntImag();
1626 bool InvalidBase : 1;
1629 CharUnits &getLValueOffset() {
return Offset; }
1630 const CharUnits &getLValueOffset()
const {
return Offset; }
1631 SubobjectDesignator &getLValueDesignator() {
return Designator; }
1632 const SubobjectDesignator &getLValueDesignator()
const {
return Designator;}
1633 bool isNullPointer()
const {
return IsNullPtr;}
1635 unsigned getLValueCallIndex()
const {
return Base.getCallIndex(); }
1636 unsigned getLValueVersion()
const {
return Base.getVersion(); }
1642 assert(!InvalidBase &&
"APValues can't handle invalid LValue bases");
1648 assert(
V.isLValue() &&
"Setting LValue from a non-LValue?");
1649 Base =
V.getLValueBase();
1650 Offset =
V.getLValueOffset();
1651 InvalidBase =
false;
1653 IsNullPtr =
V.isNullPointer();
1660 const auto *
E = B.
get<
const Expr *>();
1661 assert((isa<MemberExpr>(
E) || tryUnwrapAllocSizeCall(
E)) &&
1662 "Unexpected type of invalid base");
1668 InvalidBase = BInvalid;
1669 Designator = SubobjectDesignator(getType(B));
1677 InvalidBase =
false;
1688 moveInto(Printable);
1695 template <
typename GenDiagType>
1696 bool checkNullPointerDiagnosingWith(
const GenDiagType &GenDiag) {
1708 bool checkNullPointer(EvalInfo &Info,
const Expr *
E,
1710 return checkNullPointerDiagnosingWith([&Info,
E, CSK] {
1711 Info.CCEDiag(
E, diag::note_constexpr_null_subobject) << CSK;
1715 bool checkNullPointerForFoldAccess(EvalInfo &Info,
const Expr *
E,
1717 return checkNullPointerDiagnosingWith([&Info,
E, AK] {
1718 Info.FFDiag(
E, diag::note_constexpr_access_null) << AK;
1729 void addDecl(EvalInfo &Info,
const Expr *
E,
1734 void addUnsizedArray(EvalInfo &Info,
const Expr *
E,
QualType ElemTy) {
1736 Info.CCEDiag(
E, diag::note_constexpr_unsupported_unsized_array);
1741 assert(getType(
Base)->isPointerType() || getType(
Base)->isArrayType());
1742 Designator.FirstEntryIsAnUnsizedArray =
true;
1750 void addComplex(EvalInfo &Info,
const Expr *
E,
QualType EltTy,
bool Imag) {
1754 void addVectorElement(EvalInfo &Info,
const Expr *
E,
QualType EltTy,
1755 uint64_t Size, uint64_t Idx) {
1757 Designator.addVectorElementUnchecked(EltTy, Size, Idx);
1759 void clearIsNullPointer() {
1762 void adjustOffsetAndIndex(EvalInfo &Info,
const Expr *
E,
1772 uint64_t Offset64 = Offset.getQuantity();
1774 uint64_t Index64 = Index.extOrTrunc(64).getZExtValue();
1779 clearIsNullPointer();
1784 clearIsNullPointer();
1791 : DeclAndIsDerivedMember(
Decl,
false) {}
1796 return DeclAndIsDerivedMember.getPointer();
1799 bool isDerivedMember()
const {
1800 return DeclAndIsDerivedMember.getInt();
1804 return cast<CXXRecordDecl>(
1805 DeclAndIsDerivedMember.getPointer()->getDeclContext());
1812 assert(
V.isMemberPointer());
1813 DeclAndIsDerivedMember.setPointer(
V.getMemberPointerDecl());
1814 DeclAndIsDerivedMember.setInt(
V.isMemberPointerToDerivedMember());
1823 llvm::PointerIntPair<const ValueDecl*, 1, bool> DeclAndIsDerivedMember;
1831 assert(!
Path.empty());
1833 if (
Path.size() >= 2)
1837 if (
Expected->getCanonicalDecl() !=
Class->getCanonicalDecl()) {
1853 if (!isDerivedMember()) {
1854 Path.push_back(Derived);
1857 if (!castBack(Derived))
1860 DeclAndIsDerivedMember.setInt(
false);
1868 DeclAndIsDerivedMember.setInt(
true);
1869 if (isDerivedMember()) {
1873 return castBack(
Base);
1878 static bool operator==(
const MemberPtr &LHS,
const MemberPtr &RHS) {
1879 if (!LHS.getDecl() || !RHS.getDecl())
1880 return !LHS.getDecl() && !RHS.getDecl();
1881 if (LHS.getDecl()->getCanonicalDecl() != RHS.getDecl()->getCanonicalDecl())
1883 return LHS.Path == RHS.Path;
1889 const LValue &This,
const Expr *
E,
1890 bool AllowNonLiteralTypes =
false);
1892 bool InvalidBaseOK =
false);
1894 bool InvalidBaseOK =
false);
1908 std::string *StringResult =
nullptr);
1925 if (Int.isUnsigned() || Int.isMinSignedValue()) {
1926 Int = Int.extend(Int.getBitWidth() + 1);
1927 Int.setIsSigned(
true);
1932template<
typename KeyT>
1934 ScopeKind
Scope, LValue &LV) {
1935 unsigned Version = getTempVersion();
1944 assert(Args.CallIndex == Index &&
"creating parameter in wrong frame");
1950 return createLocal(
Base, PVD, PVD->
getType(), ScopeKind::Call);
1955 assert(
Base.getCallIndex() == Index &&
"lvalue for wrong frame");
1956 unsigned Version =
Base.getVersion();
1957 APValue &Result = Temporaries[MapKeyTy(Key, Version)];
1958 assert(Result.isAbsent() &&
"local created multiple times");
1964 if (Index <= Info.SpeculativeEvaluationDepth) {
1965 if (
T.isDestructedType())
1966 Info.noteSideEffect();
1968 Info.CleanupStack.push_back(Cleanup(&Result,
Base,
T,
Scope));
1975 FFDiag(
E, diag::note_constexpr_heap_alloc_limit_exceeded);
1981 auto Result = HeapAllocs.emplace(std::piecewise_construct,
1982 std::forward_as_tuple(DA), std::tuple<>());
1983 assert(Result.second &&
"reused a heap alloc index?");
1984 Result.first->second.AllocExpr =
E;
1985 return &Result.first->second.Value;
1989void CallStackFrame::describe(raw_ostream &Out)
const {
1990 unsigned ArgIndex = 0;
1992 isa<CXXMethodDecl>(Callee) && !isa<CXXConstructorDecl>(Callee) &&
1993 cast<CXXMethodDecl>(Callee)->isImplicitObjectMemberFunction();
1996 Callee->getNameForDiagnostic(Out, Info.Ctx.getPrintingPolicy(),
1999 if (This && IsMemberCall) {
2000 if (
const auto *MCE = dyn_cast_if_present<CXXMemberCallExpr>(
CallExpr)) {
2001 const Expr *
Object = MCE->getImplicitObjectArgument();
2002 Object->printPretty(Out,
nullptr, Info.Ctx.getPrintingPolicy(),
2004 if (
Object->getType()->isPointerType())
2008 }
else if (
const auto *OCE =
2009 dyn_cast_if_present<CXXOperatorCallExpr>(
CallExpr)) {
2010 OCE->getArg(0)->printPretty(Out,
nullptr,
2011 Info.Ctx.getPrintingPolicy(),
2016 This->moveInto(Val);
2019 Info.Ctx.getLValueReferenceType(
This->Designator.MostDerivedType));
2022 Callee->getNameForDiagnostic(Out, Info.Ctx.getPrintingPolicy(),
2024 IsMemberCall =
false;
2030 E =
Callee->param_end(); I !=
E; ++I, ++ArgIndex) {
2031 if (ArgIndex > (
unsigned)IsMemberCall)
2035 APValue *
V = Info.getParamSlot(Arguments, Param);
2037 V->printPretty(Out, Info.Ctx, Param->
getType());
2041 if (ArgIndex == 0 && IsMemberCall)
2042 Out <<
"->" << *
Callee <<
'(';
2056 return Info.noteSideEffect();
2062 unsigned Builtin =
E->getBuiltinCallee();
2063 return (Builtin == Builtin::BI__builtin___CFStringMakeConstantString ||
2064 Builtin == Builtin::BI__builtin___NSStringMakeConstantString ||
2065 Builtin == Builtin::BI__builtin_ptrauth_sign_constant ||
2066 Builtin == Builtin::BI__builtin_function_start);
2080 if (
const VarDecl *VD = dyn_cast<VarDecl>(
D))
2081 return VD->hasGlobalStorage();
2082 if (isa<TemplateParamObjectDecl>(
D))
2087 return isa<FunctionDecl, MSGuidDecl, UnnamedGlobalConstantDecl>(
D);
2097 case Expr::CompoundLiteralExprClass: {
2101 case Expr::MaterializeTemporaryExprClass:
2104 return cast<MaterializeTemporaryExpr>(
E)->getStorageDuration() ==
SD_Static;
2106 case Expr::StringLiteralClass:
2107 case Expr::PredefinedExprClass:
2108 case Expr::ObjCStringLiteralClass:
2109 case Expr::ObjCEncodeExprClass:
2111 case Expr::ObjCBoxedExprClass:
2112 return cast<ObjCBoxedExpr>(
E)->isExpressibleAsConstantInitializer();
2113 case Expr::CallExprClass:
2116 case Expr::AddrLabelExprClass:
2120 case Expr::BlockExprClass:
2121 return !cast<BlockExpr>(
E)->getBlockDecl()->hasCaptures();
2124 case Expr::SourceLocExprClass:
2126 case Expr::ImplicitValueInitExprClass:
2138 return LVal.Base.dyn_cast<
const ValueDecl*>();
2142 if (
Value.getLValueCallIndex())
2145 return E && !isa<MaterializeTemporaryExpr>(
E);
2155 if (isa_and_nonnull<VarDecl>(
Decl)) {
2165 if (!A.getLValueBase())
2166 return !B.getLValueBase();
2167 if (!B.getLValueBase())
2170 if (A.getLValueBase().getOpaqueValue() !=
2171 B.getLValueBase().getOpaqueValue())
2174 return A.getLValueCallIndex() == B.getLValueCallIndex() &&
2175 A.getLValueVersion() == B.getLValueVersion();
2179 assert(
Base &&
"no location for a null lvalue");
2185 if (
auto *PVD = dyn_cast_or_null<ParmVarDecl>(VD)) {
2187 for (CallStackFrame *F = Info.CurrentCall; F; F = F->Caller) {
2188 if (F->Arguments.CallIndex ==
Base.getCallIndex() &&
2189 F->Arguments.Version ==
Base.getVersion() && F->Callee &&
2190 Idx < F->Callee->getNumParams()) {
2191 VD = F->Callee->getParamDecl(Idx);
2198 Info.Note(VD->
getLocation(), diag::note_declared_at);
2200 Info.Note(
E->
getExprLoc(), diag::note_constexpr_temporary_here);
2203 if (std::optional<DynAlloc *> Alloc = Info.lookupDynamicAlloc(DA))
2204 Info.Note((*Alloc)->AllocExpr->getExprLoc(),
2205 diag::note_constexpr_dynamic_alloc_here);
2238 const SubobjectDesignator &
Designator = LVal.getLValueDesignator();
2246 if (isTemplateArgument(Kind)) {
2247 int InvalidBaseKind = -1;
2250 InvalidBaseKind = 0;
2251 else if (isa_and_nonnull<StringLiteral>(BaseE))
2252 InvalidBaseKind = 1;
2253 else if (isa_and_nonnull<MaterializeTemporaryExpr>(BaseE) ||
2254 isa_and_nonnull<LifetimeExtendedTemporaryDecl>(BaseVD))
2255 InvalidBaseKind = 2;
2256 else if (
auto *PE = dyn_cast_or_null<PredefinedExpr>(BaseE)) {
2257 InvalidBaseKind = 3;
2258 Ident = PE->getIdentKindName();
2261 if (InvalidBaseKind != -1) {
2262 Info.FFDiag(
Loc, diag::note_constexpr_invalid_template_arg)
2263 << IsReferenceType << !
Designator.Entries.empty() << InvalidBaseKind
2269 if (
auto *FD = dyn_cast_or_null<FunctionDecl>(BaseVD);
2270 FD && FD->isImmediateFunction()) {
2271 Info.FFDiag(
Loc, diag::note_consteval_address_accessible)
2273 Info.Note(FD->getLocation(), diag::note_declared_at);
2281 if (Info.getLangOpts().CPlusPlus11) {
2282 Info.FFDiag(
Loc, diag::note_constexpr_non_global, 1)
2283 << IsReferenceType << !
Designator.Entries.empty() << !!BaseVD
2285 auto *VarD = dyn_cast_or_null<VarDecl>(BaseVD);
2286 if (VarD && VarD->isConstexpr()) {
2292 Info.Note(VarD->getLocation(), diag::note_constexpr_not_static)
2304 assert((Info.checkingPotentialConstantExpression() ||
2305 LVal.getLValueCallIndex() == 0) &&
2306 "have call index for global lvalue");
2309 Info.FFDiag(
Loc, diag::note_constexpr_dynamic_alloc)
2310 << IsReferenceType << !
Designator.Entries.empty();
2316 if (
const VarDecl *Var = dyn_cast<const VarDecl>(BaseVD)) {
2318 if (Var->getTLSKind())
2324 if (!isForManglingOnly(Kind) && Var->hasAttr<DLLImportAttr>())
2330 if (Info.getCtx().getLangOpts().CUDA &&
2331 Info.getCtx().getLangOpts().CUDAIsDevice &&
2332 Info.getCtx().CUDAConstantEvalCtx.NoWrongSidedVars) {
2333 if ((!Var->hasAttr<CUDADeviceAttr>() &&
2334 !Var->hasAttr<CUDAConstantAttr>() &&
2335 !Var->getType()->isCUDADeviceBuiltinSurfaceType() &&
2336 !Var->getType()->isCUDADeviceBuiltinTextureType()) ||
2337 Var->hasAttr<HIPManagedAttr>())
2341 if (
const auto *FD = dyn_cast<const FunctionDecl>(BaseVD)) {
2352 if (Info.getLangOpts().CPlusPlus && !isForManglingOnly(Kind) &&
2353 FD->hasAttr<DLLImportAttr>())
2357 }
else if (
const auto *MTE =
2358 dyn_cast_or_null<MaterializeTemporaryExpr>(BaseE)) {
2359 if (CheckedTemps.insert(MTE).second) {
2362 Info.FFDiag(MTE->getExprLoc(),
2363 diag::note_constexpr_unsupported_temporary_nontrivial_dtor)
2368 APValue *
V = MTE->getOrCreateValue(
false);
2369 assert(
V &&
"evasluation result refers to uninitialised temporary");
2371 Info, MTE->getExprLoc(), TempType, *
V, Kind,
2372 nullptr, CheckedTemps))
2379 if (!IsReferenceType)
2391 Info.FFDiag(
Loc, diag::note_constexpr_past_end, 1)
2392 << !
Designator.Entries.empty() << !!BaseVD << BaseVD;
2407 const auto *FD = dyn_cast_or_null<CXXMethodDecl>(
Member);
2410 if (FD->isImmediateFunction()) {
2411 Info.FFDiag(
Loc, diag::note_consteval_address_accessible) << 0;
2412 Info.Note(FD->getLocation(), diag::note_declared_at);
2415 return isForManglingOnly(Kind) || FD->isVirtual() ||
2416 !FD->hasAttr<DLLImportAttr>();
2422 const LValue *This =
nullptr) {
2424 if (Info.getLangOpts().CPlusPlus23)
2443 if (This && Info.EvaluatingDecl == This->getLValueBase())
2447 if (Info.getLangOpts().CPlusPlus11)
2448 Info.FFDiag(
E, diag::note_constexpr_nonliteral)
2451 Info.FFDiag(
E, diag::note_invalid_subexpr_in_const_expr);
2462 if (SubobjectDecl) {
2463 Info.FFDiag(DiagLoc, diag::note_constexpr_uninitialized)
2464 << 1 << SubobjectDecl;
2466 diag::note_constexpr_subobject_declared_here);
2468 Info.FFDiag(DiagLoc, diag::note_constexpr_uninitialized)
2477 Type = AT->getValueType();
2482 if (
Value.isArray()) {
2484 for (
unsigned I = 0, N =
Value.getArrayInitializedElts(); I != N; ++I) {
2486 Value.getArrayInitializedElt(I), Kind,
2487 SubobjectDecl, CheckedTemps))
2490 if (!
Value.hasArrayFiller())
2493 Value.getArrayFiller(), Kind, SubobjectDecl,
2496 if (
Value.isUnion() &&
Value.getUnionField()) {
2499 Value.getUnionValue(), Kind,
Value.getUnionField(), CheckedTemps);
2501 if (
Value.isStruct()) {
2503 if (
const CXXRecordDecl *CD = dyn_cast<CXXRecordDecl>(RD)) {
2504 unsigned BaseIndex = 0;
2506 const APValue &BaseValue =
Value.getStructBase(BaseIndex);
2509 Info.FFDiag(TypeBeginLoc, diag::note_constexpr_uninitialized_base)
2510 << BS.getType() <<
SourceRange(TypeBeginLoc, BS.getEndLoc());
2520 for (
const auto *I : RD->
fields()) {
2521 if (I->isUnnamedBitField())
2525 Value.getStructField(I->getFieldIndex()), Kind,
2531 if (
Value.isLValue() &&
2532 CERK == CheckEvaluationResultKind::ConstantExpression) {
2534 LVal.setFrom(Info.Ctx,
Value);
2539 if (
Value.isMemberPointer() &&
2540 CERK == CheckEvaluationResultKind::ConstantExpression)
2560 nullptr, CheckedTemps);
2569 CheckEvaluationResultKind::FullyInitialized, Info, DiagLoc,
Type,
Value,
2570 ConstantExprKind::Normal,
nullptr, CheckedTemps);
2576 if (!Info.HeapAllocs.empty()) {
2580 Info.CCEDiag(Info.HeapAllocs.begin()->second.AllocExpr,
2581 diag::note_constexpr_memory_leak)
2582 <<
unsigned(Info.HeapAllocs.size() - 1);
2590 if (!
Value.getLValueBase()) {
2592 Result = !
Value.getLValueOffset().isZero();
2610 Result = Val.
getInt().getBoolValue();
2642 llvm_unreachable(
"unknown APValue kind");
2648 assert(
E->
isPRValue() &&
"missing lvalue-to-rvalue conv in bool condition");
2658 Info.CCEDiag(
E, diag::note_constexpr_overflow)
2659 << SrcValue << DestType;
2660 return Info.noteUndefinedBehavior();
2666 unsigned DestWidth = Info.Ctx.getIntWidth(DestType);
2670 Result =
APSInt(DestWidth, !DestSigned);
2672 if (
Value.convertToInteger(Result, llvm::APFloat::rmTowardZero, &ignored)
2673 & APFloat::opInvalidOp)
2684 llvm::RoundingMode RM =
2686 if (RM == llvm::RoundingMode::Dynamic)
2687 RM = llvm::RoundingMode::NearestTiesToEven;
2693 APFloat::opStatus St) {
2696 if (Info.InConstantContext)
2700 if ((St & APFloat::opInexact) &&
2704 Info.FFDiag(
E, diag::note_constexpr_dynamic_rounding);
2708 if ((St != APFloat::opOK) &&
2711 FPO.getAllowFEnvAccess())) {
2712 Info.FFDiag(
E, diag::note_constexpr_float_arithmetic_strict);
2716 if ((St & APFloat::opStatus::opInvalidOp) &&
2735 assert((isa<CastExpr>(
E) || isa<CompoundAssignOperator>(
E) ||
2736 isa<ConvertVectorExpr>(
E)) &&
2737 "HandleFloatToFloatCast has been checked with only CastExpr, "
2738 "CompoundAssignOperator and ConvertVectorExpr. Please either validate "
2739 "the new expression or address the root cause of this usage.");
2741 APFloat::opStatus St;
2742 APFloat
Value = Result;
2744 St = Result.convert(Info.Ctx.getFloatTypeSemantics(DestType), RM, &ignored);
2751 unsigned DestWidth = Info.Ctx.getIntWidth(DestType);
2757 Result =
Value.getBoolValue();
2764 QualType DestType, APFloat &Result) {
2765 Result = APFloat(Info.Ctx.getFloatTypeSemantics(DestType), 1);
2767 APFloat::opStatus St = Result.convertFromAPInt(
Value,
Value.isSigned(), RM);
2773 assert(FD->
isBitField() &&
"truncateBitfieldValue on non-bitfield");
2775 if (!
Value.isInt()) {
2779 assert(
Value.isLValue() &&
"integral value neither int nor lvalue?");
2785 unsigned OldBitWidth = Int.getBitWidth();
2787 if (NewBitWidth < OldBitWidth)
2788 Int = Int.trunc(NewBitWidth).extend(OldBitWidth);
2795template<
typename Operation>
2798 unsigned BitWidth, Operation Op,
2800 if (LHS.isUnsigned()) {
2801 Result = Op(LHS, RHS);
2805 APSInt Value(Op(LHS.extend(BitWidth), RHS.extend(BitWidth)),
false);
2806 Result =
Value.trunc(LHS.getBitWidth());
2807 if (Result.extend(BitWidth) !=
Value) {
2808 if (Info.checkingForUndefinedBehavior())
2810 diag::warn_integer_constant_overflow)
2811 <<
toString(Result, 10, Result.isSigned(),
false,
2823 bool HandleOverflowResult =
true;
2830 std::multiplies<APSInt>(), Result);
2833 std::plus<APSInt>(), Result);
2836 std::minus<APSInt>(), Result);
2837 case BO_And: Result = LHS & RHS;
return true;
2838 case BO_Xor: Result = LHS ^ RHS;
return true;
2839 case BO_Or: Result = LHS | RHS;
return true;
2843 Info.FFDiag(
E, diag::note_expr_divide_by_zero)
2849 if (RHS.isNegative() && RHS.isAllOnes() && LHS.isSigned() &&
2850 LHS.isMinSignedValue())
2852 Info,
E, -LHS.extend(LHS.getBitWidth() + 1),
E->
getType());
2853 Result = (Opcode == BO_Rem ? LHS % RHS : LHS / RHS);
2854 return HandleOverflowResult;
2856 if (Info.getLangOpts().OpenCL)
2858 RHS &=
APSInt(llvm::APInt(RHS.getBitWidth(),
2859 static_cast<uint64_t
>(LHS.getBitWidth() - 1)),
2861 else if (RHS.isSigned() && RHS.isNegative()) {
2864 Info.CCEDiag(
E, diag::note_constexpr_negative_shift) << RHS;
2865 if (!Info.noteUndefinedBehavior())
2873 unsigned SA = (
unsigned) RHS.getLimitedValue(LHS.getBitWidth()-1);
2875 Info.CCEDiag(
E, diag::note_constexpr_large_shift)
2876 << RHS <<
E->
getType() << LHS.getBitWidth();
2877 if (!Info.noteUndefinedBehavior())
2879 }
else if (LHS.isSigned() && !Info.getLangOpts().CPlusPlus20) {
2884 if (LHS.isNegative()) {
2885 Info.CCEDiag(
E, diag::note_constexpr_lshift_of_negative) << LHS;
2886 if (!Info.noteUndefinedBehavior())
2888 }
else if (LHS.countl_zero() < SA) {
2889 Info.CCEDiag(
E, diag::note_constexpr_lshift_discards);
2890 if (!Info.noteUndefinedBehavior())
2898 if (Info.getLangOpts().OpenCL)
2900 RHS &=
APSInt(llvm::APInt(RHS.getBitWidth(),
2901 static_cast<uint64_t
>(LHS.getBitWidth() - 1)),
2903 else if (RHS.isSigned() && RHS.isNegative()) {
2906 Info.CCEDiag(
E, diag::note_constexpr_negative_shift) << RHS;
2907 if (!Info.noteUndefinedBehavior())
2915 unsigned SA = (
unsigned) RHS.getLimitedValue(LHS.getBitWidth()-1);
2917 Info.CCEDiag(
E, diag::note_constexpr_large_shift)
2918 << RHS <<
E->
getType() << LHS.getBitWidth();
2919 if (!Info.noteUndefinedBehavior())
2927 case BO_LT: Result = LHS < RHS;
return true;
2928 case BO_GT: Result = LHS > RHS;
return true;
2929 case BO_LE: Result = LHS <= RHS;
return true;
2930 case BO_GE: Result = LHS >= RHS;
return true;
2931 case BO_EQ: Result = LHS == RHS;
return true;
2932 case BO_NE: Result = LHS != RHS;
return true;
2934 llvm_unreachable(
"BO_Cmp should be handled elsewhere");
2941 const APFloat &RHS) {
2943 APFloat::opStatus St;
2949 St = LHS.multiply(RHS, RM);
2952 St = LHS.add(RHS, RM);
2955 St = LHS.subtract(RHS, RM);
2961 Info.CCEDiag(
E, diag::note_expr_divide_by_zero);
2962 St = LHS.divide(RHS, RM);
2971 Info.CCEDiag(
E, diag::note_constexpr_float_arithmetic) << LHS.isNaN();
2972 return Info.noteUndefinedBehavior();
2980 const APInt &RHSValue, APInt &Result) {
2981 bool LHS = (LHSValue != 0);
2982 bool RHS = (RHSValue != 0);
2984 if (Opcode == BO_LAnd)
2985 Result = LHS && RHS;
2987 Result = LHS || RHS;
2992 const APFloat &RHSValue, APInt &Result) {
2993 bool LHS = !LHSValue.isZero();
2994 bool RHS = !RHSValue.isZero();
2996 if (Opcode == BO_LAnd)
2997 Result = LHS && RHS;
2999 Result = LHS || RHS;
3005 const APValue &RHSValue, APInt &Result) {
3009 RHSValue.
getInt(), Result);
3015template <
typename APTy>
3018 const APTy &RHSValue, APInt &Result) {
3021 llvm_unreachable(
"unsupported binary operator");
3023 Result = (LHSValue == RHSValue);
3026 Result = (LHSValue != RHSValue);
3029 Result = (LHSValue < RHSValue);
3032 Result = (LHSValue > RHSValue);
3035 Result = (LHSValue <= RHSValue);
3038 Result = (LHSValue >= RHSValue);
3052 const APValue &RHSValue, APInt &Result) {
3056 RHSValue.
getInt(), Result);
3067 assert(Opcode != BO_PtrMemD && Opcode != BO_PtrMemI &&
3068 "Operation not supported on vector types");
3072 QualType EltTy = VT->getElementType();
3079 "A vector result that isn't a vector OR uncalculated LValue");
3085 RHSValue.
getVectorLength() == NumElements &&
"Different vector sizes");
3089 for (
unsigned EltNum = 0; EltNum < NumElements; ++EltNum) {
3094 APSInt EltResult{Info.Ctx.getIntWidth(EltTy),
3104 RHSElt.
getInt(), EltResult);
3110 ResultElements.emplace_back(EltResult);
3115 "Mismatched LHS/RHS/Result Type");
3116 APFloat LHSFloat = LHSElt.
getFloat();
3124 ResultElements.emplace_back(LHSFloat);
3128 LHSValue =
APValue(ResultElements.data(), ResultElements.size());
3136 unsigned TruncatedElements) {
3137 SubobjectDesignator &
D = Result.Designator;
3140 if (TruncatedElements ==
D.Entries.size())
3142 assert(TruncatedElements >=
D.MostDerivedPathLength &&
3143 "not casting to a derived class");
3149 for (
unsigned I = TruncatedElements, N =
D.Entries.size(); I != N; ++I) {
3153 if (isVirtualBaseClass(
D.Entries[I]))
3159 D.Entries.resize(TruncatedElements);
3169 RL = &Info.Ctx.getASTRecordLayout(Derived);
3172 Obj.getLValueOffset() += RL->getBaseClassOffset(
Base);
3173 Obj.addDecl(Info,
E,
Base,
false);
3182 if (!
Base->isVirtual())
3185 SubobjectDesignator &
D = Obj.Designator;
3190 DerivedDecl =
D.MostDerivedType->getAsCXXRecordDecl();
3196 const ASTRecordLayout &Layout = Info.Ctx.getASTRecordLayout(DerivedDecl);
3198 Obj.addDecl(Info,
E, BaseDecl,
true);
3205 PathE =
E->path_end();
3206 PathI != PathE; ++PathI) {
3210 Type = (*PathI)->getType();
3222 llvm_unreachable(
"Class must be derived from the passed in base class!");
3237 RL = &Info.Ctx.getASTRecordLayout(FD->
getParent());
3241 LVal.adjustOffset(Info.Ctx.toCharUnitsFromBits(RL->getFieldOffset(I)));
3242 LVal.addDecl(Info,
E, FD);
3250 for (
const auto *
C : IFD->
chain())
3283 if (SOT == SizeOfType::SizeOf)
3284 Size = Info.Ctx.getTypeSizeInChars(
Type);
3286 Size = Info.Ctx.getTypeInfoDataSizeInChars(
Type).Width;
3303 LVal.adjustOffsetAndIndex(Info,
E, Adjustment, SizeOfPointee);
3309 int64_t Adjustment) {
3311 APSInt::get(Adjustment));
3326 LVal.Offset += SizeOfComponent;
3328 LVal.addComplex(Info,
E, EltTy, Imag);
3334 uint64_t Size, uint64_t Idx) {
3339 LVal.Offset += SizeOfElement * Idx;
3341 LVal.addVectorElement(Info,
E, EltTy, Size, Idx);
3355 const VarDecl *VD, CallStackFrame *Frame,
3356 unsigned Version,
APValue *&Result) {
3361 Result = Frame->getTemporary(VD, Version);
3365 if (!isa<ParmVarDecl>(VD)) {
3372 "missing value for local variable");
3373 if (Info.checkingPotentialConstantExpression())
3378 diag::note_unimplemented_constexpr_lambda_feature_ast)
3379 <<
"captures not currently allowed";
3386 if (Info.EvaluatingDecl ==
Base) {
3387 Result = Info.EvaluatingDeclValue;
3391 if (isa<ParmVarDecl>(VD)) {
3394 if (!Info.checkingPotentialConstantExpression() ||
3395 !Info.CurrentCall->Callee ||
3397 if (Info.getLangOpts().CPlusPlus11) {
3398 Info.FFDiag(
E, diag::note_constexpr_function_param_value_unknown)
3418 if (!Info.checkingPotentialConstantExpression()) {
3419 Info.FFDiag(
E, diag::note_constexpr_var_init_unknown, 1)
3426 if (
Init->isValueDependent()) {
3433 if (!Info.checkingPotentialConstantExpression()) {
3434 Info.FFDiag(
E, Info.getLangOpts().CPlusPlus11
3435 ? diag::note_constexpr_ltor_non_constexpr
3436 : diag::note_constexpr_ltor_non_integral, 1)
3446 Info.FFDiag(
E, diag::note_constexpr_var_init_non_constant, 1) << VD;
3462 ((Info.getLangOpts().CPlusPlus || Info.getLangOpts().OpenCL) &&
3464 Info.CCEDiag(
E, diag::note_constexpr_var_init_non_constant, 1) << VD;
3471 Info.FFDiag(
E, diag::note_constexpr_var_init_weak) << VD;
3488 if (I->getType()->getAsCXXRecordDecl()->getCanonicalDecl() ==
Base)
3492 llvm_unreachable(
"base class missing from derived class's bases list");
3498 assert(!isa<SourceLocExpr>(Lit) &&
3499 "SourceLocExpr should have already been converted to a StringLiteral");
3502 if (
const auto *ObjCEnc = dyn_cast<ObjCEncodeExpr>(Lit)) {
3504 Info.Ctx.getObjCEncodingForType(ObjCEnc->getEncodedType(), Str);
3505 assert(Index <= Str.size() &&
"Index too large");
3506 return APSInt::getUnsigned(Str.c_str()[Index]);
3509 if (
auto PE = dyn_cast<PredefinedExpr>(Lit))
3510 Lit = PE->getFunctionName();
3513 Info.Ctx.getAsConstantArrayType(S->getType());
3514 assert(CAT &&
"string literal isn't an array");
3516 assert(CharType->
isIntegerType() &&
"unexpected character type");
3519 if (Index < S->getLength())
3520 Value = S->getCodeUnit(Index);
3532 AllocType.isNull() ? S->getType() : AllocType);
3533 assert(CAT &&
"string literal isn't an array");
3535 assert(CharType->
isIntegerType() &&
"unexpected character type");
3539 std::min(S->getLength(), Elts), Elts);
3542 if (Result.hasArrayFiller())
3544 for (
unsigned I = 0, N = Result.getArrayInitializedElts(); I != N; ++I) {
3545 Value = S->getCodeUnit(I);
3552 unsigned Size = Array.getArraySize();
3553 assert(Index < Size);
3556 unsigned OldElts = Array.getArrayInitializedElts();
3557 unsigned NewElts = std::max(Index+1, OldElts * 2);
3558 NewElts = std::min(Size, std::max(NewElts, 8u));
3562 for (
unsigned I = 0; I != OldElts; ++I)
3564 for (
unsigned I = OldElts; I != NewElts; ++I)
3568 Array.swap(NewValue);
3589 for (
auto *Field : RD->
fields())
3590 if (!Field->isUnnamedBitField() &&
3594 for (
auto &BaseSpec : RD->
bases())
3612 for (
auto *Field : RD->
fields()) {
3617 if (Field->isMutable() &&
3619 Info.FFDiag(
E, diag::note_constexpr_access_mutable, 1) << AK << Field;
3620 Info.Note(Field->getLocation(), diag::note_declared_at);
3628 for (
auto &BaseSpec : RD->
bases())
3638 bool MutableSubobject =
false) {
3643 switch (Info.IsEvaluatingDecl) {
3644 case EvalInfo::EvaluatingDeclKind::None:
3647 case EvalInfo::EvaluatingDeclKind::Ctor:
3649 if (Info.EvaluatingDecl ==
Base)
3654 if (
auto *BaseE =
Base.dyn_cast<
const Expr *>())
3655 if (
auto *BaseMTE = dyn_cast<MaterializeTemporaryExpr>(BaseE))
3656 return Info.EvaluatingDecl == BaseMTE->getExtendingDecl();
3659 case EvalInfo::EvaluatingDeclKind::Dtor:
3664 if (MutableSubobject ||
Base != Info.EvaluatingDecl)
3673 llvm_unreachable(
"unknown evaluating decl kind");
3678 return Info.CheckArraySize(
3687struct CompleteObject {
3695 CompleteObject() :
Value(nullptr) {}
3699 bool mayAccessMutableMembers(EvalInfo &Info,
AccessKinds AK)
const {
3710 if (!Info.getLangOpts().CPlusPlus14)
3715 explicit operator bool()
const {
return !
Type.isNull(); }
3720 bool IsMutable =
false) {
3734template<
typename Sub
objectHandler>
3735typename SubobjectHandler::result_type
3737 const SubobjectDesignator &Sub, SubobjectHandler &handler) {
3740 return handler.failed();
3741 if (Sub.isOnePastTheEnd() || Sub.isMostDerivedAnUnsizedArray()) {
3742 if (Info.getLangOpts().CPlusPlus11)
3743 Info.FFDiag(
E, Sub.isOnePastTheEnd()
3744 ? diag::note_constexpr_access_past_end
3745 : diag::note_constexpr_access_unsized_array)
3746 << handler.AccessKind;
3749 return handler.failed();
3755 const FieldDecl *VolatileField =
nullptr;
3758 for (
unsigned I = 0, N = Sub.Entries.size(); ; ++I) {
3763 if (!Info.checkingPotentialConstantExpression())
3764 Info.FFDiag(
E, diag::note_constexpr_access_uninit)
3767 return handler.failed();
3775 Info.isEvaluatingCtorDtor(
3778 ConstructionPhase::None) {
3788 if (Info.getLangOpts().CPlusPlus) {
3792 if (VolatileField) {
3795 Decl = VolatileField;
3796 }
else if (
auto *VD = Obj.Base.dyn_cast<
const ValueDecl*>()) {
3798 Loc = VD->getLocation();
3802 if (
auto *
E = Obj.Base.dyn_cast<
const Expr *>())
3805 Info.FFDiag(
E, diag::note_constexpr_access_volatile_obj, 1)
3806 << handler.AccessKind << DiagKind <<
Decl;
3807 Info.Note(
Loc, diag::note_constexpr_volatile_here) << DiagKind;
3809 Info.FFDiag(
E, diag::note_invalid_subexpr_in_const_expr);
3811 return handler.failed();
3819 !Obj.mayAccessMutableMembers(Info, handler.AccessKind) &&
3821 return handler.failed();
3825 if (!handler.found(*O, ObjType))
3837 LastField =
nullptr;
3841 assert(CAT &&
"vla in literal type?");
3842 uint64_t Index = Sub.Entries[I].getAsArrayIndex();
3843 if (CAT->
getSize().ule(Index)) {
3846 if (Info.getLangOpts().CPlusPlus11)
3847 Info.FFDiag(
E, diag::note_constexpr_access_past_end)
3848 << handler.AccessKind;
3851 return handler.failed();
3858 else if (!
isRead(handler.AccessKind)) {
3860 return handler.failed();
3868 uint64_t Index = Sub.Entries[I].getAsArrayIndex();
3870 if (Info.getLangOpts().CPlusPlus11)
3871 Info.FFDiag(
E, diag::note_constexpr_access_past_end)
3872 << handler.AccessKind;
3875 return handler.failed();
3881 assert(I == N - 1 &&
"extracting subobject of scalar?");
3891 uint64_t Index = Sub.Entries[I].getAsArrayIndex();
3892 unsigned NumElements = VT->getNumElements();
3893 if (Index == NumElements) {
3894 if (Info.getLangOpts().CPlusPlus11)
3895 Info.FFDiag(
E, diag::note_constexpr_access_past_end)
3896 << handler.AccessKind;
3899 return handler.failed();
3902 if (Index > NumElements) {
3903 Info.CCEDiag(
E, diag::note_constexpr_array_index)
3904 << Index << 0 << NumElements;
3905 return handler.failed();
3908 ObjType = VT->getElementType();
3909 assert(I == N - 1 &&
"extracting subobject of scalar?");
3911 }
else if (
const FieldDecl *Field = getAsField(Sub.Entries[I])) {
3912 if (Field->isMutable() &&
3913 !Obj.mayAccessMutableMembers(Info, handler.AccessKind)) {
3914 Info.FFDiag(
E, diag::note_constexpr_access_mutable, 1)
3915 << handler.AccessKind << Field;
3916 Info.Note(Field->getLocation(), diag::note_declared_at);
3917 return handler.failed();
3926 if (I == N - 1 && handler.AccessKind ==
AK_Construct) {
3934 Info.FFDiag(
E, diag::note_constexpr_access_inactive_union_member)
3935 << handler.AccessKind << Field << !UnionField << UnionField;
3936 return handler.failed();
3945 if (Field->getType().isVolatileQualified())
3946 VolatileField = Field;
3959struct ExtractSubobjectHandler {
3965 typedef bool result_type;
3966 bool failed() {
return false; }
3986 const CompleteObject &Obj,
3987 const SubobjectDesignator &Sub,
APValue &Result,
3990 ExtractSubobjectHandler Handler = {Info,
E, Result, AK};
3995struct ModifySubobjectHandler {
4000 typedef bool result_type;
4006 Info.FFDiag(
E, diag::note_constexpr_modify_const_type) << QT;
4012 bool failed() {
return false; }
4014 if (!checkConst(SubobjType))
4017 Subobj.
swap(NewVal);
4021 if (!checkConst(SubobjType))
4023 if (!NewVal.
isInt()) {
4032 if (!checkConst(SubobjType))
4040const AccessKinds ModifySubobjectHandler::AccessKind;
4044 const CompleteObject &Obj,
4045 const SubobjectDesignator &Sub,
4047 ModifySubobjectHandler Handler = { Info, NewVal,
E };
4054 const SubobjectDesignator &A,
4055 const SubobjectDesignator &B,
4056 bool &WasArrayIndex) {
4057 unsigned I = 0, N = std::min(A.Entries.size(), B.Entries.size());
4058 for (; I != N; ++I) {
4062 if (A.Entries[I].getAsArrayIndex() != B.Entries[I].getAsArrayIndex()) {
4063 WasArrayIndex =
true;
4071 if (A.Entries[I].getAsBaseOrMember() !=
4072 B.Entries[I].getAsBaseOrMember()) {
4073 WasArrayIndex =
false;
4076 if (
const FieldDecl *FD = getAsField(A.Entries[I]))
4078 ObjType = FD->getType();
4084 WasArrayIndex =
false;
4091 const SubobjectDesignator &A,
4092 const SubobjectDesignator &B) {
4093 if (A.Entries.size() != B.Entries.size())
4096 bool IsArray = A.MostDerivedIsArrayElement;
4097 if (IsArray && A.MostDerivedPathLength != A.Entries.size())
4106 return CommonLength >= A.Entries.size() - IsArray;
4113 if (LVal.InvalidBase) {
4115 return CompleteObject();
4119 Info.FFDiag(
E, diag::note_constexpr_access_null) << AK;
4120 return CompleteObject();
4123 CallStackFrame *Frame =
nullptr;
4125 if (LVal.getLValueCallIndex()) {
4126 std::tie(Frame, Depth) =
4127 Info.getCallFrameAndDepth(LVal.getLValueCallIndex());
4129 Info.FFDiag(
E, diag::note_constexpr_lifetime_ended, 1)
4130 << AK << LVal.Base.is<
const ValueDecl*>();
4132 return CompleteObject();
4143 if (Info.getLangOpts().CPlusPlus)
4144 Info.FFDiag(
E, diag::note_constexpr_access_volatile_type)
4148 return CompleteObject();
4153 QualType BaseType = getType(LVal.Base);
4155 if (Info.getLangOpts().CPlusPlus14 && LVal.Base == Info.EvaluatingDecl &&
4159 BaseVal = Info.EvaluatingDeclValue;
4162 if (
auto *GD = dyn_cast<MSGuidDecl>(
D)) {
4165 Info.FFDiag(
E, diag::note_constexpr_modify_global);
4166 return CompleteObject();
4170 Info.FFDiag(
E, diag::note_constexpr_unsupported_layout)
4172 return CompleteObject();
4174 return CompleteObject(LVal.Base, &
V, GD->getType());
4178 if (
auto *GCD = dyn_cast<UnnamedGlobalConstantDecl>(
D)) {
4180 Info.FFDiag(
E, diag::note_constexpr_modify_global);
4181 return CompleteObject();
4183 return CompleteObject(LVal.Base,
const_cast<APValue *
>(&GCD->getValue()),
4188 if (
auto *TPO = dyn_cast<TemplateParamObjectDecl>(
D)) {
4190 Info.FFDiag(
E, diag::note_constexpr_modify_global);
4191 return CompleteObject();
4193 return CompleteObject(LVal.Base,
const_cast<APValue *
>(&TPO->getValue()),
4204 const VarDecl *VD = dyn_cast<VarDecl>(
D);
4211 return CompleteObject();
4214 bool IsConstant = BaseType.
isConstant(Info.Ctx);
4215 bool ConstexprVar =
false;
4216 if (
const auto *VD = dyn_cast_if_present<VarDecl>(
4217 Info.EvaluatingDecl.dyn_cast<
const ValueDecl *>()))
4223 if (IsAccess && isa<ParmVarDecl>(VD)) {
4227 }
else if (Info.getLangOpts().CPlusPlus14 &&
4234 Info.FFDiag(
E, diag::note_constexpr_modify_global);
4235 return CompleteObject();
4238 }
else if (Info.getLangOpts().C23 && ConstexprVar) {
4240 return CompleteObject();
4244 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4245 if (Info.getLangOpts().CPlusPlus) {
4246 Info.FFDiag(
E, diag::note_constexpr_ltor_non_const_int, 1) << VD;
4247 Info.Note(VD->
getLocation(), diag::note_declared_at);
4251 return CompleteObject();
4253 }
else if (!IsAccess) {
4254 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4255 }
else if (IsConstant && Info.checkingPotentialConstantExpression() &&
4258 }
else if (IsConstant) {
4262 if (Info.getLangOpts().CPlusPlus) {
4263 Info.CCEDiag(
E, Info.getLangOpts().CPlusPlus11
4264 ? diag::note_constexpr_ltor_non_constexpr
4265 : diag::note_constexpr_ltor_non_integral, 1)
4267 Info.Note(VD->
getLocation(), diag::note_declared_at);
4273 if (Info.getLangOpts().CPlusPlus) {
4274 Info.FFDiag(
E, Info.getLangOpts().CPlusPlus11
4275 ? diag::note_constexpr_ltor_non_constexpr
4276 : diag::note_constexpr_ltor_non_integral, 1)
4278 Info.Note(VD->
getLocation(), diag::note_declared_at);
4282 return CompleteObject();
4287 return CompleteObject();
4289 std::optional<DynAlloc *> Alloc = Info.lookupDynamicAlloc(DA);
4291 Info.FFDiag(
E, diag::note_constexpr_access_deleted_object) << AK;
4292 return CompleteObject();
4294 return CompleteObject(LVal.Base, &(*Alloc)->Value,
4295 LVal.Base.getDynamicAllocType());
4301 dyn_cast_or_null<MaterializeTemporaryExpr>(
Base)) {
4302 assert(MTE->getStorageDuration() ==
SD_Static &&
4303 "should have a frame for a non-global materialized temporary");
4330 if (!MTE->isUsableInConstantExpressions(Info.Ctx) &&
4333 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4334 Info.FFDiag(
E, diag::note_constexpr_access_static_temporary, 1) << AK;
4335 Info.Note(MTE->getExprLoc(), diag::note_constexpr_temporary_here);
4336 return CompleteObject();
4339 BaseVal = MTE->getOrCreateValue(
false);
4340 assert(BaseVal &&
"got reference to unevaluated temporary");
4343 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4346 Info.FFDiag(
E, diag::note_constexpr_access_unreadable_object)
4349 Info.Ctx.getLValueReferenceType(LValType));
4351 return CompleteObject();
4354 BaseVal = Frame->getTemporary(
Base, LVal.Base.getVersion());
4355 assert(BaseVal &&
"missing value for temporary");
4366 unsigned VisibleDepth = Depth;
4367 if (llvm::isa_and_nonnull<ParmVarDecl>(
4368 LVal.Base.dyn_cast<
const ValueDecl *>()))
4370 if ((Frame && Info.getLangOpts().CPlusPlus14 &&
4371 Info.EvalStatus.HasSideEffects) ||
4372 (
isModification(AK) && VisibleDepth < Info.SpeculativeEvaluationDepth))
4373 return CompleteObject();
4375 return CompleteObject(LVal.getLValueBase(), BaseVal, BaseType);
4394 const LValue &LVal,
APValue &RVal,
4395 bool WantObjectRepresentation =
false) {
4396 if (LVal.Designator.Invalid)
4405 if (
Base && !LVal.getLValueCallIndex() && !
Type.isVolatileQualified()) {
4410 if (
Type.isVolatileQualified()) {
4416 if (!
Evaluate(Lit, Info, CLE->getInitializer()))
4436 Info.Note(CLE->getExprLoc(), diag::note_declared_at);
4441 CompleteObject LitObj(LVal.Base, &Lit,
Base->getType());
4443 }
else if (isa<StringLiteral>(
Base) || isa<PredefinedExpr>(
Base)) {
4446 assert(LVal.Designator.Entries.size() <= 1 &&
4447 "Can only read characters from string literals");
4448 if (LVal.Designator.Entries.empty()) {
4455 if (LVal.Designator.isOnePastTheEnd()) {
4456 if (Info.getLangOpts().CPlusPlus11)
4457 Info.FFDiag(Conv, diag::note_constexpr_access_past_end) << AK;
4462 uint64_t CharIndex = LVal.Designator.Entries[0].getAsArrayIndex();
4469 return Obj &&
extractSubobject(Info, Conv, Obj, LVal.Designator, RVal, AK);
4475 if (LVal.Designator.Invalid)
4478 if (!Info.getLangOpts().CPlusPlus14) {
4488struct CompoundAssignSubobjectHandler {
4497 typedef bool result_type;
4502 Info.FFDiag(
E, diag::note_constexpr_modify_const_type) << QT;
4508 bool failed() {
return false; }
4512 return found(Subobj.
getInt(), SubobjType);
4514 return found(Subobj.
getFloat(), SubobjType);
4521 return foundPointer(Subobj, SubobjType);
4523 return foundVector(Subobj, SubobjType);
4525 Info.FFDiag(
E, diag::note_constexpr_access_uninit)
4537 if (!checkConst(SubobjType))
4548 if (!checkConst(SubobjType))
4567 Info.Ctx.getLangOpts());
4570 PromotedLHSType, FValue) &&
4580 return checkConst(SubobjType) &&
4587 if (!checkConst(SubobjType))
4595 (Opcode != BO_Add && Opcode != BO_Sub)) {
4601 if (Opcode == BO_Sub)
4605 LVal.setFrom(Info.Ctx, Subobj);
4608 LVal.moveInto(Subobj);
4614const AccessKinds CompoundAssignSubobjectHandler::AccessKind;
4619 const LValue &LVal,
QualType LValType,
4623 if (LVal.Designator.Invalid)
4626 if (!Info.getLangOpts().CPlusPlus14) {
4632 CompoundAssignSubobjectHandler Handler = { Info,
E, PromotedLValType, Opcode,
4634 return Obj &&
findSubobject(Info,
E, Obj, LVal.Designator, Handler);
4638struct IncDecSubobjectHandler {
4644 typedef bool result_type;
4649 Info.FFDiag(
E, diag::note_constexpr_modify_const_type) << QT;
4655 bool failed() {
return false; }
4666 return found(Subobj.
getInt(), SubobjType);
4668 return found(Subobj.
getFloat(), SubobjType);
4678 return foundPointer(Subobj, SubobjType);
4686 if (!checkConst(SubobjType))
4708 bool WasNegative =
Value.isNegative();
4712 if (!WasNegative &&
Value.isNegative() &&
E->canOverflow()) {
4719 if (WasNegative && !
Value.isNegative() &&
E->canOverflow()) {
4720 unsigned BitWidth =
Value.getBitWidth();
4721 APSInt ActualValue(
Value.sext(BitWidth + 1),
false);
4722 ActualValue.setBit(BitWidth);
4729 if (!checkConst(SubobjType))
4736 APFloat::opStatus St;
4738 St =
Value.add(One, RM);
4740 St =
Value.subtract(One, RM);
4744 if (!checkConst(SubobjType))
4756 LVal.setFrom(Info.Ctx, Subobj);
4760 LVal.moveInto(Subobj);
4769 if (LVal.Designator.Invalid)
4772 if (!Info.getLangOpts().CPlusPlus14) {
4779 IncDecSubobjectHandler Handler = {Info, cast<UnaryOperator>(
E), AK, Old};
4780 return Obj &&
findSubobject(Info,
E, Obj, LVal.Designator, Handler);
4786 if (Object->getType()->isPointerType() && Object->isPRValue())
4789 if (Object->isGLValue())
4792 if (Object->getType()->isLiteralType(Info.Ctx))
4795 if (Object->getType()->isRecordType() && Object->isPRValue())
4798 Info.FFDiag(Object, diag::note_constexpr_nonliteral) << Object->getType();
4817 bool IncludeMember =
true) {
4824 if (!MemPtr.getDecl()) {
4830 if (MemPtr.isDerivedMember()) {
4834 if (LV.Designator.MostDerivedPathLength + MemPtr.Path.size() >
4835 LV.Designator.Entries.size()) {
4839 unsigned PathLengthToMember =
4840 LV.Designator.Entries.size() - MemPtr.Path.size();
4841 for (
unsigned I = 0, N = MemPtr.Path.size(); I != N; ++I) {
4843 LV.Designator.Entries[PathLengthToMember + I]);
4853 PathLengthToMember))
4855 }
else if (!MemPtr.Path.empty()) {
4857 LV.Designator.Entries.reserve(LV.Designator.Entries.size() +
4858 MemPtr.Path.size() + IncludeMember);
4864 assert(RD &&
"member pointer access on non-class-type expression");
4866 for (
unsigned I = 1, N = MemPtr.Path.size(); I != N; ++I) {
4874 MemPtr.getContainingRecord()))
4879 if (IncludeMember) {
4880 if (
const FieldDecl *FD = dyn_cast<FieldDecl>(MemPtr.getDecl())) {
4884 dyn_cast<IndirectFieldDecl>(MemPtr.getDecl())) {
4888 llvm_unreachable(
"can't construct reference to bound member function");
4892 return MemPtr.getDecl();
4898 bool IncludeMember =
true) {
4902 if (Info.noteFailure()) {
4910 BO->
getRHS(), IncludeMember);
4917 SubobjectDesignator &
D = Result.Designator;
4918 if (
D.Invalid || !Result.checkNullPointer(Info,
E,
CSK_Derived))
4926 if (
D.MostDerivedPathLength +
E->path_size() >
D.Entries.size()) {
4927 Info.CCEDiag(
E, diag::note_constexpr_invalid_downcast)
4928 <<
D.MostDerivedType << TargetQT;
4934 unsigned NewEntriesSize =
D.Entries.size() -
E->path_size();
4937 if (NewEntriesSize ==
D.MostDerivedPathLength)
4938 FinalType =
D.MostDerivedType->getAsCXXRecordDecl();
4940 FinalType = getAsBaseClass(
D.Entries[NewEntriesSize - 1]);
4942 Info.CCEDiag(
E, diag::note_constexpr_invalid_downcast)
4943 <<
D.MostDerivedType << TargetQT;
4957 if (!Result.isAbsent())
4961 if (RD->isInvalidDecl()) {
4965 if (RD->isUnion()) {
4970 std::distance(RD->field_begin(), RD->field_end()));
4974 End = RD->bases_end();
4975 I != End; ++I, ++Index)
4979 for (
const auto *I : RD->fields()) {
4980 if (I->isUnnamedBitField())
4983 I->getType(), Result.getStructField(I->getFieldIndex()));
4991 if (Result.hasArrayFiller())
5003enum EvalStmtResult {
5027 APValue &Val = Info.CurrentCall->createTemporary(VD, VD->
getType(),
5028 ScopeKind::Block, Result);
5033 return Info.noteSideEffect();
5052 if (
const VarDecl *VD = dyn_cast<VarDecl>(
D))
5056 for (
auto *BD : DD->bindings())
5057 if (
auto *VD = BD->getHoldingVar())
5065 if (Info.noteSideEffect())
5067 assert(
E->
containsErrors() &&
"valid value-dependent expression should never "
5068 "reach invalid code path.");
5074 const Expr *Cond,
bool &Result) {
5077 FullExpressionRAII
Scope(Info);
5082 return Scope.destroy();
5095struct TempVersionRAII {
5096 CallStackFrame &Frame;
5098 TempVersionRAII(CallStackFrame &Frame) : Frame(Frame) {
5099 Frame.pushTempVersion();
5102 ~TempVersionRAII() {
5103 Frame.popTempVersion();
5117 BlockScopeRAII
Scope(Info);
5119 EvalStmtResult ESR =
EvaluateStmt(Result, Info, Body, Case);
5120 if (ESR != ESR_Failed && ESR != ESR_CaseNotFound && !
Scope.destroy())
5125 return ESR_Succeeded;
5128 return ESR_Continue;
5131 case ESR_CaseNotFound:
5134 llvm_unreachable(
"Invalid EvalStmtResult!");
5140 BlockScopeRAII
Scope(Info);
5147 if (ESR != ESR_Succeeded) {
5148 if (ESR != ESR_Failed && !
Scope.destroy())
5154 FullExpressionRAII CondScope(Info);
5166 if (!CondScope.destroy())
5175 if (isa<DefaultStmt>(SC)) {
5180 const CaseStmt *CS = cast<CaseStmt>(SC);
5191 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5195 if (ESR != ESR_Failed && ESR != ESR_CaseNotFound && !
Scope.destroy())
5200 return ESR_Succeeded;
5206 case ESR_CaseNotFound:
5209 Info.FFDiag(
Found->getBeginLoc(),
5210 diag::note_constexpr_stmt_expr_unsupported);
5213 llvm_unreachable(
"Invalid EvalStmtResult!");
5223 Info.CCEDiag(VD->
getLocation(), diag::note_constexpr_static_local)
5233 if (!Info.nextStep(S))
5239 switch (S->getStmtClass()) {
5240 case Stmt::CompoundStmtClass:
5244 case Stmt::LabelStmtClass:
5245 case Stmt::AttributedStmtClass:
5246 case Stmt::DoStmtClass:
5249 case Stmt::CaseStmtClass:
5250 case Stmt::DefaultStmtClass:
5255 case Stmt::IfStmtClass: {
5258 const IfStmt *IS = cast<IfStmt>(S);
5262 BlockScopeRAII
Scope(Info);
5268 if (ESR != ESR_CaseNotFound) {
5269 assert(ESR != ESR_Succeeded);
5280 if (ESR == ESR_Failed)
5282 if (ESR != ESR_CaseNotFound)
5283 return Scope.destroy() ? ESR : ESR_Failed;
5285 return ESR_CaseNotFound;
5288 if (ESR == ESR_Failed)
5290 if (ESR != ESR_CaseNotFound)
5291 return Scope.destroy() ? ESR : ESR_Failed;
5292 return ESR_CaseNotFound;
5295 case Stmt::WhileStmtClass: {
5296 EvalStmtResult ESR =
5298 if (ESR != ESR_Continue)
5303 case Stmt::ForStmtClass: {
5304 const ForStmt *FS = cast<ForStmt>(S);
5305 BlockScopeRAII
Scope(Info);
5309 if (
const Stmt *
Init = FS->getInit()) {
5311 if (ESR != ESR_CaseNotFound) {
5312 assert(ESR != ESR_Succeeded);
5317 EvalStmtResult ESR =
5319 if (ESR != ESR_Continue)
5321 if (
const auto *Inc = FS->getInc()) {
5322 if (Inc->isValueDependent()) {
5326 FullExpressionRAII IncScope(Info);
5334 case Stmt::DeclStmtClass: {
5337 const DeclStmt *DS = cast<DeclStmt>(S);
5338 for (
const auto *
D : DS->