54#include "llvm/ADT/APFixedPoint.h"
55#include "llvm/ADT/SmallBitVector.h"
56#include "llvm/Support/Debug.h"
57#include "llvm/Support/SaveAndRestore.h"
58#include "llvm/Support/TimeProfiler.h"
59#include "llvm/Support/raw_ostream.h"
64#define DEBUG_TYPE "exprconstant"
67using llvm::APFixedPoint;
71using llvm::FixedPointSemantics;
78 using SourceLocExprScopeGuard =
110 static const AllocSizeAttr *getAllocSizeAttr(
const CallExpr *CE) {
112 return DirectCallee->getAttr<AllocSizeAttr>();
114 return IndirectCallee->getAttr<AllocSizeAttr>();
122 static const CallExpr *tryUnwrapAllocSizeCall(
const Expr *E) {
130 if (
const auto *FE = dyn_cast<FullExpr>(E))
133 if (
const auto *Cast = dyn_cast<CastExpr>(E))
134 E = Cast->getSubExpr()->IgnoreParens();
136 if (
const auto *CE = dyn_cast<CallExpr>(E))
137 return getAllocSizeAttr(CE) ? CE :
nullptr;
144 const auto *E =
Base.dyn_cast<
const Expr *>();
145 return E && E->getType()->isPointerType() && tryUnwrapAllocSizeCall(E);
153 case ConstantExprKind::Normal:
154 case ConstantExprKind::ClassTemplateArgument:
155 case ConstantExprKind::ImmediateInvocation:
160 case ConstantExprKind::NonClassTemplateArgument:
163 llvm_unreachable(
"unknown ConstantExprKind");
168 case ConstantExprKind::Normal:
169 case ConstantExprKind::ImmediateInvocation:
172 case ConstantExprKind::ClassTemplateArgument:
173 case ConstantExprKind::NonClassTemplateArgument:
176 llvm_unreachable(
"unknown ConstantExprKind");
182 static const uint64_t AssumedSizeForUnsizedArray =
183 std::numeric_limits<uint64_t>::max() / 2;
193 bool &FirstEntryIsUnsizedArray) {
196 assert(!isBaseAnAllocSizeCall(
Base) &&
197 "Unsized arrays shouldn't appear here");
198 unsigned MostDerivedLength = 0;
201 for (
unsigned I = 0, N = Path.size(); I != N; ++I) {
205 MostDerivedLength = I + 1;
208 if (
auto *CAT = dyn_cast<ConstantArrayType>(AT)) {
209 ArraySize = CAT->getSize().getZExtValue();
211 assert(I == 0 &&
"unexpected unsized array designator");
212 FirstEntryIsUnsizedArray =
true;
213 ArraySize = AssumedSizeForUnsizedArray;
219 MostDerivedLength = I + 1;
221 }
else if (
const FieldDecl *FD = getAsField(Path[I])) {
222 Type = FD->getType();
224 MostDerivedLength = I + 1;
232 return MostDerivedLength;
236 struct SubobjectDesignator {
240 unsigned Invalid : 1;
243 unsigned IsOnePastTheEnd : 1;
246 unsigned FirstEntryIsAnUnsizedArray : 1;
249 unsigned MostDerivedIsArrayElement : 1;
253 unsigned MostDerivedPathLength : 28;
262 uint64_t MostDerivedArraySize;
272 SubobjectDesignator() : Invalid(
true) {}
274 explicit SubobjectDesignator(
QualType T)
275 : Invalid(
false), IsOnePastTheEnd(
false),
276 FirstEntryIsAnUnsizedArray(
false), MostDerivedIsArrayElement(
false),
277 MostDerivedPathLength(0), MostDerivedArraySize(0),
278 MostDerivedType(T) {}
281 : Invalid(!
V.isLValue() || !
V.hasLValuePath()), IsOnePastTheEnd(
false),
282 FirstEntryIsAnUnsizedArray(
false), MostDerivedIsArrayElement(
false),
283 MostDerivedPathLength(0), MostDerivedArraySize(0) {
284 assert(
V.isLValue() &&
"Non-LValue used to make an LValue designator?");
286 IsOnePastTheEnd =
V.isLValueOnePastTheEnd();
288 Entries.insert(Entries.end(), VEntries.begin(), VEntries.end());
289 if (
V.getLValueBase()) {
290 bool IsArray =
false;
291 bool FirstIsUnsizedArray =
false;
292 MostDerivedPathLength = findMostDerivedSubobject(
293 Ctx,
V.getLValueBase(),
V.getLValuePath(), MostDerivedArraySize,
294 MostDerivedType, IsArray, FirstIsUnsizedArray);
295 MostDerivedIsArrayElement = IsArray;
296 FirstEntryIsAnUnsizedArray = FirstIsUnsizedArray;
302 unsigned NewLength) {
306 assert(
Base &&
"cannot truncate path for null pointer");
307 assert(NewLength <= Entries.size() &&
"not a truncation");
309 if (NewLength == Entries.size())
311 Entries.resize(NewLength);
313 bool IsArray =
false;
314 bool FirstIsUnsizedArray =
false;
315 MostDerivedPathLength = findMostDerivedSubobject(
316 Ctx,
Base, Entries, MostDerivedArraySize, MostDerivedType, IsArray,
317 FirstIsUnsizedArray);
318 MostDerivedIsArrayElement = IsArray;
319 FirstEntryIsAnUnsizedArray = FirstIsUnsizedArray;
329 bool isMostDerivedAnUnsizedArray()
const {
330 assert(!Invalid &&
"Calling this makes no sense on invalid designators");
331 return Entries.size() == 1 && FirstEntryIsAnUnsizedArray;
336 uint64_t getMostDerivedArraySize()
const {
337 assert(!isMostDerivedAnUnsizedArray() &&
"Unsized array has no size");
338 return MostDerivedArraySize;
342 bool isOnePastTheEnd()
const {
346 if (!isMostDerivedAnUnsizedArray() && MostDerivedIsArrayElement &&
347 Entries[MostDerivedPathLength - 1].getAsArrayIndex() ==
348 MostDerivedArraySize)
356 std::pair<uint64_t, uint64_t> validIndexAdjustments() {
357 if (Invalid || isMostDerivedAnUnsizedArray())
363 bool IsArray = MostDerivedPathLength == Entries.size() &&
364 MostDerivedIsArrayElement;
365 uint64_t ArrayIndex = IsArray ? Entries.back().getAsArrayIndex()
366 : (uint64_t)IsOnePastTheEnd;
368 IsArray ? getMostDerivedArraySize() : (uint64_t)1;
369 return {ArrayIndex, ArraySize - ArrayIndex};
373 bool isValidSubobject()
const {
376 return !isOnePastTheEnd();
384 assert(!Invalid &&
"invalid designator has no subobject type");
385 return MostDerivedPathLength == Entries.size()
396 MostDerivedIsArrayElement =
true;
397 MostDerivedArraySize = CAT->
getSize().getZExtValue();
398 MostDerivedPathLength = Entries.size();
402 void addUnsizedArrayUnchecked(
QualType ElemTy) {
405 MostDerivedType = ElemTy;
406 MostDerivedIsArrayElement =
true;
410 MostDerivedArraySize = AssumedSizeForUnsizedArray;
411 MostDerivedPathLength = Entries.size();
415 void addDeclUnchecked(
const Decl *D,
bool Virtual =
false) {
419 if (
const FieldDecl *FD = dyn_cast<FieldDecl>(D)) {
420 MostDerivedType = FD->getType();
421 MostDerivedIsArrayElement =
false;
422 MostDerivedArraySize = 0;
423 MostDerivedPathLength = Entries.size();
427 void addComplexUnchecked(
QualType EltTy,
bool Imag) {
432 MostDerivedType = EltTy;
433 MostDerivedIsArrayElement =
true;
434 MostDerivedArraySize = 2;
435 MostDerivedPathLength = Entries.size();
437 void diagnoseUnsizedArrayPointerArithmetic(EvalInfo &Info,
const Expr *E);
438 void diagnosePointerArithmetic(EvalInfo &Info,
const Expr *E,
441 void adjustIndex(EvalInfo &Info,
const Expr *E,
APSInt N) {
442 if (Invalid || !N)
return;
443 uint64_t TruncatedN = N.extOrTrunc(64).getZExtValue();
444 if (isMostDerivedAnUnsizedArray()) {
445 diagnoseUnsizedArrayPointerArithmetic(Info, E);
450 Entries.back().getAsArrayIndex() + TruncatedN);
457 bool IsArray = MostDerivedPathLength == Entries.size() &&
458 MostDerivedIsArrayElement;
459 uint64_t ArrayIndex = IsArray ? Entries.back().getAsArrayIndex()
460 : (uint64_t)IsOnePastTheEnd;
462 IsArray ? getMostDerivedArraySize() : (uint64_t)1;
464 if (N < -(int64_t)ArrayIndex || N > ArraySize - ArrayIndex) {
467 N = N.extend(std::max<unsigned>(N.getBitWidth() + 1, 65));
468 (llvm::APInt&)N += ArrayIndex;
469 assert(N.ugt(ArraySize) &&
"bounds check failed for in-bounds index");
470 diagnosePointerArithmetic(Info, E, N);
475 ArrayIndex += TruncatedN;
476 assert(ArrayIndex <= ArraySize &&
477 "bounds check succeeded for out-of-bounds index");
482 IsOnePastTheEnd = (ArrayIndex != 0);
487 enum class ScopeKind {
495 CallRef() : OrigCallee(), CallIndex(0), Version() {}
496 CallRef(
const FunctionDecl *Callee,
unsigned CallIndex,
unsigned Version)
497 : OrigCallee(Callee), CallIndex(CallIndex), Version(Version) {}
499 explicit operator bool()
const {
return OrigCallee; }
525 CallStackFrame *Caller;
544 typedef std::pair<const void *, unsigned> MapKeyTy;
545 typedef std::map<MapKeyTy, APValue>
MapTy;
557 unsigned CurTempVersion = TempVersionStack.back();
559 unsigned getTempVersion()
const {
return TempVersionStack.back(); }
561 void pushTempVersion() {
562 TempVersionStack.push_back(++CurTempVersion);
565 void popTempVersion() {
566 TempVersionStack.pop_back();
570 return {Callee, Index, ++CurTempVersion};
581 llvm::DenseMap<const ValueDecl *, FieldDecl *> LambdaCaptureFields;
590 APValue *getTemporary(
const void *Key,
unsigned Version) {
591 MapKeyTy KV(Key, Version);
592 auto LB = Temporaries.lower_bound(KV);
593 if (LB != Temporaries.end() && LB->first == KV)
599 APValue *getCurrentTemporary(
const void *Key) {
600 auto UB = Temporaries.upper_bound(MapKeyTy(Key,
UINT_MAX));
601 if (UB != Temporaries.begin() && std::prev(UB)->first.first == Key)
602 return &std::prev(UB)->second;
607 unsigned getCurrentTemporaryVersion(
const void *Key)
const {
608 auto UB = Temporaries.upper_bound(MapKeyTy(Key,
UINT_MAX));
609 if (UB != Temporaries.begin() && std::prev(UB)->first.first == Key)
610 return std::prev(UB)->first.second;
618 template<
typename KeyT>
620 ScopeKind
Scope, LValue &LV);
625 void describe(llvm::raw_ostream &OS)
override;
627 Frame *getCaller()
const override {
return Caller; }
628 SourceLocation getCallLocation()
const override {
return CallLoc; }
629 const FunctionDecl *getCallee()
const override {
return Callee; }
631 bool isStdFunction()
const {
632 for (
const DeclContext *DC = Callee; DC; DC = DC->getParent())
633 if (DC->isStdNamespace())
644 class ThisOverrideRAII {
646 ThisOverrideRAII(CallStackFrame &Frame,
const LValue *NewThis,
bool Enable)
647 : Frame(Frame), OldThis(Frame.This) {
649 Frame.This = NewThis;
651 ~ThisOverrideRAII() {
652 Frame.This = OldThis;
655 CallStackFrame &Frame;
656 const LValue *OldThis;
661 class ExprTimeTraceScope {
663 ExprTimeTraceScope(
const Expr *E,
const ASTContext &Ctx, StringRef Name)
664 : TimeScope(Name, [E, &Ctx] {
669 llvm::TimeTraceScope TimeScope;
674 const LValue &This,
QualType ThisType);
682 llvm::PointerIntPair<APValue*, 2, ScopeKind>
Value;
693 bool isDestroyedAtEndOf(ScopeKind K)
const {
694 return (
int)
Value.getInt() >= (
int)K;
696 bool endLifetime(EvalInfo &Info,
bool RunDestructors) {
697 if (RunDestructors) {
700 Loc = VD->getLocation();
702 Loc = E->getExprLoc();
709 bool hasSideEffect() {
715 struct ObjectUnderConstruction {
718 friend bool operator==(
const ObjectUnderConstruction &LHS,
719 const ObjectUnderConstruction &RHS) {
720 return LHS.Base == RHS.Base && LHS.Path == RHS.Path;
722 friend llvm::hash_code
hash_value(
const ObjectUnderConstruction &Obj) {
723 return llvm::hash_combine(Obj.Base, Obj.Path);
726 enum class ConstructionPhase {
737template<>
struct DenseMapInfo<ObjectUnderConstruction> {
738 using Base = DenseMapInfo<APValue::LValueBase>;
740 return {Base::getEmptyKey(), {}}; }
742 return {Base::getTombstoneKey(), {}};
747 static bool isEqual(
const ObjectUnderConstruction &LHS,
748 const ObjectUnderConstruction &RHS) {
762 const Expr *AllocExpr =
nullptr;
773 if (
auto *NE = dyn_cast<CXXNewExpr>(AllocExpr))
774 return NE->isArray() ? ArrayNew : New;
775 assert(isa<CallExpr>(AllocExpr));
780 struct DynAllocOrder {
808 CallStackFrame *CurrentCall;
811 unsigned CallStackDepth;
814 unsigned NextCallIndex;
823 bool EnableNewConstInterp;
827 CallStackFrame BottomFrame;
837 enum class EvaluatingDeclKind {
844 EvaluatingDeclKind IsEvaluatingDecl = EvaluatingDeclKind::None;
851 llvm::DenseMap<ObjectUnderConstruction, ConstructionPhase>
852 ObjectsUnderConstruction;
857 std::map<DynamicAllocLValue, DynAlloc, DynAllocOrder> HeapAllocs;
860 unsigned NumHeapAllocs = 0;
862 struct EvaluatingConstructorRAII {
864 ObjectUnderConstruction
Object;
866 EvaluatingConstructorRAII(EvalInfo &EI, ObjectUnderConstruction Object,
870 EI.ObjectsUnderConstruction
871 .insert({
Object, HasBases ? ConstructionPhase::Bases
872 : ConstructionPhase::AfterBases})
875 void finishedConstructingBases() {
876 EI.ObjectsUnderConstruction[
Object] = ConstructionPhase::AfterBases;
878 void finishedConstructingFields() {
879 EI.ObjectsUnderConstruction[
Object] = ConstructionPhase::AfterFields;
881 ~EvaluatingConstructorRAII() {
882 if (DidInsert) EI.ObjectsUnderConstruction.erase(Object);
886 struct EvaluatingDestructorRAII {
888 ObjectUnderConstruction
Object;
890 EvaluatingDestructorRAII(EvalInfo &EI, ObjectUnderConstruction Object)
892 DidInsert = EI.ObjectsUnderConstruction
893 .insert({
Object, ConstructionPhase::Destroying})
896 void startedDestroyingBases() {
897 EI.ObjectsUnderConstruction[
Object] =
898 ConstructionPhase::DestroyingBases;
900 ~EvaluatingDestructorRAII() {
902 EI.ObjectsUnderConstruction.erase(Object);
909 return ObjectsUnderConstruction.lookup({
Base, Path});
914 unsigned SpeculativeEvaluationDepth = 0;
922 bool HasActiveDiagnostic;
926 bool HasFoldFailureDiagnostic;
931 bool CheckingPotentialConstantExpression =
false;
939 bool CheckingForUndefinedBehavior =
false;
941 enum EvaluationMode {
944 EM_ConstantExpression,
951 EM_ConstantExpressionUnevaluated,
959 EM_IgnoreSideEffects,
964 bool checkingPotentialConstantExpression()
const override {
965 return CheckingPotentialConstantExpression;
971 bool checkingForUndefinedBehavior()
const override {
972 return CheckingForUndefinedBehavior;
976 : Ctx(const_cast<
ASTContext &>(
C)), EvalStatus(S), CurrentCall(nullptr),
977 CallStackDepth(0), NextCallIndex(1),
978 StepsLeft(
C.getLangOpts().ConstexprStepLimit),
979 EnableNewConstInterp(
C.getLangOpts().EnableNewConstInterp),
980 BottomFrame(*this,
SourceLocation(), nullptr, nullptr, CallRef()),
981 EvaluatingDecl((const
ValueDecl *)nullptr),
982 EvaluatingDeclValue(nullptr), HasActiveDiagnostic(
false),
983 HasFoldFailureDiagnostic(
false), EvalMode(Mode) {}
989 ASTContext &getCtx()
const override {
return Ctx; }
992 EvaluatingDeclKind EDK = EvaluatingDeclKind::Ctor) {
993 EvaluatingDecl =
Base;
994 IsEvaluatingDecl = EDK;
995 EvaluatingDeclValue = &
Value;
1001 if (checkingPotentialConstantExpression() && CallStackDepth > 1)
1003 if (NextCallIndex == 0) {
1005 FFDiag(Loc, diag::note_constexpr_call_limit_exceeded);
1008 if (CallStackDepth <= getLangOpts().ConstexprCallDepth)
1010 FFDiag(Loc, diag::note_constexpr_depth_limit_exceeded)
1011 << getLangOpts().ConstexprCallDepth;
1015 std::pair<CallStackFrame *, unsigned>
1016 getCallFrameAndDepth(
unsigned CallIndex) {
1017 assert(CallIndex &&
"no call index in getCallFrameAndDepth");
1020 unsigned Depth = CallStackDepth;
1021 CallStackFrame *Frame = CurrentCall;
1022 while (Frame->Index > CallIndex) {
1023 Frame = Frame->Caller;
1026 if (Frame->Index == CallIndex)
1027 return {Frame, Depth};
1028 return {
nullptr, 0};
1031 bool nextStep(
const Stmt *S) {
1033 FFDiag(S->getBeginLoc(), diag::note_constexpr_step_limit_exceeded);
1043 std::optional<DynAlloc *> Result;
1044 auto It = HeapAllocs.find(DA);
1045 if (It != HeapAllocs.end())
1046 Result = &It->second;
1052 CallStackFrame *Frame = getCallFrameAndDepth(
Call.CallIndex).first;
1053 return Frame ? Frame->getTemporary(
Call.getOrigParam(PVD),
Call.Version)
1058 struct StdAllocatorCaller {
1059 unsigned FrameIndex;
1061 explicit operator bool()
const {
return FrameIndex != 0; };
1064 StdAllocatorCaller getStdAllocatorCaller(StringRef FnName)
const {
1065 for (
const CallStackFrame *Call = CurrentCall;
Call != &BottomFrame;
1067 const auto *MD = dyn_cast_or_null<CXXMethodDecl>(
Call->Callee);
1071 if (!FnII || !FnII->
isStr(FnName))
1075 dyn_cast<ClassTemplateSpecializationDecl>(MD->getParent());
1081 if (CTSD->isInStdNamespace() && ClassII &&
1082 ClassII->
isStr(
"allocator") && TAL.
size() >= 1 &&
1084 return {
Call->Index, TAL[0].getAsType()};
1090 void performLifetimeExtension() {
1092 llvm::erase_if(CleanupStack, [](Cleanup &
C) {
1093 return !
C.isDestroyedAtEndOf(ScopeKind::FullExpression);
1100 bool discardCleanups() {
1101 for (Cleanup &
C : CleanupStack) {
1102 if (
C.hasSideEffect() && !noteSideEffect()) {
1103 CleanupStack.clear();
1107 CleanupStack.clear();
1112 interp::Frame *getCurrentFrame()
override {
return CurrentCall; }
1113 const interp::Frame *getBottomFrame()
const override {
return &BottomFrame; }
1115 bool hasActiveDiagnostic()
override {
return HasActiveDiagnostic; }
1116 void setActiveDiagnostic(
bool Flag)
override { HasActiveDiagnostic = Flag; }
1118 void setFoldFailureDiagnostic(
bool Flag)
override {
1119 HasFoldFailureDiagnostic = Flag;
1130 bool hasPriorDiagnostic()
override {
1131 if (!EvalStatus.
Diag->empty()) {
1133 case EM_ConstantFold:
1134 case EM_IgnoreSideEffects:
1135 if (!HasFoldFailureDiagnostic)
1139 case EM_ConstantExpression:
1140 case EM_ConstantExpressionUnevaluated:
1141 setActiveDiagnostic(
false);
1148 unsigned getCallStackDepth()
override {
return CallStackDepth; }
1153 bool keepEvaluatingAfterSideEffect() {
1155 case EM_IgnoreSideEffects:
1158 case EM_ConstantExpression:
1159 case EM_ConstantExpressionUnevaluated:
1160 case EM_ConstantFold:
1163 return checkingPotentialConstantExpression() ||
1164 checkingForUndefinedBehavior();
1166 llvm_unreachable(
"Missed EvalMode case");
1171 bool noteSideEffect() {
1173 return keepEvaluatingAfterSideEffect();
1177 bool keepEvaluatingAfterUndefinedBehavior() {
1179 case EM_IgnoreSideEffects:
1180 case EM_ConstantFold:
1183 case EM_ConstantExpression:
1184 case EM_ConstantExpressionUnevaluated:
1185 return checkingForUndefinedBehavior();
1187 llvm_unreachable(
"Missed EvalMode case");
1193 bool noteUndefinedBehavior()
override {
1195 return keepEvaluatingAfterUndefinedBehavior();
1200 bool keepEvaluatingAfterFailure()
const override {
1205 case EM_ConstantExpression:
1206 case EM_ConstantExpressionUnevaluated:
1207 case EM_ConstantFold:
1208 case EM_IgnoreSideEffects:
1209 return checkingPotentialConstantExpression() ||
1210 checkingForUndefinedBehavior();
1212 llvm_unreachable(
"Missed EvalMode case");
1225 [[nodiscard]]
bool noteFailure() {
1233 bool KeepGoing = keepEvaluatingAfterFailure();
1238 class ArrayInitLoopIndex {
1243 ArrayInitLoopIndex(EvalInfo &Info)
1244 : Info(Info), OuterIndex(Info.ArrayInitIndex) {
1245 Info.ArrayInitIndex = 0;
1247 ~ArrayInitLoopIndex() { Info.ArrayInitIndex = OuterIndex; }
1249 operator uint64_t&() {
return Info.ArrayInitIndex; }
1254 struct FoldConstant {
1257 bool HadNoPriorDiags;
1258 EvalInfo::EvaluationMode OldMode;
1260 explicit FoldConstant(EvalInfo &Info,
bool Enabled)
1263 HadNoPriorDiags(Info.EvalStatus.
Diag &&
1264 Info.EvalStatus.
Diag->empty() &&
1265 !Info.EvalStatus.HasSideEffects),
1266 OldMode(Info.EvalMode) {
1268 Info.EvalMode = EvalInfo::EM_ConstantFold;
1270 void keepDiagnostics() { Enabled =
false; }
1272 if (Enabled && HadNoPriorDiags && !Info.EvalStatus.Diag->empty() &&
1273 !Info.EvalStatus.HasSideEffects)
1274 Info.EvalStatus.Diag->clear();
1275 Info.EvalMode = OldMode;
1281 struct IgnoreSideEffectsRAII {
1283 EvalInfo::EvaluationMode OldMode;
1284 explicit IgnoreSideEffectsRAII(EvalInfo &Info)
1285 : Info(Info), OldMode(Info.EvalMode) {
1286 Info.EvalMode = EvalInfo::EM_IgnoreSideEffects;
1289 ~IgnoreSideEffectsRAII() { Info.EvalMode = OldMode; }
1294 class SpeculativeEvaluationRAII {
1295 EvalInfo *Info =
nullptr;
1297 unsigned OldSpeculativeEvaluationDepth;
1299 void moveFromAndCancel(SpeculativeEvaluationRAII &&Other) {
1301 OldStatus =
Other.OldStatus;
1302 OldSpeculativeEvaluationDepth =
Other.OldSpeculativeEvaluationDepth;
1303 Other.Info =
nullptr;
1306 void maybeRestoreState() {
1310 Info->EvalStatus = OldStatus;
1311 Info->SpeculativeEvaluationDepth = OldSpeculativeEvaluationDepth;
1315 SpeculativeEvaluationRAII() =
default;
1317 SpeculativeEvaluationRAII(
1319 : Info(&Info), OldStatus(Info.EvalStatus),
1320 OldSpeculativeEvaluationDepth(Info.SpeculativeEvaluationDepth) {
1321 Info.EvalStatus.Diag = NewDiag;
1322 Info.SpeculativeEvaluationDepth = Info.CallStackDepth + 1;
1325 SpeculativeEvaluationRAII(
const SpeculativeEvaluationRAII &Other) =
delete;
1326 SpeculativeEvaluationRAII(SpeculativeEvaluationRAII &&Other) {
1327 moveFromAndCancel(std::move(Other));
1330 SpeculativeEvaluationRAII &operator=(SpeculativeEvaluationRAII &&Other) {
1331 maybeRestoreState();
1332 moveFromAndCancel(std::move(Other));
1336 ~SpeculativeEvaluationRAII() { maybeRestoreState(); }
1341 template<ScopeKind Kind>
1344 unsigned OldStackSize;
1346 ScopeRAII(EvalInfo &Info)
1347 : Info(Info), OldStackSize(Info.CleanupStack.size()) {
1350 Info.CurrentCall->pushTempVersion();
1352 bool destroy(
bool RunDestructors =
true) {
1353 bool OK =
cleanup(Info, RunDestructors, OldStackSize);
1358 if (OldStackSize != -1U)
1362 Info.CurrentCall->popTempVersion();
1365 static bool cleanup(EvalInfo &Info,
bool RunDestructors,
1366 unsigned OldStackSize) {
1367 assert(OldStackSize <= Info.CleanupStack.size() &&
1368 "running cleanups out of order?");
1373 for (
unsigned I = Info.CleanupStack.size(); I > OldStackSize; --I) {
1374 if (Info.CleanupStack[I - 1].isDestroyedAtEndOf(Kind)) {
1375 if (!Info.CleanupStack[I - 1].endLifetime(Info, RunDestructors)) {
1383 auto NewEnd = Info.CleanupStack.begin() + OldStackSize;
1384 if (Kind != ScopeKind::Block)
1386 std::remove_if(NewEnd, Info.CleanupStack.end(), [](Cleanup &
C) {
1387 return C.isDestroyedAtEndOf(Kind);
1389 Info.CleanupStack.erase(NewEnd, Info.CleanupStack.end());
1393 typedef ScopeRAII<ScopeKind::Block> BlockScopeRAII;
1394 typedef ScopeRAII<ScopeKind::FullExpression> FullExpressionRAII;
1395 typedef ScopeRAII<ScopeKind::Call> CallScopeRAII;
1398bool SubobjectDesignator::checkSubobject(EvalInfo &Info,
const Expr *E,
1402 if (isOnePastTheEnd()) {
1403 Info.CCEDiag(E, diag::note_constexpr_past_end_subobject)
1414void SubobjectDesignator::diagnoseUnsizedArrayPointerArithmetic(EvalInfo &Info,
1416 Info.CCEDiag(E, diag::note_constexpr_unsized_array_indexed);
1421void SubobjectDesignator::diagnosePointerArithmetic(EvalInfo &Info,
1426 if (MostDerivedPathLength == Entries.size() && MostDerivedIsArrayElement)
1427 Info.CCEDiag(E, diag::note_constexpr_array_index)
1429 <<
static_cast<unsigned>(getMostDerivedArraySize());
1431 Info.CCEDiag(E, diag::note_constexpr_array_index)
1436CallStackFrame::CallStackFrame(EvalInfo &Info,
SourceLocation CallLoc,
1440 Arguments(
Call), CallLoc(CallLoc), Index(Info.NextCallIndex++) {
1441 Info.CurrentCall =
this;
1442 ++Info.CallStackDepth;
1445CallStackFrame::~CallStackFrame() {
1446 assert(Info.CurrentCall ==
this &&
"calls retired out of order");
1447 --Info.CallStackDepth;
1448 Info.CurrentCall = Caller;
1470 llvm_unreachable(
"unknown access kind");
1504 llvm_unreachable(
"unknown access kind");
1508 struct ComplexValue {
1516 ComplexValue() : FloatReal(
APFloat::Bogus()), FloatImag(
APFloat::Bogus()) {}
1518 void makeComplexFloat() { IsInt =
false; }
1519 bool isComplexFloat()
const {
return !IsInt; }
1520 APFloat &getComplexFloatReal() {
return FloatReal; }
1521 APFloat &getComplexFloatImag() {
return FloatImag; }
1523 void makeComplexInt() { IsInt =
true; }
1524 bool isComplexInt()
const {
return IsInt; }
1525 APSInt &getComplexIntReal() {
return IntReal; }
1526 APSInt &getComplexIntImag() {
return IntImag; }
1529 if (isComplexFloat())
1535 assert(
v.isComplexFloat() ||
v.isComplexInt());
1536 if (
v.isComplexFloat()) {
1538 FloatReal =
v.getComplexFloatReal();
1539 FloatImag =
v.getComplexFloatImag();
1542 IntReal =
v.getComplexIntReal();
1543 IntImag =
v.getComplexIntImag();
1553 bool InvalidBase : 1;
1558 SubobjectDesignator &getLValueDesignator() {
return Designator; }
1559 const SubobjectDesignator &getLValueDesignator()
const {
return Designator;}
1560 bool isNullPointer()
const {
return IsNullPtr;}
1562 unsigned getLValueCallIndex()
const {
return Base.getCallIndex(); }
1563 unsigned getLValueVersion()
const {
return Base.getVersion(); }
1569 assert(!InvalidBase &&
"APValues can't handle invalid LValue bases");
1575 assert(
V.isLValue() &&
"Setting LValue from a non-LValue?");
1576 Base =
V.getLValueBase();
1578 InvalidBase =
false;
1580 IsNullPtr =
V.isNullPointer();
1587 const auto *E = B.
get<
const Expr *>();
1588 assert((isa<MemberExpr>(E) || tryUnwrapAllocSizeCall(E)) &&
1589 "Unexpected type of invalid base");
1595 InvalidBase = BInvalid;
1596 Designator = SubobjectDesignator(getType(B));
1604 InvalidBase =
false;
1615 moveInto(Printable);
1622 template <
typename GenDiagType>
1623 bool checkNullPointerDiagnosingWith(
const GenDiagType &GenDiag) {
1635 bool checkNullPointer(EvalInfo &Info,
const Expr *E,
1637 return checkNullPointerDiagnosingWith([&Info, E, CSK] {
1638 Info.CCEDiag(E, diag::note_constexpr_null_subobject) << CSK;
1642 bool checkNullPointerForFoldAccess(EvalInfo &Info,
const Expr *E,
1644 return checkNullPointerDiagnosingWith([&Info, E, AK] {
1645 Info.FFDiag(E, diag::note_constexpr_access_null) << AK;
1656 void addDecl(EvalInfo &Info,
const Expr *E,
1661 void addUnsizedArray(EvalInfo &Info,
const Expr *E,
QualType ElemTy) {
1663 Info.CCEDiag(E, diag::note_constexpr_unsupported_unsized_array);
1668 assert(getType(
Base)->isPointerType() || getType(
Base)->isArrayType());
1669 Designator.FirstEntryIsAnUnsizedArray =
true;
1677 void addComplex(EvalInfo &Info,
const Expr *E,
QualType EltTy,
bool Imag) {
1681 void clearIsNullPointer() {
1684 void adjustOffsetAndIndex(EvalInfo &Info,
const Expr *E,
1696 uint64_t Index64 = Index.extOrTrunc(64).getZExtValue();
1701 clearIsNullPointer();
1706 clearIsNullPointer();
1713 : DeclAndIsDerivedMember(
Decl,
false) {}
1718 return DeclAndIsDerivedMember.getPointer();
1721 bool isDerivedMember()
const {
1722 return DeclAndIsDerivedMember.getInt();
1726 return cast<CXXRecordDecl>(
1727 DeclAndIsDerivedMember.getPointer()->getDeclContext());
1731 V =
APValue(getDecl(), isDerivedMember(), Path);
1734 assert(
V.isMemberPointer());
1735 DeclAndIsDerivedMember.setPointer(
V.getMemberPointerDecl());
1736 DeclAndIsDerivedMember.setInt(
V.isMemberPointerToDerivedMember());
1739 Path.insert(Path.end(),
P.begin(),
P.end());
1745 llvm::PointerIntPair<const ValueDecl*, 1, bool> DeclAndIsDerivedMember;
1753 assert(!Path.empty());
1755 if (Path.size() >= 2)
1759 if (
Expected->getCanonicalDecl() !=
Class->getCanonicalDecl()) {
1775 if (!isDerivedMember()) {
1776 Path.push_back(Derived);
1779 if (!castBack(Derived))
1782 DeclAndIsDerivedMember.setInt(
false);
1790 DeclAndIsDerivedMember.setInt(
true);
1791 if (isDerivedMember()) {
1792 Path.push_back(
Base);
1795 return castBack(
Base);
1800 static bool operator==(
const MemberPtr &LHS,
const MemberPtr &RHS) {
1801 if (!LHS.getDecl() || !RHS.getDecl())
1802 return !LHS.getDecl() && !RHS.getDecl();
1803 if (LHS.getDecl()->getCanonicalDecl() != RHS.getDecl()->getCanonicalDecl())
1805 return LHS.Path == RHS.Path;
1811 const LValue &This,
const Expr *E,
1812 bool AllowNonLiteralTypes =
false);
1814 bool InvalidBaseOK =
false);
1816 bool InvalidBaseOK =
false);
1846 if (Int.isUnsigned() || Int.isMinSignedValue()) {
1847 Int = Int.extend(Int.getBitWidth() + 1);
1848 Int.setIsSigned(
true);
1853template<
typename KeyT>
1855 ScopeKind
Scope, LValue &LV) {
1856 unsigned Version = getTempVersion();
1859 return createLocal(
Base, Key, T,
Scope);
1865 assert(Args.CallIndex == Index &&
"creating parameter in wrong frame");
1871 return createLocal(
Base, PVD, PVD->
getType(), ScopeKind::Call);
1876 assert(
Base.getCallIndex() == Index &&
"lvalue for wrong frame");
1877 unsigned Version =
Base.getVersion();
1878 APValue &Result = Temporaries[MapKeyTy(Key, Version)];
1879 assert(Result.isAbsent() &&
"local created multiple times");
1885 if (Index <= Info.SpeculativeEvaluationDepth) {
1887 Info.noteSideEffect();
1889 Info.CleanupStack.push_back(Cleanup(&Result,
Base, T,
Scope));
1896 FFDiag(E, diag::note_constexpr_heap_alloc_limit_exceeded);
1902 auto Result = HeapAllocs.emplace(std::piecewise_construct,
1903 std::forward_as_tuple(DA), std::tuple<>());
1904 assert(Result.second &&
"reused a heap alloc index?");
1905 Result.first->second.AllocExpr = E;
1906 return &Result.first->second.Value;
1910void CallStackFrame::describe(raw_ostream &Out) {
1911 unsigned ArgIndex = 0;
1912 bool IsMemberCall = isa<CXXMethodDecl>(Callee) &&
1913 !isa<CXXConstructorDecl>(Callee) &&
1914 cast<CXXMethodDecl>(Callee)->isInstance();
1919 if (This && IsMemberCall) {
1921 This->moveInto(Val);
1923 This->Designator.MostDerivedType);
1925 Out <<
"->" << *
Callee <<
'(';
1926 IsMemberCall =
false;
1930 E =
Callee->param_end(); I != E; ++I, ++ArgIndex) {
1931 if (ArgIndex > (
unsigned)IsMemberCall)
1935 APValue *
V = Info.getParamSlot(Arguments, Param);
1937 V->printPretty(Out, Info.Ctx, Param->
getType());
1941 if (ArgIndex == 0 && IsMemberCall)
1942 Out <<
"->" << *
Callee <<
'(';
1956 return Info.noteSideEffect();
1963 return (Builtin == Builtin::BI__builtin___CFStringMakeConstantString ||
1964 Builtin == Builtin::BI__builtin___NSStringMakeConstantString ||
1965 Builtin == Builtin::BI__builtin_function_start);
1974 if (!B)
return true;
1978 if (
const VarDecl *VD = dyn_cast<VarDecl>(D))
1979 return VD->hasGlobalStorage();
1980 if (isa<TemplateParamObjectDecl>(D))
1985 return isa<FunctionDecl, MSGuidDecl, UnnamedGlobalConstantDecl>(D);
1995 case Expr::CompoundLiteralExprClass: {
1999 case Expr::MaterializeTemporaryExprClass:
2002 return cast<MaterializeTemporaryExpr>(E)->getStorageDuration() ==
SD_Static;
2004 case Expr::StringLiteralClass:
2005 case Expr::PredefinedExprClass:
2006 case Expr::ObjCStringLiteralClass:
2007 case Expr::ObjCEncodeExprClass:
2009 case Expr::ObjCBoxedExprClass:
2010 return cast<ObjCBoxedExpr>(E)->isExpressibleAsConstantInitializer();
2011 case Expr::CallExprClass:
2014 case Expr::AddrLabelExprClass:
2018 case Expr::BlockExprClass:
2019 return !cast<BlockExpr>(E)->getBlockDecl()->hasCaptures();
2022 case Expr::SourceLocExprClass:
2024 case Expr::ImplicitValueInitExprClass:
2036 return LVal.Base.dyn_cast<
const ValueDecl*>();
2040 if (
Value.getLValueCallIndex())
2043 return E && !isa<MaterializeTemporaryExpr>(E);
2063 if (!A.getLValueBase())
2064 return !B.getLValueBase();
2065 if (!B.getLValueBase())
2068 if (A.getLValueBase().getOpaqueValue() !=
2069 B.getLValueBase().getOpaqueValue())
2072 return A.getLValueCallIndex() == B.getLValueCallIndex() &&
2073 A.getLValueVersion() == B.getLValueVersion();
2077 assert(
Base &&
"no location for a null lvalue");
2083 if (
auto *PVD = dyn_cast_or_null<ParmVarDecl>(VD)) {
2085 for (CallStackFrame *F = Info.CurrentCall; F; F = F->Caller) {
2086 if (F->Arguments.CallIndex ==
Base.getCallIndex() &&
2087 F->Arguments.Version ==
Base.getVersion() && F->Callee &&
2088 Idx < F->Callee->getNumParams()) {
2089 VD = F->Callee->getParamDecl(Idx);
2096 Info.Note(VD->
getLocation(), diag::note_declared_at);
2098 Info.Note(E->
getExprLoc(), diag::note_constexpr_temporary_here);
2101 if (std::optional<DynAlloc *> Alloc = Info.lookupDynamicAlloc(DA))
2102 Info.Note((*Alloc)->AllocExpr->getExprLoc(),
2103 diag::note_constexpr_dynamic_alloc_here);
2135 const SubobjectDesignator &
Designator = LVal.getLValueDesignator();
2143 if (isTemplateArgument(Kind)) {
2144 int InvalidBaseKind = -1;
2147 InvalidBaseKind = 0;
2148 else if (isa_and_nonnull<StringLiteral>(BaseE))
2149 InvalidBaseKind = 1;
2150 else if (isa_and_nonnull<MaterializeTemporaryExpr>(BaseE) ||
2151 isa_and_nonnull<LifetimeExtendedTemporaryDecl>(BaseVD))
2152 InvalidBaseKind = 2;
2153 else if (
auto *PE = dyn_cast_or_null<PredefinedExpr>(BaseE)) {
2154 InvalidBaseKind = 3;
2155 Ident = PE->getIdentKindName();
2158 if (InvalidBaseKind != -1) {
2159 Info.FFDiag(Loc, diag::note_constexpr_invalid_template_arg)
2160 << IsReferenceType << !
Designator.Entries.empty() << InvalidBaseKind
2166 if (
auto *FD = dyn_cast_or_null<FunctionDecl>(BaseVD);
2167 FD && FD->isImmediateFunction()) {
2168 Info.FFDiag(Loc, diag::note_consteval_address_accessible)
2170 Info.Note(FD->getLocation(), diag::note_declared_at);
2178 if (Info.getLangOpts().CPlusPlus11) {
2179 Info.FFDiag(Loc, diag::note_constexpr_non_global, 1)
2180 << IsReferenceType << !
Designator.Entries.empty() << !!BaseVD
2182 auto *VarD = dyn_cast_or_null<VarDecl>(BaseVD);
2183 if (VarD && VarD->isConstexpr()) {
2189 Info.Note(VarD->getLocation(), diag::note_constexpr_not_static)
2201 assert((Info.checkingPotentialConstantExpression() ||
2202 LVal.getLValueCallIndex() == 0) &&
2203 "have call index for global lvalue");
2206 Info.FFDiag(Loc, diag::note_constexpr_dynamic_alloc)
2207 << IsReferenceType << !
Designator.Entries.empty();
2213 if (
const VarDecl *Var = dyn_cast<const VarDecl>(BaseVD)) {
2215 if (Var->getTLSKind())
2221 if (!isForManglingOnly(Kind) && Var->hasAttr<DLLImportAttr>())
2227 if (Info.getCtx().getLangOpts().CUDA &&
2228 Info.getCtx().getLangOpts().CUDAIsDevice &&
2229 Info.getCtx().CUDAConstantEvalCtx.NoWrongSidedVars) {
2230 if ((!Var->hasAttr<CUDADeviceAttr>() &&
2231 !Var->hasAttr<CUDAConstantAttr>() &&
2232 !Var->getType()->isCUDADeviceBuiltinSurfaceType() &&
2233 !Var->getType()->isCUDADeviceBuiltinTextureType()) ||
2234 Var->hasAttr<HIPManagedAttr>())
2238 if (
const auto *FD = dyn_cast<const FunctionDecl>(BaseVD)) {
2249 if (Info.getLangOpts().CPlusPlus && !isForManglingOnly(Kind) &&
2250 FD->hasAttr<DLLImportAttr>())
2254 }
else if (
const auto *MTE =
2255 dyn_cast_or_null<MaterializeTemporaryExpr>(BaseE)) {
2256 if (CheckedTemps.insert(MTE).second) {
2259 Info.FFDiag(MTE->getExprLoc(),
2260 diag::note_constexpr_unsupported_temporary_nontrivial_dtor)
2265 APValue *
V = MTE->getOrCreateValue(
false);
2266 assert(
V &&
"evasluation result refers to uninitialised temporary");
2268 Info, MTE->getExprLoc(), TempType, *
V, Kind,
2269 nullptr, CheckedTemps))
2276 if (!IsReferenceType)
2288 Info.FFDiag(Loc, diag::note_constexpr_past_end, 1)
2289 << !
Designator.Entries.empty() << !!BaseVD << BaseVD;
2304 const auto *FD = dyn_cast_or_null<CXXMethodDecl>(
Member);
2307 if (FD->isImmediateFunction()) {
2308 Info.FFDiag(Loc, diag::note_consteval_address_accessible) << 0;
2309 Info.Note(FD->getLocation(), diag::note_declared_at);
2312 return isForManglingOnly(Kind) || FD->isVirtual() ||
2313 !FD->hasAttr<DLLImportAttr>();
2319 const LValue *This =
nullptr) {
2336 if (This && Info.EvaluatingDecl == This->getLValueBase())
2340 if (Info.getLangOpts().CPlusPlus11)
2341 Info.FFDiag(E, diag::note_constexpr_nonliteral)
2344 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
2355 assert(SubobjectDecl &&
"SubobjectDecl shall be non-null");
2356 Info.FFDiag(DiagLoc, diag::note_constexpr_uninitialized) << SubobjectDecl;
2358 diag::note_constexpr_subobject_declared_here);
2365 Type = AT->getValueType();
2370 if (
Value.isArray()) {
2372 for (
unsigned I = 0, N =
Value.getArrayInitializedElts(); I != N; ++I) {
2374 Value.getArrayInitializedElt(I), Kind,
2375 SubobjectDecl, CheckedTemps))
2378 if (!
Value.hasArrayFiller())
2381 Value.getArrayFiller(), Kind, SubobjectDecl,
2384 if (
Value.isUnion() &&
Value.getUnionField()) {
2387 Value.getUnionValue(), Kind,
Value.getUnionField(), CheckedTemps);
2389 if (
Value.isStruct()) {
2391 if (
const CXXRecordDecl *CD = dyn_cast<CXXRecordDecl>(RD)) {
2392 unsigned BaseIndex = 0;
2395 Value.getStructBase(BaseIndex), Kind,
2396 nullptr, CheckedTemps))
2401 for (
const auto *I : RD->
fields()) {
2402 if (I->isUnnamedBitfield())
2406 Value.getStructField(I->getFieldIndex()), Kind,
2412 if (
Value.isLValue() &&
2413 CERK == CheckEvaluationResultKind::ConstantExpression) {
2415 LVal.setFrom(Info.Ctx,
Value);
2420 if (
Value.isMemberPointer() &&
2421 CERK == CheckEvaluationResultKind::ConstantExpression)
2441 nullptr, CheckedTemps);
2450 CheckEvaluationResultKind::FullyInitialized, Info, DiagLoc,
Type,
Value,
2451 ConstantExprKind::Normal,
nullptr, CheckedTemps);
2457 if (!Info.HeapAllocs.empty()) {
2461 Info.CCEDiag(Info.HeapAllocs.begin()->second.AllocExpr,
2462 diag::note_constexpr_memory_leak)
2463 <<
unsigned(Info.HeapAllocs.size() - 1);
2471 if (!
Value.getLValueBase()) {
2473 Result = !
Value.getLValueOffset().isZero();
2491 Result = Val.
getInt().getBoolValue();
2523 llvm_unreachable(
"unknown APValue kind");
2529 assert(E->
isPRValue() &&
"missing lvalue-to-rvalue conv in bool condition");
2538 const T &SrcValue,
QualType DestType) {
2539 Info.CCEDiag(E, diag::note_constexpr_overflow)
2540 << SrcValue << DestType;
2541 return Info.noteUndefinedBehavior();
2547 unsigned DestWidth = Info.Ctx.getIntWidth(DestType);
2551 Result =
APSInt(DestWidth, !DestSigned);
2553 if (
Value.convertToInteger(Result, llvm::APFloat::rmTowardZero, &ignored)
2554 & APFloat::opInvalidOp)
2565 llvm::RoundingMode RM =
2567 if (RM == llvm::RoundingMode::Dynamic)
2568 RM = llvm::RoundingMode::NearestTiesToEven;
2574 APFloat::opStatus St) {
2577 if (Info.InConstantContext)
2581 if ((St & APFloat::opInexact) &&
2585 Info.FFDiag(E, diag::note_constexpr_dynamic_rounding);
2589 if ((St != APFloat::opOK) &&
2592 FPO.getAllowFEnvAccess())) {
2593 Info.FFDiag(E, diag::note_constexpr_float_arithmetic_strict);
2597 if ((St & APFloat::opStatus::opInvalidOp) &&
2616 assert(isa<CastExpr>(E) || isa<CompoundAssignOperator>(E));
2618 APFloat::opStatus St;
2619 APFloat
Value = Result;
2621 St = Result.convert(Info.Ctx.getFloatTypeSemantics(DestType), RM, &ignored);
2628 unsigned DestWidth = Info.Ctx.getIntWidth(DestType);
2634 Result =
Value.getBoolValue();
2641 QualType DestType, APFloat &Result) {
2642 Result = APFloat(Info.Ctx.getFloatTypeSemantics(DestType), 1);
2644 APFloat::opStatus St = Result.convertFromAPInt(
Value,
Value.isSigned(), RM);
2650 assert(FD->
isBitField() &&
"truncateBitfieldValue on non-bitfield");
2652 if (!
Value.isInt()) {
2656 assert(
Value.isLValue() &&
"integral value neither int nor lvalue?");
2662 unsigned OldBitWidth = Int.getBitWidth();
2664 if (NewBitWidth < OldBitWidth)
2665 Int = Int.trunc(NewBitWidth).extend(OldBitWidth);
2679 Res = SVal.
getFloat().bitcastToAPInt();
2684 unsigned VecSize = Info.Ctx.getTypeSize(VecTy);
2686 unsigned EltSize = Info.Ctx.getTypeSize(EltTy);
2687 bool BigEndian = Info.Ctx.getTargetInfo().isBigEndian();
2688 Res = llvm::APInt::getZero(VecSize);
2691 llvm::APInt EltAsInt;
2695 EltAsInt = Elt.
getFloat().bitcastToAPInt();
2699 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
2702 unsigned BaseEltSize = EltAsInt.getBitWidth();
2704 Res |= EltAsInt.zextOrTrunc(VecSize).rotr(i*EltSize+BaseEltSize);
2706 Res |= EltAsInt.zextOrTrunc(VecSize).rotl(i*EltSize);
2712 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
2719template<
typename Operation>
2722 unsigned BitWidth, Operation Op,
2724 if (LHS.isUnsigned()) {
2725 Result = Op(LHS, RHS);
2729 APSInt Value(Op(LHS.extend(BitWidth), RHS.extend(BitWidth)),
false);
2730 Result =
Value.trunc(LHS.getBitWidth());
2731 if (Result.extend(BitWidth) !=
Value) {
2732 if (Info.checkingForUndefinedBehavior())
2733 Info.Ctx.getDiagnostics().Report(E->
getExprLoc(),
2734 diag::warn_integer_constant_overflow)
2745 bool HandleOverflowResult =
true;
2752 std::multiplies<APSInt>(), Result);
2755 std::plus<APSInt>(), Result);
2758 std::minus<APSInt>(), Result);
2759 case BO_And: Result = LHS & RHS;
return true;
2760 case BO_Xor: Result = LHS ^ RHS;
return true;
2761 case BO_Or: Result = LHS | RHS;
return true;
2765 Info.FFDiag(E, diag::note_expr_divide_by_zero);
2770 if (RHS.isNegative() && RHS.isAllOnes() && LHS.isSigned() &&
2771 LHS.isMinSignedValue())
2773 Info, E, -LHS.extend(LHS.getBitWidth() + 1), E->
getType());
2774 Result = (Opcode == BO_Rem ? LHS % RHS : LHS / RHS);
2775 return HandleOverflowResult;
2777 if (Info.getLangOpts().OpenCL)
2779 RHS &=
APSInt(llvm::APInt(RHS.getBitWidth(),
2780 static_cast<uint64_t
>(LHS.getBitWidth() - 1)),
2782 else if (RHS.isSigned() && RHS.isNegative()) {
2785 Info.CCEDiag(E, diag::note_constexpr_negative_shift) << RHS;
2792 unsigned SA = (
unsigned) RHS.getLimitedValue(LHS.getBitWidth()-1);
2794 Info.CCEDiag(E, diag::note_constexpr_large_shift)
2795 << RHS << E->
getType() << LHS.getBitWidth();
2796 }
else if (LHS.isSigned() && !Info.getLangOpts().CPlusPlus20) {
2801 if (LHS.isNegative())
2802 Info.CCEDiag(E, diag::note_constexpr_lshift_of_negative) << LHS;
2803 else if (LHS.countl_zero() < SA)
2804 Info.CCEDiag(E, diag::note_constexpr_lshift_discards);
2810 if (Info.getLangOpts().OpenCL)
2812 RHS &=
APSInt(llvm::APInt(RHS.getBitWidth(),
2813 static_cast<uint64_t
>(LHS.getBitWidth() - 1)),
2815 else if (RHS.isSigned() && RHS.isNegative()) {
2818 Info.CCEDiag(E, diag::note_constexpr_negative_shift) << RHS;
2825 unsigned SA = (
unsigned) RHS.getLimitedValue(LHS.getBitWidth()-1);
2827 Info.CCEDiag(E, diag::note_constexpr_large_shift)
2828 << RHS << E->
getType() << LHS.getBitWidth();
2833 case BO_LT: Result = LHS < RHS;
return true;
2834 case BO_GT: Result = LHS > RHS;
return true;
2835 case BO_LE: Result = LHS <= RHS;
return true;
2836 case BO_GE: Result = LHS >= RHS;
return true;
2837 case BO_EQ: Result = LHS == RHS;
return true;
2838 case BO_NE: Result = LHS != RHS;
return true;
2840 llvm_unreachable(
"BO_Cmp should be handled elsewhere");
2847 const APFloat &RHS) {
2849 APFloat::opStatus St;
2855 St = LHS.multiply(RHS, RM);
2858 St = LHS.add(RHS, RM);
2861 St = LHS.subtract(RHS, RM);
2867 Info.CCEDiag(E, diag::note_expr_divide_by_zero);
2868 St = LHS.divide(RHS, RM);
2877 Info.CCEDiag(E, diag::note_constexpr_float_arithmetic) << LHS.isNaN();
2878 return Info.noteUndefinedBehavior();
2886 const APInt &RHSValue, APInt &Result) {
2887 bool LHS = (LHSValue != 0);
2888 bool RHS = (RHSValue != 0);
2890 if (Opcode == BO_LAnd)
2891 Result = LHS && RHS;
2893 Result = LHS || RHS;
2898 const APFloat &RHSValue, APInt &Result) {
2899 bool LHS = !LHSValue.isZero();
2900 bool RHS = !RHSValue.isZero();
2902 if (Opcode == BO_LAnd)
2903 Result = LHS && RHS;
2905 Result = LHS || RHS;
2911 const APValue &RHSValue, APInt &Result) {
2915 RHSValue.
getInt(), Result);
2921template <
typename APTy>
2924 const APTy &RHSValue, APInt &Result) {
2927 llvm_unreachable(
"unsupported binary operator");
2929 Result = (LHSValue == RHSValue);
2932 Result = (LHSValue != RHSValue);
2935 Result = (LHSValue < RHSValue);
2938 Result = (LHSValue > RHSValue);
2941 Result = (LHSValue <= RHSValue);
2944 Result = (LHSValue >= RHSValue);
2958 const APValue &RHSValue, APInt &Result) {
2962 RHSValue.
getInt(), Result);
2973 assert(Opcode != BO_PtrMemD && Opcode != BO_PtrMemI &&
2974 "Operation not supported on vector types");
2978 QualType EltTy = VT->getElementType();
2985 "A vector result that isn't a vector OR uncalculated LValue");
2991 RHSValue.
getVectorLength() == NumElements &&
"Different vector sizes");
2995 for (
unsigned EltNum = 0; EltNum < NumElements; ++EltNum) {
3000 APSInt EltResult{Info.Ctx.getIntWidth(EltTy),
3002 bool Success =
true;
3010 RHSElt.
getInt(), EltResult);
3016 ResultElements.emplace_back(EltResult);
3021 "Mismatched LHS/RHS/Result Type");
3022 APFloat LHSFloat = LHSElt.
getFloat();
3030 ResultElements.emplace_back(LHSFloat);
3034 LHSValue =
APValue(ResultElements.data(), ResultElements.size());
3042 unsigned TruncatedElements) {
3043 SubobjectDesignator &D = Result.Designator;
3046 if (TruncatedElements == D.Entries.size())
3048 assert(TruncatedElements >= D.MostDerivedPathLength &&
3049 "not casting to a derived class");
3055 for (
unsigned I = TruncatedElements, N = D.Entries.size(); I != N; ++I) {
3059 if (isVirtualBaseClass(D.Entries[I]))
3065 D.Entries.resize(TruncatedElements);
3075 RL = &Info.Ctx.getASTRecordLayout(Derived);
3078 Obj.getLValueOffset() += RL->getBaseClassOffset(
Base);
3079 Obj.addDecl(Info, E,
Base,
false);
3088 if (!
Base->isVirtual())
3091 SubobjectDesignator &D = Obj.Designator;
3096 DerivedDecl = D.MostDerivedType->getAsCXXRecordDecl();
3102 const ASTRecordLayout &Layout = Info.Ctx.getASTRecordLayout(DerivedDecl);
3104 Obj.addDecl(Info, E, BaseDecl,
true);
3112 PathI != PathE; ++PathI) {
3116 Type = (*PathI)->getType();
3128 llvm_unreachable(
"Class must be derived from the passed in base class!");
3143 RL = &Info.Ctx.getASTRecordLayout(FD->
getParent());
3147 LVal.adjustOffset(Info.Ctx.toCharUnitsFromBits(RL->getFieldOffset(I)));
3148 LVal.addDecl(Info, E, FD);
3156 for (
const auto *
C : IFD->
chain())
3184 Size = Info.Ctx.getTypeSizeInChars(
Type);
3201 LVal.adjustOffsetAndIndex(Info, E, Adjustment, SizeOfPointee);
3207 int64_t Adjustment) {
3209 APSInt::get(Adjustment));
3224 LVal.Offset += SizeOfComponent;
3226 LVal.addComplex(Info, E, EltTy, Imag);
3240 const VarDecl *VD, CallStackFrame *Frame,
3241 unsigned Version,
APValue *&Result) {
3246 Result = Frame->getTemporary(VD, Version);
3250 if (!isa<ParmVarDecl>(VD)) {
3257 "missing value for local variable");
3258 if (Info.checkingPotentialConstantExpression())
3263 diag::note_unimplemented_constexpr_lambda_feature_ast)
3264 <<
"captures not currently allowed";
3271 if (Info.EvaluatingDecl ==
Base) {
3272 Result = Info.EvaluatingDeclValue;
3276 if (isa<ParmVarDecl>(VD)) {
3279 if (!Info.checkingPotentialConstantExpression() ||
3280 !Info.CurrentCall->Callee ||
3282 if (Info.getLangOpts().CPlusPlus11) {
3283 Info.FFDiag(E, diag::note_constexpr_function_param_value_unknown)
3300 if (!Info.checkingPotentialConstantExpression()) {
3301 Info.FFDiag(E, diag::note_constexpr_var_init_unknown, 1)
3308 if (Init->isValueDependent()) {
3315 if (!Info.checkingPotentialConstantExpression()) {
3316 Info.FFDiag(E, Info.getLangOpts().CPlusPlus11
3317 ? diag::note_constexpr_ltor_non_constexpr
3318 : diag::note_constexpr_ltor_non_integral, 1)
3328 Info.FFDiag(E, diag::note_constexpr_var_init_non_constant, 1) << VD;
3344 ((Info.getLangOpts().CPlusPlus || Info.getLangOpts().OpenCL) &&
3346 Info.CCEDiag(E, diag::note_constexpr_var_init_non_constant, 1) << VD;
3353 Info.FFDiag(E, diag::note_constexpr_var_init_weak) << VD;
3369 E = Derived->
bases_end(); I != E; ++I, ++Index) {
3370 if (I->getType()->getAsCXXRecordDecl()->getCanonicalDecl() ==
Base)
3374 llvm_unreachable(
"base class missing from derived class's bases list");
3380 assert(!isa<SourceLocExpr>(Lit) &&
3381 "SourceLocExpr should have already been converted to a StringLiteral");
3384 if (
const auto *ObjCEnc = dyn_cast<ObjCEncodeExpr>(Lit)) {
3386 Info.Ctx.getObjCEncodingForType(ObjCEnc->getEncodedType(), Str);
3387 assert(Index <= Str.size() &&
"Index too large");
3388 return APSInt::getUnsigned(Str.c_str()[Index]);
3391 if (
auto PE = dyn_cast<PredefinedExpr>(Lit))
3392 Lit = PE->getFunctionName();
3395 Info.Ctx.getAsConstantArrayType(S->getType());
3396 assert(CAT &&
"string literal isn't an array");
3398 assert(CharType->
isIntegerType() &&
"unexpected character type");
3400 APSInt Value(S->getCharByteWidth() * Info.Ctx.getCharWidth(),
3402 if (Index < S->getLength())
3403 Value = S->getCodeUnit(Index);
3415 AllocType.isNull() ? S->getType() : AllocType);
3416 assert(CAT &&
"string literal isn't an array");
3418 assert(CharType->
isIntegerType() &&
"unexpected character type");
3420 unsigned Elts = CAT->
getSize().getZExtValue();
3422 std::min(S->getLength(), Elts), Elts);
3423 APSInt Value(S->getCharByteWidth() * Info.Ctx.getCharWidth(),
3425 if (Result.hasArrayFiller())
3427 for (
unsigned I = 0, N = Result.getArrayInitializedElts(); I != N; ++I) {
3428 Value = S->getCodeUnit(I);
3435 unsigned Size = Array.getArraySize();
3436 assert(Index < Size);
3439 unsigned OldElts = Array.getArrayInitializedElts();
3440 unsigned NewElts = std::max(Index+1, OldElts * 2);
3441 NewElts = std::min(Size, std::max(NewElts, 8u));
3445 for (
unsigned I = 0; I != OldElts; ++I)
3447 for (
unsigned I = OldElts; I != NewElts; ++I)
3451 Array.swap(NewValue);
3472 for (
auto *Field : RD->
fields())
3473 if (!Field->isUnnamedBitfield() &&
3477 for (
auto &BaseSpec : RD->
bases())
3495 for (
auto *Field : RD->
fields()) {
3500 if (Field->isMutable() &&
3502 Info.FFDiag(E, diag::note_constexpr_access_mutable, 1) << AK << Field;
3503 Info.Note(Field->getLocation(), diag::note_declared_at);
3511 for (
auto &BaseSpec : RD->
bases())
3521 bool MutableSubobject =
false) {
3526 switch (Info.IsEvaluatingDecl) {
3527 case EvalInfo::EvaluatingDeclKind::None:
3530 case EvalInfo::EvaluatingDeclKind::Ctor:
3532 if (Info.EvaluatingDecl ==
Base)
3537 if (
auto *BaseE =
Base.dyn_cast<
const Expr *>())
3538 if (
auto *BaseMTE = dyn_cast<MaterializeTemporaryExpr>(BaseE))
3539 return Info.EvaluatingDecl == BaseMTE->getExtendingDecl();
3542 case EvalInfo::EvaluatingDeclKind::Dtor:
3547 if (MutableSubobject ||
Base != Info.EvaluatingDecl)
3556 llvm_unreachable(
"unknown evaluating decl kind");
3562struct CompleteObject {
3570 CompleteObject() :
Value(nullptr) {}
3574 bool mayAccessMutableMembers(EvalInfo &Info,
AccessKinds AK)
const {
3585 if (!Info.getLangOpts().CPlusPlus14)
3590 explicit operator bool()
const {
return !
Type.isNull(); }
3595 bool IsMutable =
false) {
3609template<
typename Sub
objectHandler>
3610typename SubobjectHandler::result_type
3612 const SubobjectDesignator &Sub, SubobjectHandler &handler) {
3615 return handler.failed();
3616 if (Sub.isOnePastTheEnd() || Sub.isMostDerivedAnUnsizedArray()) {
3617 if (Info.getLangOpts().CPlusPlus11)
3618 Info.FFDiag(E, Sub.isOnePastTheEnd()
3619 ? diag::note_constexpr_access_past_end
3620 : diag::note_constexpr_access_unsized_array)
3621 << handler.AccessKind;
3624 return handler.failed();
3630 const FieldDecl *VolatileField =
nullptr;
3633 for (
unsigned I = 0, N = Sub.Entries.size(); ; ++I) {
3638 if (!Info.checkingPotentialConstantExpression())
3639 Info.FFDiag(E, diag::note_constexpr_access_uninit)
3641 return handler.failed();
3649 Info.isEvaluatingCtorDtor(
3652 ConstructionPhase::None) {
3662 if (Info.getLangOpts().CPlusPlus) {
3666 if (VolatileField) {
3669 Decl = VolatileField;
3670 }
else if (
auto *VD = Obj.Base.dyn_cast<
const ValueDecl*>()) {
3672 Loc = VD->getLocation();
3676 if (
auto *E = Obj.Base.dyn_cast<
const Expr *>())
3679 Info.FFDiag(E, diag::note_constexpr_access_volatile_obj, 1)
3680 << handler.AccessKind << DiagKind <<
Decl;
3681 Info.Note(Loc, diag::note_constexpr_volatile_here) << DiagKind;
3683 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
3685 return handler.failed();
3693 !Obj.mayAccessMutableMembers(Info, handler.AccessKind) &&
3695 return handler.failed();
3699 if (!handler.found(*O, ObjType))
3711 LastField =
nullptr;
3715 assert(CAT &&
"vla in literal type?");
3716 uint64_t Index = Sub.Entries[I].getAsArrayIndex();
3717 if (CAT->
getSize().ule(Index)) {
3720 if (Info.getLangOpts().CPlusPlus11)
3721 Info.FFDiag(E, diag::note_constexpr_access_past_end)
3722 << handler.AccessKind;
3725 return handler.failed();
3732 else if (!
isRead(handler.AccessKind)) {
3739 uint64_t Index = Sub.Entries[I].getAsArrayIndex();
3741 if (Info.getLangOpts().CPlusPlus11)
3742 Info.FFDiag(E, diag::note_constexpr_access_past_end)
3743 << handler.AccessKind;
3746 return handler.failed();
3752 assert(I == N - 1 &&
"extracting subobject of scalar?");
3761 }
else if (
const FieldDecl *Field = getAsField(Sub.Entries[I])) {
3762 if (Field->isMutable() &&
3763 !Obj.mayAccessMutableMembers(Info, handler.AccessKind)) {
3764 Info.FFDiag(E, diag::note_constexpr_access_mutable, 1)
3765 << handler.AccessKind << Field;
3766 Info.Note(Field->getLocation(), diag::note_declared_at);
3767 return handler.failed();
3776 if (I == N - 1 && handler.AccessKind ==
AK_Construct) {
3784 Info.FFDiag(E, diag::note_constexpr_access_inactive_union_member)
3785 << handler.AccessKind << Field << !UnionField << UnionField;
3786 return handler.failed();
3795 if (Field->getType().isVolatileQualified())
3796 VolatileField = Field;
3809struct ExtractSubobjectHandler {
3815 typedef bool result_type;
3816 bool failed() {
return false; }
3836 const CompleteObject &Obj,
3837 const SubobjectDesignator &Sub,
APValue &Result,
3840 ExtractSubobjectHandler Handler = {Info, E, Result, AK};
3845struct ModifySubobjectHandler {
3850 typedef bool result_type;
3856 Info.FFDiag(E, diag::note_constexpr_modify_const_type) << QT;
3862 bool failed() {
return false; }
3864 if (!checkConst(SubobjType))
3867 Subobj.
swap(NewVal);
3871 if (!checkConst(SubobjType))
3873 if (!NewVal.
isInt()) {
3882 if (!checkConst(SubobjType))
3890const AccessKinds ModifySubobjectHandler::AccessKind;
3894 const CompleteObject &Obj,
3895 const SubobjectDesignator &Sub,
3897 ModifySubobjectHandler Handler = { Info, NewVal, E };
3904 const SubobjectDesignator &A,
3905 const SubobjectDesignator &B,
3906 bool &WasArrayIndex) {
3907 unsigned I = 0, N = std::min(A.Entries.size(), B.Entries.size());
3908 for (; I != N; ++I) {
3912 if (A.Entries[I].getAsArrayIndex() != B.Entries[I].getAsArrayIndex()) {
3913 WasArrayIndex =
true;
3921 if (A.Entries[I].getAsBaseOrMember() !=
3922 B.Entries[I].getAsBaseOrMember()) {
3923 WasArrayIndex =
false;
3926 if (
const FieldDecl *FD = getAsField(A.Entries[I]))
3928 ObjType = FD->getType();
3934 WasArrayIndex =
false;
3941 const SubobjectDesignator &A,
3942 const SubobjectDesignator &B) {
3943 if (A.Entries.size() != B.Entries.size())
3946 bool IsArray = A.MostDerivedIsArrayElement;
3947 if (IsArray && A.MostDerivedPathLength != A.Entries.size())
3956 return CommonLength >= A.Entries.size() - IsArray;
3963 if (LVal.InvalidBase) {
3965 return CompleteObject();
3969 Info.FFDiag(E, diag::note_constexpr_access_null) << AK;
3970 return CompleteObject();
3973 CallStackFrame *Frame =
nullptr;
3975 if (LVal.getLValueCallIndex()) {
3976 std::tie(Frame, Depth) =
3977 Info.getCallFrameAndDepth(LVal.getLValueCallIndex());
3979 Info.FFDiag(E, diag::note_constexpr_lifetime_ended, 1)
3980 << AK << LVal.Base.is<
const ValueDecl*>();
3982 return CompleteObject();
3993 if (Info.getLangOpts().CPlusPlus)
3994 Info.FFDiag(E, diag::note_constexpr_access_volatile_type)
3998 return CompleteObject();
4003 QualType BaseType = getType(LVal.Base);
4005 if (Info.getLangOpts().CPlusPlus14 && LVal.Base == Info.EvaluatingDecl &&
4009 BaseVal = Info.EvaluatingDeclValue;
4012 if (
auto *GD = dyn_cast<MSGuidDecl>(D)) {
4015 Info.FFDiag(E, diag::note_constexpr_modify_global);
4016 return CompleteObject();
4020 Info.FFDiag(E, diag::note_constexpr_unsupported_layout)
4022 return CompleteObject();
4024 return CompleteObject(LVal.Base, &
V, GD->getType());
4028 if (
auto *GCD = dyn_cast<UnnamedGlobalConstantDecl>(D)) {
4030 Info.FFDiag(E, diag::note_constexpr_modify_global);
4031 return CompleteObject();
4033 return CompleteObject(LVal.Base,
const_cast<APValue *
>(&GCD->getValue()),
4038 if (
auto *TPO = dyn_cast<TemplateParamObjectDecl>(D)) {
4040 Info.FFDiag(E, diag::note_constexpr_modify_global);
4041 return CompleteObject();
4043 return CompleteObject(LVal.Base,
const_cast<APValue *
>(&TPO->getValue()),
4054 const VarDecl *VD = dyn_cast<VarDecl>(D);
4061 return CompleteObject();
4064 bool IsConstant = BaseType.
isConstant(Info.Ctx);
4069 if (IsAccess && isa<ParmVarDecl>(VD)) {
4073 }
else if (Info.getLangOpts().CPlusPlus14 &&
4080 Info.FFDiag(E, diag::note_constexpr_modify_global);
4081 return CompleteObject();
4087 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4088 if (Info.getLangOpts().CPlusPlus) {
4089 Info.FFDiag(E, diag::note_constexpr_ltor_non_const_int, 1) << VD;
4090 Info.Note(VD->
getLocation(), diag::note_declared_at);
4094 return CompleteObject();
4096 }
else if (!IsAccess) {
4097 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4098 }
else if (IsConstant && Info.checkingPotentialConstantExpression() &&
4101 }
else if (IsConstant) {
4105 if (Info.getLangOpts().CPlusPlus) {
4106 Info.CCEDiag(E, Info.getLangOpts().CPlusPlus11
4107 ? diag::note_constexpr_ltor_non_constexpr
4108 : diag::note_constexpr_ltor_non_integral, 1)
4110 Info.Note(VD->
getLocation(), diag::note_declared_at);
4116 if (Info.getLangOpts().CPlusPlus) {
4117 Info.FFDiag(E, Info.getLangOpts().CPlusPlus11
4118 ? diag::note_constexpr_ltor_non_constexpr
4119 : diag::note_constexpr_ltor_non_integral, 1)
4121 Info.Note(VD->
getLocation(), diag::note_declared_at);
4125 return CompleteObject();
4130 return CompleteObject();
4132 std::optional<DynAlloc *> Alloc = Info.lookupDynamicAlloc(DA);
4134 Info.FFDiag(E, diag::note_constexpr_access_deleted_object) << AK;
4135 return CompleteObject();
4137 return CompleteObject(LVal.Base, &(*Alloc)->Value,
4138 LVal.Base.getDynamicAllocType());
4144 dyn_cast_or_null<MaterializeTemporaryExpr>(
Base)) {
4145 assert(MTE->getStorageDuration() ==
SD_Static &&
4146 "should have a frame for a non-global materialized temporary");
4173 if (!MTE->isUsableInConstantExpressions(Info.Ctx) &&
4176 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4177 Info.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK;
4178 Info.Note(MTE->getExprLoc(), diag::note_constexpr_temporary_here);
4179 return CompleteObject();
4182 BaseVal = MTE->getOrCreateValue(
false);
4183 assert(BaseVal &&
"got reference to unevaluated temporary");
4186 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4189 Info.FFDiag(E, diag::note_constexpr_access_unreadable_object)
4192 Info.Ctx.getLValueReferenceType(LValType));
4194 return CompleteObject();
4197 BaseVal = Frame->getTemporary(
Base, LVal.Base.getVersion());
4198 assert(BaseVal &&
"missing value for temporary");
4209 unsigned VisibleDepth = Depth;
4210 if (llvm::isa_and_nonnull<ParmVarDecl>(
4211 LVal.Base.dyn_cast<
const ValueDecl *>()))
4213 if ((Frame && Info.getLangOpts().CPlusPlus14 &&
4214 Info.EvalStatus.HasSideEffects) ||
4215 (
isModification(AK) && VisibleDepth < Info.SpeculativeEvaluationDepth))
4216 return CompleteObject();
4218 return CompleteObject(LVal.getLValueBase(), BaseVal, BaseType);
4237 const LValue &LVal,
APValue &RVal,
4238 bool WantObjectRepresentation =
false) {
4239 if (LVal.Designator.Invalid)
4248 if (
Base && !LVal.getLValueCallIndex() && !
Type.isVolatileQualified()) {
4253 if (
Type.isVolatileQualified()) {
4259 if (!
Evaluate(Lit, Info, CLE->getInitializer()))
4279 Info.Note(CLE->getExprLoc(), diag::note_declared_at);
4284 CompleteObject LitObj(LVal.Base, &Lit,
Base->getType());
4286 }
else if (isa<StringLiteral>(
Base) || isa<PredefinedExpr>(
Base)) {
4289 assert(LVal.Designator.Entries.size() <= 1 &&
4290 "Can only read characters from string literals");
4291 if (LVal.Designator.Entries.empty()) {
4298 if (LVal.Designator.isOnePastTheEnd()) {
4299 if (Info.getLangOpts().CPlusPlus11)
4300 Info.FFDiag(Conv, diag::note_constexpr_access_past_end) << AK;
4305 uint64_t CharIndex = LVal.Designator.Entries[0].getAsArrayIndex();
4312 return Obj &&
extractSubobject(Info, Conv, Obj, LVal.Designator, RVal, AK);
4318 if (LVal.Designator.Invalid)
4321 if (!Info.getLangOpts().CPlusPlus14) {
4331struct CompoundAssignSubobjectHandler {
4340 typedef bool result_type;
4345 Info.FFDiag(E, diag::note_constexpr_modify_const_type) << QT;
4351 bool failed() {
return false; }
4355 return found(Subobj.
getInt(), SubobjType);
4357 return found(Subobj.
getFloat(), SubobjType);
4364 return foundPointer(Subobj, SubobjType);
4366 return foundVector(Subobj, SubobjType);
4375 if (!checkConst(SubobjType))
4386 if (!checkConst(SubobjType))
4405 Info.Ctx.getLangOpts());
4408 PromotedLHSType, FValue) &&
4418 return checkConst(SubobjType) &&
4425 if (!checkConst(SubobjType))
4433 (Opcode != BO_Add && Opcode != BO_Sub)) {
4439 if (Opcode == BO_Sub)
4443 LVal.setFrom(Info.Ctx, Subobj);
4446 LVal.moveInto(Subobj);
4452const AccessKinds CompoundAssignSubobjectHandler::AccessKind;
4457 const LValue &LVal,
QualType LValType,
4461 if (LVal.Designator.Invalid)
4464 if (!Info.getLangOpts().CPlusPlus14) {
4470 CompoundAssignSubobjectHandler Handler = { Info, E, PromotedLValType, Opcode,
4472 return Obj &&
findSubobject(Info, E, Obj, LVal.Designator, Handler);
4476struct IncDecSubobjectHandler {
4482 typedef bool result_type;
4487 Info.FFDiag(E, diag::note_constexpr_modify_const_type) << QT;
4493 bool failed() {
return false; }
4504 return found(Subobj.
getInt(), SubobjType);
4506 return found(Subobj.
getFloat(), SubobjType);
4516 return foundPointer(Subobj, SubobjType);
4524 if (!checkConst(SubobjType))
4546 bool WasNegative =
Value.isNegative();
4558 unsigned BitWidth =
Value.getBitWidth();
4559 APSInt ActualValue(
Value.sext(BitWidth + 1),
false);
4560 ActualValue.setBit(BitWidth);
4567 if (!checkConst(SubobjType))
4574 Value.add(One, APFloat::rmNearestTiesToEven);
4576 Value.subtract(One, APFloat::rmNearestTiesToEven);
4580 if (!checkConst(SubobjType))
4592 LVal.setFrom(Info.Ctx, Subobj);
4596 LVal.moveInto(Subobj);
4605 if (LVal.Designator.Invalid)
4608 if (!Info.getLangOpts().CPlusPlus14) {
4615 IncDecSubobjectHandler Handler = {Info, cast<UnaryOperator>(E), AK, Old};
4616 return Obj &&
findSubobject(Info, E, Obj, LVal.Designator, Handler);
4622 if (Object->getType()->isPointerType() && Object->isPRValue())
4625 if (Object->isGLValue())
4628 if (Object->getType()->isLiteralType(Info.Ctx))
4631 Info.FFDiag(Object, diag::note_constexpr_nonliteral) << Object->getType();
4650 bool IncludeMember =
true) {
4657 if (!MemPtr.getDecl()) {
4663 if (MemPtr.isDerivedMember()) {
4667 if (LV.Designator.MostDerivedPathLength + MemPtr.Path.size() >
4668 LV.Designator.Entries.size()) {
4672 unsigned PathLengthToMember =
4673 LV.Designator.Entries.size() - MemPtr.Path.size();
4674 for (
unsigned I = 0, N = MemPtr.Path.size(); I != N; ++I) {
4676 LV.Designator.Entries[PathLengthToMember + I]);
4686 PathLengthToMember))
4688 }
else if (!MemPtr.Path.empty()) {
4690 LV.Designator.Entries.reserve(LV.Designator.Entries.size() +
4691 MemPtr.Path.size() + IncludeMember);
4697 assert(RD &&
"member pointer access on non-class-type expression");
4699 for (
unsigned I = 1, N = MemPtr.Path.size(); I != N; ++I) {
4707 MemPtr.getContainingRecord()))
4712 if (IncludeMember) {
4713 if (
const FieldDecl *FD = dyn_cast<FieldDecl>(MemPtr.getDecl())) {
4717 dyn_cast<IndirectFieldDecl>(MemPtr.getDecl())) {
4721 llvm_unreachable(
"can't construct reference to bound member function");
4725 return MemPtr.getDecl();
4731 bool IncludeMember =
true) {
4735 if (Info.noteFailure()) {
4743 BO->
getRHS(), IncludeMember);
4750 SubobjectDesignator &D = Result.Designator;
4751 if (D.Invalid || !Result.checkNullPointer(Info, E,
CSK_Derived))
4759 if (D.MostDerivedPathLength + E->
path_size() > D.Entries.size()) {
4760 Info.CCEDiag(E, diag::note_constexpr_invalid_downcast)
4761 << D.MostDerivedType << TargetQT;
4767 unsigned NewEntriesSize = D.Entries.size() - E->
path_size();
4770 if (NewEntriesSize == D.MostDerivedPathLength)
4771 FinalType = D.MostDerivedType->getAsCXXRecordDecl();
4773 FinalType = getAsBaseClass(D.Entries[NewEntriesSize - 1]);
4775 Info.CCEDiag(E, diag::note_constexpr_invalid_downcast)
4776 << D.MostDerivedType << TargetQT;
4787 bool Success =
true;
4789 if (RD->isInvalidDecl()) {
4793 if (RD->isUnion()) {
4798 std::distance(RD->field_begin(), RD->field_end()));
4802 End = RD->bases_end();
4803 I != End; ++I, ++Index)
4806 for (
const auto *I : RD->fields()) {
4807 if (I->isUnnamedBitfield())
4810 Result.getStructField(I->getFieldIndex()));
4818 if (Result.hasArrayFiller())
4830enum EvalStmtResult {
4854 APValue &Val = Info.CurrentCall->createTemporary(VD, VD->
getType(),
4855 ScopeKind::Block, Result);
4860 return Info.noteSideEffect();
4879 if (
const VarDecl *VD = dyn_cast<VarDecl>(D))
4883 for (
auto *BD : DD->bindings())
4884 if (
auto *VD = BD->getHoldingVar())
4892 if (Info.noteSideEffect())
4894 assert(E->
containsErrors() &&
"valid value-dependent expression should never "
4895 "reach invalid code path.");
4901 const Expr *Cond,
bool &Result) {
4904 FullExpressionRAII
Scope(Info);
4909 return Scope.destroy();
4922struct TempVersionRAII {
4923 CallStackFrame &Frame;
4925 TempVersionRAII(CallStackFrame &Frame) : Frame(Frame) {
4926 Frame.pushTempVersion();
4929 ~TempVersionRAII() {
4930 Frame.popTempVersion();
4944 BlockScopeRAII
Scope(Info);
4946 EvalStmtResult ESR =
EvaluateStmt(Result, Info, Body, Case);
4947 if (ESR != ESR_Failed && ESR != ESR_CaseNotFound && !
Scope.destroy())
4952 return ESR_Succeeded;
4955 return ESR_Continue;
4958 case ESR_CaseNotFound:
4961 llvm_unreachable(
"Invalid EvalStmtResult!");
4967 BlockScopeRAII
Scope(Info);
4974 if (ESR != ESR_Succeeded) {
4975 if (ESR != ESR_Failed && !
Scope.destroy())
4981 FullExpressionRAII CondScope(Info);
4992 if (!CondScope.destroy())
5001 if (isa<DefaultStmt>(SC)) {
5006 const CaseStmt *CS = cast<CaseStmt>(SC);
5017 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5021 if (ESR != ESR_Failed && ESR != ESR_CaseNotFound && !
Scope.destroy())
5026 return ESR_Succeeded;
5032 case ESR_CaseNotFound:
5036 diag::note_constexpr_stmt_expr_unsupported);
5039 llvm_unreachable(
"Invalid EvalStmtResult!");
5049 Info.CCEDiag(VD->
getLocation(), diag::note_constexpr_static_local)
5059 if (!Info.nextStep(S))
5065 switch (S->getStmtClass()) {
5066 case Stmt::CompoundStmtClass:
5070 case Stmt::LabelStmtClass:
5071 case Stmt::AttributedStmtClass:
5072 case Stmt::DoStmtClass:
5075 case Stmt::CaseStmtClass:
5076 case Stmt::DefaultStmtClass:
5081 case Stmt::IfStmtClass: {
5084 const IfStmt *IS = cast<IfStmt>(S);
5088 BlockScopeRAII
Scope(Info);
5093 EvalStmtResult ESR =
EvaluateStmt(Result, Info, Init, Case);
5094 if (ESR != ESR_CaseNotFound) {
5095 assert(ESR != ESR_Succeeded);
5106 if (ESR == ESR_Failed)
5108 if (ESR != ESR_CaseNotFound)
5109 return Scope.destroy() ? ESR : ESR_Failed;
5111 return ESR_CaseNotFound;
5114 if (ESR == ESR_Failed)
5116 if (ESR != ESR_CaseNotFound)
5117 return Scope.destroy() ? ESR : ESR_Failed;
5118 return ESR_CaseNotFound;
5121 case Stmt::WhileStmtClass: {
5122 EvalStmtResult ESR =
5124 if (ESR != ESR_Continue)
5129 case Stmt::ForStmtClass: {
5130 const ForStmt *FS = cast<ForStmt>(S);
5131 BlockScopeRAII
Scope(Info);
5135 if (
const Stmt *Init = FS->getInit()) {
5136 EvalStmtResult ESR =
EvaluateStmt(Result, Info, Init, Case);
5137 if (ESR != ESR_CaseNotFound) {
5138 assert(ESR != ESR_Succeeded);
5143 EvalStmtResult ESR =
5145 if (ESR != ESR_Continue)
5147 if (
const auto *Inc = FS->getInc()) {
5148 if (Inc->isValueDependent()) {
5152 FullExpressionRAII IncScope(Info);
5160 case Stmt::DeclStmtClass: {
5163 const DeclStmt *DS = cast<DeclStmt>(S);
5164 for (
const auto *D : DS->
decls()) {
5165 if (
const auto *VD = dyn_cast<VarDecl>(D)) {
5168 if (VD->hasLocalStorage() && !VD->getInit())
5176 return ESR_CaseNotFound;
5180 return ESR_CaseNotFound;
5184 switch (S->getStmtClass()) {
5186 if (
const Expr *E = dyn_cast<Expr>(S)) {
5195 FullExpressionRAII
Scope(Info);
5199 return ESR_Succeeded;
5202 Info.FFDiag(S->getBeginLoc());
5205 case Stmt::NullStmtClass:
5206 return ESR_Succeeded;
5208 case Stmt::DeclStmtClass: {
5209 const DeclStmt *DS = cast<DeclStmt>(S);
5210 for (
const auto *D : DS->
decls()) {
5211 const VarDecl *VD = dyn_cast_or_null<VarDecl>(D);
5215 FullExpressionRAII
Scope(Info);
5218 if (!
Scope.destroy())
5221 return ESR_Succeeded;
5224 case Stmt::ReturnStmtClass: {
5225 const Expr *RetExpr = cast<ReturnStmt>(S)->getRetValue();
5226 FullExpressionRAII
Scope(Info);
5235 :
Evaluate(Result.Value, Info, RetExpr)))
5237 return Scope.destroy() ? ESR_Returned : ESR_Failed;
5240 case Stmt::CompoundStmtClass: {
5241 BlockScopeRAII
Scope(Info);
5244 for (
const auto *BI : CS->
body()) {
5245 EvalStmtResult ESR =
EvaluateStmt(Result, Info, BI, Case);
5246 if (ESR == ESR_Succeeded)
5248 else if (ESR != ESR_CaseNotFound) {
5249 if (ESR != ESR_Failed && !
Scope.destroy())
5255 return ESR_CaseNotFound;
5256 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5259 case Stmt::IfStmtClass: {
5260 const IfStmt *IS = cast<IfStmt>(S);
5263 BlockScopeRAII
Scope(Info);
5266 if (ESR != ESR_Succeeded) {
5267 if (ESR != ESR_Failed && !
Scope.destroy())
5277 if (!Info.InConstantContext)
5284 EvalStmtResult ESR =
EvaluateStmt(Result, Info, SubStmt);
5285 if (ESR != ESR_Succeeded) {
5286 if (ESR != ESR_Failed && !
Scope.destroy())
5291 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5294 case Stmt::WhileStmtClass: {
5295 const WhileStmt *WS = cast<WhileStmt>(S);
5297 BlockScopeRAII
Scope(Info);
5306 if (ESR != ESR_Continue) {
5307 if (ESR != ESR_Failed && !
Scope.destroy())
5311 if (!
Scope.destroy())
5314 return ESR_Succeeded;
5317 case Stmt::DoStmtClass: {
5318 const DoStmt *DS = cast<DoStmt>(S);
5322 if (ESR != ESR_Continue)
5331 FullExpressionRAII CondScope(Info);
5333 !CondScope.destroy())
5336 return ESR_Succeeded;
5339 case Stmt::ForStmtClass: {
5340 const ForStmt *FS = cast<ForStmt>(S);
5341 BlockScopeRAII ForScope(Info);
5342 if (FS->getInit()) {
5343 EvalStmtResult ESR =
EvaluateStmt(Result, Info, FS->getInit());
5344 if (ESR != ESR_Succeeded) {
5345 if (ESR != ESR_Failed && !ForScope.destroy())
5351 BlockScopeRAII IterScope(Info);
5352 bool Continue =
true;
5353 if (FS->getCond() && !
EvaluateCond(Info, FS->getConditionVariable(),
5354 FS->getCond(), Continue))
5360 if (ESR != ESR_Continue) {
5361 if (ESR != ESR_Failed && (!IterScope.destroy() || !ForScope.destroy()))
5366 if (
const auto *Inc = FS->getInc()) {
5367 if (Inc->isValueDependent()) {
5371 FullExpressionRAII IncScope(Info);
5377 if (!IterScope.destroy())
5380 return ForScope.destroy() ? ESR_Succeeded : ESR_Failed;
5383 case Stmt::CXXForRangeStmtClass: {
5385 BlockScopeRAII
Scope(Info);
5388 if (FS->getInit()) {
5389 EvalStmtResult ESR =
EvaluateStmt(Result, Info, FS->getInit());
5390 if (ESR != ESR_Succeeded) {
5391 if (ESR != ESR_Failed && !
Scope.destroy())
5398 EvalStmtResult ESR =
EvaluateStmt(Result, Info, FS->getRangeStmt());
5399 if (ESR != ESR_Succeeded) {
5400 if (ESR != ESR_Failed && !
Scope.destroy())
5407 if (!FS->getBeginStmt() || !FS->getEndStmt() || !FS->getCond())
5412 if (ESR != ESR_Succeeded) {
5413 if (ESR != ESR_Failed && !
Scope.destroy())
5418 if (ESR != ESR_Succeeded) {
5419 if (ESR != ESR_Failed && !
Scope.destroy())
5427 if (FS->getCond()->isValueDependent()) {
5432 bool Continue =
true;
5433 FullExpressionRAII CondExpr(Info);
5441 BlockScopeRAII InnerScope(Info);
5442 ESR =
EvaluateStmt(Result, Info, FS->getLoopVarStmt());
5443 if (ESR != ESR_Succeeded) {
5444 if (ESR != ESR_Failed && (!InnerScope.destroy() || !
Scope.destroy()))
5451 if (ESR != ESR_Continue) {
5452 if (ESR != ESR_Failed && (!InnerScope.destroy() || !
Scope.destroy()))
5456 if (FS->getInc()->isValueDependent()) {
5465 if (!InnerScope.destroy())
5469 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5472 case Stmt::SwitchStmtClass:
5475 case Stmt::ContinueStmtClass:
5476 return ESR_Continue;
5478 case Stmt::BreakStmtClass:
5481 case Stmt::LabelStmtClass:
5482 return EvaluateStmt(Result, Info, cast<LabelStmt>(S)->getSubStmt(), Case);
5484 case Stmt::AttributedStmtClass:
5487 return EvaluateStmt(Result, Info, cast<AttributedStmt>(S)->getSubStmt(),
5490 case Stmt::CaseStmtClass:
5491 case Stmt::DefaultStmtClass:
5492 return EvaluateStmt(Result, Info, cast<SwitchCase>(S)->getSubStmt(), Case);
5493 case Stmt::CXXTryStmtClass:
5495 return EvaluateStmt(Result, Info, cast<CXXTryStmt>(S)->getTryBlock(), Case);
5505 bool IsValueInitialization) {
5512 if (!CD->
isConstexpr() && !IsValueInitialization) {
5513 if (Info.getLangOpts().CPlusPlus11) {
5516 Info.CCEDiag(Loc, diag::note_constexpr_invalid_function, 1)
5518 Info.Note(CD->
getLocation(), diag::note_declared_at);
5520 Info.CCEDiag(Loc, diag::note_invalid_subexpr_in_const_expr);
5534 if (Info.checkingPotentialConstantExpression() && !
Definition &&
5542 Info.FFDiag(CallLoc, diag::note_invalid_subexpr_in_const_expr);
5549 if (!Info.Ctx.getLangOpts().CPlusPlus20 && isa<CXXMethodDecl>(
Declaration) &&
5551 Info.CCEDiag(CallLoc, diag::note_constexpr_virtual_call);
5554 Info.FFDiag(CallLoc, diag::note_invalid_subexpr_in_const_expr);
5562 if (Info.getLangOpts().CPlusPlus11) {
5567 auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl);
5568 if (CD && CD->isInheritingConstructor()) {
5569 auto *Inherited = CD->getInheritedConstructor().getConstructor();
5570 if (!Inherited->isConstexpr())
5571 DiagDecl = CD = Inherited;
5577 if (CD && CD->isInheritingConstructor())
5578 Info.FFDiag(CallLoc, diag::note_constexpr_invalid_inhctor, 1)
5579 << CD->getInheritedConstructor().getConstructor()->
getParent();
5581 Info.FFDiag(CallLoc, diag::note_constexpr_invalid_function, 1)
5583 Info.Note(DiagDecl->
getLocation(), diag::note_declared_at);
5585 Info.FFDiag(CallLoc, diag::note_invalid_subexpr_in_const_expr);
5591struct CheckDynamicTypeHandler {
5593 typedef bool result_type;
5594 bool failed() {
return false; }
5597 bool found(APFloat &
Value,
QualType SubobjType) {
return true; }
5605 if (This.Designator.Invalid)