54 #include "llvm/ADT/APFixedPoint.h"
55 #include "llvm/ADT/SmallBitVector.h"
56 #include "llvm/Support/Debug.h"
57 #include "llvm/Support/SaveAndRestore.h"
58 #include "llvm/Support/TimeProfiler.h"
59 #include "llvm/Support/raw_ostream.h"
64 #define DEBUG_TYPE "exprconstant"
66 using namespace clang;
67 using llvm::APFixedPoint;
71 using llvm::FixedPointSemantics;
78 using SourceLocExprScopeGuard =
110 static const AllocSizeAttr *getAllocSizeAttr(
const CallExpr *CE) {
112 return DirectCallee->getAttr<AllocSizeAttr>();
114 return IndirectCallee->getAttr<AllocSizeAttr>();
122 static const CallExpr *tryUnwrapAllocSizeCall(
const Expr *E) {
130 if (
const auto *FE = dyn_cast<FullExpr>(E))
133 if (
const auto *
Cast = dyn_cast<CastExpr>(E))
134 E =
Cast->getSubExpr()->IgnoreParens();
136 if (
const auto *CE = dyn_cast<CallExpr>(E))
137 return getAllocSizeAttr(CE) ? CE :
nullptr;
144 const auto *E =
Base.dyn_cast<
const Expr *>();
163 llvm_unreachable(
"unknown ConstantExprKind");
176 llvm_unreachable(
"unknown ConstantExprKind");
182 static const uint64_t AssumedSizeForUnsizedArray =
193 bool &FirstEntryIsUnsizedArray) {
196 assert(!isBaseAnAllocSizeCall(
Base) &&
197 "Unsized arrays shouldn't appear here");
198 unsigned MostDerivedLength = 0;
201 for (
unsigned I = 0, N = Path.size(); I != N; ++I) {
205 MostDerivedLength = I + 1;
208 if (
auto *CAT = dyn_cast<ConstantArrayType>(AT)) {
209 ArraySize = CAT->getSize().getZExtValue();
211 assert(I == 0 &&
"unexpected unsized array designator");
212 FirstEntryIsUnsizedArray =
true;
213 ArraySize = AssumedSizeForUnsizedArray;
219 MostDerivedLength = I + 1;
221 }
else if (
const FieldDecl *FD = getAsField(Path[I])) {
222 Type = FD->getType();
224 MostDerivedLength = I + 1;
232 return MostDerivedLength;
236 struct SubobjectDesignator {
240 unsigned Invalid : 1;
243 unsigned IsOnePastTheEnd : 1;
246 unsigned FirstEntryIsAnUnsizedArray : 1;
249 unsigned MostDerivedIsArrayElement : 1;
253 unsigned MostDerivedPathLength : 28;
272 SubobjectDesignator() : Invalid(
true) {}
274 explicit SubobjectDesignator(
QualType T)
275 : Invalid(
false), IsOnePastTheEnd(
false),
276 FirstEntryIsAnUnsizedArray(
false), MostDerivedIsArrayElement(
false),
277 MostDerivedPathLength(0), MostDerivedArraySize(0),
278 MostDerivedType(T) {}
281 : Invalid(!
V.isLValue() || !
V.hasLValuePath()), IsOnePastTheEnd(
false),
282 FirstEntryIsAnUnsizedArray(
false), MostDerivedIsArrayElement(
false),
283 MostDerivedPathLength(0), MostDerivedArraySize(0) {
284 assert(
V.isLValue() &&
"Non-LValue used to make an LValue designator?");
286 IsOnePastTheEnd =
V.isLValueOnePastTheEnd();
288 Entries.insert(Entries.end(), VEntries.begin(), VEntries.end());
289 if (
V.getLValueBase()) {
290 bool IsArray =
false;
291 bool FirstIsUnsizedArray =
false;
292 MostDerivedPathLength = findMostDerivedSubobject(
293 Ctx,
V.getLValueBase(),
V.getLValuePath(), MostDerivedArraySize,
294 MostDerivedType, IsArray, FirstIsUnsizedArray);
295 MostDerivedIsArrayElement = IsArray;
296 FirstEntryIsAnUnsizedArray = FirstIsUnsizedArray;
302 unsigned NewLength) {
306 assert(
Base &&
"cannot truncate path for null pointer");
307 assert(NewLength <= Entries.size() &&
"not a truncation");
309 if (NewLength == Entries.size())
311 Entries.resize(NewLength);
313 bool IsArray =
false;
314 bool FirstIsUnsizedArray =
false;
315 MostDerivedPathLength = findMostDerivedSubobject(
316 Ctx,
Base, Entries, MostDerivedArraySize, MostDerivedType, IsArray,
317 FirstIsUnsizedArray);
318 MostDerivedIsArrayElement = IsArray;
319 FirstEntryIsAnUnsizedArray = FirstIsUnsizedArray;
329 bool isMostDerivedAnUnsizedArray()
const {
330 assert(!Invalid &&
"Calling this makes no sense on invalid designators");
331 return Entries.size() == 1 && FirstEntryIsAnUnsizedArray;
336 uint64_t getMostDerivedArraySize()
const {
337 assert(!isMostDerivedAnUnsizedArray() &&
"Unsized array has no size");
338 return MostDerivedArraySize;
342 bool isOnePastTheEnd()
const {
346 if (!isMostDerivedAnUnsizedArray() && MostDerivedIsArrayElement &&
347 Entries[MostDerivedPathLength - 1].getAsArrayIndex() ==
348 MostDerivedArraySize)
356 std::pair<uint64_t, uint64_t> validIndexAdjustments() {
357 if (Invalid || isMostDerivedAnUnsizedArray())
363 bool IsArray = MostDerivedPathLength == Entries.size() &&
364 MostDerivedIsArrayElement;
365 uint64_t ArrayIndex = IsArray ? Entries.back().getAsArrayIndex()
368 IsArray ? getMostDerivedArraySize() : (
uint64_t)1;
369 return {ArrayIndex, ArraySize - ArrayIndex};
373 bool isValidSubobject()
const {
376 return !isOnePastTheEnd();
384 assert(!Invalid &&
"invalid designator has no subobject type");
385 return MostDerivedPathLength == Entries.size()
392 Entries.push_back(PathEntry::ArrayIndex(0));
396 MostDerivedIsArrayElement =
true;
397 MostDerivedArraySize = CAT->
getSize().getZExtValue();
398 MostDerivedPathLength = Entries.size();
402 void addUnsizedArrayUnchecked(
QualType ElemTy) {
403 Entries.push_back(PathEntry::ArrayIndex(0));
405 MostDerivedType = ElemTy;
406 MostDerivedIsArrayElement =
true;
410 MostDerivedArraySize = AssumedSizeForUnsizedArray;
411 MostDerivedPathLength = Entries.size();
415 void addDeclUnchecked(
const Decl *D,
bool Virtual =
false) {
419 if (
const FieldDecl *FD = dyn_cast<FieldDecl>(D)) {
420 MostDerivedType = FD->getType();
421 MostDerivedIsArrayElement =
false;
422 MostDerivedArraySize = 0;
423 MostDerivedPathLength = Entries.size();
427 void addComplexUnchecked(
QualType EltTy,
bool Imag) {
428 Entries.push_back(PathEntry::ArrayIndex(Imag));
432 MostDerivedType = EltTy;
433 MostDerivedIsArrayElement =
true;
434 MostDerivedArraySize = 2;
435 MostDerivedPathLength = Entries.size();
437 void diagnoseUnsizedArrayPointerArithmetic(EvalInfo &Info,
const Expr *E);
438 void diagnosePointerArithmetic(EvalInfo &Info,
const Expr *E,
441 void adjustIndex(EvalInfo &Info,
const Expr *E,
APSInt N) {
442 if (Invalid || !N)
return;
443 uint64_t TruncatedN = N.extOrTrunc(64).getZExtValue();
444 if (isMostDerivedAnUnsizedArray()) {
445 diagnoseUnsizedArrayPointerArithmetic(Info, E);
449 Entries.back() = PathEntry::ArrayIndex(
450 Entries.back().getAsArrayIndex() + TruncatedN);
457 bool IsArray = MostDerivedPathLength == Entries.size() &&
458 MostDerivedIsArrayElement;
459 uint64_t ArrayIndex = IsArray ? Entries.back().getAsArrayIndex()
462 IsArray ? getMostDerivedArraySize() : (
uint64_t)1;
464 if (N < -(
int64_t)ArrayIndex || N > ArraySize - ArrayIndex) {
467 N = N.extend(std::max<unsigned>(N.getBitWidth() + 1, 65));
469 assert(N.ugt(ArraySize) &&
"bounds check failed for in-bounds index");
470 diagnosePointerArithmetic(Info, E, N);
475 ArrayIndex += TruncatedN;
476 assert(ArrayIndex <= ArraySize &&
477 "bounds check succeeded for out-of-bounds index");
480 Entries.back() = PathEntry::ArrayIndex(ArrayIndex);
482 IsOnePastTheEnd = (ArrayIndex != 0);
487 enum class ScopeKind {
495 CallRef() : OrigCallee(), CallIndex(0), Version() {}
496 CallRef(
const FunctionDecl *Callee,
unsigned CallIndex,
unsigned Version)
497 : OrigCallee(Callee), CallIndex(CallIndex), Version(Version) {}
499 explicit operator bool()
const {
return OrigCallee; }
525 CallStackFrame *Caller;
544 typedef std::pair<const void *, unsigned> MapKeyTy;
545 typedef std::map<MapKeyTy, APValue>
MapTy;
557 unsigned CurTempVersion = TempVersionStack.back();
559 unsigned getTempVersion()
const {
return TempVersionStack.back(); }
561 void pushTempVersion() {
562 TempVersionStack.push_back(++CurTempVersion);
565 void popTempVersion() {
566 TempVersionStack.pop_back();
570 return {Callee, Index, ++CurTempVersion};
581 llvm::DenseMap<const ValueDecl *, FieldDecl *> LambdaCaptureFields;
590 APValue *getTemporary(
const void *Key,
unsigned Version) {
591 MapKeyTy KV(Key, Version);
592 auto LB = Temporaries.lower_bound(KV);
593 if (LB != Temporaries.end() && LB->first == KV)
599 APValue *getCurrentTemporary(
const void *Key) {
600 auto UB = Temporaries.upper_bound(MapKeyTy(Key,
UINT_MAX));
601 if (UB != Temporaries.begin() && std::prev(UB)->first.first == Key)
602 return &std::prev(UB)->second;
607 unsigned getCurrentTemporaryVersion(
const void *Key)
const {
608 auto UB = Temporaries.upper_bound(MapKeyTy(Key,
UINT_MAX));
609 if (UB != Temporaries.begin() && std::prev(UB)->first.first == Key)
610 return std::prev(UB)->first.second;
618 template<
typename KeyT>
620 ScopeKind
Scope, LValue &LV);
625 void describe(llvm::raw_ostream &OS)
override;
627 Frame *getCaller()
const override {
return Caller; }
628 SourceLocation getCallLocation()
const override {
return CallLoc; }
629 const FunctionDecl *getCallee()
const override {
return Callee; }
631 bool isStdFunction()
const {
632 for (
const DeclContext *DC = Callee; DC; DC = DC->getParent())
633 if (DC->isStdNamespace())
644 class ThisOverrideRAII {
646 ThisOverrideRAII(CallStackFrame &Frame,
const LValue *NewThis,
bool Enable)
647 : Frame(Frame), OldThis(Frame.This) {
649 Frame.This = NewThis;
651 ~ThisOverrideRAII() {
652 Frame.This = OldThis;
655 CallStackFrame &Frame;
656 const LValue *OldThis;
661 class ExprTimeTraceScope {
663 ExprTimeTraceScope(
const Expr *E,
const ASTContext &Ctx, StringRef Name)
664 : TimeScope(Name, [E, &Ctx] {
669 llvm::TimeTraceScope TimeScope;
682 llvm::PointerIntPair<APValue*, 2, ScopeKind>
Value;
693 bool isDestroyedAtEndOf(ScopeKind K)
const {
694 return (
int)
Value.getInt() >= (
int)K;
696 bool endLifetime(EvalInfo &Info,
bool RunDestructors) {
697 if (RunDestructors) {
700 Loc = VD->getLocation();
702 Loc = E->getExprLoc();
709 bool hasSideEffect() {
715 struct ObjectUnderConstruction {
718 friend bool operator==(
const ObjectUnderConstruction &LHS,
719 const ObjectUnderConstruction &RHS) {
720 return LHS.Base == RHS.Base && LHS.Path == RHS.Path;
722 friend llvm::hash_code
hash_value(
const ObjectUnderConstruction &Obj) {
726 enum class ConstructionPhase {
737 template<>
struct DenseMapInfo<ObjectUnderConstruction> {
738 using Base = DenseMapInfo<APValue::LValueBase>;
740 return {Base::getEmptyKey(), {}}; }
742 return {Base::getTombstoneKey(), {}};
747 static bool isEqual(
const ObjectUnderConstruction &LHS,
748 const ObjectUnderConstruction &RHS) {
762 const Expr *AllocExpr =
nullptr;
773 if (
auto *
NE = dyn_cast<CXXNewExpr>(AllocExpr))
774 return NE->isArray() ? ArrayNew : New;
775 assert(isa<CallExpr>(AllocExpr));
780 struct DynAllocOrder {
808 CallStackFrame *CurrentCall;
811 unsigned CallStackDepth;
814 unsigned NextCallIndex;
823 bool EnableNewConstInterp;
827 CallStackFrame BottomFrame;
837 enum class EvaluatingDeclKind {
844 EvaluatingDeclKind IsEvaluatingDecl = EvaluatingDeclKind::None;
851 llvm::DenseMap<ObjectUnderConstruction, ConstructionPhase>
852 ObjectsUnderConstruction;
857 std::map<DynamicAllocLValue, DynAlloc, DynAllocOrder> HeapAllocs;
860 unsigned NumHeapAllocs = 0;
862 struct EvaluatingConstructorRAII {
864 ObjectUnderConstruction
Object;
866 EvaluatingConstructorRAII(EvalInfo &EI, ObjectUnderConstruction Object,
870 EI.ObjectsUnderConstruction
871 .insert({
Object, HasBases ? ConstructionPhase::Bases
872 : ConstructionPhase::AfterBases})
875 void finishedConstructingBases() {
876 EI.ObjectsUnderConstruction[
Object] = ConstructionPhase::AfterBases;
878 void finishedConstructingFields() {
879 EI.ObjectsUnderConstruction[
Object] = ConstructionPhase::AfterFields;
881 ~EvaluatingConstructorRAII() {
882 if (DidInsert) EI.ObjectsUnderConstruction.erase(Object);
886 struct EvaluatingDestructorRAII {
888 ObjectUnderConstruction
Object;
890 EvaluatingDestructorRAII(EvalInfo &EI, ObjectUnderConstruction Object)
892 DidInsert = EI.ObjectsUnderConstruction
893 .insert({
Object, ConstructionPhase::Destroying})
896 void startedDestroyingBases() {
897 EI.ObjectsUnderConstruction[
Object] =
898 ConstructionPhase::DestroyingBases;
900 ~EvaluatingDestructorRAII() {
902 EI.ObjectsUnderConstruction.erase(Object);
909 return ObjectsUnderConstruction.lookup({
Base, Path});
914 unsigned SpeculativeEvaluationDepth = 0;
922 bool HasActiveDiagnostic;
926 bool HasFoldFailureDiagnostic;
931 bool CheckingPotentialConstantExpression =
false;
939 bool CheckingForUndefinedBehavior =
false;
941 enum EvaluationMode {
944 EM_ConstantExpression,
951 EM_ConstantExpressionUnevaluated,
959 EM_IgnoreSideEffects,
964 bool checkingPotentialConstantExpression()
const override {
965 return CheckingPotentialConstantExpression;
971 bool checkingForUndefinedBehavior()
const override {
972 return CheckingForUndefinedBehavior;
976 : Ctx(const_cast<
ASTContext &>(
C)), EvalStatus(S), CurrentCall(nullptr),
977 CallStackDepth(0), NextCallIndex(1),
978 StepsLeft(
C.getLangOpts().ConstexprStepLimit),
979 EnableNewConstInterp(
C.getLangOpts().EnableNewConstInterp),
980 BottomFrame(*this,
SourceLocation(), nullptr, nullptr, CallRef()),
981 EvaluatingDecl((const
ValueDecl *)nullptr),
982 EvaluatingDeclValue(nullptr), HasActiveDiagnostic(
false),
983 HasFoldFailureDiagnostic(
false), EvalMode(Mode) {}
989 ASTContext &getCtx()
const override {
return Ctx; }
992 EvaluatingDeclKind EDK = EvaluatingDeclKind::Ctor) {
993 EvaluatingDecl =
Base;
994 IsEvaluatingDecl = EDK;
995 EvaluatingDeclValue = &
Value;
1001 if (checkingPotentialConstantExpression() && CallStackDepth > 1)
1003 if (NextCallIndex == 0) {
1005 FFDiag(Loc, diag::note_constexpr_call_limit_exceeded);
1008 if (CallStackDepth <= getLangOpts().ConstexprCallDepth)
1010 FFDiag(Loc, diag::note_constexpr_depth_limit_exceeded)
1011 << getLangOpts().ConstexprCallDepth;
1015 std::pair<CallStackFrame *, unsigned>
1016 getCallFrameAndDepth(
unsigned CallIndex) {
1017 assert(CallIndex &&
"no call index in getCallFrameAndDepth");
1020 unsigned Depth = CallStackDepth;
1021 CallStackFrame *Frame = CurrentCall;
1022 while (Frame->Index > CallIndex) {
1023 Frame = Frame->Caller;
1026 if (Frame->Index == CallIndex)
1027 return {Frame,
Depth};
1028 return {
nullptr, 0};
1031 bool nextStep(
const Stmt *S) {
1033 FFDiag(S->getBeginLoc(), diag::note_constexpr_step_limit_exceeded);
1043 std::optional<DynAlloc *> Result;
1044 auto It = HeapAllocs.find(DA);
1045 if (It != HeapAllocs.end())
1046 Result = &It->second;
1052 CallStackFrame *Frame = getCallFrameAndDepth(
Call.CallIndex).first;
1053 return Frame ? Frame->getTemporary(
Call.getOrigParam(PVD),
Call.Version)
1058 struct StdAllocatorCaller {
1059 unsigned FrameIndex;
1061 explicit operator bool()
const {
return FrameIndex != 0; };
1064 StdAllocatorCaller getStdAllocatorCaller(StringRef FnName)
const {
1065 for (
const CallStackFrame *Call = CurrentCall;
Call != &BottomFrame;
1067 const auto *MD = dyn_cast_or_null<CXXMethodDecl>(
Call->Callee);
1071 if (!FnII || !FnII->
isStr(FnName))
1075 dyn_cast<ClassTemplateSpecializationDecl>(MD->getParent());
1081 if (CTSD->isInStdNamespace() && ClassII &&
1082 ClassII->
isStr(
"allocator") && TAL.
size() >= 1 &&
1084 return {
Call->Index, TAL[0].getAsType()};
1090 void performLifetimeExtension() {
1092 llvm::erase_if(CleanupStack, [](Cleanup &C) {
1093 return !
C.isDestroyedAtEndOf(ScopeKind::FullExpression);
1100 bool discardCleanups() {
1101 for (Cleanup &C : CleanupStack) {
1102 if (
C.hasSideEffect() && !noteSideEffect()) {
1103 CleanupStack.clear();
1107 CleanupStack.clear();
1112 interp::Frame *getCurrentFrame()
override {
return CurrentCall; }
1113 const interp::Frame *getBottomFrame()
const override {
return &BottomFrame; }
1115 bool hasActiveDiagnostic()
override {
return HasActiveDiagnostic; }
1116 void setActiveDiagnostic(
bool Flag)
override { HasActiveDiagnostic = Flag; }
1118 void setFoldFailureDiagnostic(
bool Flag)
override {
1119 HasFoldFailureDiagnostic = Flag;
1130 bool hasPriorDiagnostic()
override {
1131 if (!EvalStatus.
Diag->empty()) {
1133 case EM_ConstantFold:
1134 case EM_IgnoreSideEffects:
1135 if (!HasFoldFailureDiagnostic)
1139 case EM_ConstantExpression:
1140 case EM_ConstantExpressionUnevaluated:
1141 setActiveDiagnostic(
false);
1148 unsigned getCallStackDepth()
override {
return CallStackDepth; }
1153 bool keepEvaluatingAfterSideEffect() {
1155 case EM_IgnoreSideEffects:
1158 case EM_ConstantExpression:
1159 case EM_ConstantExpressionUnevaluated:
1160 case EM_ConstantFold:
1163 return checkingPotentialConstantExpression() ||
1164 checkingForUndefinedBehavior();
1166 llvm_unreachable(
"Missed EvalMode case");
1171 bool noteSideEffect() {
1173 return keepEvaluatingAfterSideEffect();
1177 bool keepEvaluatingAfterUndefinedBehavior() {
1179 case EM_IgnoreSideEffects:
1180 case EM_ConstantFold:
1183 case EM_ConstantExpression:
1184 case EM_ConstantExpressionUnevaluated:
1185 return checkingForUndefinedBehavior();
1187 llvm_unreachable(
"Missed EvalMode case");
1193 bool noteUndefinedBehavior()
override {
1195 return keepEvaluatingAfterUndefinedBehavior();
1200 bool keepEvaluatingAfterFailure()
const override {
1205 case EM_ConstantExpression:
1206 case EM_ConstantExpressionUnevaluated:
1207 case EM_ConstantFold:
1208 case EM_IgnoreSideEffects:
1209 return checkingPotentialConstantExpression() ||
1210 checkingForUndefinedBehavior();
1212 llvm_unreachable(
"Missed EvalMode case");
1225 [[nodiscard]]
bool noteFailure() {
1233 bool KeepGoing = keepEvaluatingAfterFailure();
1238 class ArrayInitLoopIndex {
1243 ArrayInitLoopIndex(EvalInfo &Info)
1244 : Info(Info), OuterIndex(Info.ArrayInitIndex) {
1245 Info.ArrayInitIndex = 0;
1247 ~ArrayInitLoopIndex() { Info.ArrayInitIndex = OuterIndex; }
1249 operator uint64_t&() {
return Info.ArrayInitIndex; }
1254 struct FoldConstant {
1257 bool HadNoPriorDiags;
1258 EvalInfo::EvaluationMode OldMode;
1260 explicit FoldConstant(EvalInfo &Info,
bool Enabled)
1263 HadNoPriorDiags(Info.EvalStatus.
Diag &&
1264 Info.EvalStatus.
Diag->empty() &&
1265 !Info.EvalStatus.HasSideEffects),
1266 OldMode(Info.EvalMode) {
1268 Info.EvalMode = EvalInfo::EM_ConstantFold;
1270 void keepDiagnostics() { Enabled =
false; }
1272 if (Enabled && HadNoPriorDiags && !Info.EvalStatus.Diag->empty() &&
1273 !Info.EvalStatus.HasSideEffects)
1274 Info.EvalStatus.Diag->clear();
1275 Info.EvalMode = OldMode;
1281 struct IgnoreSideEffectsRAII {
1283 EvalInfo::EvaluationMode OldMode;
1284 explicit IgnoreSideEffectsRAII(EvalInfo &Info)
1285 : Info(Info), OldMode(Info.EvalMode) {
1286 Info.EvalMode = EvalInfo::EM_IgnoreSideEffects;
1289 ~IgnoreSideEffectsRAII() { Info.EvalMode = OldMode; }
1294 class SpeculativeEvaluationRAII {
1295 EvalInfo *Info =
nullptr;
1297 unsigned OldSpeculativeEvaluationDepth;
1299 void moveFromAndCancel(SpeculativeEvaluationRAII &&Other) {
1301 OldStatus =
Other.OldStatus;
1302 OldSpeculativeEvaluationDepth =
Other.OldSpeculativeEvaluationDepth;
1303 Other.Info =
nullptr;
1306 void maybeRestoreState() {
1310 Info->EvalStatus = OldStatus;
1311 Info->SpeculativeEvaluationDepth = OldSpeculativeEvaluationDepth;
1315 SpeculativeEvaluationRAII() =
default;
1317 SpeculativeEvaluationRAII(
1319 : Info(&Info), OldStatus(Info.EvalStatus),
1320 OldSpeculativeEvaluationDepth(Info.SpeculativeEvaluationDepth) {
1321 Info.EvalStatus.Diag = NewDiag;
1322 Info.SpeculativeEvaluationDepth = Info.CallStackDepth + 1;
1325 SpeculativeEvaluationRAII(
const SpeculativeEvaluationRAII &Other) =
delete;
1326 SpeculativeEvaluationRAII(SpeculativeEvaluationRAII &&Other) {
1327 moveFromAndCancel(std::move(Other));
1330 SpeculativeEvaluationRAII &operator=(SpeculativeEvaluationRAII &&Other) {
1331 maybeRestoreState();
1332 moveFromAndCancel(std::move(Other));
1336 ~SpeculativeEvaluationRAII() { maybeRestoreState(); }
1341 template<ScopeKind Kind>
1344 unsigned OldStackSize;
1346 ScopeRAII(EvalInfo &Info)
1347 : Info(Info), OldStackSize(Info.CleanupStack.size()) {
1350 Info.CurrentCall->pushTempVersion();
1352 bool destroy(
bool RunDestructors =
true) {
1353 bool OK =
cleanup(Info, RunDestructors, OldStackSize);
1358 if (OldStackSize != -1
U)
1362 Info.CurrentCall->popTempVersion();
1365 static bool cleanup(EvalInfo &Info,
bool RunDestructors,
1366 unsigned OldStackSize) {
1367 assert(OldStackSize <= Info.CleanupStack.size() &&
1368 "running cleanups out of order?");
1373 for (
unsigned I = Info.CleanupStack.size(); I > OldStackSize; --I) {
1374 if (Info.CleanupStack[I - 1].isDestroyedAtEndOf(
Kind)) {
1375 if (!Info.CleanupStack[I - 1].endLifetime(Info, RunDestructors)) {
1383 auto NewEnd = Info.CleanupStack.begin() + OldStackSize;
1384 if (
Kind != ScopeKind::Block)
1386 std::remove_if(NewEnd, Info.CleanupStack.end(), [](Cleanup &C) {
1387 return C.isDestroyedAtEndOf(Kind);
1389 Info.CleanupStack.erase(NewEnd, Info.CleanupStack.end());
1393 typedef ScopeRAII<ScopeKind::Block> BlockScopeRAII;
1394 typedef ScopeRAII<ScopeKind::FullExpression> FullExpressionRAII;
1395 typedef ScopeRAII<ScopeKind::Call> CallScopeRAII;
1398 bool SubobjectDesignator::checkSubobject(EvalInfo &Info,
const Expr *E,
1402 if (isOnePastTheEnd()) {
1403 Info.CCEDiag(E, diag::note_constexpr_past_end_subobject)
1414 void SubobjectDesignator::diagnoseUnsizedArrayPointerArithmetic(EvalInfo &Info,
1416 Info.CCEDiag(E, diag::note_constexpr_unsized_array_indexed);
1421 void SubobjectDesignator::diagnosePointerArithmetic(EvalInfo &Info,
1426 if (MostDerivedPathLength == Entries.size() && MostDerivedIsArrayElement)
1427 Info.CCEDiag(E, diag::note_constexpr_array_index)
1429 <<
static_cast<unsigned>(getMostDerivedArraySize());
1431 Info.CCEDiag(E, diag::note_constexpr_array_index)
1436 CallStackFrame::CallStackFrame(EvalInfo &Info,
SourceLocation CallLoc,
1440 Arguments(
Call), CallLoc(CallLoc), Index(Info.NextCallIndex++) {
1441 Info.CurrentCall =
this;
1442 ++Info.CallStackDepth;
1445 CallStackFrame::~CallStackFrame() {
1446 assert(Info.CurrentCall ==
this &&
"calls retired out of order");
1447 --Info.CallStackDepth;
1448 Info.CurrentCall = Caller;
1470 llvm_unreachable(
"unknown access kind");
1504 llvm_unreachable(
"unknown access kind");
1508 struct ComplexValue {
1516 ComplexValue() : FloatReal(
APFloat::Bogus()), FloatImag(
APFloat::Bogus()) {}
1518 void makeComplexFloat() { IsInt =
false; }
1519 bool isComplexFloat()
const {
return !IsInt; }
1520 APFloat &getComplexFloatReal() {
return FloatReal; }
1521 APFloat &getComplexFloatImag() {
return FloatImag; }
1523 void makeComplexInt() { IsInt =
true; }
1524 bool isComplexInt()
const {
return IsInt; }
1525 APSInt &getComplexIntReal() {
return IntReal; }
1526 APSInt &getComplexIntImag() {
return IntImag; }
1529 if (isComplexFloat())
1535 assert(
v.isComplexFloat() ||
v.isComplexInt());
1536 if (
v.isComplexFloat()) {
1538 FloatReal =
v.getComplexFloatReal();
1539 FloatImag =
v.getComplexFloatImag();
1542 IntReal =
v.getComplexIntReal();
1543 IntImag =
v.getComplexIntImag();
1553 bool InvalidBase : 1;
1558 SubobjectDesignator &getLValueDesignator() {
return Designator; }
1559 const SubobjectDesignator &getLValueDesignator()
const {
return Designator;}
1560 bool isNullPointer()
const {
return IsNullPtr;}
1562 unsigned getLValueCallIndex()
const {
return Base.getCallIndex(); }
1563 unsigned getLValueVersion()
const {
return Base.getVersion(); }
1569 assert(!InvalidBase &&
"APValues can't handle invalid LValue bases");
1575 assert(
V.isLValue() &&
"Setting LValue from a non-LValue?");
1576 Base =
V.getLValueBase();
1578 InvalidBase =
false;
1580 IsNullPtr =
V.isNullPointer();
1587 const auto *E = B.
get<
const Expr *>();
1588 assert((isa<MemberExpr>(E) || tryUnwrapAllocSizeCall(E)) &&
1589 "Unexpected type of invalid base");
1594 Offset = CharUnits::fromQuantity(0);
1595 InvalidBase = BInvalid;
1596 Designator = SubobjectDesignator(getType(B));
1604 InvalidBase =
false;
1615 moveInto(Printable);
1622 template <
typename GenDiagType>
1623 bool checkNullPointerDiagnosingWith(
const GenDiagType &GenDiag) {
1635 bool checkNullPointer(EvalInfo &Info,
const Expr *E,
1637 return checkNullPointerDiagnosingWith([&Info, E, CSK] {
1638 Info.CCEDiag(E, diag::note_constexpr_null_subobject) << CSK;
1642 bool checkNullPointerForFoldAccess(EvalInfo &Info,
const Expr *E,
1644 return checkNullPointerDiagnosingWith([&Info, E, AK] {
1645 Info.FFDiag(E, diag::note_constexpr_access_null) << AK;
1656 void addDecl(EvalInfo &Info,
const Expr *E,
1657 const Decl *D,
bool Virtual =
false) {
1661 void addUnsizedArray(EvalInfo &Info,
const Expr *E,
QualType ElemTy) {
1663 Info.CCEDiag(E, diag::note_constexpr_unsupported_unsized_array);
1668 assert(getType(
Base)->isPointerType() || getType(
Base)->isArrayType());
1669 Designator.FirstEntryIsAnUnsizedArray =
true;
1677 void addComplex(EvalInfo &Info,
const Expr *E,
QualType EltTy,
bool Imag) {
1681 void clearIsNullPointer() {
1684 void adjustOffsetAndIndex(EvalInfo &Info,
const Expr *E,
1696 uint64_t Index64 = Index.extOrTrunc(64).getZExtValue();
1697 Offset = CharUnits::fromQuantity(Offset64 + ElemSize64 * Index64);
1701 clearIsNullPointer();
1706 clearIsNullPointer();
1713 : DeclAndIsDerivedMember(
Decl,
false) {}
1718 return DeclAndIsDerivedMember.getPointer();
1721 bool isDerivedMember()
const {
1722 return DeclAndIsDerivedMember.getInt();
1726 return cast<CXXRecordDecl>(
1727 DeclAndIsDerivedMember.getPointer()->getDeclContext());
1731 V =
APValue(getDecl(), isDerivedMember(), Path);
1734 assert(
V.isMemberPointer());
1735 DeclAndIsDerivedMember.setPointer(
V.getMemberPointerDecl());
1736 DeclAndIsDerivedMember.setInt(
V.isMemberPointerToDerivedMember());
1739 Path.insert(Path.end(),
P.begin(),
P.end());
1745 llvm::PointerIntPair<const ValueDecl*, 1, bool> DeclAndIsDerivedMember;
1753 assert(!Path.empty());
1755 if (Path.size() >= 2)
1759 if (
Expected->getCanonicalDecl() !=
Class->getCanonicalDecl()) {
1775 if (!isDerivedMember()) {
1776 Path.push_back(Derived);
1779 if (!castBack(Derived))
1782 DeclAndIsDerivedMember.setInt(
false);
1790 DeclAndIsDerivedMember.setInt(
true);
1791 if (isDerivedMember()) {
1792 Path.push_back(
Base);
1795 return castBack(
Base);
1800 static bool operator==(
const MemberPtr &LHS,
const MemberPtr &RHS) {
1801 if (!LHS.getDecl() || !RHS.getDecl())
1802 return !LHS.getDecl() && !RHS.getDecl();
1803 if (LHS.getDecl()->getCanonicalDecl() != RHS.getDecl()->getCanonicalDecl())
1805 return LHS.Path == RHS.Path;
1812 bool AllowNonLiteralTypes =
false);
1814 bool InvalidBaseOK =
false);
1816 bool InvalidBaseOK =
false);
1846 if (Int.isUnsigned() || Int.isMinSignedValue()) {
1847 Int = Int.extend(Int.getBitWidth() + 1);
1848 Int.setIsSigned(
true);
1853 template<
typename KeyT>
1855 ScopeKind
Scope, LValue &LV) {
1856 unsigned Version = getTempVersion();
1859 return createLocal(
Base, Key, T,
Scope);
1865 assert(Args.CallIndex == Index &&
"creating parameter in wrong frame");
1876 assert(
Base.getCallIndex() == Index &&
"lvalue for wrong frame");
1877 unsigned Version =
Base.getVersion();
1878 APValue &Result = Temporaries[MapKeyTy(Key, Version)];
1879 assert(Result.isAbsent() &&
"local created multiple times");
1885 if (Index <= Info.SpeculativeEvaluationDepth) {
1887 Info.noteSideEffect();
1889 Info.CleanupStack.push_back(Cleanup(&Result,
Base, T,
Scope));
1895 if (NumHeapAllocs > DynamicAllocLValue::getMaxIndex()) {
1896 FFDiag(E, diag::note_constexpr_heap_alloc_limit_exceeded);
1901 LV.set(APValue::LValueBase::getDynamicAlloc(DA, T));
1902 auto Result = HeapAllocs.emplace(std::piecewise_construct,
1903 std::forward_as_tuple(DA), std::tuple<>());
1904 assert(Result.second &&
"reused a heap alloc index?");
1905 Result.first->second.AllocExpr = E;
1906 return &Result.first->second.Value;
1911 unsigned ArgIndex = 0;
1912 bool IsMemberCall = isa<CXXMethodDecl>(Callee) &&
1913 !isa<CXXConstructorDecl>(Callee) &&
1914 cast<CXXMethodDecl>(Callee)->isInstance();
1919 if (
This && IsMemberCall) {
1921 This->moveInto(Val);
1923 This->Designator.MostDerivedType);
1925 Out <<
"->" << *
Callee <<
'(';
1926 IsMemberCall =
false;
1930 E =
Callee->param_end(); I != E; ++I, ++ArgIndex) {
1931 if (ArgIndex > (
unsigned)IsMemberCall)
1935 APValue *
V = Info.getParamSlot(Arguments, Param);
1937 V->printPretty(Out, Info.Ctx, Param->
getType());
1941 if (ArgIndex == 0 && IsMemberCall)
1942 Out <<
"->" << *
Callee <<
'(';
1956 return Info.noteSideEffect();
1963 return (Builtin == Builtin::BI__builtin___CFStringMakeConstantString ||
1964 Builtin == Builtin::BI__builtin___NSStringMakeConstantString ||
1965 Builtin == Builtin::BI__builtin_function_start);
1974 if (!B)
return true;
1978 if (
const VarDecl *VD = dyn_cast<VarDecl>(D))
1979 return VD->hasGlobalStorage();
1980 if (isa<TemplateParamObjectDecl>(D))
1985 return isa<FunctionDecl, MSGuidDecl, UnnamedGlobalConstantDecl>(D);
1995 case Expr::CompoundLiteralExprClass: {
1999 case Expr::MaterializeTemporaryExprClass:
2002 return cast<MaterializeTemporaryExpr>(E)->getStorageDuration() ==
SD_Static;
2004 case Expr::StringLiteralClass:
2005 case Expr::PredefinedExprClass:
2006 case Expr::ObjCStringLiteralClass:
2007 case Expr::ObjCEncodeExprClass:
2009 case Expr::ObjCBoxedExprClass:
2010 return cast<ObjCBoxedExpr>(E)->isExpressibleAsConstantInitializer();
2011 case Expr::CallExprClass:
2014 case Expr::AddrLabelExprClass:
2018 case Expr::BlockExprClass:
2019 return !cast<BlockExpr>(E)->getBlockDecl()->hasCaptures();
2022 case Expr::SourceLocExprClass:
2024 case Expr::ImplicitValueInitExprClass:
2036 return LVal.Base.dyn_cast<
const ValueDecl*>();
2040 if (
Value.getLValueCallIndex())
2043 return E && !isa<MaterializeTemporaryExpr>(E);
2063 if (!A.getLValueBase())
2064 return !B.getLValueBase();
2065 if (!B.getLValueBase())
2068 if (A.getLValueBase().getOpaqueValue() !=
2069 B.getLValueBase().getOpaqueValue())
2072 return A.getLValueCallIndex() == B.getLValueCallIndex() &&
2073 A.getLValueVersion() == B.getLValueVersion();
2077 assert(
Base &&
"no location for a null lvalue");
2083 if (
auto *PVD = dyn_cast_or_null<ParmVarDecl>(VD)) {
2085 for (CallStackFrame *F = Info.CurrentCall; F; F = F->Caller) {
2086 if (F->Arguments.CallIndex ==
Base.getCallIndex() &&
2087 F->Arguments.Version ==
Base.getVersion() && F->Callee &&
2088 Idx < F->Callee->getNumParams()) {
2089 VD = F->Callee->getParamDecl(Idx);
2096 Info.Note(VD->
getLocation(), diag::note_declared_at);
2098 Info.Note(E->
getExprLoc(), diag::note_constexpr_temporary_here);
2101 if (std::optional<DynAlloc *> Alloc = Info.lookupDynamicAlloc(DA))
2102 Info.Note((*Alloc)->AllocExpr->getExprLoc(),
2103 diag::note_constexpr_dynamic_alloc_here);
2135 const SubobjectDesignator &
Designator = LVal.getLValueDesignator();
2143 if (isTemplateArgument(
Kind)) {
2144 int InvalidBaseKind = -1;
2147 InvalidBaseKind = 0;
2148 else if (isa_and_nonnull<StringLiteral>(BaseE))
2149 InvalidBaseKind = 1;
2150 else if (isa_and_nonnull<MaterializeTemporaryExpr>(BaseE) ||
2151 isa_and_nonnull<LifetimeExtendedTemporaryDecl>(BaseVD))
2152 InvalidBaseKind = 2;
2153 else if (
auto *PE = dyn_cast_or_null<PredefinedExpr>(BaseE)) {
2154 InvalidBaseKind = 3;
2155 Ident = PE->getIdentKindName();
2158 if (InvalidBaseKind != -1) {
2159 Info.FFDiag(Loc, diag::note_constexpr_invalid_template_arg)
2160 << IsReferenceType << !
Designator.Entries.empty() << InvalidBaseKind
2166 if (
auto *FD = dyn_cast_or_null<FunctionDecl>(BaseVD)) {
2167 if (FD->isConsteval()) {
2168 Info.FFDiag(Loc, diag::note_consteval_address_accessible)
2170 Info.Note(FD->getLocation(), diag::note_declared_at);
2179 if (Info.getLangOpts().CPlusPlus11) {
2180 Info.FFDiag(Loc, diag::note_constexpr_non_global, 1)
2181 << IsReferenceType << !
Designator.Entries.empty() << !!BaseVD
2183 auto *VarD = dyn_cast_or_null<VarDecl>(BaseVD);
2184 if (VarD && VarD->isConstexpr()) {
2190 Info.Note(VarD->getLocation(), diag::note_constexpr_not_static)
2192 << FixItHint::CreateInsertion(VarD->getBeginLoc(),
"static ");
2202 assert((Info.checkingPotentialConstantExpression() ||
2203 LVal.getLValueCallIndex() == 0) &&
2204 "have call index for global lvalue");
2207 Info.FFDiag(Loc, diag::note_constexpr_dynamic_alloc)
2208 << IsReferenceType << !
Designator.Entries.empty();
2214 if (
const VarDecl *Var = dyn_cast<const VarDecl>(BaseVD)) {
2216 if (Var->getTLSKind())
2222 if (!isForManglingOnly(
Kind) && Var->hasAttr<DLLImportAttr>())
2228 if (Info.getCtx().getLangOpts().CUDA &&
2229 Info.getCtx().getLangOpts().CUDAIsDevice &&
2230 Info.getCtx().CUDAConstantEvalCtx.NoWrongSidedVars) {
2231 if ((!Var->hasAttr<CUDADeviceAttr>() &&
2232 !Var->hasAttr<CUDAConstantAttr>() &&
2233 !Var->getType()->isCUDADeviceBuiltinSurfaceType() &&
2234 !Var->getType()->isCUDADeviceBuiltinTextureType()) ||
2235 Var->hasAttr<HIPManagedAttr>())
2239 if (
const auto *FD = dyn_cast<const FunctionDecl>(BaseVD)) {
2250 if (Info.getLangOpts().CPlusPlus && !isForManglingOnly(
Kind) &&
2251 FD->hasAttr<DLLImportAttr>())
2255 }
else if (
const auto *MTE =
2256 dyn_cast_or_null<MaterializeTemporaryExpr>(BaseE)) {
2257 if (CheckedTemps.insert(MTE).second) {
2260 Info.FFDiag(MTE->getExprLoc(),
2261 diag::note_constexpr_unsupported_temporary_nontrivial_dtor)
2266 APValue *
V = MTE->getOrCreateValue(
false);
2267 assert(
V &&
"evasluation result refers to uninitialised temporary");
2269 Info, MTE->getExprLoc(), TempType, *
V,
2277 if (!IsReferenceType)
2289 Info.FFDiag(Loc, diag::note_constexpr_past_end, 1)
2290 << !
Designator.Entries.empty() << !!BaseVD << BaseVD;
2305 const auto *FD = dyn_cast_or_null<CXXMethodDecl>(
Member);
2308 if (FD->isConsteval()) {
2309 Info.FFDiag(Loc, diag::note_consteval_address_accessible) << 0;
2310 Info.Note(FD->getLocation(), diag::note_declared_at);
2313 return isForManglingOnly(
Kind) || FD->isVirtual() ||
2314 !FD->hasAttr<DLLImportAttr>();
2320 const LValue *
This =
nullptr) {
2337 if (
This && Info.EvaluatingDecl ==
This->getLValueBase())
2341 if (Info.getLangOpts().CPlusPlus11)
2342 Info.FFDiag(E, diag::note_constexpr_nonliteral)
2345 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
2355 if (!
Value.hasValue()) {
2356 Info.FFDiag(DiagLoc, diag::note_constexpr_uninitialized)
2359 Info.Note(SubobjectLoc, diag::note_constexpr_subobject_declared_here);
2366 Type = AT->getValueType();
2371 if (
Value.isArray()) {
2373 for (
unsigned I = 0, N =
Value.getArrayInitializedElts(); I != N; ++I) {
2376 SubobjectLoc, CheckedTemps))
2379 if (!
Value.hasArrayFiller())
2382 Value.getArrayFiller(),
Kind, SubobjectLoc,
2385 if (
Value.isUnion() &&
Value.getUnionField()) {
2387 CERK, Info, DiagLoc,
Value.getUnionField()->getType(),
2388 Value.getUnionValue(),
Kind,
Value.getUnionField()->getLocation(),
2391 if (
Value.isStruct()) {
2393 if (
const CXXRecordDecl *CD = dyn_cast<CXXRecordDecl>(RD)) {
2394 unsigned BaseIndex = 0;
2398 BS.getBeginLoc(), CheckedTemps))
2403 for (
const auto *I : RD->
fields()) {
2404 if (I->isUnnamedBitfield())
2408 Value.getStructField(I->getFieldIndex()),
2409 Kind, I->getLocation(), CheckedTemps))
2414 if (
Value.isLValue() &&
2417 LVal.setFrom(Info.Ctx,
Value);
2422 if (
Value.isMemberPointer() &&
2459 if (!Info.HeapAllocs.empty()) {
2463 Info.CCEDiag(Info.HeapAllocs.begin()->second.AllocExpr,
2464 diag::note_constexpr_memory_leak)
2465 <<
unsigned(Info.HeapAllocs.size() - 1);
2473 if (!
Value.getLValueBase()) {
2475 Result = !
Value.getLValueOffset().isZero();
2490 case APValue::Indeterminate:
2493 Result = Val.
getInt().getBoolValue();
2495 case APValue::FixedPoint:
2501 case APValue::ComplexInt:
2505 case APValue::ComplexFloat:
2509 case APValue::LValue:
2511 case APValue::MemberPointer:
2517 case APValue::Vector:
2518 case APValue::Array:
2519 case APValue::Struct:
2520 case APValue::Union:
2521 case APValue::AddrLabelDiff:
2525 llvm_unreachable(
"unknown APValue kind");
2531 assert(E->
isPRValue() &&
"missing lvalue-to-rvalue conv in bool condition");
2538 template<
typename T>
2540 const T &SrcValue,
QualType DestType) {
2541 Info.CCEDiag(E, diag::note_constexpr_overflow)
2542 << SrcValue << DestType;
2543 return Info.noteUndefinedBehavior();
2549 unsigned DestWidth = Info.Ctx.getIntWidth(DestType);
2553 Result =
APSInt(DestWidth, !DestSigned);
2555 if (
Value.convertToInteger(Result, llvm::APFloat::rmTowardZero, &ignored)
2556 & APFloat::opInvalidOp)
2567 llvm::RoundingMode RM =
2569 if (RM == llvm::RoundingMode::Dynamic)
2570 RM = llvm::RoundingMode::NearestTiesToEven;
2576 APFloat::opStatus St) {
2579 if (Info.InConstantContext)
2583 if ((St & APFloat::opInexact) &&
2587 Info.FFDiag(E, diag::note_constexpr_dynamic_rounding);
2591 if ((St != APFloat::opOK) &&
2594 FPO.getAllowFEnvAccess())) {
2595 Info.FFDiag(E, diag::note_constexpr_float_arithmetic_strict);
2599 if ((St & APFloat::opStatus::opInvalidOp) &&
2618 assert(isa<CastExpr>(E) || isa<CompoundAssignOperator>(E));
2620 APFloat::opStatus St;
2623 St = Result.convert(Info.Ctx.getFloatTypeSemantics(DestType), RM, &ignored);
2630 unsigned DestWidth = Info.Ctx.getIntWidth(DestType);
2636 Result =
Value.getBoolValue();
2644 Result =
APFloat(Info.Ctx.getFloatTypeSemantics(DestType), 1);
2646 APFloat::opStatus St = Result.convertFromAPInt(
Value,
Value.isSigned(), RM);
2652 assert(FD->
isBitField() &&
"truncateBitfieldValue on non-bitfield");
2654 if (!
Value.isInt()) {
2658 assert(
Value.isLValue() &&
"integral value neither int nor lvalue?");
2664 unsigned OldBitWidth = Int.getBitWidth();
2666 if (NewBitWidth < OldBitWidth)
2667 Int = Int.trunc(NewBitWidth).extend(OldBitWidth);
2681 Res = SVal.
getFloat().bitcastToAPInt();
2686 unsigned VecSize = Info.Ctx.getTypeSize(VecTy);
2688 unsigned EltSize = Info.Ctx.getTypeSize(EltTy);
2689 bool BigEndian = Info.Ctx.getTargetInfo().isBigEndian();
2690 Res = llvm::APInt::getZero(VecSize);
2697 EltAsInt = Elt.
getFloat().bitcastToAPInt();
2701 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
2704 unsigned BaseEltSize = EltAsInt.getBitWidth();
2706 Res |= EltAsInt.zextOrTrunc(VecSize).rotr(i*EltSize+BaseEltSize);
2708 Res |= EltAsInt.zextOrTrunc(VecSize).rotl(i*EltSize);
2714 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
2721 template<
typename Operation>
2724 unsigned BitWidth, Operation Op,
2726 if (LHS.isUnsigned()) {
2727 Result = Op(LHS, RHS);
2731 APSInt Value(Op(LHS.extend(BitWidth), RHS.extend(BitWidth)),
false);
2732 Result =
Value.trunc(LHS.getBitWidth());
2733 if (Result.extend(BitWidth) !=
Value) {
2734 if (Info.checkingForUndefinedBehavior())
2735 Info.Ctx.getDiagnostics().Report(E->
getExprLoc(),
2736 diag::warn_integer_constant_overflow)
2747 bool HandleOverflowResult =
true;
2754 std::multiplies<APSInt>(), Result);
2757 std::plus<APSInt>(), Result);
2760 std::minus<APSInt>(), Result);
2761 case BO_And: Result = LHS & RHS;
return true;
2762 case BO_Xor: Result = LHS ^ RHS;
return true;
2763 case BO_Or: Result = LHS | RHS;
return true;
2767 Info.FFDiag(E, diag::note_expr_divide_by_zero);
2772 if (RHS.isNegative() && RHS.isAllOnes() && LHS.isSigned() &&
2773 LHS.isMinSignedValue())
2775 Info, E, -LHS.extend(LHS.getBitWidth() + 1), E->
getType());
2776 Result = (
Opcode == BO_Rem ? LHS % RHS : LHS / RHS);
2777 return HandleOverflowResult;
2779 if (Info.getLangOpts().OpenCL)
2782 static_cast<uint64_t>(LHS.getBitWidth() - 1)),
2784 else if (RHS.isSigned() && RHS.isNegative()) {
2787 Info.CCEDiag(E, diag::note_constexpr_negative_shift) << RHS;
2794 unsigned SA = (
unsigned) RHS.getLimitedValue(LHS.getBitWidth()-1);
2796 Info.CCEDiag(E, diag::note_constexpr_large_shift)
2797 << RHS << E->
getType() << LHS.getBitWidth();
2798 }
else if (LHS.isSigned() && !Info.getLangOpts().CPlusPlus20) {
2803 if (LHS.isNegative())
2804 Info.CCEDiag(E, diag::note_constexpr_lshift_of_negative) << LHS;
2805 else if (LHS.countLeadingZeros() < SA)
2806 Info.CCEDiag(E, diag::note_constexpr_lshift_discards);
2812 if (Info.getLangOpts().OpenCL)
2815 static_cast<uint64_t>(LHS.getBitWidth() - 1)),
2817 else if (RHS.isSigned() && RHS.isNegative()) {
2820 Info.CCEDiag(E, diag::note_constexpr_negative_shift) << RHS;
2827 unsigned SA = (
unsigned) RHS.getLimitedValue(LHS.getBitWidth()-1);
2829 Info.CCEDiag(E, diag::note_constexpr_large_shift)
2830 << RHS << E->
getType() << LHS.getBitWidth();
2835 case BO_LT: Result = LHS < RHS;
return true;
2836 case BO_GT: Result = LHS > RHS;
return true;
2837 case BO_LE: Result = LHS <= RHS;
return true;
2838 case BO_GE: Result = LHS >= RHS;
return true;
2839 case BO_EQ: Result = LHS == RHS;
return true;
2840 case BO_NE: Result = LHS != RHS;
return true;
2842 llvm_unreachable(
"BO_Cmp should be handled elsewhere");
2851 APFloat::opStatus St;
2857 St = LHS.multiply(RHS, RM);
2860 St = LHS.add(RHS, RM);
2863 St = LHS.subtract(RHS, RM);
2869 Info.CCEDiag(E, diag::note_expr_divide_by_zero);
2870 St = LHS.divide(RHS, RM);
2879 Info.CCEDiag(E, diag::note_constexpr_float_arithmetic) << LHS.isNaN();
2880 return Info.noteUndefinedBehavior();
2889 bool LHS = (LHSValue != 0);
2890 bool RHS = (RHSValue != 0);
2893 Result = LHS && RHS;
2895 Result = LHS || RHS;
2901 bool LHS = !LHSValue.isZero();
2902 bool RHS = !RHSValue.isZero();
2905 Result = LHS && RHS;
2907 Result = LHS || RHS;
2915 if (LHSValue.
getKind() == APValue::Int)
2917 RHSValue.
getInt(), Result);
2923 template <
typename APTy>
2926 const APTy &RHSValue,
APInt &Result) {
2929 llvm_unreachable(
"unsupported binary operator");
2931 Result = (LHSValue == RHSValue);
2934 Result = (LHSValue != RHSValue);
2937 Result = (LHSValue < RHSValue);
2940 Result = (LHSValue > RHSValue);
2943 Result = (LHSValue <= RHSValue);
2946 Result = (LHSValue >= RHSValue);
2962 if (LHSValue.
getKind() == APValue::Int)
2964 RHSValue.
getInt(), Result);
2976 "Operation not supported on vector types");
2980 QualType EltTy = VT->getElementType();
2987 "A vector result that isn't a vector OR uncalculated LValue");
2993 RHSValue.
getVectorLength() == NumElements &&
"Different vector sizes");
2997 for (
unsigned EltNum = 0; EltNum < NumElements; ++EltNum) {
3002 APSInt EltResult{Info.Ctx.getIntWidth(EltTy),
3004 bool Success =
true;
3006 if (BinaryOperator::isLogicalOp(
Opcode))
3008 else if (BinaryOperator::isComparisonOp(
Opcode))
3012 RHSElt.
getInt(), EltResult);
3018 ResultElements.emplace_back(EltResult);
3023 "Mismatched LHS/RHS/Result Type");
3032 ResultElements.emplace_back(LHSFloat);
3036 LHSValue =
APValue(ResultElements.data(), ResultElements.size());
3044 unsigned TruncatedElements) {
3045 SubobjectDesignator &D = Result.Designator;
3048 if (TruncatedElements == D.Entries.size())
3050 assert(TruncatedElements >= D.MostDerivedPathLength &&
3051 "not casting to a derived class");
3057 for (
unsigned I = TruncatedElements, N = D.Entries.size(); I != N; ++I) {
3061 if (isVirtualBaseClass(D.Entries[I]))
3067 D.Entries.resize(TruncatedElements);
3077 RL = &Info.Ctx.getASTRecordLayout(Derived);
3080 Obj.getLValueOffset() += RL->getBaseClassOffset(
Base);
3081 Obj.addDecl(Info, E,
Base,
false);
3090 if (!
Base->isVirtual())
3093 SubobjectDesignator &D = Obj.Designator;
3098 DerivedDecl = D.MostDerivedType->getAsCXXRecordDecl();
3104 const ASTRecordLayout &Layout = Info.Ctx.getASTRecordLayout(DerivedDecl);
3106 Obj.addDecl(Info, E, BaseDecl,
true);
3112 for (CastExpr::path_const_iterator PathI = E->
path_begin(),
3114 PathI != PathE; ++PathI) {
3118 Type = (*PathI)->getType();
3130 llvm_unreachable(
"Class must be derived from the passed in base class!");
3145 RL = &Info.Ctx.getASTRecordLayout(FD->
getParent());
3149 LVal.adjustOffset(Info.Ctx.toCharUnitsFromBits(RL->getFieldOffset(I)));
3150 LVal.addDecl(Info, E, FD);
3158 for (
const auto *C : IFD->
chain())
3170 Size = CharUnits::One();
3186 Size = Info.Ctx.getTypeSizeInChars(
Type);
3203 LVal.adjustOffsetAndIndex(Info, E, Adjustment, SizeOfPointee);
3211 APSInt::get(Adjustment));
3226 LVal.Offset += SizeOfComponent;
3228 LVal.addComplex(Info, E, EltTy, Imag);
3242 const VarDecl *VD, CallStackFrame *Frame,
3243 unsigned Version,
APValue *&Result) {
3248 Result = Frame->getTemporary(VD, Version);
3252 if (!isa<ParmVarDecl>(VD)) {
3259 "missing value for local variable");
3260 if (Info.checkingPotentialConstantExpression())
3265 diag::note_unimplemented_constexpr_lambda_feature_ast)
3266 <<
"captures not currently allowed";
3273 if (Info.EvaluatingDecl ==
Base) {
3274 Result = Info.EvaluatingDeclValue;
3278 if (isa<ParmVarDecl>(VD)) {
3281 if (!Info.checkingPotentialConstantExpression() ||
3282 !Info.CurrentCall->Callee ||
3284 if (Info.getLangOpts().CPlusPlus11) {
3285 Info.FFDiag(E, diag::note_constexpr_function_param_value_unknown)
3302 if (!Info.checkingPotentialConstantExpression()) {
3303 Info.FFDiag(E, diag::note_constexpr_var_init_unknown, 1)
3310 if (Init->isValueDependent()) {
3317 if (!Info.checkingPotentialConstantExpression()) {
3318 Info.FFDiag(E, Info.getLangOpts().CPlusPlus11
3319 ? diag::note_constexpr_ltor_non_constexpr
3320 : diag::note_constexpr_ltor_non_integral, 1)
3330 Info.FFDiag(E, diag::note_constexpr_var_init_non_constant, 1) << VD;
3346 ((Info.getLangOpts().CPlusPlus || Info.getLangOpts().OpenCL) &&
3348 Info.CCEDiag(E, diag::note_constexpr_var_init_non_constant, 1) << VD;
3355 Info.FFDiag(E, diag::note_constexpr_var_init_weak) << VD;
3371 E = Derived->
bases_end(); I != E; ++I, ++Index) {
3372 if (I->getType()->getAsCXXRecordDecl()->getCanonicalDecl() ==
Base)
3376 llvm_unreachable(
"base class missing from derived class's bases list");
3382 assert(!isa<SourceLocExpr>(Lit) &&
3383 "SourceLocExpr should have already been converted to a StringLiteral");
3386 if (
const auto *ObjCEnc = dyn_cast<ObjCEncodeExpr>(Lit)) {
3388 Info.Ctx.getObjCEncodingForType(ObjCEnc->getEncodedType(), Str);
3389 assert(Index <= Str.size() &&
"Index too large");
3390 return APSInt::getUnsigned(Str.c_str()[Index]);
3393 if (
auto PE = dyn_cast<PredefinedExpr>(Lit))
3394 Lit = PE->getFunctionName();
3397 Info.Ctx.getAsConstantArrayType(S->getType());
3398 assert(CAT &&
"string literal isn't an array");
3400 assert(CharType->
isIntegerType() &&
"unexpected character type");
3402 APSInt Value(S->getCharByteWidth() * Info.Ctx.getCharWidth(),
3404 if (Index < S->getLength())
3405 Value = S->getCodeUnit(Index);
3417 AllocType.isNull() ? S->getType() : AllocType);
3418 assert(CAT &&
"string literal isn't an array");
3420 assert(CharType->
isIntegerType() &&
"unexpected character type");
3422 unsigned Elts = CAT->
getSize().getZExtValue();
3424 std::min(S->getLength(), Elts), Elts);
3425 APSInt Value(S->getCharByteWidth() * Info.Ctx.getCharWidth(),
3427 if (Result.hasArrayFiller())
3429 for (
unsigned I = 0, N = Result.getArrayInitializedElts(); I != N; ++I) {
3430 Value = S->getCodeUnit(I);
3437 unsigned Size = Array.getArraySize();
3438 assert(Index < Size);
3441 unsigned OldElts = Array.getArrayInitializedElts();
3442 unsigned NewElts =
std::max(Index+1, OldElts * 2);
3447 for (
unsigned I = 0; I != OldElts; ++I)
3449 for (
unsigned I = OldElts; I != NewElts; ++I)
3453 Array.swap(NewValue);
3474 for (
auto *Field : RD->
fields())
3475 if (!Field->isUnnamedBitfield() &&
3479 for (
auto &BaseSpec : RD->
bases())
3497 for (
auto *Field : RD->
fields()) {
3502 if (Field->isMutable() &&
3504 Info.FFDiag(E, diag::note_constexpr_access_mutable, 1) << AK << Field;
3505 Info.Note(Field->getLocation(), diag::note_declared_at);
3513 for (
auto &BaseSpec : RD->
bases())
3523 bool MutableSubobject =
false) {
3528 switch (Info.IsEvaluatingDecl) {
3529 case EvalInfo::EvaluatingDeclKind::None:
3532 case EvalInfo::EvaluatingDeclKind::Ctor:
3534 if (Info.EvaluatingDecl ==
Base)
3539 if (
auto *BaseE =
Base.dyn_cast<
const Expr *>())
3540 if (
auto *BaseMTE = dyn_cast<MaterializeTemporaryExpr>(BaseE))
3541 return Info.EvaluatingDecl == BaseMTE->getExtendingDecl();
3544 case EvalInfo::EvaluatingDeclKind::Dtor:
3549 if (MutableSubobject ||
Base != Info.EvaluatingDecl)
3558 llvm_unreachable(
"unknown evaluating decl kind");
3564 struct CompleteObject {
3572 CompleteObject() :
Value(nullptr) {}
3576 bool mayAccessMutableMembers(EvalInfo &Info,
AccessKinds AK)
const {
3587 if (!Info.getLangOpts().CPlusPlus14)
3592 explicit operator bool()
const {
return !
Type.isNull(); }
3597 bool IsMutable =
false) {
3611 template<
typename Sub
objectHandler>
3612 typename SubobjectHandler::result_type
3614 const SubobjectDesignator &Sub, SubobjectHandler &handler) {
3617 return handler.failed();
3618 if (Sub.isOnePastTheEnd() || Sub.isMostDerivedAnUnsizedArray()) {
3619 if (Info.getLangOpts().CPlusPlus11)
3620 Info.FFDiag(E, Sub.isOnePastTheEnd()
3621 ? diag::note_constexpr_access_past_end
3622 : diag::note_constexpr_access_unsized_array)
3623 << handler.AccessKind;
3626 return handler.failed();
3632 const FieldDecl *VolatileField =
nullptr;
3635 for (
unsigned I = 0, N = Sub.Entries.size(); ; ++I) {
3640 if (!Info.checkingPotentialConstantExpression())
3641 Info.FFDiag(E, diag::note_constexpr_access_uninit)
3643 return handler.failed();
3651 Info.isEvaluatingCtorDtor(
3654 ConstructionPhase::None) {
3655 ObjType = Info.Ctx.getCanonicalType(ObjType);
3664 if (Info.getLangOpts().CPlusPlus) {
3668 if (VolatileField) {
3671 Decl = VolatileField;
3672 }
else if (
auto *VD = Obj.Base.dyn_cast<
const ValueDecl*>()) {
3674 Loc = VD->getLocation();
3678 if (
auto *E = Obj.Base.dyn_cast<
const Expr *>())
3681 Info.FFDiag(E, diag::note_constexpr_access_volatile_obj, 1)
3682 << handler.AccessKind << DiagKind <<
Decl;
3683 Info.Note(Loc, diag::note_constexpr_volatile_here) << DiagKind;
3685 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
3687 return handler.failed();
3695 !Obj.mayAccessMutableMembers(Info, handler.AccessKind) &&
3697 return handler.failed();
3701 if (!handler.found(*O, ObjType))
3713 LastField =
nullptr;
3717 assert(CAT &&
"vla in literal type?");
3718 uint64_t Index = Sub.Entries[I].getAsArrayIndex();
3719 if (CAT->
getSize().ule(Index)) {
3722 if (Info.getLangOpts().CPlusPlus11)
3723 Info.FFDiag(E, diag::note_constexpr_access_past_end)
3724 << handler.AccessKind;
3727 return handler.failed();
3734 else if (!
isRead(handler.AccessKind)) {
3741 uint64_t Index = Sub.Entries[I].getAsArrayIndex();
3743 if (Info.getLangOpts().CPlusPlus11)
3744 Info.FFDiag(E, diag::note_constexpr_access_past_end)
3745 << handler.AccessKind;
3748 return handler.failed();
3754 assert(I == N - 1 &&
"extracting subobject of scalar?");
3763 }
else if (
const FieldDecl *Field = getAsField(Sub.Entries[I])) {
3764 if (Field->isMutable() &&
3765 !Obj.mayAccessMutableMembers(Info, handler.AccessKind)) {
3766 Info.FFDiag(E, diag::note_constexpr_access_mutable, 1)
3767 << handler.AccessKind << Field;
3768 Info.Note(Field->getLocation(), diag::note_declared_at);
3769 return handler.failed();
3778 if (I == N - 1 && handler.AccessKind ==
AK_Construct) {
3786 Info.FFDiag(E, diag::note_constexpr_access_inactive_union_member)
3787 << handler.AccessKind << Field << !UnionField << UnionField;
3788 return handler.failed();
3797 if (Field->getType().isVolatileQualified())
3798 VolatileField = Field;
3811 struct ExtractSubobjectHandler {
3817 typedef bool result_type;
3818 bool failed() {
return false; }
3838 const CompleteObject &Obj,
3839 const SubobjectDesignator &Sub,
APValue &Result,
3842 ExtractSubobjectHandler Handler = {Info, E, Result, AK};
3847 struct ModifySubobjectHandler {
3852 typedef bool result_type;
3858 Info.FFDiag(E, diag::note_constexpr_modify_const_type) << QT;
3864 bool failed() {
return false; }
3866 if (!checkConst(SubobjType))
3869 Subobj.
swap(NewVal);
3873 if (!checkConst(SubobjType))
3875 if (!NewVal.
isInt()) {
3884 if (!checkConst(SubobjType))
3896 const CompleteObject &Obj,
3897 const SubobjectDesignator &Sub,
3899 ModifySubobjectHandler Handler = { Info, NewVal, E };
3906 const SubobjectDesignator &A,
3907 const SubobjectDesignator &B,
3908 bool &WasArrayIndex) {
3909 unsigned I = 0, N =
std::min(A.Entries.size(), B.Entries.size());
3910 for (; I != N; ++I) {
3914 if (A.Entries[I].getAsArrayIndex() != B.Entries[I].getAsArrayIndex()) {
3915 WasArrayIndex =
true;
3923 if (A.Entries[I].getAsBaseOrMember() !=
3924 B.Entries[I].getAsBaseOrMember()) {
3925 WasArrayIndex =
false;
3928 if (
const FieldDecl *FD = getAsField(A.Entries[I]))
3930 ObjType = FD->getType();
3936 WasArrayIndex =
false;
3943 const SubobjectDesignator &A,
3944 const SubobjectDesignator &B) {
3945 if (A.Entries.size() != B.Entries.size())
3948 bool IsArray = A.MostDerivedIsArrayElement;
3949 if (IsArray && A.MostDerivedPathLength != A.Entries.size())
3958 return CommonLength >= A.Entries.size() - IsArray;
3965 if (LVal.InvalidBase) {
3967 return CompleteObject();
3971 Info.FFDiag(E, diag::note_constexpr_access_null) << AK;
3972 return CompleteObject();
3975 CallStackFrame *Frame =
nullptr;
3977 if (LVal.getLValueCallIndex()) {
3978 std::tie(Frame,
Depth) =
3979 Info.getCallFrameAndDepth(LVal.getLValueCallIndex());
3981 Info.FFDiag(E, diag::note_constexpr_lifetime_ended, 1)
3982 << AK << LVal.Base.is<
const ValueDecl*>();
3984 return CompleteObject();
3995 if (Info.getLangOpts().CPlusPlus)
3996 Info.FFDiag(E, diag::note_constexpr_access_volatile_type)
4000 return CompleteObject();
4005 QualType BaseType = getType(LVal.Base);
4007 if (Info.getLangOpts().CPlusPlus14 && LVal.Base == Info.EvaluatingDecl &&
4011 BaseVal = Info.EvaluatingDeclValue;
4014 if (
auto *GD = dyn_cast<MSGuidDecl>(D)) {
4017 Info.FFDiag(E, diag::note_constexpr_modify_global);
4018 return CompleteObject();
4022 Info.FFDiag(E, diag::note_constexpr_unsupported_layout)
4024 return CompleteObject();
4026 return CompleteObject(LVal.Base, &
V, GD->getType());
4030 if (
auto *GCD = dyn_cast<UnnamedGlobalConstantDecl>(D)) {
4032 Info.FFDiag(E, diag::note_constexpr_modify_global);
4033 return CompleteObject();
4035 return CompleteObject(LVal.Base,
const_cast<APValue *
>(&GCD->getValue()),
4040 if (
auto *TPO = dyn_cast<TemplateParamObjectDecl>(D)) {
4042 Info.FFDiag(E, diag::note_constexpr_modify_global);
4043 return CompleteObject();
4045 return CompleteObject(LVal.Base,
const_cast<APValue *
>(&TPO->getValue()),
4056 const VarDecl *VD = dyn_cast<VarDecl>(D);
4063 return CompleteObject();
4066 bool IsConstant = BaseType.
isConstant(Info.Ctx);
4071 if (IsAccess && isa<ParmVarDecl>(VD)) {
4075 }
else if (Info.getLangOpts().CPlusPlus14 &&
4082 Info.FFDiag(E, diag::note_constexpr_modify_global);
4083 return CompleteObject();
4089 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4090 if (Info.getLangOpts().CPlusPlus) {
4091 Info.FFDiag(E, diag::note_constexpr_ltor_non_const_int, 1) << VD;
4092 Info.Note(VD->
getLocation(), diag::note_declared_at);
4096 return CompleteObject();
4098 }
else if (!IsAccess) {
4099 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4100 }
else if (IsConstant && Info.checkingPotentialConstantExpression() &&
4103 }
else if (IsConstant) {
4107 if (Info.getLangOpts().CPlusPlus) {
4108 Info.CCEDiag(E, Info.getLangOpts().CPlusPlus11
4109 ? diag::note_constexpr_ltor_non_constexpr
4110 : diag::note_constexpr_ltor_non_integral, 1)
4112 Info.Note(VD->
getLocation(), diag::note_declared_at);
4118 if (Info.getLangOpts().CPlusPlus) {
4119 Info.FFDiag(E, Info.getLangOpts().CPlusPlus11
4120 ? diag::note_constexpr_ltor_non_constexpr
4121 : diag::note_constexpr_ltor_non_integral, 1)
4123 Info.Note(VD->
getLocation(), diag::note_declared_at);
4127 return CompleteObject();
4132 return CompleteObject();
4134 std::optional<DynAlloc *> Alloc = Info.lookupDynamicAlloc(DA);
4136 Info.FFDiag(E, diag::note_constexpr_access_deleted_object) << AK;
4137 return CompleteObject();
4139 return CompleteObject(LVal.Base, &(*Alloc)->Value,
4140 LVal.Base.getDynamicAllocType());
4146 dyn_cast_or_null<MaterializeTemporaryExpr>(
Base)) {
4147 assert(MTE->getStorageDuration() ==
SD_Static &&
4148 "should have a frame for a non-global materialized temporary");
4175 if (!MTE->isUsableInConstantExpressions(Info.Ctx) &&
4178 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4179 Info.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK;
4180 Info.Note(MTE->getExprLoc(), diag::note_constexpr_temporary_here);
4181 return CompleteObject();
4184 BaseVal = MTE->getOrCreateValue(
false);
4185 assert(BaseVal &&
"got reference to unevaluated temporary");
4188 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4191 Info.FFDiag(E, diag::note_constexpr_access_unreadable_object)
4194 Info.Ctx.getLValueReferenceType(LValType));
4196 return CompleteObject();
4199 BaseVal = Frame->getTemporary(
Base, LVal.Base.getVersion());
4200 assert(BaseVal &&
"missing value for temporary");
4211 unsigned VisibleDepth =
Depth;
4212 if (llvm::isa_and_nonnull<ParmVarDecl>(
4213 LVal.Base.dyn_cast<
const ValueDecl *>()))
4215 if ((Frame && Info.getLangOpts().CPlusPlus14 &&
4216 Info.EvalStatus.HasSideEffects) ||
4217 (
isModification(AK) && VisibleDepth < Info.SpeculativeEvaluationDepth))
4218 return CompleteObject();
4220 return CompleteObject(LVal.getLValueBase(), BaseVal, BaseType);
4239 const LValue &LVal,
APValue &RVal,
4240 bool WantObjectRepresentation =
false) {
4241 if (LVal.Designator.Invalid)
4250 if (
Base && !LVal.getLValueCallIndex() && !
Type.isVolatileQualified()) {
4255 if (
Type.isVolatileQualified()) {
4261 if (!
Evaluate(Lit, Info, CLE->getInitializer()))
4281 Info.Note(CLE->getExprLoc(), diag::note_declared_at);
4286 CompleteObject LitObj(LVal.Base, &Lit,
Base->getType());
4288 }
else if (isa<StringLiteral>(
Base) || isa<PredefinedExpr>(
Base)) {
4291 assert(LVal.Designator.Entries.size() <= 1 &&
4292 "Can only read characters from string literals");
4293 if (LVal.Designator.Entries.empty()) {
4300 if (LVal.Designator.isOnePastTheEnd()) {
4301 if (Info.getLangOpts().CPlusPlus11)
4302 Info.FFDiag(Conv, diag::note_constexpr_access_past_end) << AK;
4307 uint64_t CharIndex = LVal.Designator.Entries[0].getAsArrayIndex();
4314 return Obj &&
extractSubobject(Info, Conv, Obj, LVal.Designator, RVal, AK);
4320 if (LVal.Designator.Invalid)
4323 if (!Info.getLangOpts().CPlusPlus14) {
4333 struct CompoundAssignSubobjectHandler {
4342 typedef bool result_type;
4347 Info.FFDiag(E, diag::note_constexpr_modify_const_type) << QT;
4353 bool failed() {
return false; }
4357 return found(Subobj.
getInt(), SubobjType);
4359 return found(Subobj.
getFloat(), SubobjType);
4360 case APValue::ComplexInt:
4361 case APValue::ComplexFloat:
4365 case APValue::LValue:
4366 return foundPointer(Subobj, SubobjType);
4367 case APValue::Vector:
4368 return foundVector(Subobj, SubobjType);
4377 if (!checkConst(SubobjType))
4388 if (!checkConst(SubobjType))
4407 Info.Ctx.getLangOpts());
4410 PromotedLHSType, FValue) &&
4420 return checkConst(SubobjType) &&
4427 if (!checkConst(SubobjType))
4445 LVal.setFrom(Info.Ctx, Subobj);
4448 LVal.moveInto(Subobj);
4459 const LValue &LVal,
QualType LValType,
4463 if (LVal.Designator.Invalid)
4466 if (!Info.getLangOpts().CPlusPlus14) {
4472 CompoundAssignSubobjectHandler Handler = { Info, E, PromotedLValType,
Opcode,
4474 return Obj &&
findSubobject(Info, E, Obj, LVal.Designator, Handler);
4478 struct IncDecSubobjectHandler {
4484 typedef bool result_type;
4489 Info.FFDiag(E, diag::note_constexpr_modify_const_type) << QT;
4495 bool failed() {
return false; }
4506 return found(Subobj.
getInt(), SubobjType);
4508 return found(Subobj.
getFloat(), SubobjType);
4509 case APValue::ComplexInt:
4513 case APValue::ComplexFloat:
4517 case APValue::LValue:
4518 return foundPointer(Subobj, SubobjType);
4526 if (!checkConst(SubobjType))
4548 bool WasNegative =
Value.isNegative();
4560 unsigned BitWidth =
Value.getBitWidth();
4561 APSInt ActualValue(
Value.sext(BitWidth + 1),
false);
4562 ActualValue.setBit(BitWidth);
4569 if (!checkConst(SubobjType))
4576 Value.add(One, APFloat::rmNearestTiesToEven);
4578 Value.subtract(One, APFloat::rmNearestTiesToEven);
4582 if (!checkConst(SubobjType))
4594 LVal.setFrom(Info.Ctx, Subobj);
4598 LVal.moveInto(Subobj);
4607 if (LVal.Designator.Invalid)
4610 if (!Info.getLangOpts().CPlusPlus14) {
4617 IncDecSubobjectHandler Handler = {Info, cast<UnaryOperator>(E), AK, Old};
4618 return Obj &&
findSubobject(Info, E, Obj, LVal.Designator, Handler);
4624 if (Object->getType()->isPointerType() && Object->isPRValue())
4627 if (Object->isGLValue())
4630 if (Object->getType()->isLiteralType(Info.Ctx))
4633 Info.FFDiag(Object, diag::note_constexpr_nonliteral) << Object->getType();
4652 bool IncludeMember =
true) {
4659 if (!MemPtr.getDecl()) {
4665 if (MemPtr.isDerivedMember()) {
4669 if (LV.Designator.MostDerivedPathLength + MemPtr.Path.size() >
4670 LV.Designator.Entries.size()) {
4674 unsigned PathLengthToMember =
4675 LV.Designator.Entries.size() - MemPtr.Path.size();
4676 for (
unsigned I = 0, N = MemPtr.Path.size(); I != N; ++I) {
4678 LV.Designator.Entries[PathLengthToMember + I]);
4688 PathLengthToMember))
4690 }
else if (!MemPtr.Path.empty()) {
4692 LV.Designator.Entries.reserve(LV.Designator.Entries.size() +
4693 MemPtr.Path.size() + IncludeMember);
4699 assert(RD &&
"member pointer access on non-class-type expression");
4701 for (
unsigned I = 1, N = MemPtr.Path.size(); I != N; ++I) {
4709 MemPtr.getContainingRecord()))
4714 if (IncludeMember) {
4715 if (
const FieldDecl *FD = dyn_cast<FieldDecl>(MemPtr.getDecl())) {
4719 dyn_cast<IndirectFieldDecl>(MemPtr.getDecl())) {
4723 llvm_unreachable(
"can't construct reference to bound member function");
4727 return MemPtr.getDecl();
4733 bool IncludeMember =
true) {
4737 if (Info.noteFailure()) {
4745 BO->
getRHS(), IncludeMember);
4752 SubobjectDesignator &D = Result.Designator;
4753 if (D.Invalid || !Result.checkNullPointer(Info, E,
CSK_Derived))
4761 if (D.MostDerivedPathLength + E->
path_size() > D.Entries.size()) {
4762 Info.CCEDiag(E, diag::note_constexpr_invalid_downcast)
4763 << D.MostDerivedType << TargetQT;
4769 unsigned NewEntriesSize = D.Entries.size() - E->
path_size();
4772 if (NewEntriesSize == D.MostDerivedPathLength)
4773 FinalType = D.MostDerivedType->getAsCXXRecordDecl();
4775 FinalType = getAsBaseClass(D.Entries[NewEntriesSize - 1]);
4777 Info.CCEDiag(E, diag::note_constexpr_invalid_downcast)
4778 << D.MostDerivedType << TargetQT;
4789 bool Success =
true;
4791 if (RD->isInvalidDecl()) {
4795 if (RD->isUnion()) {
4804 End = RD->bases_end();
4805 I !=
End; ++I, ++Index)
4808 for (
const auto *I : RD->fields()) {
4809 if (I->isUnnamedBitfield())
4812 Result.getStructField(I->getFieldIndex()));
4820 if (Result.hasArrayFiller())
4827 Result = APValue::IndeterminateValue();
4832 enum EvalStmtResult {
4856 APValue &Val = Info.CurrentCall->createTemporary(VD, VD->
getType(),
4857 ScopeKind::Block, Result);
4862 return Info.noteSideEffect();
4881 if (
const VarDecl *VD = dyn_cast<VarDecl>(D))
4885 for (
auto *BD : DD->bindings())
4886 if (
auto *VD = BD->getHoldingVar())
4894 if (Info.noteSideEffect())
4896 assert(E->
containsErrors() &&
"valid value-dependent expression should never "
4897 "reach invalid code path.");
4903 const Expr *Cond,
bool &Result) {
4906 FullExpressionRAII
Scope(Info);
4911 return Scope.destroy();
4924 struct TempVersionRAII {
4925 CallStackFrame &Frame;
4927 TempVersionRAII(CallStackFrame &Frame) : Frame(Frame) {
4928 Frame.pushTempVersion();
4931 ~TempVersionRAII() {
4932 Frame.popTempVersion();
4946 BlockScopeRAII
Scope(Info);
4948 EvalStmtResult ESR =
EvaluateStmt(Result, Info, Body, Case);
4949 if (ESR != ESR_Failed && ESR != ESR_CaseNotFound && !
Scope.destroy())
4954 return ESR_Succeeded;
4957 return ESR_Continue;
4960 case ESR_CaseNotFound:
4963 llvm_unreachable(
"Invalid EvalStmtResult!");
4969 BlockScopeRAII
Scope(Info);
4976 if (ESR != ESR_Succeeded) {
4977 if (ESR != ESR_Failed && !
Scope.destroy())
4983 FullExpressionRAII CondScope(Info);
4994 if (!CondScope.destroy())
5003 if (isa<DefaultStmt>(SC)) {
5008 const CaseStmt *CS = cast<CaseStmt>(SC);
5019 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5023 if (ESR != ESR_Failed && ESR != ESR_CaseNotFound && !
Scope.destroy())
5028 return ESR_Succeeded;
5034 case ESR_CaseNotFound:
5038 diag::note_constexpr_stmt_expr_unsupported);
5041 llvm_unreachable(
"Invalid EvalStmtResult!");
5051 Info.CCEDiag(VD->
getLocation(), diag::note_constexpr_static_local)
5061 if (!Info.nextStep(S))
5067 switch (S->getStmtClass()) {
5068 case Stmt::CompoundStmtClass:
5072 case Stmt::LabelStmtClass:
5073 case Stmt::AttributedStmtClass:
5074 case Stmt::DoStmtClass:
5077 case Stmt::CaseStmtClass:
5078 case Stmt::DefaultStmtClass:
5083 case Stmt::IfStmtClass: {
5086 const IfStmt *IS = cast<IfStmt>(S);
5090 BlockScopeRAII
Scope(Info);
5095 EvalStmtResult ESR =
EvaluateStmt(Result, Info, Init, Case);
5096 if (ESR != ESR_CaseNotFound) {
5097 assert(ESR != ESR_Succeeded);
5108 if (ESR == ESR_Failed)
5110 if (ESR != ESR_CaseNotFound)
5111 return Scope.destroy() ? ESR : ESR_Failed;
5113 return ESR_CaseNotFound;
5116 if (ESR == ESR_Failed)
5118 if (ESR != ESR_CaseNotFound)
5119 return Scope.destroy() ? ESR : ESR_Failed;
5120 return ESR_CaseNotFound;
5123 case Stmt::WhileStmtClass: {
5124 EvalStmtResult ESR =
5126 if (ESR != ESR_Continue)
5131 case Stmt::ForStmtClass: {
5132 const ForStmt *FS = cast<ForStmt>(S);
5133 BlockScopeRAII
Scope(Info);
5137 if (
const Stmt *Init = FS->getInit()) {
5138 EvalStmtResult ESR =
EvaluateStmt(Result, Info, Init, Case);
5139 if (ESR != ESR_CaseNotFound) {
5140 assert(ESR != ESR_Succeeded);
5145 EvalStmtResult ESR =
5147 if (ESR != ESR_Continue)
5149 if (
const auto *Inc = FS->getInc()) {
5150 if (Inc->isValueDependent()) {
5154 FullExpressionRAII IncScope(Info);
5162 case Stmt::DeclStmtClass: {
5165 const DeclStmt *DS = cast<DeclStmt>(S);
5166 for (
const auto *D : DS->
decls()) {
5167 if (
const auto *VD = dyn_cast<VarDecl>(D)) {
5170 if (VD->hasLocalStorage() && !VD->getInit())
5178 return ESR_CaseNotFound;
5182 return ESR_CaseNotFound;
5186 switch (S->getStmtClass()) {
5188 if (
const Expr *E = dyn_cast<Expr>(S)) {
5197 FullExpressionRAII
Scope(Info);
5201 return ESR_Succeeded;
5204 Info.FFDiag(S->getBeginLoc());
5207 case Stmt::NullStmtClass:
5208 return ESR_Succeeded;
5210 case Stmt::DeclStmtClass: {
5211 const DeclStmt *DS = cast<DeclStmt>(S);
5212 for (
const auto *D : DS->
decls()) {
5213 const VarDecl *VD = dyn_cast_or_null<VarDecl>(D);
5217 FullExpressionRAII
Scope(Info);
5220 if (!
Scope.destroy())
5223 return ESR_Succeeded;
5226 case Stmt::ReturnStmtClass: {
5227 const Expr *RetExpr = cast<ReturnStmt>(S)->getRetValue();
5228 FullExpressionRAII
Scope(Info);
5237 :
Evaluate(Result.Value, Info, RetExpr)))
5239 return Scope.destroy() ? ESR_Returned : ESR_Failed;
5242 case Stmt::CompoundStmtClass: {
5243 BlockScopeRAII
Scope(Info);
5246 for (
const auto *BI : CS->
body()) {
5247 EvalStmtResult ESR =
EvaluateStmt(Result, Info, BI, Case);
5248 if (ESR == ESR_Succeeded)
5250 else if (ESR != ESR_CaseNotFound) {
5251 if (ESR != ESR_Failed && !
Scope.destroy())
5257 return ESR_CaseNotFound;
5258 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5261 case Stmt::IfStmtClass: {
5262 const IfStmt *IS = cast<IfStmt>(S);
5265 BlockScopeRAII
Scope(Info);
5268 if (ESR != ESR_Succeeded) {
5269 if (ESR != ESR_Failed && !
Scope.destroy())
5279 if (!Info.InConstantContext)
5286 EvalStmtResult ESR =
EvaluateStmt(Result, Info, SubStmt);
5287 if (ESR != ESR_Succeeded) {
5288 if (ESR != ESR_Failed && !
Scope.destroy())
5293 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5296 case Stmt::WhileStmtClass: {
5297 const WhileStmt *WS = cast<WhileStmt>(S);
5299 BlockScopeRAII
Scope(Info);
5308 if (ESR != ESR_Continue) {
5309 if (ESR != ESR_Failed && !
Scope.destroy())
5313 if (!
Scope.destroy())
5316 return ESR_Succeeded;
5319 case Stmt::DoStmtClass: {
5320 const DoStmt *DS = cast<DoStmt>(S);
5324 if (ESR != ESR_Continue)
5333 FullExpressionRAII CondScope(Info);
5335 !CondScope.destroy())
5338 return ESR_Succeeded;
5341 case Stmt::ForStmtClass: {
5342 const ForStmt *FS = cast<ForStmt>(S);
5343 BlockScopeRAII ForScope(Info);
5344 if (FS->getInit()) {
5345 EvalStmtResult ESR =
EvaluateStmt(Result, Info, FS->getInit());
5346 if (ESR != ESR_Succeeded) {
5347 if (ESR != ESR_Failed && !ForScope.destroy())
5353 BlockScopeRAII IterScope(Info);
5354 bool Continue =
true;
5355 if (FS->getCond() && !
EvaluateCond(Info, FS->getConditionVariable(),
5356 FS->getCond(), Continue))
5362 if (ESR != ESR_Continue) {
5363 if (ESR != ESR_Failed && (!IterScope.destroy() || !ForScope.destroy()))
5368 if (
const auto *Inc = FS->getInc()) {
5369 if (Inc->isValueDependent()) {
5373 FullExpressionRAII IncScope(Info);
5379 if (!IterScope.destroy())
5382 return ForScope.destroy() ? ESR_Succeeded : ESR_Failed;
5385 case Stmt::CXXForRangeStmtClass: {
5387 BlockScopeRAII
Scope(Info);
5390 if (FS->getInit()) {
5391 EvalStmtResult ESR =
EvaluateStmt(Result, Info, FS->getInit());
5392 if (ESR != ESR_Succeeded) {
5393 if (ESR != ESR_Failed && !
Scope.destroy())
5400 EvalStmtResult ESR =
EvaluateStmt(Result, Info, FS->getRangeStmt());
5401 if (ESR != ESR_Succeeded) {
5402 if (ESR != ESR_Failed && !
Scope.destroy())
5409 if (!FS->getBeginStmt() || !FS->getEndStmt() || !FS->getCond())
5414 if (ESR != ESR_Succeeded) {
5415 if (ESR != ESR_Failed && !
Scope.destroy())
5420 if (ESR != ESR_Succeeded) {
5421 if (ESR != ESR_Failed && !
Scope.destroy())
5429 if (FS->getCond()->isValueDependent()) {
5434 bool Continue =
true;
5435 FullExpressionRAII CondExpr(Info);
5443 BlockScopeRAII InnerScope(Info);
5444 ESR =
EvaluateStmt(Result, Info, FS->getLoopVarStmt());
5445 if (ESR != ESR_Succeeded) {
5446 if (ESR != ESR_Failed && (!InnerScope.destroy() || !
Scope.destroy()))
5453 if (ESR != ESR_Continue) {
5454 if (ESR != ESR_Failed && (!InnerScope.destroy() || !
Scope.destroy()))
5458 if (FS->getInc()->isValueDependent()) {
5467 if (!InnerScope.destroy())
5471 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5474 case Stmt::SwitchStmtClass:
5477 case Stmt::ContinueStmtClass:
5478 return ESR_Continue;
5480 case Stmt::BreakStmtClass:
5483 case Stmt::LabelStmtClass:
5484 return EvaluateStmt(Result, Info, cast<LabelStmt>(S)->getSubStmt(), Case);
5486 case Stmt::AttributedStmtClass:
5489 return EvaluateStmt(Result, Info, cast<AttributedStmt>(S)->getSubStmt(),
5492 case Stmt::CaseStmtClass:
5493 case Stmt::DefaultStmtClass:
5494 return EvaluateStmt(Result, Info, cast<SwitchCase>(S)->getSubStmt(), Case);
5495 case Stmt::CXXTryStmtClass:
5497 return EvaluateStmt(Result, Info, cast<CXXTryStmt>(S)->getTryBlock(), Case);
5507 bool IsValueInitialization) {
5514 if (!CD->
isConstexpr() && !IsValueInitialization) {
5515 if (Info.getLangOpts().CPlusPlus11) {
5518 Info.CCEDiag(Loc, diag::note_constexpr_invalid_function, 1)
5520 Info.Note(CD->
getLocation(), diag::note_declared_at);
5522 Info.CCEDiag(Loc, diag::note_invalid_subexpr_in_const_expr);
5536 if (Info.checkingPotentialConstantExpression() && !Definition &&
5537 Declaration->isConstexpr())
5543 if (Declaration->isInvalidDecl()) {
5544 Info.FFDiag(CallLoc, diag::note_invalid_subexpr_in_const_expr);
5551 if (!Info.Ctx.getLangOpts().CPlusPlus20 && isa<CXXMethodDecl>(Declaration) &&
5552 cast<CXXMethodDecl>(Declaration)->isVirtual())
5553 Info.CCEDiag(CallLoc, diag::note_constexpr_virtual_call);
5555 if (Definition && Definition->isInvalidDecl()) {
5556 Info.FFDiag(CallLoc, diag::note_invalid_subexpr_in_const_expr);
5561 if (Definition && Definition->isConstexpr() && Body)
5564 if (Info.getLangOpts().CPlusPlus11) {
5565 const FunctionDecl *DiagDecl = Definition ? Definition : Declaration;
5569 auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl);
5570 if (CD && CD->isInheritingConstructor()) {
5571 auto *Inherited = CD->getInheritedConstructor().getConstructor();
5572 if (!Inherited->isConstexpr())
5573 DiagDecl = CD = Inherited;
5579 if (CD && CD->isInheritingConstructor())
5580 Info.FFDiag(CallLoc, diag::note_constexpr_invalid_inhctor, 1)
5581 << CD->getInheritedConstructor().getConstructor()->getParent();
5583 Info.FFDiag(CallLoc, diag::note_constexpr_invalid_function, 1)
5585 Info.Note(DiagDecl->
getLocation(), diag::note_declared_at);
5587 Info.FFDiag(CallLoc, diag::note_invalid_subexpr_in_const_expr);
5593 struct CheckDynamicTypeHandler {
5595 typedef bool result_type;
5596 bool failed() {
return false; }
5607 if (
This.Designator.Invalid)
5619 if (
This.Designator.isOnePastTheEnd() ||
5620 This.Designator.isMostDerivedAnUnsizedArray()) {
5621 Info.FFDiag(E,
This.Designator.isOnePastTheEnd()
5622 ? diag::note_constexpr_access_past_end
5623 : diag::note_constexpr_access_unsized_array)
5626 }
else if (Polymorphic) {
5632 Info.Ctx.getLValueReferenceType(
This.Designator.getType(Info.Ctx));
5633 Info.FFDiag(E, diag::note_constexpr_polymorphic_unknown_dynamic_type)
5640 CheckDynamicTypeHandler Handler{AK};
5663 unsigned PathLength) {
5664 assert(PathLength >=
Designator.MostDerivedPathLength && PathLength <=
5665 Designator.Entries.size() &&
"invalid path length");
5666 return (PathLength ==
Designator.MostDerivedPathLength)
5667 ?
Designator.MostDerivedType->getAsCXXRecordDecl()
5668 : getAsBaseClass(
Designator.Entries[PathLength - 1]);
5680 return std::nullopt;
5689 This.Designator.MostDerivedType->getAsCXXRecordDecl();
5690 if (!Class || Class->getNumVBases()) {
5692 return std::nullopt;
5700 for (
unsigned PathLength =
This.Designator.MostDerivedPathLength;
5701 PathLength <= Path.size(); ++PathLength) {
5702 switch (Info.isEvaluatingCtorDtor(
This.getLValueBase(),
5703 Path.slice(0, PathLength))) {
5704 case ConstructionPhase::Bases:
5705 case ConstructionPhase::DestroyingBases:
5710 case ConstructionPhase::None:
5711 case ConstructionPhase::AfterBases:
5712 case ConstructionPhase::AfterFields:
5713 case ConstructionPhase::Destroying:
5725 return std::nullopt;
5743 unsigned PathLength = DynType->PathLength;
5744 for (; PathLength <=
This.Designator.Entries.size(); ++PathLength) {
5757 if (Callee->isPure()) {
5758 Info.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << Callee;
5759 Info.Note(Callee->getLocation(), diag::note_declared_at);
5765 if (!Info.Ctx.hasSameUnqualifiedType(Callee->getReturnType(),
5767 CovariantAdjustmentPath.push_back(Callee->getReturnType());
5768 for (
unsigned CovariantPathLength = PathLength + 1;
5769 CovariantPathLength !=
This.Designator.Entries.size();
5770 ++CovariantPathLength) {
5775 if (Next && !Info.Ctx.hasSameUnqualifiedType(
5776 Next->getReturnType(), CovariantAdjustmentPath.back()))
5777 CovariantAdjustmentPath.push_back(Next->getReturnType());
5779 if (!Info.Ctx.hasSameUnqualifiedType(Found->
getReturnType(),
5780 CovariantAdjustmentPath.back()))
5797 assert(Result.isLValue() &&
5798 "unexpected kind of APValue for covariant return");
5799 if (Result.isNullPointer())
5803 LVal.setFrom(Info.Ctx, Result);
5805 const CXXRecordDecl *OldClass = Path[0]->getPointeeCXXRecordDecl();
5806 for (
unsigned I = 1; I != Path.size(); ++I) {
5807 const CXXRecordDecl *NewClass = Path[I]->getPointeeCXXRecordDecl();
5808 assert(OldClass && NewClass &&
"unexpected kind of covariant return");
5809 if (OldClass != NewClass &&
5812 OldClass = NewClass;
5815 LVal.moveInto(Result);
5824 auto *BaseClass = BaseSpec.getType()->getAsCXXRecordDecl();
5826 return BaseSpec.getAccessSpecifier() ==
AS_public;
5828 llvm_unreachable(
"Base is not a direct base of Derived");