54 #include "llvm/ADT/APFixedPoint.h"
55 #include "llvm/ADT/Optional.h"
56 #include "llvm/ADT/SmallBitVector.h"
57 #include "llvm/Support/Debug.h"
58 #include "llvm/Support/SaveAndRestore.h"
59 #include "llvm/Support/raw_ostream.h"
63 #define DEBUG_TYPE "exprconstant"
65 using namespace clang;
66 using llvm::APFixedPoint;
70 using llvm::FixedPointSemantics;
78 using SourceLocExprScopeGuard =
110 static const AllocSizeAttr *getAllocSizeAttr(
const CallExpr *CE) {
112 return DirectCallee->getAttr<AllocSizeAttr>();
114 return IndirectCallee->getAttr<AllocSizeAttr>();
122 static const CallExpr *tryUnwrapAllocSizeCall(
const Expr *E) {
130 if (
const auto *FE = dyn_cast<FullExpr>(E))
133 if (
const auto *
Cast = dyn_cast<CastExpr>(E))
134 E =
Cast->getSubExpr()->IgnoreParens();
136 if (
const auto *CE = dyn_cast<CallExpr>(E))
137 return getAllocSizeAttr(CE) ? CE :
nullptr;
144 const auto *E =
Base.dyn_cast<
const Expr *>();
163 llvm_unreachable(
"unknown ConstantExprKind");
176 llvm_unreachable(
"unknown ConstantExprKind");
182 static const uint64_t AssumedSizeForUnsizedArray =
193 bool &FirstEntryIsUnsizedArray) {
196 assert(!isBaseAnAllocSizeCall(
Base) &&
197 "Unsized arrays shouldn't appear here");
198 unsigned MostDerivedLength = 0;
201 for (
unsigned I = 0, N = Path.size(); I != N; ++I) {
205 MostDerivedLength = I + 1;
208 if (
auto *CAT = dyn_cast<ConstantArrayType>(AT)) {
209 ArraySize = CAT->getSize().getZExtValue();
211 assert(I == 0 &&
"unexpected unsized array designator");
212 FirstEntryIsUnsizedArray =
true;
213 ArraySize = AssumedSizeForUnsizedArray;
219 MostDerivedLength = I + 1;
221 }
else if (
const FieldDecl *FD = getAsField(Path[I])) {
222 Type = FD->getType();
224 MostDerivedLength = I + 1;
232 return MostDerivedLength;
236 struct SubobjectDesignator {
240 unsigned Invalid : 1;
243 unsigned IsOnePastTheEnd : 1;
246 unsigned FirstEntryIsAnUnsizedArray : 1;
249 unsigned MostDerivedIsArrayElement : 1;
253 unsigned MostDerivedPathLength : 28;
262 uint64_t MostDerivedArraySize;
272 SubobjectDesignator() : Invalid(
true) {}
274 explicit SubobjectDesignator(
QualType T)
275 : Invalid(
false), IsOnePastTheEnd(
false),
276 FirstEntryIsAnUnsizedArray(
false), MostDerivedIsArrayElement(
false),
277 MostDerivedPathLength(0), MostDerivedArraySize(0),
278 MostDerivedType(T) {}
281 : Invalid(!
V.isLValue() || !
V.hasLValuePath()), IsOnePastTheEnd(
false),
282 FirstEntryIsAnUnsizedArray(
false), MostDerivedIsArrayElement(
false),
283 MostDerivedPathLength(0), MostDerivedArraySize(0) {
284 assert(
V.isLValue() &&
"Non-LValue used to make an LValue designator?");
286 IsOnePastTheEnd =
V.isLValueOnePastTheEnd();
288 Entries.insert(Entries.end(), VEntries.begin(), VEntries.end());
289 if (
V.getLValueBase()) {
290 bool IsArray =
false;
291 bool FirstIsUnsizedArray =
false;
292 MostDerivedPathLength = findMostDerivedSubobject(
293 Ctx,
V.getLValueBase(),
V.getLValuePath(), MostDerivedArraySize,
294 MostDerivedType, IsArray, FirstIsUnsizedArray);
295 MostDerivedIsArrayElement = IsArray;
296 FirstEntryIsAnUnsizedArray = FirstIsUnsizedArray;
302 unsigned NewLength) {
306 assert(
Base &&
"cannot truncate path for null pointer");
307 assert(NewLength <= Entries.size() &&
"not a truncation");
309 if (NewLength == Entries.size())
311 Entries.resize(NewLength);
313 bool IsArray =
false;
314 bool FirstIsUnsizedArray =
false;
315 MostDerivedPathLength = findMostDerivedSubobject(
316 Ctx,
Base, Entries, MostDerivedArraySize, MostDerivedType, IsArray,
317 FirstIsUnsizedArray);
318 MostDerivedIsArrayElement = IsArray;
319 FirstEntryIsAnUnsizedArray = FirstIsUnsizedArray;
329 bool isMostDerivedAnUnsizedArray()
const {
330 assert(!Invalid &&
"Calling this makes no sense on invalid designators");
331 return Entries.size() == 1 && FirstEntryIsAnUnsizedArray;
336 uint64_t getMostDerivedArraySize()
const {
337 assert(!isMostDerivedAnUnsizedArray() &&
"Unsized array has no size");
338 return MostDerivedArraySize;
342 bool isOnePastTheEnd()
const {
346 if (!isMostDerivedAnUnsizedArray() && MostDerivedIsArrayElement &&
347 Entries[MostDerivedPathLength - 1].getAsArrayIndex() ==
348 MostDerivedArraySize)
356 std::pair<uint64_t, uint64_t> validIndexAdjustments() {
357 if (Invalid || isMostDerivedAnUnsizedArray())
363 bool IsArray = MostDerivedPathLength == Entries.size() &&
364 MostDerivedIsArrayElement;
365 uint64_t ArrayIndex = IsArray ? Entries.back().getAsArrayIndex()
366 : (uint64_t)IsOnePastTheEnd;
368 IsArray ? getMostDerivedArraySize() : (uint64_t)1;
369 return {ArrayIndex, ArraySize - ArrayIndex};
373 bool isValidSubobject()
const {
376 return !isOnePastTheEnd();
384 assert(!Invalid &&
"invalid designator has no subobject type");
385 return MostDerivedPathLength == Entries.size()
392 Entries.push_back(PathEntry::ArrayIndex(0));
396 MostDerivedIsArrayElement =
true;
397 MostDerivedArraySize = CAT->
getSize().getZExtValue();
398 MostDerivedPathLength = Entries.size();
402 void addUnsizedArrayUnchecked(
QualType ElemTy) {
403 Entries.push_back(PathEntry::ArrayIndex(0));
405 MostDerivedType = ElemTy;
406 MostDerivedIsArrayElement =
true;
410 MostDerivedArraySize = AssumedSizeForUnsizedArray;
411 MostDerivedPathLength = Entries.size();
415 void addDeclUnchecked(
const Decl *D,
bool Virtual =
false) {
419 if (
const FieldDecl *FD = dyn_cast<FieldDecl>(D)) {
420 MostDerivedType = FD->getType();
421 MostDerivedIsArrayElement =
false;
422 MostDerivedArraySize = 0;
423 MostDerivedPathLength = Entries.size();
427 void addComplexUnchecked(
QualType EltTy,
bool Imag) {
428 Entries.push_back(PathEntry::ArrayIndex(Imag));
432 MostDerivedType = EltTy;
433 MostDerivedIsArrayElement =
true;
434 MostDerivedArraySize = 2;
435 MostDerivedPathLength = Entries.size();
437 void diagnoseUnsizedArrayPointerArithmetic(EvalInfo &Info,
const Expr *E);
438 void diagnosePointerArithmetic(EvalInfo &Info,
const Expr *E,
441 void adjustIndex(EvalInfo &Info,
const Expr *E,
APSInt N) {
442 if (Invalid || !N)
return;
443 uint64_t TruncatedN = N.extOrTrunc(64).getZExtValue();
444 if (isMostDerivedAnUnsizedArray()) {
445 diagnoseUnsizedArrayPointerArithmetic(Info, E);
449 Entries.back() = PathEntry::ArrayIndex(
450 Entries.back().getAsArrayIndex() + TruncatedN);
457 bool IsArray = MostDerivedPathLength == Entries.size() &&
458 MostDerivedIsArrayElement;
459 uint64_t ArrayIndex = IsArray ? Entries.back().getAsArrayIndex()
460 : (uint64_t)IsOnePastTheEnd;
462 IsArray ? getMostDerivedArraySize() : (uint64_t)1;
464 if (N < -(int64_t)ArrayIndex || N > ArraySize - ArrayIndex) {
467 N = N.extend(std::max<unsigned>(N.getBitWidth() + 1, 65));
469 assert(N.ugt(ArraySize) &&
"bounds check failed for in-bounds index");
470 diagnosePointerArithmetic(Info, E, N);
475 ArrayIndex += TruncatedN;
476 assert(ArrayIndex <= ArraySize &&
477 "bounds check succeeded for out-of-bounds index");
480 Entries.back() = PathEntry::ArrayIndex(ArrayIndex);
482 IsOnePastTheEnd = (ArrayIndex != 0);
487 enum class ScopeKind {
495 CallRef() : OrigCallee(), CallIndex(0), Version() {}
496 CallRef(
const FunctionDecl *Callee,
unsigned CallIndex,
unsigned Version)
497 : OrigCallee(Callee), CallIndex(CallIndex), Version(Version) {}
499 explicit operator bool()
const {
return OrigCallee; }
525 CallStackFrame *Caller;
544 typedef std::pair<const void *, unsigned> MapKeyTy;
545 typedef std::map<MapKeyTy, APValue>
MapTy;
557 unsigned CurTempVersion = TempVersionStack.back();
559 unsigned getTempVersion()
const {
return TempVersionStack.back(); }
561 void pushTempVersion() {
562 TempVersionStack.push_back(++CurTempVersion);
565 void popTempVersion() {
566 TempVersionStack.pop_back();
570 return {Callee, Index, ++CurTempVersion};
581 llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields;
590 APValue *getTemporary(
const void *Key,
unsigned Version) {
591 MapKeyTy KV(Key, Version);
592 auto LB = Temporaries.lower_bound(KV);
593 if (LB != Temporaries.end() && LB->first == KV)
597 assert((LB == Temporaries.end() || LB->first.first != Key) &&
598 (LB == Temporaries.begin() || std::prev(LB)->first.first != Key) &&
599 "Element with key 'Key' found in map");
604 APValue *getCurrentTemporary(
const void *Key) {
605 auto UB = Temporaries.upper_bound(MapKeyTy(Key,
UINT_MAX));
606 if (UB != Temporaries.begin() && std::prev(UB)->first.first == Key)
607 return &std::prev(UB)->second;
612 unsigned getCurrentTemporaryVersion(
const void *Key)
const {
613 auto UB = Temporaries.upper_bound(MapKeyTy(Key,
UINT_MAX));
614 if (UB != Temporaries.begin() && std::prev(UB)->first.first == Key)
615 return std::prev(UB)->first.second;
623 template<
typename KeyT>
625 ScopeKind
Scope, LValue &LV);
630 void describe(llvm::raw_ostream &OS)
override;
632 Frame *getCaller()
const override {
return Caller; }
633 SourceLocation getCallLocation()
const override {
return CallLoc; }
634 const FunctionDecl *getCallee()
const override {
return Callee; }
636 bool isStdFunction()
const {
637 for (
const DeclContext *DC = Callee; DC; DC = DC->getParent())
638 if (DC->isStdNamespace())
649 class ThisOverrideRAII {
651 ThisOverrideRAII(CallStackFrame &Frame,
const LValue *NewThis,
bool Enable)
652 : Frame(Frame), OldThis(Frame.This) {
654 Frame.This = NewThis;
656 ~ThisOverrideRAII() {
657 Frame.This = OldThis;
660 CallStackFrame &Frame;
661 const LValue *OldThis;
674 llvm::PointerIntPair<APValue*, 2, ScopeKind>
Value;
685 bool isDestroyedAtEndOf(ScopeKind K)
const {
686 return (
int)
Value.getInt() >= (
int)K;
688 bool endLifetime(EvalInfo &Info,
bool RunDestructors) {
689 if (RunDestructors) {
692 Loc = VD->getLocation();
694 Loc = E->getExprLoc();
701 bool hasSideEffect() {
707 struct ObjectUnderConstruction {
710 friend bool operator==(
const ObjectUnderConstruction &LHS,
711 const ObjectUnderConstruction &RHS) {
712 return LHS.Base == RHS.Base && LHS.Path == RHS.Path;
714 friend llvm::hash_code
hash_value(
const ObjectUnderConstruction &Obj) {
718 enum class ConstructionPhase {
729 template<>
struct DenseMapInfo<ObjectUnderConstruction> {
730 using Base = DenseMapInfo<APValue::LValueBase>;
732 return {Base::getEmptyKey(), {}}; }
734 return {Base::getTombstoneKey(), {}};
739 static bool isEqual(
const ObjectUnderConstruction &LHS,
740 const ObjectUnderConstruction &RHS) {
754 const Expr *AllocExpr =
nullptr;
765 if (
auto *
NE = dyn_cast<CXXNewExpr>(AllocExpr))
766 return NE->isArray() ? ArrayNew : New;
767 assert(isa<CallExpr>(AllocExpr));
772 struct DynAllocOrder {
800 CallStackFrame *CurrentCall;
803 unsigned CallStackDepth;
806 unsigned NextCallIndex;
815 bool EnableNewConstInterp;
819 CallStackFrame BottomFrame;
829 enum class EvaluatingDeclKind {
836 EvaluatingDeclKind IsEvaluatingDecl = EvaluatingDeclKind::None;
843 llvm::DenseMap<ObjectUnderConstruction, ConstructionPhase>
844 ObjectsUnderConstruction;
849 std::map<DynamicAllocLValue, DynAlloc, DynAllocOrder> HeapAllocs;
852 unsigned NumHeapAllocs = 0;
854 struct EvaluatingConstructorRAII {
856 ObjectUnderConstruction
Object;
858 EvaluatingConstructorRAII(EvalInfo &EI, ObjectUnderConstruction Object,
862 EI.ObjectsUnderConstruction
863 .insert({
Object, HasBases ? ConstructionPhase::Bases
864 : ConstructionPhase::AfterBases})
867 void finishedConstructingBases() {
868 EI.ObjectsUnderConstruction[
Object] = ConstructionPhase::AfterBases;
870 void finishedConstructingFields() {
871 EI.ObjectsUnderConstruction[
Object] = ConstructionPhase::AfterFields;
873 ~EvaluatingConstructorRAII() {
874 if (DidInsert) EI.ObjectsUnderConstruction.erase(Object);
878 struct EvaluatingDestructorRAII {
880 ObjectUnderConstruction
Object;
882 EvaluatingDestructorRAII(EvalInfo &EI, ObjectUnderConstruction Object)
884 DidInsert = EI.ObjectsUnderConstruction
885 .insert({
Object, ConstructionPhase::Destroying})
888 void startedDestroyingBases() {
889 EI.ObjectsUnderConstruction[
Object] =
890 ConstructionPhase::DestroyingBases;
892 ~EvaluatingDestructorRAII() {
894 EI.ObjectsUnderConstruction.erase(Object);
901 return ObjectsUnderConstruction.lookup({
Base, Path});
906 unsigned SpeculativeEvaluationDepth = 0;
910 uint64_t ArrayInitIndex = -1;
914 bool HasActiveDiagnostic;
918 bool HasFoldFailureDiagnostic;
922 bool InConstantContext;
927 bool CheckingPotentialConstantExpression =
false;
935 bool CheckingForUndefinedBehavior =
false;
937 enum EvaluationMode {
940 EM_ConstantExpression,
947 EM_ConstantExpressionUnevaluated,
955 EM_IgnoreSideEffects,
960 bool checkingPotentialConstantExpression()
const override {
961 return CheckingPotentialConstantExpression;
967 bool checkingForUndefinedBehavior()
const override {
968 return CheckingForUndefinedBehavior;
972 : Ctx(const_cast<
ASTContext &>(
C)), EvalStatus(S), CurrentCall(nullptr),
973 CallStackDepth(0), NextCallIndex(1),
974 StepsLeft(
C.getLangOpts().ConstexprStepLimit),
975 EnableNewConstInterp(
C.getLangOpts().EnableNewConstInterp),
976 BottomFrame(*this,
SourceLocation(), nullptr, nullptr, CallRef()),
977 EvaluatingDecl((const
ValueDecl *)nullptr),
978 EvaluatingDeclValue(nullptr), HasActiveDiagnostic(
false),
979 HasFoldFailureDiagnostic(
false), InConstantContext(
false),
986 ASTContext &getCtx()
const override {
return Ctx; }
989 EvaluatingDeclKind EDK = EvaluatingDeclKind::Ctor) {
990 EvaluatingDecl =
Base;
991 IsEvaluatingDecl = EDK;
992 EvaluatingDeclValue = &
Value;
998 if (checkingPotentialConstantExpression() && CallStackDepth > 1)
1000 if (NextCallIndex == 0) {
1002 FFDiag(Loc, diag::note_constexpr_call_limit_exceeded);
1005 if (CallStackDepth <= getLangOpts().ConstexprCallDepth)
1007 FFDiag(Loc, diag::note_constexpr_depth_limit_exceeded)
1008 << getLangOpts().ConstexprCallDepth;
1012 std::pair<CallStackFrame *, unsigned>
1013 getCallFrameAndDepth(
unsigned CallIndex) {
1014 assert(CallIndex &&
"no call index in getCallFrameAndDepth");
1017 unsigned Depth = CallStackDepth;
1018 CallStackFrame *Frame = CurrentCall;
1019 while (Frame->Index > CallIndex) {
1020 Frame = Frame->Caller;
1023 if (Frame->Index == CallIndex)
1024 return {Frame,
Depth};
1025 return {
nullptr, 0};
1028 bool nextStep(
const Stmt *S) {
1030 FFDiag(S->getBeginLoc(), diag::note_constexpr_step_limit_exceeded);
1041 auto It = HeapAllocs.find(DA);
1042 if (It != HeapAllocs.end())
1043 Result = &It->second;
1049 CallStackFrame *Frame = getCallFrameAndDepth(
Call.CallIndex).first;
1050 return Frame ? Frame->getTemporary(
Call.getOrigParam(PVD),
Call.Version)
1055 struct StdAllocatorCaller {
1056 unsigned FrameIndex;
1058 explicit operator bool()
const {
return FrameIndex != 0; };
1061 StdAllocatorCaller getStdAllocatorCaller(StringRef FnName)
const {
1062 for (
const CallStackFrame *Call = CurrentCall;
Call != &BottomFrame;
1064 const auto *MD = dyn_cast_or_null<CXXMethodDecl>(
Call->Callee);
1068 if (!FnII || !FnII->
isStr(FnName))
1072 dyn_cast<ClassTemplateSpecializationDecl>(MD->getParent());
1078 if (CTSD->isInStdNamespace() && ClassII &&
1079 ClassII->
isStr(
"allocator") && TAL.
size() >= 1 &&
1081 return {
Call->Index, TAL[0].getAsType()};
1087 void performLifetimeExtension() {
1089 llvm::erase_if(CleanupStack, [](Cleanup &C) {
1090 return !
C.isDestroyedAtEndOf(ScopeKind::FullExpression);
1097 bool discardCleanups() {
1098 for (Cleanup &C : CleanupStack) {
1099 if (
C.hasSideEffect() && !noteSideEffect()) {
1100 CleanupStack.clear();
1104 CleanupStack.clear();
1109 interp::Frame *getCurrentFrame()
override {
return CurrentCall; }
1110 const interp::Frame *getBottomFrame()
const override {
return &BottomFrame; }
1112 bool hasActiveDiagnostic()
override {
return HasActiveDiagnostic; }
1113 void setActiveDiagnostic(
bool Flag)
override { HasActiveDiagnostic = Flag; }
1115 void setFoldFailureDiagnostic(
bool Flag)
override {
1116 HasFoldFailureDiagnostic = Flag;
1127 bool hasPriorDiagnostic()
override {
1128 if (!EvalStatus.
Diag->empty()) {
1130 case EM_ConstantFold:
1131 case EM_IgnoreSideEffects:
1132 if (!HasFoldFailureDiagnostic)
1136 case EM_ConstantExpression:
1137 case EM_ConstantExpressionUnevaluated:
1138 setActiveDiagnostic(
false);
1145 unsigned getCallStackDepth()
override {
return CallStackDepth; }
1150 bool keepEvaluatingAfterSideEffect() {
1152 case EM_IgnoreSideEffects:
1155 case EM_ConstantExpression:
1156 case EM_ConstantExpressionUnevaluated:
1157 case EM_ConstantFold:
1160 return checkingPotentialConstantExpression() ||
1161 checkingForUndefinedBehavior();
1163 llvm_unreachable(
"Missed EvalMode case");
1168 bool noteSideEffect() {
1170 return keepEvaluatingAfterSideEffect();
1174 bool keepEvaluatingAfterUndefinedBehavior() {
1176 case EM_IgnoreSideEffects:
1177 case EM_ConstantFold:
1180 case EM_ConstantExpression:
1181 case EM_ConstantExpressionUnevaluated:
1182 return checkingForUndefinedBehavior();
1184 llvm_unreachable(
"Missed EvalMode case");
1190 bool noteUndefinedBehavior()
override {
1192 return keepEvaluatingAfterUndefinedBehavior();
1197 bool keepEvaluatingAfterFailure()
const override {
1202 case EM_ConstantExpression:
1203 case EM_ConstantExpressionUnevaluated:
1204 case EM_ConstantFold:
1205 case EM_IgnoreSideEffects:
1206 return checkingPotentialConstantExpression() ||
1207 checkingForUndefinedBehavior();
1209 llvm_unreachable(
"Missed EvalMode case");
1222 LLVM_NODISCARD
bool noteFailure() {
1230 bool KeepGoing = keepEvaluatingAfterFailure();
1235 class ArrayInitLoopIndex {
1237 uint64_t OuterIndex;
1240 ArrayInitLoopIndex(EvalInfo &Info)
1241 : Info(Info), OuterIndex(Info.ArrayInitIndex) {
1242 Info.ArrayInitIndex = 0;
1244 ~ArrayInitLoopIndex() { Info.ArrayInitIndex = OuterIndex; }
1246 operator uint64_t&() {
return Info.ArrayInitIndex; }
1251 struct FoldConstant {
1254 bool HadNoPriorDiags;
1255 EvalInfo::EvaluationMode OldMode;
1257 explicit FoldConstant(EvalInfo &Info,
bool Enabled)
1260 HadNoPriorDiags(Info.EvalStatus.
Diag &&
1261 Info.EvalStatus.
Diag->empty() &&
1262 !Info.EvalStatus.HasSideEffects),
1263 OldMode(Info.EvalMode) {
1265 Info.EvalMode = EvalInfo::EM_ConstantFold;
1267 void keepDiagnostics() { Enabled =
false; }
1269 if (Enabled && HadNoPriorDiags && !Info.EvalStatus.Diag->empty() &&
1270 !Info.EvalStatus.HasSideEffects)
1271 Info.EvalStatus.Diag->clear();
1272 Info.EvalMode = OldMode;
1278 struct IgnoreSideEffectsRAII {
1280 EvalInfo::EvaluationMode OldMode;
1281 explicit IgnoreSideEffectsRAII(EvalInfo &Info)
1282 : Info(Info), OldMode(Info.EvalMode) {
1283 Info.EvalMode = EvalInfo::EM_IgnoreSideEffects;
1286 ~IgnoreSideEffectsRAII() { Info.EvalMode = OldMode; }
1291 class SpeculativeEvaluationRAII {
1292 EvalInfo *Info =
nullptr;
1294 unsigned OldSpeculativeEvaluationDepth;
1296 void moveFromAndCancel(SpeculativeEvaluationRAII &&Other) {
1298 OldStatus = Other.OldStatus;
1299 OldSpeculativeEvaluationDepth = Other.OldSpeculativeEvaluationDepth;
1300 Other.Info =
nullptr;
1303 void maybeRestoreState() {
1307 Info->EvalStatus = OldStatus;
1308 Info->SpeculativeEvaluationDepth = OldSpeculativeEvaluationDepth;
1312 SpeculativeEvaluationRAII() =
default;
1314 SpeculativeEvaluationRAII(
1316 : Info(&Info), OldStatus(Info.EvalStatus),
1317 OldSpeculativeEvaluationDepth(Info.SpeculativeEvaluationDepth) {
1318 Info.EvalStatus.Diag = NewDiag;
1319 Info.SpeculativeEvaluationDepth = Info.CallStackDepth + 1;
1322 SpeculativeEvaluationRAII(
const SpeculativeEvaluationRAII &Other) =
delete;
1323 SpeculativeEvaluationRAII(SpeculativeEvaluationRAII &&Other) {
1324 moveFromAndCancel(std::move(Other));
1327 SpeculativeEvaluationRAII &operator=(SpeculativeEvaluationRAII &&Other) {
1328 maybeRestoreState();
1329 moveFromAndCancel(std::move(Other));
1333 ~SpeculativeEvaluationRAII() { maybeRestoreState(); }
1338 template<ScopeKind Kind>
1341 unsigned OldStackSize;
1343 ScopeRAII(EvalInfo &Info)
1344 : Info(Info), OldStackSize(Info.CleanupStack.size()) {
1347 Info.CurrentCall->pushTempVersion();
1349 bool destroy(
bool RunDestructors =
true) {
1350 bool OK =
cleanup(Info, RunDestructors, OldStackSize);
1355 if (OldStackSize != -1
U)
1359 Info.CurrentCall->popTempVersion();
1362 static bool cleanup(EvalInfo &Info,
bool RunDestructors,
1363 unsigned OldStackSize) {
1364 assert(OldStackSize <= Info.CleanupStack.size() &&
1365 "running cleanups out of order?");
1370 for (
unsigned I = Info.CleanupStack.size(); I > OldStackSize; --I) {
1371 if (Info.CleanupStack[I - 1].isDestroyedAtEndOf(
Kind)) {
1372 if (!Info.CleanupStack[I - 1].endLifetime(Info, RunDestructors)) {
1380 auto NewEnd = Info.CleanupStack.begin() + OldStackSize;
1381 if (
Kind != ScopeKind::Block)
1383 std::remove_if(NewEnd, Info.CleanupStack.end(), [](Cleanup &C) {
1384 return C.isDestroyedAtEndOf(Kind);
1386 Info.CleanupStack.erase(NewEnd, Info.CleanupStack.end());
1390 typedef ScopeRAII<ScopeKind::Block> BlockScopeRAII;
1391 typedef ScopeRAII<ScopeKind::FullExpression> FullExpressionRAII;
1392 typedef ScopeRAII<ScopeKind::Call> CallScopeRAII;
1395 bool SubobjectDesignator::checkSubobject(EvalInfo &Info,
const Expr *E,
1399 if (isOnePastTheEnd()) {
1400 Info.CCEDiag(E, diag::note_constexpr_past_end_subobject)
1411 void SubobjectDesignator::diagnoseUnsizedArrayPointerArithmetic(EvalInfo &Info,
1413 Info.CCEDiag(E, diag::note_constexpr_unsized_array_indexed);
1418 void SubobjectDesignator::diagnosePointerArithmetic(EvalInfo &Info,
1423 if (MostDerivedPathLength == Entries.size() && MostDerivedIsArrayElement)
1424 Info.CCEDiag(E, diag::note_constexpr_array_index)
1426 <<
static_cast<unsigned>(getMostDerivedArraySize());
1428 Info.CCEDiag(E, diag::note_constexpr_array_index)
1433 CallStackFrame::CallStackFrame(EvalInfo &Info,
SourceLocation CallLoc,
1437 Arguments(
Call), CallLoc(CallLoc), Index(Info.NextCallIndex++) {
1438 Info.CurrentCall =
this;
1439 ++Info.CallStackDepth;
1442 CallStackFrame::~CallStackFrame() {
1443 assert(Info.CurrentCall ==
this &&
"calls retired out of order");
1444 --Info.CallStackDepth;
1445 Info.CurrentCall = Caller;
1467 llvm_unreachable(
"unknown access kind");
1501 llvm_unreachable(
"unknown access kind");
1505 struct ComplexValue {
1511 APFloat FloatReal, FloatImag;
1513 ComplexValue() : FloatReal(APFloat::Bogus()), FloatImag(APFloat::Bogus()) {}
1515 void makeComplexFloat() { IsInt =
false; }
1516 bool isComplexFloat()
const {
return !IsInt; }
1517 APFloat &getComplexFloatReal() {
return FloatReal; }
1518 APFloat &getComplexFloatImag() {
return FloatImag; }
1520 void makeComplexInt() { IsInt =
true; }
1521 bool isComplexInt()
const {
return IsInt; }
1522 APSInt &getComplexIntReal() {
return IntReal; }
1523 APSInt &getComplexIntImag() {
return IntImag; }
1526 if (isComplexFloat())
1532 assert(
v.isComplexFloat() ||
v.isComplexInt());
1533 if (
v.isComplexFloat()) {
1535 FloatReal =
v.getComplexFloatReal();
1536 FloatImag =
v.getComplexFloatImag();
1539 IntReal =
v.getComplexIntReal();
1540 IntImag =
v.getComplexIntImag();
1550 bool InvalidBase : 1;
1555 SubobjectDesignator &getLValueDesignator() {
return Designator; }
1556 const SubobjectDesignator &getLValueDesignator()
const {
return Designator;}
1557 bool isNullPointer()
const {
return IsNullPtr;}
1559 unsigned getLValueCallIndex()
const {
return Base.getCallIndex(); }
1560 unsigned getLValueVersion()
const {
return Base.getVersion(); }
1566 assert(!InvalidBase &&
"APValues can't handle invalid LValue bases");
1572 assert(
V.isLValue() &&
"Setting LValue from a non-LValue?");
1573 Base =
V.getLValueBase();
1575 InvalidBase =
false;
1577 IsNullPtr =
V.isNullPointer();
1584 const auto *E = B.
get<
const Expr *>();
1585 assert((isa<MemberExpr>(E) || tryUnwrapAllocSizeCall(E)) &&
1586 "Unexpected type of invalid base");
1591 Offset = CharUnits::fromQuantity(0);
1592 InvalidBase = BInvalid;
1593 Designator = SubobjectDesignator(getType(B));
1601 InvalidBase =
false;
1612 moveInto(Printable);
1619 template <
typename GenDiagType>
1620 bool checkNullPointerDiagnosingWith(
const GenDiagType &GenDiag) {
1632 bool checkNullPointer(EvalInfo &Info,
const Expr *E,
1634 return checkNullPointerDiagnosingWith([&Info, E, CSK] {
1635 Info.CCEDiag(E, diag::note_constexpr_null_subobject) << CSK;
1639 bool checkNullPointerForFoldAccess(EvalInfo &Info,
const Expr *E,
1641 return checkNullPointerDiagnosingWith([&Info, E, AK] {
1642 Info.FFDiag(E, diag::note_constexpr_access_null) << AK;
1653 void addDecl(EvalInfo &Info,
const Expr *E,
1654 const Decl *D,
bool Virtual =
false) {
1658 void addUnsizedArray(EvalInfo &Info,
const Expr *E,
QualType ElemTy) {
1660 Info.CCEDiag(E, diag::note_constexpr_unsupported_unsized_array);
1665 assert(getType(
Base)->isPointerType() || getType(
Base)->isArrayType());
1666 Designator.FirstEntryIsAnUnsizedArray =
true;
1674 void addComplex(EvalInfo &Info,
const Expr *E,
QualType EltTy,
bool Imag) {
1678 void clearIsNullPointer() {
1681 void adjustOffsetAndIndex(EvalInfo &Info,
const Expr *E,
1691 uint64_t Offset64 =
Offset.getQuantity();
1693 uint64_t Index64 = Index.extOrTrunc(64).getZExtValue();
1694 Offset = CharUnits::fromQuantity(Offset64 + ElemSize64 * Index64);
1698 clearIsNullPointer();
1703 clearIsNullPointer();
1710 : DeclAndIsDerivedMember(
Decl,
false) {}
1715 return DeclAndIsDerivedMember.getPointer();
1718 bool isDerivedMember()
const {
1719 return DeclAndIsDerivedMember.getInt();
1723 return cast<CXXRecordDecl>(
1724 DeclAndIsDerivedMember.getPointer()->getDeclContext());
1728 V =
APValue(getDecl(), isDerivedMember(), Path);
1731 assert(
V.isMemberPointer());
1732 DeclAndIsDerivedMember.setPointer(
V.getMemberPointerDecl());
1733 DeclAndIsDerivedMember.setInt(
V.isMemberPointerToDerivedMember());
1736 Path.insert(Path.end(),
P.begin(),
P.end());
1742 llvm::PointerIntPair<const ValueDecl*, 1, bool> DeclAndIsDerivedMember;
1750 assert(!Path.empty());
1752 if (Path.size() >= 2)
1756 if (
Expected->getCanonicalDecl() !=
Class->getCanonicalDecl()) {
1772 if (!isDerivedMember()) {
1773 Path.push_back(Derived);
1776 if (!castBack(Derived))
1779 DeclAndIsDerivedMember.setInt(
false);
1787 DeclAndIsDerivedMember.setInt(
true);
1788 if (isDerivedMember()) {
1789 Path.push_back(
Base);
1792 return castBack(
Base);
1797 static bool operator==(
const MemberPtr &LHS,
const MemberPtr &RHS) {
1798 if (!LHS.getDecl() || !RHS.getDecl())
1799 return !LHS.getDecl() && !RHS.getDecl();
1800 if (LHS.getDecl()->getCanonicalDecl() != RHS.getDecl()->getCanonicalDecl())
1802 return LHS.Path == RHS.Path;
1809 bool AllowNonLiteralTypes =
false);
1811 bool InvalidBaseOK =
false);
1813 bool InvalidBaseOK =
false);
1843 if (Int.isUnsigned() || Int.isMinSignedValue()) {
1844 Int = Int.extend(Int.getBitWidth() + 1);
1845 Int.setIsSigned(
true);
1850 template<
typename KeyT>
1852 ScopeKind
Scope, LValue &LV) {
1853 unsigned Version = getTempVersion();
1856 return createLocal(
Base, Key, T,
Scope);
1862 assert(Args.CallIndex == Index &&
"creating parameter in wrong frame");
1868 return createLocal(
Base, PVD, PVD->
getType(), ScopeKind::Call);
1873 assert(
Base.getCallIndex() == Index &&
"lvalue for wrong frame");
1874 unsigned Version =
Base.getVersion();
1875 APValue &Result = Temporaries[MapKeyTy(Key, Version)];
1876 assert(Result.isAbsent() &&
"local created multiple times");
1882 if (Index <= Info.SpeculativeEvaluationDepth) {
1884 Info.noteSideEffect();
1886 Info.CleanupStack.push_back(Cleanup(&Result,
Base, T,
Scope));
1892 if (NumHeapAllocs > DynamicAllocLValue::getMaxIndex()) {
1893 FFDiag(E, diag::note_constexpr_heap_alloc_limit_exceeded);
1898 LV.set(APValue::LValueBase::getDynamicAlloc(DA, T));
1899 auto Result = HeapAllocs.emplace(std::piecewise_construct,
1900 std::forward_as_tuple(DA), std::tuple<>());
1901 assert(Result.second &&
"reused a heap alloc index?");
1902 Result.first->second.AllocExpr = E;
1903 return &Result.first->second.Value;
1908 unsigned ArgIndex = 0;
1909 bool IsMemberCall = isa<CXXMethodDecl>(Callee) &&
1910 !isa<CXXConstructorDecl>(Callee) &&
1911 cast<CXXMethodDecl>(Callee)->isInstance();
1916 if (
This && IsMemberCall) {
1918 This->moveInto(Val);
1920 This->Designator.MostDerivedType);
1922 Out <<
"->" << *
Callee <<
'(';
1923 IsMemberCall =
false;
1927 E =
Callee->param_end(); I != E; ++I, ++ArgIndex) {
1928 if (ArgIndex > (
unsigned)IsMemberCall)
1932 APValue *
V = Info.getParamSlot(Arguments, Param);
1934 V->printPretty(Out, Info.Ctx, Param->
getType());
1938 if (ArgIndex == 0 && IsMemberCall)
1939 Out <<
"->" << *
Callee <<
'(';
1953 return Info.noteSideEffect();
1960 return (Builtin == Builtin::BI__builtin___CFStringMakeConstantString ||
1961 Builtin == Builtin::BI__builtin___NSStringMakeConstantString ||
1962 Builtin == Builtin::BI__builtin_function_start);
1971 if (!B)
return true;
1975 if (
const VarDecl *VD = dyn_cast<VarDecl>(D))
1976 return VD->hasGlobalStorage();
1977 if (isa<TemplateParamObjectDecl>(D))
1982 return isa<FunctionDecl, MSGuidDecl, UnnamedGlobalConstantDecl>(D);
1992 case Expr::CompoundLiteralExprClass: {
1996 case Expr::MaterializeTemporaryExprClass:
1999 return cast<MaterializeTemporaryExpr>(E)->getStorageDuration() ==
SD_Static;
2001 case Expr::StringLiteralClass:
2002 case Expr::PredefinedExprClass:
2003 case Expr::ObjCStringLiteralClass:
2004 case Expr::ObjCEncodeExprClass:
2006 case Expr::ObjCBoxedExprClass:
2007 return cast<ObjCBoxedExpr>(E)->isExpressibleAsConstantInitializer();
2008 case Expr::CallExprClass:
2011 case Expr::AddrLabelExprClass:
2015 case Expr::BlockExprClass:
2016 return !cast<BlockExpr>(E)->getBlockDecl()->hasCaptures();
2019 case Expr::SourceLocExprClass:
2021 case Expr::ImplicitValueInitExprClass:
2033 return LVal.Base.dyn_cast<
const ValueDecl*>();
2037 if (
Value.getLValueCallIndex())
2040 return E && !isa<MaterializeTemporaryExpr>(E);
2060 if (!A.getLValueBase())
2061 return !B.getLValueBase();
2062 if (!B.getLValueBase())
2065 if (A.getLValueBase().getOpaqueValue() !=
2066 B.getLValueBase().getOpaqueValue())
2069 return A.getLValueCallIndex() == B.getLValueCallIndex() &&
2070 A.getLValueVersion() == B.getLValueVersion();
2074 assert(
Base &&
"no location for a null lvalue");
2080 if (
auto *PVD = dyn_cast_or_null<ParmVarDecl>(VD)) {
2082 for (CallStackFrame *F = Info.CurrentCall; F; F = F->Caller) {
2083 if (F->Arguments.CallIndex ==
Base.getCallIndex() &&
2084 F->Arguments.Version ==
Base.getVersion() && F->Callee &&
2085 Idx < F->Callee->getNumParams()) {
2086 VD = F->Callee->getParamDecl(Idx);
2093 Info.Note(VD->
getLocation(), diag::note_declared_at);
2095 Info.Note(E->
getExprLoc(), diag::note_constexpr_temporary_here);
2099 Info.Note((*Alloc)->AllocExpr->getExprLoc(),
2100 diag::note_constexpr_dynamic_alloc_here);
2132 const SubobjectDesignator &
Designator = LVal.getLValueDesignator();
2140 if (isTemplateArgument(
Kind)) {
2141 int InvalidBaseKind = -1;
2144 InvalidBaseKind = 0;
2145 else if (isa_and_nonnull<StringLiteral>(BaseE))
2146 InvalidBaseKind = 1;
2147 else if (isa_and_nonnull<MaterializeTemporaryExpr>(BaseE) ||
2148 isa_and_nonnull<LifetimeExtendedTemporaryDecl>(BaseVD))
2149 InvalidBaseKind = 2;
2150 else if (
auto *PE = dyn_cast_or_null<PredefinedExpr>(BaseE)) {
2151 InvalidBaseKind = 3;
2152 Ident = PE->getIdentKindName();
2155 if (InvalidBaseKind != -1) {
2156 Info.FFDiag(Loc, diag::note_constexpr_invalid_template_arg)
2157 << IsReferenceType << !
Designator.Entries.empty() << InvalidBaseKind
2163 if (
auto *FD = dyn_cast_or_null<FunctionDecl>(BaseVD)) {
2164 if (FD->isConsteval()) {
2165 Info.FFDiag(Loc, diag::note_consteval_address_accessible)
2167 Info.Note(FD->getLocation(), diag::note_declared_at);
2176 if (Info.getLangOpts().CPlusPlus11) {
2178 Info.FFDiag(Loc, diag::note_constexpr_non_global, 1)
2179 << IsReferenceType << !
Designator.Entries.empty()
2182 auto *VarD = dyn_cast_or_null<VarDecl>(VD);
2183 if (VarD && VarD->isConstexpr()) {
2189 Info.Note(VarD->getLocation(), diag::note_constexpr_not_static)
2191 << FixItHint::CreateInsertion(VarD->getBeginLoc(),
"static ");
2201 assert((Info.checkingPotentialConstantExpression() ||
2202 LVal.getLValueCallIndex() == 0) &&
2203 "have call index for global lvalue");
2206 Info.FFDiag(Loc, diag::note_constexpr_dynamic_alloc)
2207 << IsReferenceType << !
Designator.Entries.empty();
2213 if (
const VarDecl *Var = dyn_cast<const VarDecl>(BaseVD)) {
2215 if (Var->getTLSKind())
2221 if (!isForManglingOnly(
Kind) && Var->hasAttr<DLLImportAttr>())
2227 if (Info.getCtx().getLangOpts().CUDA &&
2228 Info.getCtx().getLangOpts().CUDAIsDevice &&
2229 Info.getCtx().CUDAConstantEvalCtx.NoWrongSidedVars) {
2230 if ((!Var->hasAttr<CUDADeviceAttr>() &&
2231 !Var->hasAttr<CUDAConstantAttr>() &&
2232 !Var->getType()->isCUDADeviceBuiltinSurfaceType() &&
2233 !Var->getType()->isCUDADeviceBuiltinTextureType()) ||
2234 Var->hasAttr<HIPManagedAttr>())
2238 if (
const auto *FD = dyn_cast<const FunctionDecl>(BaseVD)) {
2249 if (Info.getLangOpts().CPlusPlus && !isForManglingOnly(
Kind) &&
2250 FD->hasAttr<DLLImportAttr>())
2254 }
else if (
const auto *MTE =
2255 dyn_cast_or_null<MaterializeTemporaryExpr>(BaseE)) {
2256 if (CheckedTemps.insert(MTE).second) {
2259 Info.FFDiag(MTE->getExprLoc(),
2260 diag::note_constexpr_unsupported_temporary_nontrivial_dtor)
2265 APValue *
V = MTE->getOrCreateValue(
false);
2266 assert(
V &&
"evasluation result refers to uninitialised temporary");
2268 Info, MTE->getExprLoc(), TempType, *
V,
2276 if (!IsReferenceType)
2288 Info.FFDiag(Loc, diag::note_constexpr_past_end, 1)
2289 << !
Designator.Entries.empty() << !!BaseVD << BaseVD;
2304 const auto *FD = dyn_cast_or_null<CXXMethodDecl>(
Member);
2307 if (FD->isConsteval()) {
2308 Info.FFDiag(Loc, diag::note_consteval_address_accessible) << 0;
2309 Info.Note(FD->getLocation(), diag::note_declared_at);
2312 return isForManglingOnly(
Kind) || FD->isVirtual() ||
2313 !FD->hasAttr<DLLImportAttr>();
2319 const LValue *
This =
nullptr) {
2336 if (
This && Info.EvaluatingDecl ==
This->getLValueBase())
2340 if (Info.getLangOpts().CPlusPlus11)
2341 Info.FFDiag(E, diag::note_constexpr_nonliteral)
2344 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
2354 if (!
Value.hasValue()) {
2355 Info.FFDiag(DiagLoc, diag::note_constexpr_uninitialized)
2358 Info.Note(SubobjectLoc, diag::note_constexpr_subobject_declared_here);
2365 Type = AT->getValueType();
2370 if (
Value.isArray()) {
2372 for (
unsigned I = 0, N =
Value.getArrayInitializedElts(); I != N; ++I) {
2375 SubobjectLoc, CheckedTemps))
2378 if (!
Value.hasArrayFiller())
2381 Value.getArrayFiller(),
Kind, SubobjectLoc,
2384 if (
Value.isUnion() &&
Value.getUnionField()) {
2386 CERK, Info, DiagLoc,
Value.getUnionField()->getType(),
2387 Value.getUnionValue(),
Kind,
Value.getUnionField()->getLocation(),
2390 if (
Value.isStruct()) {
2392 if (
const CXXRecordDecl *CD = dyn_cast<CXXRecordDecl>(RD)) {
2393 unsigned BaseIndex = 0;
2397 BS.getBeginLoc(), CheckedTemps))
2402 for (
const auto *I : RD->
fields()) {
2403 if (I->isUnnamedBitfield())
2407 Value.getStructField(I->getFieldIndex()),
2408 Kind, I->getLocation(), CheckedTemps))
2413 if (
Value.isLValue() &&
2416 LVal.setFrom(Info.Ctx,
Value);
2421 if (
Value.isMemberPointer() &&
2458 if (!Info.HeapAllocs.empty()) {
2462 Info.CCEDiag(Info.HeapAllocs.begin()->second.AllocExpr,
2463 diag::note_constexpr_memory_leak)
2464 <<
unsigned(Info.HeapAllocs.size() - 1);
2472 if (!
Value.getLValueBase()) {
2473 Result = !
Value.getLValueOffset().isZero();
2487 case APValue::Indeterminate:
2490 Result = Val.
getInt().getBoolValue();
2492 case APValue::FixedPoint:
2498 case APValue::ComplexInt:
2502 case APValue::ComplexFloat:
2506 case APValue::LValue:
2508 case APValue::MemberPointer:
2511 case APValue::Vector:
2512 case APValue::Array:
2513 case APValue::Struct:
2514 case APValue::Union:
2515 case APValue::AddrLabelDiff:
2519 llvm_unreachable(
"unknown APValue kind");
2525 assert(E->
isPRValue() &&
"missing lvalue-to-rvalue conv in bool condition");
2532 template<
typename T>
2534 const T &SrcValue,
QualType DestType) {
2535 Info.CCEDiag(E, diag::note_constexpr_overflow)
2536 << SrcValue << DestType;
2537 return Info.noteUndefinedBehavior();
2543 unsigned DestWidth = Info.Ctx.getIntWidth(DestType);
2547 Result =
APSInt(DestWidth, !DestSigned);
2549 if (
Value.convertToInteger(Result, llvm::APFloat::rmTowardZero, &ignored)
2550 & APFloat::opInvalidOp)
2563 llvm::RoundingMode RM =
2565 DynamicRM = (RM == llvm::RoundingMode::Dynamic);
2567 RM = llvm::RoundingMode::NearestTiesToEven;
2573 APFloat::opStatus St) {
2576 if (Info.InConstantContext)
2580 if ((St & APFloat::opInexact) &&
2581 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
2584 Info.FFDiag(E, diag::note_constexpr_dynamic_rounding);
2588 if ((St != APFloat::opOK) &&
2589 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
2590 FPO.getFPExceptionMode() != LangOptions::FPE_Ignore ||
2591 FPO.getAllowFEnvAccess())) {
2592 Info.FFDiag(E, diag::note_constexpr_float_arithmetic_strict);
2596 if ((St & APFloat::opStatus::opInvalidOp) &&
2597 FPO.getFPExceptionMode() != LangOptions::FPE_Ignore) {
2615 assert(isa<CastExpr>(E) || isa<CompoundAssignOperator>(E));
2618 APFloat::opStatus St;
2619 APFloat
Value = Result;
2621 St = Result.convert(Info.Ctx.getFloatTypeSemantics(DestType), RM, &ignored);
2628 unsigned DestWidth = Info.Ctx.getIntWidth(DestType);
2634 Result =
Value.getBoolValue();
2641 QualType DestType, APFloat &Result) {
2642 Result = APFloat(Info.Ctx.getFloatTypeSemantics(DestType), 1);
2643 APFloat::opStatus St = Result.convertFromAPInt(
Value,
Value.isSigned(),
2644 APFloat::rmNearestTiesToEven);
2645 if (!Info.InConstantContext && St != llvm::APFloatBase::opOK &&
2647 Info.FFDiag(E, diag::note_constexpr_float_arithmetic_strict);
2655 assert(FD->
isBitField() &&
"truncateBitfieldValue on non-bitfield");
2657 if (!
Value.isInt()) {
2661 assert(
Value.isLValue() &&
"integral value neither int nor lvalue?");
2667 unsigned OldBitWidth = Int.getBitWidth();
2669 if (NewBitWidth < OldBitWidth)
2670 Int = Int.trunc(NewBitWidth).extend(OldBitWidth);
2684 Res = SVal.
getFloat().bitcastToAPInt();
2689 unsigned VecSize = Info.Ctx.getTypeSize(VecTy);
2691 unsigned EltSize = Info.Ctx.getTypeSize(EltTy);
2692 bool BigEndian = Info.Ctx.getTargetInfo().isBigEndian();
2693 Res = llvm::APInt::getZero(VecSize);
2700 EltAsInt = Elt.
getFloat().bitcastToAPInt();
2704 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
2707 unsigned BaseEltSize = EltAsInt.getBitWidth();
2709 Res |= EltAsInt.zextOrTrunc(VecSize).rotr(i*EltSize+BaseEltSize);
2711 Res |= EltAsInt.zextOrTrunc(VecSize).rotl(i*EltSize);
2717 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
2724 template<
typename Operation>
2727 unsigned BitWidth, Operation Op,
2729 if (LHS.isUnsigned()) {
2730 Result = Op(LHS, RHS);
2734 APSInt Value(Op(LHS.extend(BitWidth), RHS.extend(BitWidth)),
false);
2735 Result =
Value.trunc(LHS.getBitWidth());
2736 if (Result.extend(BitWidth) !=
Value) {
2737 if (Info.checkingForUndefinedBehavior())
2738 Info.Ctx.getDiagnostics().Report(E->
getExprLoc(),
2739 diag::warn_integer_constant_overflow)
2756 std::multiplies<APSInt>(), Result);
2759 std::plus<APSInt>(), Result);
2762 std::minus<APSInt>(), Result);
2763 case BO_And: Result = LHS & RHS;
return true;
2764 case BO_Xor: Result = LHS ^ RHS;
return true;
2765 case BO_Or: Result = LHS | RHS;
return true;
2769 Info.FFDiag(E, diag::note_expr_divide_by_zero);
2772 Result = (
Opcode == BO_Rem ? LHS % RHS : LHS / RHS);
2775 if (RHS.isNegative() && RHS.isAllOnes() && LHS.isSigned() &&
2776 LHS.isMinSignedValue())
2777 return HandleOverflow(Info, E, -LHS.extend(LHS.getBitWidth() + 1),
2781 if (Info.getLangOpts().OpenCL)
2784 static_cast<uint64_t
>(LHS.getBitWidth() - 1)),
2786 else if (RHS.isSigned() && RHS.isNegative()) {
2789 Info.CCEDiag(E, diag::note_constexpr_negative_shift) << RHS;
2796 unsigned SA = (
unsigned) RHS.getLimitedValue(LHS.getBitWidth()-1);
2798 Info.CCEDiag(E, diag::note_constexpr_large_shift)
2799 << RHS << E->
getType() << LHS.getBitWidth();
2800 }
else if (LHS.isSigned() && !Info.getLangOpts().CPlusPlus20) {
2805 if (LHS.isNegative())
2806 Info.CCEDiag(E, diag::note_constexpr_lshift_of_negative) << LHS;
2807 else if (LHS.countLeadingZeros() < SA)
2808 Info.CCEDiag(E, diag::note_constexpr_lshift_discards);
2814 if (Info.getLangOpts().OpenCL)
2817 static_cast<uint64_t
>(LHS.getBitWidth() - 1)),
2819 else if (RHS.isSigned() && RHS.isNegative()) {
2822 Info.CCEDiag(E, diag::note_constexpr_negative_shift) << RHS;
2829 unsigned SA = (
unsigned) RHS.getLimitedValue(LHS.getBitWidth()-1);
2831 Info.CCEDiag(E, diag::note_constexpr_large_shift)
2832 << RHS << E->
getType() << LHS.getBitWidth();
2837 case BO_LT: Result = LHS < RHS;
return true;
2838 case BO_GT: Result = LHS > RHS;
return true;
2839 case BO_LE: Result = LHS <= RHS;
return true;
2840 case BO_GE: Result = LHS >= RHS;
return true;
2841 case BO_EQ: Result = LHS == RHS;
return true;
2842 case BO_NE: Result = LHS != RHS;
return true;
2844 llvm_unreachable(
"BO_Cmp should be handled elsewhere");
2851 const APFloat &RHS) {
2854 APFloat::opStatus St;
2860 St = LHS.multiply(RHS, RM);
2863 St = LHS.add(RHS, RM);
2866 St = LHS.subtract(RHS, RM);
2872 Info.CCEDiag(E, diag::note_expr_divide_by_zero);
2873 St = LHS.divide(RHS, RM);
2882 Info.CCEDiag(E, diag::note_constexpr_float_arithmetic) << LHS.isNaN();
2883 return Info.noteUndefinedBehavior();
2892 bool LHS = (LHSValue != 0);
2893 bool RHS = (RHSValue != 0);
2896 Result = LHS && RHS;
2898 Result = LHS || RHS;
2903 const APFloat &RHSValue,
APInt &Result) {
2904 bool LHS = !LHSValue.isZero();
2905 bool RHS = !RHSValue.isZero();
2908 Result = LHS && RHS;
2910 Result = LHS || RHS;
2918 if (LHSValue.
getKind() == APValue::Int)
2920 RHSValue.
getInt(), Result);
2926 template <
typename APTy>
2929 const APTy &RHSValue,
APInt &Result) {
2932 llvm_unreachable(
"unsupported binary operator");
2934 Result = (LHSValue == RHSValue);
2937 Result = (LHSValue != RHSValue);
2940 Result = (LHSValue < RHSValue);
2943 Result = (LHSValue > RHSValue);
2946 Result = (LHSValue <= RHSValue);
2949 Result = (LHSValue >= RHSValue);
2965 if (LHSValue.
getKind() == APValue::Int)
2967 RHSValue.
getInt(), Result);
2979 "Operation not supported on vector types");
2983 QualType EltTy = VT->getElementType();
2990 "A vector result that isn't a vector OR uncalculated LValue");
2996 RHSValue.
getVectorLength() == NumElements &&
"Different vector sizes");
3000 for (
unsigned EltNum = 0; EltNum < NumElements; ++EltNum) {
3005 APSInt EltResult{Info.Ctx.getIntWidth(EltTy),
3007 bool Success =
true;
3009 if (BinaryOperator::isLogicalOp(
Opcode))
3011 else if (BinaryOperator::isComparisonOp(
Opcode))
3015 RHSElt.
getInt(), EltResult);
3021 ResultElements.emplace_back(EltResult);
3026 "Mismatched LHS/RHS/Result Type");
3027 APFloat LHSFloat = LHSElt.
getFloat();
3035 ResultElements.emplace_back(LHSFloat);
3039 LHSValue =
APValue(ResultElements.data(), ResultElements.size());
3047 unsigned TruncatedElements) {
3048 SubobjectDesignator &D = Result.Designator;
3051 if (TruncatedElements == D.Entries.size())
3053 assert(TruncatedElements >= D.MostDerivedPathLength &&
3054 "not casting to a derived class");
3060 for (
unsigned I = TruncatedElements, N = D.Entries.size(); I != N; ++I) {
3064 if (isVirtualBaseClass(D.Entries[I]))
3070 D.Entries.resize(TruncatedElements);
3080 RL = &Info.Ctx.getASTRecordLayout(Derived);
3083 Obj.getLValueOffset() += RL->getBaseClassOffset(
Base);
3084 Obj.addDecl(Info, E,
Base,
false);
3093 if (!
Base->isVirtual())
3096 SubobjectDesignator &D = Obj.Designator;
3101 DerivedDecl = D.MostDerivedType->getAsCXXRecordDecl();
3107 const ASTRecordLayout &Layout = Info.Ctx.getASTRecordLayout(DerivedDecl);
3109 Obj.addDecl(Info, E, BaseDecl,
true);
3115 for (CastExpr::path_const_iterator PathI = E->
path_begin(),
3117 PathI != PathE; ++PathI) {
3121 Type = (*PathI)->getType();
3133 llvm_unreachable(
"Class must be derived from the passed in base class!");
3148 RL = &Info.Ctx.getASTRecordLayout(FD->
getParent());
3152 LVal.adjustOffset(Info.Ctx.toCharUnitsFromBits(RL->getFieldOffset(I)));
3153 LVal.addDecl(Info, E, FD);
3161 for (
const auto *C : IFD->
chain())
3173 Size = CharUnits::One();
3189 Size = Info.Ctx.getTypeSizeInChars(
Type);
3206 LVal.adjustOffsetAndIndex(Info, E, Adjustment, SizeOfPointee);
3212 int64_t Adjustment) {
3214 APSInt::get(Adjustment));
3229 LVal.Offset += SizeOfComponent;
3231 LVal.addComplex(Info, E, EltTy, Imag);
3245 const VarDecl *VD, CallStackFrame *Frame,
3246 unsigned Version,
APValue *&Result) {
3251 Result = Frame->getTemporary(VD, Version);
3255 if (!isa<ParmVarDecl>(VD)) {
3262 "missing value for local variable");
3263 if (Info.checkingPotentialConstantExpression())
3268 diag::note_unimplemented_constexpr_lambda_feature_ast)
3269 <<
"captures not currently allowed";
3276 if (Info.EvaluatingDecl ==
Base) {
3277 Result = Info.EvaluatingDeclValue;
3281 if (isa<ParmVarDecl>(VD)) {
3284 if (!Info.checkingPotentialConstantExpression() ||
3285 !Info.CurrentCall->Callee ||
3287 if (Info.getLangOpts().CPlusPlus11) {
3288 Info.FFDiag(E, diag::note_constexpr_function_param_value_unknown)
3305 if (!Info.checkingPotentialConstantExpression()) {
3306 Info.FFDiag(E, diag::note_constexpr_var_init_unknown, 1)
3313 if (Init->isValueDependent()) {
3320 if (!Info.checkingPotentialConstantExpression()) {
3321 Info.FFDiag(E, Info.getLangOpts().CPlusPlus11
3322 ? diag::note_constexpr_ltor_non_constexpr
3323 : diag::note_constexpr_ltor_non_integral, 1)
3333 Info.FFDiag(E, diag::note_constexpr_var_init_non_constant, 1) << VD;
3349 ((Info.getLangOpts().CPlusPlus || Info.getLangOpts().OpenCL) &&
3351 Info.CCEDiag(E, diag::note_constexpr_var_init_non_constant, 1) << VD;
3358 Info.FFDiag(E, diag::note_constexpr_var_init_weak) << VD;
3374 E = Derived->
bases_end(); I != E; ++I, ++Index) {
3375 if (I->getType()->getAsCXXRecordDecl()->getCanonicalDecl() ==
Base)
3379 llvm_unreachable(
"base class missing from derived class's bases list");
3385 assert(!isa<SourceLocExpr>(Lit) &&
3386 "SourceLocExpr should have already been converted to a StringLiteral");
3389 if (
const auto *ObjCEnc = dyn_cast<ObjCEncodeExpr>(Lit)) {
3391 Info.Ctx.getObjCEncodingForType(ObjCEnc->getEncodedType(), Str);
3392 assert(Index <= Str.size() &&
"Index too large");
3393 return APSInt::getUnsigned(Str.c_str()[Index]);
3396 if (
auto PE = dyn_cast<PredefinedExpr>(Lit))
3397 Lit = PE->getFunctionName();
3400 Info.Ctx.getAsConstantArrayType(S->getType());
3401 assert(CAT &&
"string literal isn't an array");
3403 assert(CharType->
isIntegerType() &&
"unexpected character type");
3405 APSInt Value(S->getCharByteWidth() * Info.Ctx.getCharWidth(),
3407 if (Index < S->getLength())
3408 Value = S->getCodeUnit(Index);
3420 AllocType.isNull() ? S->getType() : AllocType);
3421 assert(CAT &&
"string literal isn't an array");
3423 assert(CharType->
isIntegerType() &&
"unexpected character type");
3425 unsigned Elts = CAT->
getSize().getZExtValue();
3427 std::min(S->getLength(), Elts), Elts);
3428 APSInt Value(S->getCharByteWidth() * Info.Ctx.getCharWidth(),
3430 if (Result.hasArrayFiller())
3432 for (
unsigned I = 0, N = Result.getArrayInitializedElts(); I != N; ++I) {
3433 Value = S->getCodeUnit(I);
3440 unsigned Size = Array.getArraySize();
3441 assert(Index < Size);
3444 unsigned OldElts = Array.getArrayInitializedElts();
3445 unsigned NewElts =
std::max(Index+1, OldElts * 2);
3450 for (
unsigned I = 0; I != OldElts; ++I)
3452 for (
unsigned I = OldElts; I != NewElts; ++I)
3456 Array.swap(NewValue);
3477 for (
auto *Field : RD->
fields())
3478 if (!Field->isUnnamedBitfield() &&
3482 for (
auto &BaseSpec : RD->
bases())
3500 for (
auto *Field : RD->
fields()) {
3505 if (Field->isMutable() &&
3507 Info.FFDiag(E, diag::note_constexpr_access_mutable, 1) << AK << Field;
3508 Info.Note(Field->getLocation(), diag::note_declared_at);
3516 for (
auto &BaseSpec : RD->
bases())
3526 bool MutableSubobject =
false) {
3531 switch (Info.IsEvaluatingDecl) {
3532 case EvalInfo::EvaluatingDeclKind::None:
3535 case EvalInfo::EvaluatingDeclKind::Ctor:
3537 if (Info.EvaluatingDecl ==
Base)
3542 if (
auto *BaseE =
Base.dyn_cast<
const Expr *>())
3543 if (
auto *BaseMTE = dyn_cast<MaterializeTemporaryExpr>(BaseE))
3544 return Info.EvaluatingDecl == BaseMTE->getExtendingDecl();
3547 case EvalInfo::EvaluatingDeclKind::Dtor:
3552 if (MutableSubobject ||
Base != Info.EvaluatingDecl)
3561 llvm_unreachable(
"unknown evaluating decl kind");
3567 struct CompleteObject {
3575 CompleteObject() :
Value(nullptr) {}
3579 bool mayAccessMutableMembers(EvalInfo &Info,
AccessKinds AK)
const {
3590 if (!Info.getLangOpts().CPlusPlus14)
3595 explicit operator bool()
const {
return !
Type.isNull(); }
3600 bool IsMutable =
false) {
3614 template<
typename Sub
objectHandler>
3615 typename SubobjectHandler::result_type
3617 const SubobjectDesignator &
Sub, SubobjectHandler &handler) {
3620 return handler.failed();
3621 if (
Sub.isOnePastTheEnd() ||
Sub.isMostDerivedAnUnsizedArray()) {
3622 if (Info.getLangOpts().CPlusPlus11)
3623 Info.FFDiag(E,
Sub.isOnePastTheEnd()
3624 ? diag::note_constexpr_access_past_end
3625 : diag::note_constexpr_access_unsized_array)
3626 << handler.AccessKind;
3629 return handler.failed();
3635 const FieldDecl *VolatileField =
nullptr;
3638 for (
unsigned I = 0, N =
Sub.Entries.size(); ; ++I) {
3643 if (!Info.checkingPotentialConstantExpression())
3644 Info.FFDiag(E, diag::note_constexpr_access_uninit)
3646 return handler.failed();
3654 Info.isEvaluatingCtorDtor(
3655 Obj.Base, llvm::makeArrayRef(
Sub.Entries.begin(),
3656 Sub.Entries.begin() + I)) !=
3657 ConstructionPhase::None) {
3658 ObjType = Info.Ctx.getCanonicalType(ObjType);
3667 if (Info.getLangOpts().CPlusPlus) {
3671 if (VolatileField) {
3674 Decl = VolatileField;
3675 }
else if (
auto *VD = Obj.Base.dyn_cast<
const ValueDecl*>()) {
3677 Loc = VD->getLocation();
3681 if (
auto *E = Obj.Base.dyn_cast<
const Expr *>())
3684 Info.FFDiag(E, diag::note_constexpr_access_volatile_obj, 1)
3685 << handler.AccessKind << DiagKind <<
Decl;
3686 Info.Note(Loc, diag::note_constexpr_volatile_here) << DiagKind;
3688 Info.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
3690 return handler.failed();
3698 !Obj.mayAccessMutableMembers(Info, handler.AccessKind) &&
3700 return handler.failed();
3704 if (!handler.found(*O, ObjType))
3716 LastField =
nullptr;
3720 assert(CAT &&
"vla in literal type?");
3721 uint64_t Index =
Sub.Entries[I].getAsArrayIndex();
3722 if (CAT->
getSize().ule(Index)) {
3725 if (Info.getLangOpts().CPlusPlus11)
3726 Info.FFDiag(E, diag::note_constexpr_access_past_end)
3727 << handler.AccessKind;
3730 return handler.failed();
3737 else if (!
isRead(handler.AccessKind)) {
3744 uint64_t Index =
Sub.Entries[I].getAsArrayIndex();
3746 if (Info.getLangOpts().CPlusPlus11)
3747 Info.FFDiag(E, diag::note_constexpr_access_past_end)
3748 << handler.AccessKind;
3751 return handler.failed();
3757 assert(I == N - 1 &&
"extracting subobject of scalar?");
3766 }
else if (
const FieldDecl *Field = getAsField(
Sub.Entries[I])) {
3767 if (Field->isMutable() &&
3768 !Obj.mayAccessMutableMembers(Info, handler.AccessKind)) {
3769 Info.FFDiag(E, diag::note_constexpr_access_mutable, 1)
3770 << handler.AccessKind << Field;
3771 Info.Note(Field->getLocation(), diag::note_declared_at);
3772 return handler.failed();
3781 if (I == N - 1 && handler.AccessKind ==
AK_Construct) {
3789 Info.FFDiag(E, diag::note_constexpr_access_inactive_union_member)
3790 << handler.AccessKind << Field << !UnionField << UnionField;
3791 return handler.failed();
3800 if (Field->getType().isVolatileQualified())
3801 VolatileField = Field;
3814 struct ExtractSubobjectHandler {
3820 typedef bool result_type;
3821 bool failed() {
return false; }
3841 const CompleteObject &Obj,
3842 const SubobjectDesignator &
Sub,
APValue &Result,
3845 ExtractSubobjectHandler Handler = {Info, E, Result, AK};
3850 struct ModifySubobjectHandler {
3855 typedef bool result_type;
3861 Info.FFDiag(E, diag::note_constexpr_modify_const_type) << QT;
3867 bool failed() {
return false; }
3869 if (!checkConst(SubobjType))
3872 Subobj.
swap(NewVal);
3876 if (!checkConst(SubobjType))
3878 if (!NewVal.
isInt()) {
3887 if (!checkConst(SubobjType))
3899 const CompleteObject &Obj,
3900 const SubobjectDesignator &
Sub,
3902 ModifySubobjectHandler Handler = { Info, NewVal, E };
3909 const SubobjectDesignator &A,
3910 const SubobjectDesignator &B,
3911 bool &WasArrayIndex) {
3912 unsigned I = 0, N =
std::min(A.Entries.size(), B.Entries.size());
3913 for (; I != N; ++I) {
3917 if (A.Entries[I].getAsArrayIndex() != B.Entries[I].getAsArrayIndex()) {
3918 WasArrayIndex =
true;
3926 if (A.Entries[I].getAsBaseOrMember() !=
3927 B.Entries[I].getAsBaseOrMember()) {
3928 WasArrayIndex =
false;
3931 if (
const FieldDecl *FD = getAsField(A.Entries[I]))
3933 ObjType = FD->getType();
3939 WasArrayIndex =
false;
3946 const SubobjectDesignator &A,
3947 const SubobjectDesignator &B) {
3948 if (A.Entries.size() != B.Entries.size())
3951 bool IsArray = A.MostDerivedIsArrayElement;
3952 if (IsArray && A.MostDerivedPathLength != A.Entries.size())
3961 return CommonLength >= A.Entries.size() - IsArray;
3968 if (LVal.InvalidBase) {
3970 return CompleteObject();
3974 Info.FFDiag(E, diag::note_constexpr_access_null) << AK;
3975 return CompleteObject();
3978 CallStackFrame *Frame =
nullptr;
3980 if (LVal.getLValueCallIndex()) {
3981 std::tie(Frame,
Depth) =
3982 Info.getCallFrameAndDepth(LVal.getLValueCallIndex());
3984 Info.FFDiag(E, diag::note_constexpr_lifetime_ended, 1)
3985 << AK << LVal.Base.is<
const ValueDecl*>();
3987 return CompleteObject();
3998 if (Info.getLangOpts().CPlusPlus)
3999 Info.FFDiag(E, diag::note_constexpr_access_volatile_type)
4003 return CompleteObject();
4008 QualType BaseType = getType(LVal.Base);
4010 if (Info.getLangOpts().CPlusPlus14 && LVal.Base == Info.EvaluatingDecl &&
4014 BaseVal = Info.EvaluatingDeclValue;
4017 if (
auto *GD = dyn_cast<MSGuidDecl>(D)) {
4020 Info.FFDiag(E, diag::note_constexpr_modify_global);
4021 return CompleteObject();
4025 Info.FFDiag(E, diag::note_constexpr_unsupported_layout)
4027 return CompleteObject();
4029 return CompleteObject(LVal.Base, &
V, GD->getType());
4033 if (
auto *GCD = dyn_cast<UnnamedGlobalConstantDecl>(D)) {
4035 Info.FFDiag(E, diag::note_constexpr_modify_global);
4036 return CompleteObject();
4038 return CompleteObject(LVal.Base,
const_cast<APValue *
>(&GCD->getValue()),
4043 if (
auto *TPO = dyn_cast<TemplateParamObjectDecl>(D)) {
4045 Info.FFDiag(E, diag::note_constexpr_modify_global);
4046 return CompleteObject();
4048 return CompleteObject(LVal.Base,
const_cast<APValue *
>(&TPO->getValue()),
4059 const VarDecl *VD = dyn_cast<VarDecl>(D);
4066 return CompleteObject();
4069 bool IsConstant = BaseType.
isConstant(Info.Ctx);
4074 if (IsAccess && isa<ParmVarDecl>(VD)) {
4078 }
else if (Info.getLangOpts().CPlusPlus14 &&
4085 Info.FFDiag(E, diag::note_constexpr_modify_global);
4086 return CompleteObject();
4092 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4093 if (Info.getLangOpts().CPlusPlus) {
4094 Info.FFDiag(E, diag::note_constexpr_ltor_non_const_int, 1) << VD;
4095 Info.Note(VD->
getLocation(), diag::note_declared_at);
4099 return CompleteObject();
4101 }
else if (!IsAccess) {
4102 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4103 }
else if (IsConstant && Info.checkingPotentialConstantExpression() &&
4106 }
else if (IsConstant) {
4110 if (Info.getLangOpts().CPlusPlus) {
4111 Info.CCEDiag(E, Info.getLangOpts().CPlusPlus11
4112 ? diag::note_constexpr_ltor_non_constexpr
4113 : diag::note_constexpr_ltor_non_integral, 1)
4115 Info.Note(VD->
getLocation(), diag::note_declared_at);
4121 if (Info.getLangOpts().CPlusPlus) {
4122 Info.FFDiag(E, Info.getLangOpts().CPlusPlus11
4123 ? diag::note_constexpr_ltor_non_constexpr
4124 : diag::note_constexpr_ltor_non_integral, 1)
4126 Info.Note(VD->
getLocation(), diag::note_declared_at);
4130 return CompleteObject();
4135 return CompleteObject();
4139 Info.FFDiag(E, diag::note_constexpr_access_deleted_object) << AK;
4140 return CompleteObject();
4142 return CompleteObject(LVal.Base, &(*Alloc)->Value,
4143 LVal.Base.getDynamicAllocType());
4149 dyn_cast_or_null<MaterializeTemporaryExpr>(
Base)) {
4150 assert(MTE->getStorageDuration() ==
SD_Static &&
4151 "should have a frame for a non-global materialized temporary");
4178 if (!MTE->isUsableInConstantExpressions(Info.Ctx) &&
4181 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4182 Info.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK;
4183 Info.Note(MTE->getExprLoc(), diag::note_constexpr_temporary_here);
4184 return CompleteObject();
4187 BaseVal = MTE->getOrCreateValue(
false);
4188 assert(BaseVal &&
"got reference to unevaluated temporary");
4191 return CompleteObject(LVal.getLValueBase(),
nullptr, BaseType);
4194 Info.FFDiag(E, diag::note_constexpr_access_unreadable_object)
4197 Info.Ctx.getLValueReferenceType(LValType));
4199 return CompleteObject();
4202 BaseVal = Frame->getTemporary(
Base, LVal.Base.getVersion());
4203 assert(BaseVal &&
"missing value for temporary");
4214 unsigned VisibleDepth =
Depth;
4215 if (llvm::isa_and_nonnull<ParmVarDecl>(
4216 LVal.Base.dyn_cast<
const ValueDecl *>()))
4218 if ((Frame && Info.getLangOpts().CPlusPlus14 &&
4219 Info.EvalStatus.HasSideEffects) ||
4220 (
isModification(AK) && VisibleDepth < Info.SpeculativeEvaluationDepth))
4221 return CompleteObject();
4223 return CompleteObject(LVal.getLValueBase(), BaseVal, BaseType);
4242 const LValue &LVal,
APValue &RVal,
4243 bool WantObjectRepresentation =
false) {
4244 if (LVal.Designator.Invalid)
4253 if (
Base && !LVal.getLValueCallIndex() && !
Type.isVolatileQualified()) {
4258 if (
Type.isVolatileQualified()) {
4264 if (!
Evaluate(Lit, Info, CLE->getInitializer()))
4284 Info.Note(CLE->getExprLoc(), diag::note_declared_at);
4289 CompleteObject LitObj(LVal.Base, &Lit,
Base->getType());
4291 }
else if (isa<StringLiteral>(
Base) || isa<PredefinedExpr>(
Base)) {
4294 assert(LVal.Designator.Entries.size() <= 1 &&
4295 "Can only read characters from string literals");
4296 if (LVal.Designator.Entries.empty()) {
4303 if (LVal.Designator.isOnePastTheEnd()) {
4304 if (Info.getLangOpts().CPlusPlus11)
4305 Info.FFDiag(Conv, diag::note_constexpr_access_past_end) << AK;
4310 uint64_t CharIndex = LVal.Designator.Entries[0].getAsArrayIndex();
4317 return Obj &&
extractSubobject(Info, Conv, Obj, LVal.Designator, RVal, AK);
4323 if (LVal.Designator.Invalid)
4326 if (!Info.getLangOpts().CPlusPlus14) {
4336 struct CompoundAssignSubobjectHandler {
4345 typedef bool result_type;
4350 Info.FFDiag(E, diag::note_constexpr_modify_const_type) << QT;
4356 bool failed() {
return false; }
4360 return found(Subobj.
getInt(), SubobjType);
4362 return found(Subobj.
getFloat(), SubobjType);
4363 case APValue::ComplexInt:
4364 case APValue::ComplexFloat:
4368 case APValue::LValue:
4369 return foundPointer(Subobj, SubobjType);
4370 case APValue::Vector:
4371 return foundVector(Subobj, SubobjType);
4380 if (!checkConst(SubobjType))
4391 if (!checkConst(SubobjType))
4410 Info.Ctx.getLangOpts());
4411 APFloat FValue(0.0);
4413 PromotedLHSType, FValue) &&
4423 return checkConst(SubobjType) &&
4430 if (!checkConst(SubobjType))
4448 LVal.setFrom(Info.Ctx, Subobj);
4451 LVal.moveInto(Subobj);
4462 const LValue &LVal,
QualType LValType,
4466 if (LVal.Designator.Invalid)
4469 if (!Info.getLangOpts().CPlusPlus14) {
4475 CompoundAssignSubobjectHandler Handler = { Info, E, PromotedLValType,
Opcode,
4477 return Obj &&
findSubobject(Info, E, Obj, LVal.Designator, Handler);
4481 struct IncDecSubobjectHandler {
4487 typedef bool result_type;
4492 Info.FFDiag(E, diag::note_constexpr_modify_const_type) << QT;
4498 bool failed() {
return false; }
4509 return found(Subobj.
getInt(), SubobjType);
4511 return found(Subobj.
getFloat(), SubobjType);
4512 case APValue::ComplexInt:
4516 case APValue::ComplexFloat:
4520 case APValue::LValue:
4521 return foundPointer(Subobj, SubobjType);
4529 if (!checkConst(SubobjType))
4551 bool WasNegative =
Value.isNegative();
4563 unsigned BitWidth =
Value.getBitWidth();
4564 APSInt ActualValue(
Value.sext(BitWidth + 1),
false);
4565 ActualValue.setBit(BitWidth);
4572 if (!checkConst(SubobjType))
4577 APFloat One(
Value.getSemantics(), 1);
4579 Value.add(One, APFloat::rmNearestTiesToEven);
4581 Value.subtract(One, APFloat::rmNearestTiesToEven);
4585 if (!checkConst(SubobjType))
4597 LVal.setFrom(Info.Ctx, Subobj);
4601 LVal.moveInto(Subobj);
4610 if (LVal.Designator.Invalid)
4613 if (!Info.getLangOpts().CPlusPlus14) {
4620 IncDecSubobjectHandler Handler = {Info, cast<UnaryOperator>(E), AK, Old};
4621 return Obj &&
findSubobject(Info, E, Obj, LVal.Designator, Handler);
4627 if (Object->getType()->isPointerType() && Object->isPRValue())
4630 if (Object->isGLValue())
4633 if (Object->getType()->isLiteralType(Info.Ctx))
4636 Info.FFDiag(Object, diag::note_constexpr_nonliteral) << Object->getType();
4655 bool IncludeMember =
true) {
4662 if (!MemPtr.getDecl()) {
4668 if (MemPtr.isDerivedMember()) {
4672 if (LV.Designator.MostDerivedPathLength + MemPtr.Path.size() >
4673 LV.Designator.Entries.size()) {
4677 unsigned PathLengthToMember =
4678 LV.Designator.Entries.size() - MemPtr.Path.size();
4679 for (
unsigned I = 0, N = MemPtr.Path.size(); I != N; ++I) {
4681 LV.Designator.Entries[PathLengthToMember + I]);
4691 PathLengthToMember))
4693 }
else if (!MemPtr.Path.empty()) {
4695 LV.Designator.Entries.reserve(LV.Designator.Entries.size() +
4696 MemPtr.Path.size() + IncludeMember);
4702 assert(RD &&
"member pointer access on non-class-type expression");
4704 for (
unsigned I = 1, N = MemPtr.Path.size(); I != N; ++I) {
4712 MemPtr.getContainingRecord()))
4717 if (IncludeMember) {
4718 if (
const FieldDecl *FD = dyn_cast<FieldDecl>(MemPtr.getDecl())) {
4722 dyn_cast<IndirectFieldDecl>(MemPtr.getDecl())) {
4726 llvm_unreachable(
"can't construct reference to bound member function");
4730 return MemPtr.getDecl();
4736 bool IncludeMember =
true) {
4740 if (Info.noteFailure()) {
4748 BO->
getRHS(), IncludeMember);
4755 SubobjectDesignator &D = Result.Designator;
4756 if (D.Invalid || !Result.checkNullPointer(Info, E,
CSK_Derived))
4764 if (D.MostDerivedPathLength + E->
path_size() > D.Entries.size()) {
4765 Info.CCEDiag(E, diag::note_constexpr_invalid_downcast)
4766 << D.MostDerivedType << TargetQT;
4772 unsigned NewEntriesSize = D.Entries.size() - E->
path_size();
4775 if (NewEntriesSize == D.MostDerivedPathLength)
4776 FinalType = D.MostDerivedType->getAsCXXRecordDecl();
4778 FinalType = getAsBaseClass(D.Entries[NewEntriesSize - 1]);
4780 Info.CCEDiag(E, diag::note_constexpr_invalid_downcast)
4781 << D.MostDerivedType << TargetQT;
4792 bool Success =
true;
4794 if (RD->isInvalidDecl()) {
4798 if (RD->isUnion()) {
4807 End = RD->bases_end();
4808 I !=
End; ++I, ++Index)
4811 for (
const auto *I : RD->fields()) {
4812 if (I->isUnnamedBitfield())
4815 Result.getStructField(I->getFieldIndex()));
4823 if (Result.hasArrayFiller())
4830 Result = APValue::IndeterminateValue();
4835 enum EvalStmtResult {
4857 APValue &Val = Info.CurrentCall->createTemporary(VD, VD->
getType(),
4858 ScopeKind::Block, Result);
4863 return Info.noteSideEffect();
4882 if (
const VarDecl *VD = dyn_cast<VarDecl>(D))
4886 for (
auto *BD : DD->bindings())
4887 if (
auto *VD = BD->getHoldingVar())
4895 if (Info.noteSideEffect())
4897 assert(E->
containsErrors() &&
"valid value-dependent expression should never "
4898 "reach invalid code path.");
4904 const Expr *Cond,
bool &Result) {
4907 FullExpressionRAII
Scope(Info);
4912 return Scope.destroy();
4925 struct TempVersionRAII {
4926 CallStackFrame &Frame;
4928 TempVersionRAII(CallStackFrame &Frame) : Frame(Frame) {
4929 Frame.pushTempVersion();
4932 ~TempVersionRAII() {
4933 Frame.popTempVersion();
4947 BlockScopeRAII
Scope(Info);
4949 EvalStmtResult ESR =
EvaluateStmt(Result, Info, Body, Case);
4950 if (ESR != ESR_Failed && ESR != ESR_CaseNotFound && !
Scope.destroy())
4955 return ESR_Succeeded;
4958 return ESR_Continue;
4961 case ESR_CaseNotFound:
4964 llvm_unreachable(
"Invalid EvalStmtResult!");
4970 BlockScopeRAII
Scope(Info);
4977 if (ESR != ESR_Succeeded) {
4978 if (ESR != ESR_Failed && !
Scope.destroy())
4984 FullExpressionRAII CondScope(Info);
4995 if (!CondScope.destroy())
5004 if (isa<DefaultStmt>(SC)) {
5009 const CaseStmt *CS = cast<CaseStmt>(SC);
5020 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5024 if (ESR != ESR_Failed && ESR != ESR_CaseNotFound && !
Scope.destroy())
5029 return ESR_Succeeded;
5035 case ESR_CaseNotFound:
5039 diag::note_constexpr_stmt_expr_unsupported);
5042 llvm_unreachable(
"Invalid EvalStmtResult!");
5050 Info.CCEDiag(VD->
getLocation(), diag::note_constexpr_static_local)
5060 if (!Info.nextStep(S))
5066 switch (S->getStmtClass()) {
5067 case Stmt::CompoundStmtClass:
5071 case Stmt::LabelStmtClass:
5072 case Stmt::AttributedStmtClass:
5073 case Stmt::DoStmtClass:
5076 case Stmt::CaseStmtClass:
5077 case Stmt::DefaultStmtClass:
5082 case Stmt::IfStmtClass: {
5085 const IfStmt *IS = cast<IfStmt>(S);
5089 BlockScopeRAII
Scope(Info);
5094 EvalStmtResult ESR =
EvaluateStmt(Result, Info, Init, Case);
5095 if (ESR != ESR_CaseNotFound) {
5096 assert(ESR != ESR_Succeeded);
5107 if (ESR == ESR_Failed)
5109 if (ESR != ESR_CaseNotFound)
5110 return Scope.destroy() ? ESR : ESR_Failed;
5112 return ESR_CaseNotFound;
5115 if (ESR == ESR_Failed)
5117 if (ESR != ESR_CaseNotFound)
5118 return Scope.destroy() ? ESR : ESR_Failed;
5119 return ESR_CaseNotFound;
5122 case Stmt::WhileStmtClass: {
5123 EvalStmtResult ESR =
5125 if (ESR != ESR_Continue)
5130 case Stmt::ForStmtClass: {
5131 const ForStmt *FS = cast<ForStmt>(S);
5132 BlockScopeRAII
Scope(Info);
5136 if (
const Stmt *Init = FS->getInit()) {
5137 EvalStmtResult ESR =
EvaluateStmt(Result, Info, Init, Case);
5138 if (ESR != ESR_CaseNotFound) {
5139 assert(ESR != ESR_Succeeded);
5144 EvalStmtResult ESR =
5146 if (ESR != ESR_Continue)
5148 if (
const auto *Inc = FS->getInc()) {
5149 if (Inc->isValueDependent()) {
5153 FullExpressionRAII IncScope(Info);
5161 case Stmt::DeclStmtClass: {
5164 const DeclStmt *DS = cast<DeclStmt>(S);
5165 for (
const auto *D : DS->
decls()) {
5166 if (
const auto *VD = dyn_cast<VarDecl>(D)) {
5169 if (VD->hasLocalStorage() && !VD->getInit())
5177 return ESR_CaseNotFound;
5181 return ESR_CaseNotFound;
5185 switch (S->getStmtClass()) {
5187 if (
const Expr *E = dyn_cast<Expr>(S)) {
5196 FullExpressionRAII
Scope(Info);
5200 return ESR_Succeeded;
5203 Info.FFDiag(S->getBeginLoc());
5206 case Stmt::NullStmtClass:
5207 return ESR_Succeeded;
5209 case Stmt::DeclStmtClass: {
5210 const DeclStmt *DS = cast<DeclStmt>(S);
5211 for (
const auto *D : DS->
decls()) {
5212 const VarDecl *VD = dyn_cast_or_null<VarDecl>(D);
5216 FullExpressionRAII
Scope(Info);
5219 if (!
Scope.destroy())
5222 return ESR_Succeeded;
5225 case Stmt::ReturnStmtClass: {
5226 const Expr *RetExpr = cast<ReturnStmt>(S)->getRetValue();
5227 FullExpressionRAII
Scope(Info);
5236 :
Evaluate(Result.Value, Info, RetExpr)))
5238 return Scope.destroy() ? ESR_Returned : ESR_Failed;
5241 case Stmt::CompoundStmtClass: {
5242 BlockScopeRAII
Scope(Info);
5245 for (
const auto *BI : CS->
body()) {
5246 EvalStmtResult ESR =
EvaluateStmt(Result, Info, BI, Case);
5247 if (ESR == ESR_Succeeded)
5249 else if (ESR != ESR_CaseNotFound) {
5250 if (ESR != ESR_Failed && !
Scope.destroy())
5256 return ESR_CaseNotFound;
5257 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5260 case Stmt::IfStmtClass: {
5261 const IfStmt *IS = cast<IfStmt>(S);
5264 BlockScopeRAII
Scope(Info);
5267 if (ESR != ESR_Succeeded) {
5268 if (ESR != ESR_Failed && !
Scope.destroy())
5281 EvalStmtResult ESR =
EvaluateStmt(Result, Info, SubStmt);
5282 if (ESR != ESR_Succeeded) {
5283 if (ESR != ESR_Failed && !
Scope.destroy())
5288 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5291 case Stmt::WhileStmtClass: {
5292 const WhileStmt *WS = cast<WhileStmt>(S);
5294 BlockScopeRAII
Scope(Info);
5303 if (ESR != ESR_Continue) {
5304 if (ESR != ESR_Failed && !
Scope.destroy())
5308 if (!
Scope.destroy())
5311 return ESR_Succeeded;
5314 case Stmt::DoStmtClass: {
5315 const DoStmt *DS = cast<DoStmt>(S);
5319 if (ESR != ESR_Continue)
5328 FullExpressionRAII CondScope(Info);
5330 !CondScope.destroy())
5333 return ESR_Succeeded;
5336 case Stmt::ForStmtClass: {
5337 const ForStmt *FS = cast<ForStmt>(S);
5338 BlockScopeRAII ForScope(Info);
5339 if (FS->getInit()) {
5340 EvalStmtResult ESR =
EvaluateStmt(Result, Info, FS->getInit());
5341 if (ESR != ESR_Succeeded) {
5342 if (ESR != ESR_Failed && !ForScope.destroy())
5348 BlockScopeRAII IterScope(Info);
5349 bool Continue =
true;
5350 if (FS->getCond() && !
EvaluateCond(Info, FS->getConditionVariable(),
5351 FS->getCond(), Continue))
5357 if (ESR != ESR_Continue) {
5358 if (ESR != ESR_Failed && (!IterScope.destroy() || !ForScope.destroy()))
5363 if (
const auto *Inc = FS->getInc()) {
5364 if (Inc->isValueDependent()) {
5368 FullExpressionRAII IncScope(Info);
5374 if (!IterScope.destroy())
5377 return ForScope.destroy() ? ESR_Succeeded : ESR_Failed;
5380 case Stmt::CXXForRangeStmtClass: {
5382 BlockScopeRAII
Scope(Info);
5385 if (FS->getInit()) {
5386 EvalStmtResult ESR =
EvaluateStmt(Result, Info, FS->getInit());
5387 if (ESR != ESR_Succeeded) {
5388 if (ESR != ESR_Failed && !
Scope.destroy())
5395 EvalStmtResult ESR =
EvaluateStmt(Result, Info, FS->getRangeStmt());
5396 if (ESR != ESR_Succeeded) {
5397 if (ESR != ESR_Failed && !
Scope.destroy())
5404 if (!FS->getBeginStmt() || !FS->getEndStmt() || !FS->getCond())
5409 if (ESR != ESR_Succeeded) {
5410 if (ESR != ESR_Failed && !
Scope.destroy())
5415 if (ESR != ESR_Succeeded) {
5416 if (ESR != ESR_Failed && !
Scope.destroy())
5424 if (FS->getCond()->isValueDependent()) {
5429 bool Continue =
true;
5430 FullExpressionRAII CondExpr(Info);
5438 BlockScopeRAII InnerScope(Info);
5439 ESR =
EvaluateStmt(Result, Info, FS->getLoopVarStmt());
5440 if (ESR != ESR_Succeeded) {
5441 if (ESR != ESR_Failed && (!InnerScope.destroy() || !
Scope.destroy()))
5448 if (ESR != ESR_Continue) {
5449 if (ESR != ESR_Failed && (!InnerScope.destroy() || !
Scope.destroy()))
5453 if (FS->getInc()->isValueDependent()) {
5462 if (!InnerScope.destroy())
5466 return Scope.destroy() ? ESR_Succeeded : ESR_Failed;
5469 case Stmt::SwitchStmtClass:
5472 case Stmt::ContinueStmtClass:
5473 return ESR_Continue;
5475 case Stmt::BreakStmtClass:
5478 case Stmt::LabelStmtClass:
5479 return EvaluateStmt(Result, Info, cast<LabelStmt>(S)->getSubStmt(), Case);
5481 case Stmt::AttributedStmtClass:
5484 return EvaluateStmt(Result, Info, cast<AttributedStmt>(S)->getSubStmt(),
5487 case Stmt::CaseStmtClass:
5488 case Stmt::DefaultStmtClass:
5489 return EvaluateStmt(Result, Info, cast<SwitchCase>(S)->getSubStmt(), Case);
5490 case Stmt::CXXTryStmtClass:
5492 return EvaluateStmt(Result, Info, cast<CXXTryStmt>(S)->getTryBlock(), Case);
5502 bool IsValueInitialization) {
5509 if (!CD->
isConstexpr() && !IsValueInitialization) {
5510 if (Info.getLangOpts().CPlusPlus11) {
5513 Info.CCEDiag(Loc, diag::note_constexpr_invalid_function, 1)
5515 Info.Note(CD->
getLocation(), diag::note_declared_at);
5517 Info.CCEDiag(Loc, diag::note_invalid_subexpr_in_const_expr);
5531 if (Info.checkingPotentialConstantExpression() && !Definition &&
5532 Declaration->isConstexpr())
5538 if (Declaration->isInvalidDecl()) {
5539 Info.FFDiag(CallLoc, diag::note_invalid_subexpr_in_const_expr);
5546 if (!Info.Ctx.getLangOpts().CPlusPlus20 && isa<CXXMethodDecl>(Declaration) &&
5547 cast<CXXMethodDecl>(Declaration)->isVirtual())
5548 Info.CCEDiag(CallLoc, diag::note_constexpr_virtual_call);
5550 if (Definition && Definition->isInvalidDecl()) {
5551 Info.FFDiag(CallLoc, diag::note_invalid_subexpr_in_const_expr);
5556 if (Definition && Definition->isConstexpr() && Body)
5559 if (Info.getLangOpts().CPlusPlus11) {
5560 const FunctionDecl *DiagDecl = Definition ? Definition : Declaration;
5564 auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl);
5565 if (CD && CD->isInheritingConstructor()) {
5566 auto *Inherited = CD->getInheritedConstructor().getConstructor();
5567 if (!Inherited->isConstexpr())
5568 DiagDecl = CD = Inherited;
5574 if (CD && CD->isInheritingConstructor())
5575 Info.FFDiag(CallLoc, diag::note_constexpr_invalid_inhctor, 1)
5576 << CD->getInheritedConstructor().getConstructor()->getParent();
5578 Info.FFDiag(CallLoc, diag::note_constexpr_invalid_function, 1)
5580 Info.Note(DiagDecl->
getLocation(), diag::note_declared_at);
5582 Info.FFDiag(CallLoc, diag::note_invalid_subexpr_in_const_expr);
5588 struct CheckDynamicTypeHandler {
5590 typedef bool result_type;
5591 bool failed() {
return false; }
5594 bool found(APFloat &
Value,
QualType SubobjType) {
return true; }
5602 if (
This.Designator.Invalid)
5614 if (
This.Designator.isOnePastTheEnd() ||
5615 This.Designator.isMostDerivedAnUnsizedArray()) {
5616 Info.FFDiag(E,
This.Designator.isOnePastTheEnd()
5617 ? diag::note_constexpr_access_past_end
5618 : diag::note_constexpr_access_unsized_array)
5621 }
else if (Polymorphic) {
5627 Info.Ctx.getLValueReferenceType(
This.Designator.getType(Info.Ctx));
5628 Info.FFDiag(E, diag::note_constexpr_polymorphic_unknown_dynamic_type)
5635 CheckDynamicTypeHandler Handler{AK};
5658 unsigned PathLength) {
5659 assert(PathLength >=
Designator.MostDerivedPathLength && PathLength <=
5660 Designator.Entries.size() &&
"invalid path length");
5661 return (PathLength ==
Designator.MostDerivedPathLength)
5662 ?
Designator.MostDerivedType->getAsCXXRecordDecl()
5663 : getAsBaseClass(
Designator.Entries[PathLength - 1]);
5682 This.Designator.MostDerivedType->getAsCXXRecordDecl();
5683 if (!Class || Class->getNumVBases()) {
5693 for (
unsigned PathLength =
This.Designator.MostDerivedPathLength;
5694 PathLength <= Path.size(); ++PathLength) {
5695 switch (Info.isEvaluatingCtorDtor(
This.getLValueBase(),
5696 Path.slice(0, PathLength))) {
5697 case ConstructionPhase::Bases:
5698 case ConstructionPhase::DestroyingBases:
5703 case ConstructionPhase::None:
5704 case ConstructionPhase::AfterBases:
5705 case ConstructionPhase::AfterFields:
5706 case ConstructionPhase::Destroying:
5736 unsigned PathLength = DynType->PathLength;
5737 for (; PathLength <=
This.Designator.Entries.size(); ++PathLength) {
5750 if (Callee->isPure()) {
5751 Info.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << Callee;
5752 Info.Note(Callee->getLocation(), diag::note_declared_at);
5758 if (!Info.Ctx.hasSameUnqualifiedType(Callee->getReturnType(),
5760 CovariantAdjustmentPath.push_back(Callee->getReturnType());
5761 for (
unsigned CovariantPathLength = PathLength + 1;
5762 CovariantPathLength !=
This.Designator.Entries.size();
5763 ++CovariantPathLength) {
5768 if (Next && !Info.Ctx.hasSameUnqualifiedType(
5769 Next->getReturnType(), CovariantAdjustmentPath.back()))
5770 CovariantAdjustmentPath.push_back(Next->getReturnType());
5772 if (!Info.Ctx.hasSameUnqualifiedType(Found->
getReturnType(),
5773 CovariantAdjustmentPath.back()))
5790 assert(Result.isLValue() &&
5791 "unexpected kind of APValue for covariant return");
5792 if (Result.isNullPointer())
5796 LVal.setFrom(Info.Ctx, Result);
5798 const CXXRecordDecl *OldClass = Path[0]->getPointeeCXXRecordDecl();
5799 for (
unsigned I = 1; I != Path.size(); ++I) {
5800 const CXXRecordDecl *NewClass = Path[I]->getPointeeCXXRecordDecl();
5801 assert(OldClass && NewClass &&
"unexpected kind of covariant return");
5802 if (OldClass != NewClass &&
5805 OldClass = NewClass;
5808 LVal.moveInto(Result);
5817 auto *BaseClass = BaseSpec.getType()->getAsCXXRecordDecl();
5819 return BaseSpec.getAccessSpecifier() ==
AS_public;
5821 llvm_unreachable(
"Base is not a direct base of Derived");
5831 SubobjectDesignator &D = Ptr.Designator;
5837 if (Ptr.isNullPointer() && !E->
isGLValue())