43 #include "llvm/ADT/APInt.h"
44 #include "llvm/ADT/STLExtras.h"
45 #include "llvm/Support/ErrorHandling.h"
46 #include "llvm/Support/TypeSize.h"
48 using namespace clang;
80 llvm_unreachable(
"Nested name specifier is not a type for inheriting ctor");
85 return CreateParsedType(
Type,
92 bool EnteringContext) {
95 "not a constructor name");
105 if (SS.
isNotEmpty() && RequireCompleteDeclContext(SS, CurClass))
116 auto *RD = dyn_cast<CXXRecordDecl>(ND);
117 if (RD && RD->isInjectedClassName()) {
118 InjectedClassName = RD;
122 if (!InjectedClassName) {
127 diag::err_incomplete_nested_name_spec) << CurClass << SS.
getRange();
133 DiagnoseUseOfDecl(InjectedClassName, NameLoc);
134 MarkAnyDeclReferenced(NameLoc, InjectedClassName,
false);
144 bool EnteringContext) {
187 ObjectTypePtr ? GetTypeFromParser(ObjectTypePtr) :
QualType();
190 auto IsAcceptableResult = [&](
NamedDecl *D) ->
bool {
191 auto *
Type = dyn_cast<TypeDecl>(D->getUnderlyingDecl());
202 unsigned NumAcceptableResults = 0;
204 if (IsAcceptableResult(D))
205 ++NumAcceptableResults;
210 if (
auto *RD = dyn_cast<CXXRecordDecl>(D))
211 if (RD->isInjectedClassName())
212 D = cast<NamedDecl>(RD->getParent());
214 if (FoundDeclSet.insert(D).second)
215 FoundDecls.push_back(D);
223 if (Found.isAmbiguous() && NumAcceptableResults == 1) {
224 Diag(NameLoc, diag::ext_dtor_name_ambiguous);
234 if (!IsAcceptableResult(D))
240 if (Found.isAmbiguous())
244 if (IsAcceptableResult(
Type)) {
246 MarkAnyDeclReferenced(
Type->getLocation(),
Type,
false);
255 bool IsDependent =
false;
257 auto LookupInObjectType = [&]() ->
ParsedType {
258 if (Failed || SearchType.
isNull())
263 LookupResult Found(*
this, &II, NameLoc, LookupDestructorName);
264 DeclContext *LookupCtx = computeDeclContext(SearchType);
267 LookupQualifiedName(Found, LookupCtx);
268 return CheckLookupResult(Found);
275 IsDependent |= isDependentScopeSpecifier(LookupSS);
276 DeclContext *LookupCtx = computeDeclContext(LookupSS, EnteringContext);
280 LookupResult Found(*
this, &II, NameLoc, LookupDestructorName);
281 if (RequireCompleteDeclContext(LookupSS, LookupCtx)) {
285 LookupQualifiedName(Found, LookupCtx);
286 return CheckLookupResult(Found);
293 LookupResult Found(*
this, &II, NameLoc, LookupDestructorName);
294 LookupName(Found, S);
295 return CheckLookupResult(Found);
342 if (
ParsedType T = LookupInNestedNameSpec(PrefixSS))
372 unsigned NumNonExtensionDecls = FoundDecls.size();
380 if (
ParsedType T = LookupInNestedNameSpec(SS)) {
396 Diag(SS.
getEndLoc(), diag::ext_qualified_dtor_named_in_lexical_scope)
398 Diag(FoundDecls.back()->getLocation(), diag::note_destructor_type_here)
399 << GetTypeFromParser(T);
409 FoundDecls.resize(NumNonExtensionDecls);
412 std::stable_sort(FoundDecls.begin(), FoundDecls.end(),
414 return isa<TypeDecl>(A->getUnderlyingDecl()) >
415 isa<TypeDecl>(B->getUnderlyingDecl());
419 auto MakeFixItHint = [&]{
425 Destroyed = dyn_cast_or_null<CXXRecordDecl>(S->getEntity());
432 if (FoundDecls.empty()) {
434 Diag(NameLoc, diag::err_undeclared_destructor_name)
435 << &II << MakeFixItHint();
436 }
else if (!SearchType.
isNull() && FoundDecls.size() == 1) {
437 if (
auto *TD = dyn_cast<TypeDecl>(FoundDecls[0]->getUnderlyingDecl())) {
438 assert(!SearchType.
isNull() &&
439 "should only reject a type result if we have a search type");
441 Diag(NameLoc, diag::err_destructor_expr_type_mismatch)
442 << T << SearchType << MakeFixItHint();
444 Diag(NameLoc, diag::err_destructor_expr_nontype)
445 << &II << MakeFixItHint();
448 Diag(NameLoc, SearchType.
isNull() ? diag::err_destructor_name_nontype
449 : diag::err_destructor_expr_mismatch)
450 << &II << SearchType << MakeFixItHint();
454 if (
auto *TD = dyn_cast<TypeDecl>(FoundD->getUnderlyingDecl()))
455 Diag(FoundD->getLocation(), diag::note_destructor_type_here)
458 Diag(FoundD->getLocation(), diag::note_destructor_nontype_here)
476 "unexpected type in getDestructorType");
481 QualType SearchType = GetTypeFromParser(ObjectType);
504 !PP.getSourceManager().isInSystemHeader(Loc)) {
505 Diag(Loc, diag::warn_reserved_extern_symbol)
506 << II << static_cast<int>(Status)
508 Name.getSourceRange(),
509 (StringRef(
"operator\"\"") + II->
getName()).str());
524 Diag(Name.getBeginLoc(), diag::err_literal_operator_id_outside_namespace)
535 llvm_unreachable(
"unknown nested name specifier kind");
553 RequireCompleteType(TypeidLoc, T, diag::err_incomplete_typeid))
557 return ExprError(
Diag(TypeidLoc, diag::err_variably_modified_typeid) << T);
559 if (CheckQualifiedFunctionForTypeId(T, TypeidLoc))
571 bool WasEvaluated =
false;
581 CXXRecordDecl *RecordD = cast<CXXRecordDecl>(RecordT->getDecl());
585 if (RequireCompleteType(TypeidLoc, T, diag::err_incomplete_typeid))
593 if (isUnevaluatedContext()) {
596 ExprResult Result = TransformToPotentiallyEvaluated(E);
597 if (Result.isInvalid())
603 MarkVTableUsed(TypeidLoc, RecordD);
608 ExprResult Result = CheckUnevaluatedOperand(E);
609 if (Result.isInvalid())
622 E = ImpCastExprToType(E, UnqualT, CK_NoOp, E->
getValueKind()).get();
627 return ExprError(
Diag(TypeidLoc, diag::err_variably_modified_typeid)
629 else if (!inTemplateInstantiation() &&
634 ? diag::warn_side_effects_typeid
635 : diag::warn_side_effects_unevaluated_context);
647 if (getLangOpts().OpenCLCPlusPlus) {
648 return ExprError(
Diag(OpLoc, diag::err_openclcxx_not_supported)
653 if (!getStdNamespace())
654 return ExprError(
Diag(OpLoc, diag::err_need_header_before_typeid));
656 if (!CXXTypeInfoDecl) {
657 IdentifierInfo *TypeInfoII = &PP.getIdentifierTable().get(
"type_info");
659 LookupQualifiedName(R, getStdNamespace());
663 if (!CXXTypeInfoDecl && LangOpts.MSVCCompat) {
667 if (!CXXTypeInfoDecl)
668 return ExprError(
Diag(OpLoc, diag::err_need_header_before_typeid));
671 if (!getLangOpts().RTTI) {
672 return ExprError(
Diag(OpLoc, diag::err_no_typeid_with_fno_rtti));
688 return BuildCXXTypeId(TypeInfoType, OpLoc, TInfo, RParenLoc);
693 BuildCXXTypeId(TypeInfoType, OpLoc, (
Expr *)TyOrExpr, RParenLoc);
695 if (!getLangOpts().RTTIData && !Result.isInvalid())
696 if (
auto *CTE = dyn_cast<CXXTypeidExpr>(Result.get()))
697 if (CTE->isPotentiallyEvaluated() && !CTE->isMostDerived(Context))
698 Diag(OpLoc, diag::warn_no_typeid_with_rtti_disabled)
699 << (getDiagnostics().getDiagnosticOptions().getFormat() ==
720 if (
const auto *Uuid = TD->getMostRecentDecl()->getAttr<UuidAttr>()) {
721 UuidAttrs.insert(Uuid);
726 if (
const auto *CTSD = dyn_cast<ClassTemplateSpecializationDecl>(TD)) {
729 const UuidAttr *UuidForTA =
nullptr;
736 UuidAttrs.insert(UuidForTA);
747 if (!Operand->getType()->isDependentType()) {
750 if (UuidAttrs.empty())
751 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_without_guid));
752 if (UuidAttrs.size() > 1)
753 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_with_multiple_guids));
754 Guid = UuidAttrs.back()->getGuidDecl();
772 if (UuidAttrs.empty())
773 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_without_guid));
774 if (UuidAttrs.size() > 1)
775 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_with_multiple_guids));
776 Guid = UuidAttrs.back()->getGuidDecl();
802 return BuildCXXUuidof(GuidType, OpLoc, TInfo, RParenLoc);
806 return BuildCXXUuidof(GuidType, OpLoc, (
Expr*)TyOrExpr, RParenLoc);
812 assert((
Kind == tok::kw_true ||
Kind == tok::kw_false) &&
813 "Unknown C++ Boolean value!");
827 bool IsThrownVarInScope =
false;
841 if (
VarDecl *Var = dyn_cast<VarDecl>(DRE->getDecl())) {
842 if (Var->hasLocalStorage() && !Var->getType().isVolatileQualified()) {
843 for( ; S; S = S->getParent()) {
844 if (S->isDeclScope(Var)) {
845 IsThrownVarInScope =
true;
859 return BuildCXXThrow(OpLoc, Ex, IsThrownVarInScope);
863 bool IsThrownVarInScope) {
865 if (!getLangOpts().CXXExceptions &&
866 !getSourceManager().isInSystemHeader(OpLoc) && !getLangOpts().
CUDA) {
868 targetDiag(OpLoc, diag::err_exceptions_disabled) <<
"throw";
872 if (getLangOpts().
CUDA)
873 CUDADiagIfDeviceCode(OpLoc, diag::err_cuda_device_exceptions)
874 <<
"throw" << CurrentCUDATarget();
876 if (getCurScope() && getCurScope()->isOpenMPSimdDirectiveScope())
877 Diag(OpLoc, diag::err_omp_simd_region_cannot_use_stmt) <<
"throw";
899 if (CheckCXXThrowOperand(OpLoc, ExceptionObjectTy, Ex))
904 ExprResult Res = PerformMoveOrCopyInitialization(Entity, NRInfo, Ex);
920 llvm::DenseMap<CXXRecordDecl *, unsigned> &SubobjectsSeen,
921 llvm::SmallPtrSetImpl<CXXRecordDecl *> &VBases,
922 llvm::SetVector<CXXRecordDecl *> &PublicSubobjectsSeen,
923 bool ParentIsPublic) {
925 CXXRecordDecl *BaseDecl = BS.getType()->getAsCXXRecordDecl();
930 NewSubobject = VBases.insert(BaseDecl).second;
935 ++SubobjectsSeen[BaseDecl];
938 bool PublicPath = ParentIsPublic && BS.getAccessSpecifier() ==
AS_public;
940 PublicSubobjectsSeen.insert(BaseDecl);
950 llvm::DenseMap<CXXRecordDecl *, unsigned> SubobjectsSeen;
951 llvm::SmallSet<CXXRecordDecl *, 2> VBases;
952 llvm::SetVector<CXXRecordDecl *> PublicSubobjectsSeen;
953 SubobjectsSeen[RD] = 1;
954 PublicSubobjectsSeen.insert(RD);
958 for (
CXXRecordDecl *PublicSubobject : PublicSubobjectsSeen) {
960 if (SubobjectsSeen[PublicSubobject] > 1)
963 Objects.push_back(PublicSubobject);
973 bool isPointer =
false;
979 if (RequireCompleteType(ThrowLoc, Ty,
980 isPointer ? diag::err_throw_incomplete_ptr
981 : diag::err_throw_incomplete,
990 if (RequireNonAbstractType(ThrowLoc, ExceptionObjectTy,
991 diag::err_throw_abstract_type, E))
1002 MarkVTableUsed(ThrowLoc, RD);
1011 MarkFunctionReferenced(E->
getExprLoc(), Destructor);
1012 CheckDestructorAccess(E->
getExprLoc(), Destructor,
1013 PDiag(diag::err_access_dtor_exception) << Ty);
1014 if (DiagnoseUseOfDecl(Destructor, E->
getExprLoc()))
1029 for (
CXXRecordDecl *Subobject : UnambiguousPublicSubobjects) {
1056 for (
unsigned I = 1, E = CD->
getNumParams(); I != E; ++I) {
1057 if (CheckCXXDefaultArgExpr(ThrowLoc, CD, CD->
getParamDecl(I)))
1070 if (ExnObjAlign < TypeAlign) {
1071 Diag(ThrowLoc, diag::warn_throw_underaligned_obj);
1072 Diag(ThrowLoc, diag::note_throw_underaligned_obj)
1125 for (
int I = FunctionScopes.size();
1126 I-- && isa<LambdaScopeInfo>(FunctionScopes[I]) &&
1128 cast<LambdaScopeInfo>(FunctionScopes[I])->CallOperator);
1130 CurLSI = cast<LambdaScopeInfo>(FunctionScopes[I]);
1137 if (C.isCopyCapture()) {
1150 "While computing 'this' capture-type for a generic lambda, when we "
1151 "run out of enclosing LSI's, yet the enclosing DC is a "
1152 "lambda-call-operator we must be (i.e. Current LSI) in a generic "
1153 "lambda call oeprator");
1156 auto IsThisCaptured =
1160 for (
auto &&C : Closure->
captures()) {
1161 if (C.capturesThis()) {
1172 bool IsByCopyCapture =
false;
1173 bool IsConstCapture =
false;
1176 IsThisCaptured(Closure, IsByCopyCapture, IsConstCapture)) {
1177 if (IsByCopyCapture) {
1193 QualType ThisTy = CXXThisTypeOverride;
1196 if (method && method->isInstance())
1197 ThisTy = method->getThisType();
1201 inTemplateInstantiation() && isa<CXXRecordDecl>(DC)) {
1217 CurContext, Context);
1225 : S(S), OldCXXThisTypeOverride(S.CXXThisTypeOverride), Enabled(
false)
1227 if (!Enabled || !ContextDecl)
1232 Record = Template->getTemplatedDecl();
1234 Record = cast<CXXRecordDecl>(ContextDecl);
1241 this->Enabled =
true;
1258 Sema.
Diag(DiagLoc, diag::note_lambda_this_capture_fixit)
1264 bool BuildAndDiagnose,
const unsigned *
const FunctionScopeIndexToStopAt,
1265 const bool ByCopy) {
1270 assert((!ByCopy || Explicit) &&
"cannot implicitly capture *this by value");
1272 const int MaxFunctionScopesIndex = FunctionScopeIndexToStopAt
1273 ? *FunctionScopeIndexToStopAt
1299 unsigned NumCapturingClosures = 0;
1300 for (
int idx = MaxFunctionScopesIndex; idx >= 0; idx--) {
1303 if (CSI->CXXThisCaptureIndex != 0) {
1305 CSI->Captures[CSI->CXXThisCaptureIndex - 1].markUsed(BuildAndDiagnose);
1311 if (BuildAndDiagnose) {
1312 Diag(Loc, diag::err_this_capture)
1313 << (Explicit && idx == MaxFunctionScopesIndex);
1323 (Explicit && idx == MaxFunctionScopesIndex)) {
1329 NumCapturingClosures++;
1333 if (BuildAndDiagnose)
1334 Diag(Loc, diag::err_this_capture)
1335 << (Explicit && idx == MaxFunctionScopesIndex);
1343 if (!BuildAndDiagnose)
return false;
1356 "Only a lambda can capture the enclosing object (referred to by "
1359 for (
int idx = MaxFunctionScopesIndex; NumCapturingClosures;
1360 --idx, --NumCapturingClosures) {
1375 bool isNested = NumCapturingClosures > 1;
1388 return Diag(Loc, diag::err_invalid_this_use);
1419 return Class && Class->isBeingDefined();
1431 bool ListInitialization) {
1441 RParenOrBraceLoc, ListInitialization);
1445 if (!Result.isInvalid() && Result.get()->isInstantiationDependent() &&
1446 !Result.get()->isTypeDependent())
1448 else if (Result.isInvalid())
1450 RParenOrBraceLoc, exprs, Ty);
1459 bool ListInitialization) {
1463 assert((!ListInitialization || Exprs.size() == 1) &&
1464 "List initialization must have exactly one expression.");
1471 ? ListInitialization
1473 TyBeginLoc, LParenOrBraceLoc, RParenOrBraceLoc)
1487 isa<DeducedTemplateSpecializationType>(Deduced)) {
1493 }
else if (Deduced && !Deduced->
isDeduced()) {
1495 if (ListInitialization) {
1496 auto *ILE = cast<InitListExpr>(Exprs[0]);
1497 Inits =
MultiExprArg(ILE->getInits(), ILE->getNumInits());
1501 return ExprError(
Diag(TyBeginLoc, diag::err_auto_expr_init_no_expression)
1502 << Ty << FullRange);
1503 if (Inits.size() > 1) {
1504 Expr *FirstBad = Inits[1];
1506 diag::err_auto_expr_init_multiple_expressions)
1507 << Ty << FullRange);
1511 Diag(TyBeginLoc, diag::warn_cxx20_compat_auto_expr) << FullRange;
1513 Expr *Deduce = Inits[0];
1514 if (isa<InitListExpr>(Deduce))
1517 << ListInitialization << Ty << FullRange);
1523 return ExprError(
Diag(TyBeginLoc, diag::err_auto_expr_deduction_failure)
1524 << Ty << Deduce->
getType() << FullRange
1540 :
SourceRange(LParenOrBraceLoc, RParenOrBraceLoc);
1550 if (Exprs.size() == 1 && !ListInitialization &&
1551 !isa<InitListExpr>(Exprs[0])) {
1552 Expr *Arg = Exprs[0];
1560 if (!ListInitialization)
1561 return ExprError(
Diag(TyBeginLoc, diag::err_value_init_for_array_type)
1571 return ExprError(
Diag(TyBeginLoc, diag::err_init_for_function_type)
1572 << Ty << FullRange);
1579 diag::err_invalid_incomplete_type_use, FullRange))
1587 if (Result.isInvalid())
1590 Expr *Inner = Result.get();
1592 Inner = BTE->getSubExpr();
1593 if (!isa<CXXTemporaryObjectExpr>(Inner) &&
1594 !isa<CXXScalarValueInitExpr>(Inner)) {
1603 QualType ResultType = Result.get()->getType();
1606 :
SourceRange(LParenOrBraceLoc, RParenOrBraceLoc);
1628 for (
const auto *D : R) {
1629 if (
const auto *FD = dyn_cast<FunctionDecl>(D)) {
1646 return llvm::none_of(PreventedBy, [&](
const FunctionDecl *FD) {
1648 "Only single-operand functions should be in PreventedBy");
1663 unsigned UsualParams = 1;
1665 if (S.
getLangOpts().SizedDeallocation && UsualParams < FD->getNumParams() &&
1671 if (S.
getLangOpts().AlignedAllocation && UsualParams < FD->getNumParams() &&
1681 struct UsualDeallocFnInfo {
1682 UsualDeallocFnInfo() : Found(), FD(nullptr) {}
1684 : Found(Found), FD(dyn_cast<
FunctionDecl>(Found->getUnderlyingDecl())),
1690 unsigned NumBaseParams = 1;
1691 if (FD->isDestroyingOperatorDelete()) {
1696 if (NumBaseParams < FD->getNumParams() &&
1698 FD->getParamDecl(NumBaseParams)->getType(),
1704 if (NumBaseParams < FD->getNumParams() &&
1705 FD->getParamDecl(NumBaseParams)->getType()->isAlignValT()) {
1707 HasAlignValT =
true;
1716 explicit operator bool()
const {
return FD; }
1718 bool isBetterThan(
const UsualDeallocFnInfo &Other,
bool WantSize,
1719 bool WantAlign)
const {
1723 if (Destroying !=
Other.Destroying)
1730 if (HasAlignValT !=
Other.HasAlignValT)
1731 return HasAlignValT == WantAlign;
1733 if (HasSizeT !=
Other.HasSizeT)
1734 return HasSizeT == WantSize;
1737 return CUDAPref >
Other.CUDAPref;
1742 bool Destroying, HasSizeT, HasAlignValT;
1762 UsualDeallocFnInfo Best;
1764 for (
auto I = R.
begin(), E = R.
end(); I != E; ++I) {
1765 UsualDeallocFnInfo Info(S, I.getPair());
1773 BestFns->push_back(Info);
1777 if (Best.isBetterThan(Info, WantSize, WantAlign))
1782 if (BestFns && Info.isBetterThan(Best, WantSize, WantAlign))
1787 BestFns->push_back(Info);
1801 if (!record)
return false;
1814 if (ops.
empty())
return false;
1826 return Best && Best.HasSizeT;
1850 std::optional<Expr *> ArraySize;
1876 if (
Expr *NumElts = (
Expr *)Array.NumElts) {
1877 if (!NumElts->isTypeDependent() && !NumElts->isValueDependent()) {
1892 NumElts,
nullptr, diag::err_new_array_nonconst,
AllowFold)
1909 DirectInitRange = List->getSourceRange();
1912 PlacementLParen, PlacementArgs, PlacementRParen,
1913 TypeIdParens, AllocType, TInfo, ArraySize, DirectInitRange,
1922 return PLE->getNumExprs() == 0;
1923 if (isa<ImplicitValueInitExpr>(Init))
1926 return !CCE->isListInitialization() &&
1927 CCE->getConstructor()->isDefaultConstructor();
1929 assert(isa<InitListExpr>(Init) &&
1930 "Shouldn't create list CXXConstructExprs for arrays.");
1942 std::optional<unsigned> AlignmentParam;
1955 StringRef OSName = AvailabilityAttr::getPlatformNameSourceSpelling(
1960 bool IsDelete =
Kind == OO_Delete ||
Kind == OO_Array_Delete;
1961 Diag(Loc, diag::err_aligned_allocation_unavailable)
1963 << OSVersion.getAsString() << OSVersion.empty();
1964 Diag(Loc, diag::note_silence_aligned_allocation_unavailable);
1974 std::optional<Expr *> ArraySize,
1980 if (DirectInitRange.
isValid()) {
1981 assert(
Initializer &&
"Have parens but no initializer.");
1988 "Initializer expression that cannot have been implicitly created.");
1995 Exprs =
MultiExprArg(List->getExprs(), List->getNumExprs());
2016 DirectInitRange.
getEnd());
2020 if (Deduced && !Deduced->isDeduced() &&
2021 isa<DeducedTemplateSpecializationType>(Deduced)) {
2024 Diag(*ArraySize ? (*ArraySize)->getExprLoc() : TypeRange.
getBegin(),
2025 diag::err_deduced_class_template_compound_type)
2027 << (*ArraySize ? (*ArraySize)->getSourceRange() : TypeRange));
2032 AllocTypeInfo, Entity,
Kind, Exprs);
2035 }
else if (Deduced && !Deduced->isDeduced()) {
2039 auto *ILE = cast<InitListExpr>(Exprs[0]);
2040 Inits =
MultiExprArg(ILE->getInits(), ILE->getNumInits());
2044 return ExprError(
Diag(StartLoc, diag::err_auto_new_requires_ctor_arg)
2045 << AllocType << TypeRange);
2046 if (Inits.size() > 1) {
2047 Expr *FirstBad = Inits[1];
2049 diag::err_auto_new_ctor_multiple_expressions)
2050 << AllocType << TypeRange);
2054 << AllocType << TypeRange;
2055 Expr *Deduce = Inits[0];
2056 if (isa<InitListExpr>(Deduce))
2059 << Braced << AllocType << TypeRange);
2065 return ExprError(
Diag(StartLoc, diag::err_auto_new_deduction_failure)
2066 << AllocType << Deduce->
getType() << TypeRange
2083 AllocType = Array->getElementType();
2103 if (ArraySize && *ArraySize &&
2104 (*ArraySize)->getType()->isNonOverloadPlaceholderType()) {
2107 ArraySize = result.
get();
2116 std::optional<uint64_t> KnownArraySize;
2117 if (ArraySize && *ArraySize && !(*ArraySize)->isTypeDependent()) {
2126 (*ArraySize)->getType()->getAs<
RecordType>())
2128 Diag(StartLoc, diag::warn_cxx98_compat_array_size_conversion)
2129 << (*ArraySize)->getType() << 0 <<
"'size_t'";
2136 SizeConvertDiagnoser(
Expr *ArraySize)
2138 ArraySize(ArraySize) {}
2142 return S.
Diag(Loc, diag::err_array_size_not_integral)
2148 return S.
Diag(Loc, diag::err_array_size_incomplete_type)
2154 return S.
Diag(Loc, diag::err_array_size_explicit_conversion) << T << ConvTy;
2165 return S.
Diag(Loc, diag::err_array_size_ambiguous_conversion) << T;
2179 ? diag::warn_cxx98_compat_array_size_conversion
2180 : diag::ext_array_size_conversion)
2183 } SizeDiagnoser(*ArraySize);
2191 ArraySize = ConvertedSize.
get();
2192 QualType SizeType = (*ArraySize)->getType();
2210 if (std::optional<llvm::APSInt>
Value =
2211 (*ArraySize)->getIntegerConstantExpr(
Context)) {
2212 if (
Value->isSigned() &&
Value->isNegative()) {
2214 diag::err_typecheck_negative_array_size)
2215 << (*ArraySize)->getSourceRange());
2219 unsigned ActiveSizeBits =
2223 Diag((*ArraySize)->getBeginLoc(), diag::err_array_too_large)
2227 KnownArraySize =
Value->getZExtValue();
2228 }
else if (TypeIdParens.
isValid()) {
2230 Diag((*ArraySize)->getBeginLoc(), diag::ext_new_paren_array_nonconst)
2231 << (*ArraySize)->getSourceRange()
2244 unsigned Alignment =
2247 bool PassAlignment =
getLangOpts().AlignedAllocation &&
2248 Alignment > NewAlignment;
2255 AllocType, ArraySize.has_value(), PassAlignment, PlacementArgs,
2256 OperatorNew, OperatorDelete))
2261 bool UsualArrayDeleteWantsSize =
false;
2263 UsualArrayDeleteWantsSize =
2276 unsigned NumImplicitArgs = PassAlignment ? 2 : 1;
2278 NumImplicitArgs, PlacementArgs, AllPlaceArgs,
2282 if (!AllPlaceArgs.empty())
2283 PlacementArgs = AllPlaceArgs;
2297 std::optional<llvm::APInt> AllocationSize;
2300 AllocationSize = SingleEltSize;
2304 AllocationSize =
llvm::APInt(SizeTyWidth, *KnownArraySize)
2305 .umul_ov(SingleEltSize, Overflow);
2309 "Expected that all the overflows would have been handled already.");
2313 Context, AllocationSize.value_or(llvm::APInt::getZero(SizeTyWidth)),
2332 CK_IntegralCast, &AlignmentLiteral,
2337 CallArgs.reserve(NumImplicitArgs + PlacementArgs.size());
2338 CallArgs.emplace_back(AllocationSize
2339 ?
static_cast<Expr *
>(&AllocationSizeLiteral)
2340 : &OpaqueAllocationSize);
2342 CallArgs.emplace_back(&DesiredAlignment);
2343 CallArgs.insert(CallArgs.end(), PlacementArgs.begin(), PlacementArgs.end());
2347 checkCall(OperatorNew, Proto,
nullptr, CallArgs,
2348 false, StartLoc, Range, CallType);
2352 if (PlacementArgs.empty() && !PassAlignment &&
2356 if (Alignment > NewAlignment)
2357 Diag(StartLoc, diag::warn_overaligned_type)
2368 SourceRange InitRange(Exprs.front()->getBeginLoc(),
2369 Exprs.back()->getEndLoc());
2370 Diag(StartLoc, diag::err_new_array_init_args) << InitRange;
2390 InitType = AllocType;
2403 dyn_cast_or_null<CXXBindTemporaryExpr>(FullInit.
get()))
2404 FullInit = Binder->getSubExpr();
2411 if (ArraySize && !*ArraySize) {
2419 Diag(TypeRange.
getEnd(), diag::err_new_array_size_unknown_from_init)
2431 if (OperatorDelete) {
2438 PassAlignment, UsualArrayDeleteWantsSize,
2439 PlacementArgs, TypeIdParens, ArraySize, initStyle,
2451 return Diag(Loc, diag::err_bad_new_type)
2452 << AllocType << 0 << R;
2454 return Diag(Loc, diag::err_bad_new_type)
2455 << AllocType << 1 << R;
2458 Loc, AllocType, diag::err_new_incomplete_or_sizeless_type, R))
2461 diag::err_allocation_of_abstract_type))
2464 return Diag(Loc, diag::err_variably_modified_new_type)
2468 return Diag(Loc, diag::err_address_space_qualified_new)
2476 return Diag(Loc, diag::err_arc_new_array_without_ownership)
2491 Alloc != AllocEnd; ++Alloc) {
2528 if (PassAlignment) {
2529 PassAlignment =
false;
2531 Args.erase(Args.begin() + 1);
2533 Operator, &Candidates, AlignArg,
2559 (Args[1]->getType()->isObjectPointerType() ||
2560 Args[1]->getType()->isArrayType())) {
2561 S.
Diag(R.
getNameLoc(), diag::err_need_header_before_placement_new)
2576 if (AlignedCandidates) {
2578 return C.Function->getNumParams() > 1 &&
2579 C.Function->getParamDecl(1)->getType()->isAlignValT();
2583 AlignedArgs.reserve(Args.size() + 1);
2584 AlignedArgs.push_back(Args[0]);
2585 AlignedArgs.push_back(AlignArg);
2586 AlignedArgs.append(Args.begin() + 1, Args.end());
2599 if (AlignedCandidates)
2600 AlignedCandidates->
NoteCandidates(S, AlignedArgs, AlignedCands,
"",
2610 S.
PDiag(diag::err_ovl_ambiguous_call)
2620 S.
PDiag(diag::err_ovl_deleted_call)
2627 llvm_unreachable(
"Unreachable, bad result from BestViableFunction");
2649 AllocArgs.reserve((PassAlignment ? 2 : 1) + PlaceArgs.size());
2657 llvm::APInt::getZero(
2660 AllocArgs.push_back(&Size);
2663 if (PassAlignment) {
2669 AllocArgs.push_back(&Align);
2671 AllocArgs.insert(AllocArgs.end(), PlaceArgs.begin(), PlaceArgs.end());
2680 IsArray ? OO_Array_New : OO_New);
2712 if (PlaceArgs.empty()) {
2713 Diag(StartLoc, diag::err_openclcxx_not_supported) <<
"default new";
2715 Diag(StartLoc, diag::err_openclcxx_placement_new);
2720 assert(!R.
empty() &&
"implicitly declared allocation functions not found");
2721 assert(!R.
isAmbiguous() &&
"global allocation functions are ambiguous");
2727 OperatorNew,
nullptr,
2734 OperatorDelete =
nullptr;
2769 while (
Filter.hasNext()) {
2770 auto *FD = dyn_cast<FunctionDecl>(
Filter.next()->getUnderlyingDecl());
2771 if (FD && FD->isDestroyingOperatorDelete())
2777 bool FoundGlobalDelete = FoundDelete.
empty();
2778 if (FoundDelete.
empty()) {
2805 bool isPlacementNew = !PlaceArgs.empty() || OperatorNew->
param_size() != 1 ||
2808 if (isPlacementNew) {
2825 for (
unsigned I = 1, N = Proto->getNumParams(); I < N; ++I)
2826 ArgTypes.push_back(Proto->getParamType(I));
2830 EPI.
Variadic = Proto->isVariadic();
2832 ExpectedFunctionType
2837 DEnd = FoundDelete.
end();
2841 dyn_cast<FunctionTemplateDecl>((*D)->getUnderlyingDecl())) {
2849 Fn = cast<FunctionDecl>((*D)->getUnderlyingDecl());
2852 ExpectedFunctionType,
2854 ExpectedFunctionType))
2855 Matches.push_back(std::make_pair(D.getPair(), Fn));
2871 *
this, FoundDelete, FoundGlobalDelete,
2875 Matches.push_back(std::make_pair(Selected.Found, Selected.FD));
2879 for (
auto Fn : BestDeallocFns)
2880 Matches.push_back(std::make_pair(Fn.Found, Fn.FD));
2888 if (Matches.size() == 1) {
2889 OperatorDelete = Matches[0].second;
2899 UsualDeallocFnInfo Info(*
this,
2905 bool IsSizedDelete = Info.HasSizeT;
2906 if (IsSizedDelete && !FoundGlobalDelete) {
2907 auto NonSizedDelete =
2910 if (NonSizedDelete && !NonSizedDelete.HasSizeT &&
2911 NonSizedDelete.HasAlignValT == Info.HasAlignValT)
2912 IsSizedDelete =
false;
2915 if (IsSizedDelete) {
2919 PlaceArgs.back()->getEndLoc());
2920 Diag(StartLoc, diag::err_placement_new_non_placement_delete) << R;
2929 }
else if (!Matches.empty()) {
2933 Diag(StartLoc, diag::warn_ambiguous_suitable_delete_function_found)
2934 << DeleteName << AllocElemType;
2936 for (
auto &Match : Matches)
2937 Diag(Match.second->getLocation(),
2938 diag::note_member_declared_here) << DeleteName;
3026 if (GlobalModuleFragment) {
3041 if (GlobalModuleFragment) {
3042 AlignValT->setModuleOwnershipKind(
3044 AlignValT->setLocalOwningModule(GlobalModuleFragment);
3049 AlignValT->setImplicit(
true);
3062 Params.push_back(Param);
3065 bool HasSizedVariant =
getLangOpts().SizedDeallocation &&
3066 (
Kind == OO_Delete ||
Kind == OO_Array_Delete);
3067 bool HasAlignedVariant =
getLangOpts().AlignedAllocation;
3069 int NumSizeVariants = (HasSizedVariant ? 2 : 1);
3070 int NumAlignVariants = (HasAlignedVariant ? 2 : 1);
3071 for (
int Sized = 0; Sized < NumSizeVariants; ++Sized) {
3073 Params.push_back(SizeT);
3075 for (
int Aligned = 0; Aligned < NumAlignVariants; ++Aligned) {
3088 DeclareGlobalAllocationFunctions(OO_New, VoidPtr, SizeT);
3089 DeclareGlobalAllocationFunctions(OO_Array_New, VoidPtr, SizeT);
3090 DeclareGlobalAllocationFunctions(OO_Delete,
Context.
VoidTy, VoidPtr);
3091 DeclareGlobalAllocationFunctions(OO_Array_Delete,
Context.
VoidTy, VoidPtr);
3094 PopGlobalModuleFragment();
3107 Alloc != AllocEnd; ++Alloc) {
3110 if (
FunctionDecl *Func = dyn_cast<FunctionDecl>(*Alloc)) {
3111 if (Func->getNumParams() == Params.size()) {
3113 for (
auto *
P : Func->parameters())
3114 FuncParams.push_back(
3120 Func->setVisibleDespiteOwningModule();
3128 false,
false,
true));
3131 bool HasBadAllocExceptionSpec
3132 = (Name.getCXXOverloadedOperator() == OO_New ||
3133 Name.getCXXOverloadedOperator() == OO_Array_New);
3134 if (HasBadAllocExceptionSpec) {
3137 assert(
StdBadAlloc &&
"Must have std::bad_alloc declared");
3149 auto CreateAllocationFunctionDecl = [&](
Attr *ExtraAttr) {
3159 if (HasBadAllocExceptionSpec &&
getLangOpts().NewInfallible)
3173 if (GlobalModuleFragment) {
3179 Alloc->
addAttr(VisibilityAttr::CreateImplicit(
3181 ? VisibilityAttr::Hidden
3182 : VisibilityAttr::Default));
3189 ParamDecls.back()->setImplicit();
3191 Alloc->setParams(ParamDecls);
3200 CreateAllocationFunctionDecl(
nullptr);
3204 CreateAllocationFunctionDecl(CUDAHostAttr::CreateImplicit(
Context));
3205 CreateAllocationFunctionDecl(CUDADeviceAttr::CreateImplicit(
Context));
3210 bool CanProvideSize,
3224 assert(Result.FD &&
"operator delete missing from global scope?");
3236 return OperatorDelete;
3248 bool WantSize,
bool WantAligned) {
3266 Overaligned, &Matches);
3269 if (Matches.size() == 1) {
3270 Operator = cast<CXXMethodDecl>(Matches[0].FD);
3275 Diag(StartLoc, diag::err_deleted_function_use);
3291 if (!Matches.empty()) {
3293 Diag(StartLoc, diag::err_ambiguous_suitable_delete_member_function_found)
3295 for (
auto &Match : Matches)
3296 Diag(Match.FD->getLocation(), diag::note_member_declared_here) << Name;
3303 if (!Found.
empty()) {
3305 Diag(StartLoc, diag::err_no_suitable_delete_member_function_found)
3309 Diag(D->getUnderlyingDecl()->getLocation(),
3310 diag::note_member_declared_here) << Name;
3322 class MismatchingNewDeleteDetector {
3324 enum MismatchResult {
3330 MemberInitMismatches,
3339 explicit MismatchingNewDeleteDetector(
bool EndOfTU)
3340 : Field(nullptr), IsArrayForm(
false), EndOfTU(EndOfTU),
3341 HasUndefinedConstructors(
false) {}
3358 MismatchResult analyzeField(
FieldDecl *Field,
bool DeleteWasArrayForm);
3368 bool HasUndefinedConstructors;
3380 MismatchResult analyzeMemberExpr(
const MemberExpr *ME);
3403 MismatchResult analyzeInClassInitializer();
3407 MismatchingNewDeleteDetector::MismatchResult
3408 MismatchingNewDeleteDetector::analyzeDeleteExpr(
const CXXDeleteExpr *DE) {
3410 assert(DE &&
"Expected delete-expression");
3413 if (
const MemberExpr *ME = dyn_cast<const MemberExpr>(E)) {
3414 return analyzeMemberExpr(ME);
3415 }
else if (
const DeclRefExpr *D = dyn_cast<const DeclRefExpr>(E)) {
3416 if (!hasMatchingVarInit(D))
3417 return VarInitMismatches;
3423 MismatchingNewDeleteDetector::getNewExprFromInitListOrExpr(
const Expr *E) {
3424 assert(E !=
nullptr &&
"Expected a valid initializer expression");
3426 if (
const InitListExpr *ILE = dyn_cast<const InitListExpr>(E)) {
3427 if (ILE->getNumInits() == 1)
3428 E = dyn_cast<const CXXNewExpr>(ILE->getInit(0)->IgnoreParenImpCasts());
3431 return dyn_cast_or_null<const CXXNewExpr>(E);
3434 bool MismatchingNewDeleteDetector::hasMatchingNewInCtorInit(
3438 (
NE = getNewExprFromInitListOrExpr(CI->
getInit()))) {
3439 if (
NE->isArray() == IsArrayForm)
3442 NewExprs.push_back(
NE);
3447 bool MismatchingNewDeleteDetector::hasMatchingNewInCtor(
3453 HasUndefinedConstructors =
true;
3456 for (
const auto *CI : cast<const CXXConstructorDecl>(Definition)->inits()) {
3457 if (hasMatchingNewInCtorInit(CI))
3463 MismatchingNewDeleteDetector::MismatchResult
3464 MismatchingNewDeleteDetector::analyzeInClassInitializer() {
3465 assert(Field !=
nullptr &&
"This should be called only for members");
3466 const Expr *InitExpr =
Field->getInClassInitializer();
3468 return EndOfTU ? NoMismatch : AnalyzeLater;
3469 if (
const CXXNewExpr *
NE = getNewExprFromInitListOrExpr(InitExpr)) {
3470 if (
NE->isArray() != IsArrayForm) {
3471 NewExprs.push_back(
NE);
3472 return MemberInitMismatches;
3478 MismatchingNewDeleteDetector::MismatchResult
3479 MismatchingNewDeleteDetector::analyzeField(
FieldDecl *Field,
3480 bool DeleteWasArrayForm) {
3481 assert(Field !=
nullptr &&
"Analysis requires a valid class member.");
3482 this->Field =
Field;
3483 IsArrayForm = DeleteWasArrayForm;
3485 for (
const auto *CD : RD->
ctors()) {
3486 if (hasMatchingNewInCtor(CD))
3489 if (HasUndefinedConstructors)
3490 return EndOfTU ? NoMismatch : AnalyzeLater;
3491 if (!NewExprs.empty())
3492 return MemberInitMismatches;
3493 return Field->hasInClassInitializer() ? analyzeInClassInitializer()
3497 MismatchingNewDeleteDetector::MismatchResult
3498 MismatchingNewDeleteDetector::analyzeMemberExpr(
const MemberExpr *ME) {
3499 assert(ME !=
nullptr &&
"Expected a member expression");
3501 return analyzeField(F, IsArrayForm);
3505 bool MismatchingNewDeleteDetector::hasMatchingVarInit(
const DeclRefExpr *D) {
3508 if (VD->hasInit() && (
NE = getNewExprFromInitListOrExpr(VD->getInit())) &&
3509 NE->isArray() != IsArrayForm) {
3510 NewExprs.push_back(
NE);
3513 return NewExprs.empty();
3518 const MismatchingNewDeleteDetector &Detector) {
3521 if (!Detector.IsArrayForm)
3530 SemaRef.
Diag(DeleteLoc, diag::warn_mismatched_delete_new)
3531 << Detector.IsArrayForm << H;
3533 for (
const auto *
NE : Detector.NewExprs)
3534 SemaRef.
Diag(
NE->getExprLoc(), diag::note_allocated_here)
3535 << Detector.IsArrayForm;
3538 void Sema::AnalyzeDeleteExprMismatch(
const CXXDeleteExpr *DE) {
3541 MismatchingNewDeleteDetector Detector(
false);
3542 switch (Detector.analyzeDeleteExpr(DE)) {
3543 case MismatchingNewDeleteDetector::VarInitMismatches:
3544 case MismatchingNewDeleteDetector::MemberInitMismatches: {
3548 case MismatchingNewDeleteDetector::AnalyzeLater: {
3553 case MismatchingNewDeleteDetector::NoMismatch:
3559 bool DeleteWasArrayForm) {
3560 MismatchingNewDeleteDetector Detector(
true);
3561 switch (Detector.analyzeField(Field, DeleteWasArrayForm)) {
3562 case MismatchingNewDeleteDetector::VarInitMismatches:
3563 llvm_unreachable(
"This analysis should have been done for class members.");
3564 case MismatchingNewDeleteDetector::AnalyzeLater:
3565 llvm_unreachable(
"Analysis cannot be postponed any point beyond end of "
3566 "translation unit.");
3567 case MismatchingNewDeleteDetector::MemberInitMismatches:
3570 case MismatchingNewDeleteDetector::NoMismatch:
3581 bool ArrayForm,
Expr *ExE) {
3591 bool ArrayFormAsWritten = ArrayForm;
3592 bool UsualArrayDeleteWantsSize =
false;
3610 if (ConvPtrType->getPointeeType()->isIncompleteOrObjectType())
3617 return S.
Diag(Loc, diag::err_delete_operand) << T;
3622 return S.
Diag(Loc, diag::err_delete_incomplete_class_type) << T;
3628 return S.
Diag(Loc, diag::err_delete_explicit_conversion) << T << ConvTy;
3639 return S.
Diag(Loc, diag::err_ambiguous_delete_operand) << T;
3651 llvm_unreachable(
"conversion functions are permitted");
3659 if (!Converter.match(
Type))
3670 diag::err_address_space_qualified_delete)
3679 Diag(StartLoc, diag::ext_delete_void_ptr_operand)
3689 diag::warn_delete_incomplete, Ex.
get())) {
3691 PointeeRD = cast<CXXRecordDecl>(RT->getDecl());
3696 Diag(StartLoc, diag::warn_delete_array_type)
3703 ArrayForm ? OO_Array_Delete : OO_Delete);
3717 UsualArrayDeleteWantsSize =
3722 else if (OperatorDelete && isa<CXXMethodDecl>(OperatorDelete))
3723 UsualArrayDeleteWantsSize =
3724 UsualDeallocFnInfo(*
this,
3743 if (!OperatorDelete) {
3745 Diag(StartLoc, diag::err_openclcxx_not_supported) <<
"default delete";
3750 bool CanProvideSize =
3751 IsComplete && (!ArrayForm || UsualArrayDeleteWantsSize ||
3757 Overaligned, DeleteName);
3764 bool IsVirtualDelete =
false;
3768 PDiag(diag::err_access_dtor) << PointeeElem);
3769 IsVirtualDelete = Dtor->isVirtual();
3798 UsualArrayDeleteWantsSize, OperatorDelete, Ex.
get(), StartLoc);
3799 AnalyzeDeleteExprMismatch(Result);
3808 IsDelete ? OO_Delete : OO_New);
3812 assert(!R.
empty() &&
"implicitly declared allocation functions not found");
3813 assert(!R.
isAmbiguous() &&
"global allocation functions are ambiguous");
3822 FnOvl != FnOvlEnd; ++FnOvl) {
3825 NamedDecl *D = (*FnOvl)->getUnderlyingDecl();
3849 "class members should not be considered");
3852 S.
Diag(R.
getNameLoc(), diag::err_builtin_operator_new_delete_not_usual)
3853 << (IsDelete ? 1 : 0) << Range;
3854 S.
Diag(FnDecl->
getLocation(), diag::note_non_usual_function_declared_here)
3866 S.
PDiag(diag::err_ovl_no_viable_function_in_call)
3874 S.
PDiag(diag::err_ovl_ambiguous_call)
3887 llvm_unreachable(
"Unreachable, bad result from BestViableFunction");
3891 Sema::SemaBuiltinOperatorNewDeleteOverloaded(
ExprResult TheCallResult,
3893 CallExpr *TheCall = cast<CallExpr>(TheCallResult.
get());
3896 << (IsDelete ?
"__builtin_operator_delete" :
"__builtin_operator_new")
3906 OperatorNewOrDelete))
3908 assert(OperatorNewOrDelete &&
"should be found");
3914 for (
unsigned i = 0; i != TheCall->
getNumArgs(); ++i) {
3925 assert(Callee &&
Callee->getCastKind() == CK_BuiltinFnToFnPtr &&
3926 "Callee expected to be implicit cast to a builtin function pointer");
3929 return TheCallResult;
3933 bool IsDelete,
bool CallCanBeVirtual,
3934 bool WarnOnNonAbstractTypes,
3961 Diag(Loc, diag::warn_delete_abstract_non_virtual_dtor) << (IsDelete ? 0 : 1)
3963 }
else if (WarnOnNonAbstractTypes) {
3966 Diag(Loc, diag::warn_delete_non_virtual_dtor) << (IsDelete ? 0 : 1)
3972 Diag(DtorLoc, diag::note_delete_non_virtual)
4002 diag::err_invalid_use_of_function_type)
4006 diag::err_invalid_use_of_array_type)
4024 llvm_unreachable(
"unexpected condition kind");
4051 diag::err_constexpr_if_condition_expression_is_not_constant);
4063 From =
Cast->getSubExpr();
4075 if (!ToPtrType->getPointeeType().hasQualifiers()) {
4076 switch (StrLit->getKind()) {
4083 return (ToPointeeType->getKind() == BuiltinType::Char_U ||
4084 ToPointeeType->getKind() == BuiltinType::Char_S);
4101 bool HadMultipleCandidates,
4104 default: llvm_unreachable(
"Unhandled cast kind!");
4105 case CK_ConstructorConversion: {
4110 diag::err_allocation_of_abstract_type))
4123 CastLoc, Ty, FoundDecl, cast<CXXConstructorDecl>(Method),
4124 ConstructorArgs, HadMultipleCandidates,
4125 false,
false,
false,
4127 if (Result.isInvalid())
4133 case CK_UserDefinedConversion: {
4143 HadMultipleCandidates);
4144 if (Result.isInvalid())
4148 CK_UserDefinedConversion, Result.get(),
4149 nullptr, Result.get()->getValueKind(),
4186 assert(FD &&
"no conversion function for user-defined conversion seq");
4188 CastKind = CK_UserDefinedConversion;
4196 CastKind = CK_ConstructorConversion;
4224 From = CastArg.
get();
4238 PDiag(diag::err_typecheck_ambiguous_condition)
4244 llvm_unreachable(
"bad conversion");
4251 ToType, From->
getType(), From, Action);
4252 assert(Diagnosed &&
"failed to diagnose bad conversion"); (void)Diagnosed;
4290 ConstructorArgs,
false,
4291 false,
false,
false,
4298 false,
false,
false,
4321 From = Checked.
get();
4329 ToAtomicType = ToType;
4330 ToType = ToAtomic->getValueType();
4333 QualType InitialFromType = FromType;
4335 switch (SCS.
First) {
4338 FromType = FromAtomic->getValueType().getUnqualifiedType();
4351 From = FromRes.
get();
4371 llvm_unreachable(
"Improper first standard conversion");
4408 "only enums with fixed underlying type can promote to bool");
4433 CK = CK_FloatingComplexCast;
4435 CK = CK_FloatingComplexToIntegralComplex;
4437 CK = CK_IntegralComplexToFloatingComplex;
4439 CK = CK_IntegralComplexCast;
4460 nullptr, CCK).
get();
4469 diag::ext_typecheck_convert_incompatible_pointer)
4474 diag::ext_typecheck_convert_incompatible_pointer)
4481 }
else if (
getLangOpts().allowsNonTrivialObjCLifetimeQualifiers() &&
4487 Diag(From->
getBeginLoc(), diag::err_arc_convesion_of_weak_unavailable)
4516 if (
Kind == CK_BlockPointerToObjCPointerCast) {
4570 &BasePath, CCK).
get();
4598 QualType ElType = ToComplex->getElementType();
4606 isFloatingComplex ? CK_FloatingCast : CK_FloatingToIntegral).
get();
4610 isFloatingComplex ? CK_IntegralToFloating : CK_IntegralCast).
get();
4614 isFloatingComplex ? CK_FloatingRealToComplex
4615 : CK_IntegralRealToComplex).
get();
4620 QualType ElType = FromComplex->getElementType();
4625 isFloatingComplex ? CK_FloatingComplexToReal
4626 : CK_IntegralComplexToReal,
4635 isFloatingComplex ? CK_FloatingCast
4636 : CK_IntegralToFloating,
4642 isFloatingComplex ? CK_FloatingToIntegral
4658 AddrSpaceL != AddrSpaceR ? CK_AddressSpaceConversion : CK_BitCast;
4671 From = FromRes.
get();
4673 "Improper transparent union conversion");
4681 CK_ZeroToOCLOpaqueType,
4693 llvm_unreachable(
"Improper second standard conversion");
4696 switch (SCS.
Third) {
4719 CK = CK_AddressSpaceConversion;
4724 CK = CK_AddressSpaceConversion;
4730 << InitialFromType << ToType;
4741 ? diag::ext_deprecated_string_literal_conversion
4742 : diag::warn_deprecated_string_literal_conversion)
4750 llvm_unreachable(
"Improper third standard conversion");
4755 if (!ToAtomicType.
isNull()) {
4801 default: llvm_unreachable(
"not a UTT");
4803 case UTT_IsCompleteType:
4811 case UTT_IsIntegral:
4812 case UTT_IsFloatingPoint:
4814 case UTT_IsBoundedArray:
4816 case UTT_IsNullPointer:
4817 case UTT_IsReferenceable:
4818 case UTT_IsLvalueReference:
4819 case UTT_IsRvalueReference:
4820 case UTT_IsMemberFunctionPointer:
4821 case UTT_IsMemberObjectPointer:
4823 case UTT_IsScopedEnum:
4826 case UTT_IsFunction:
4827 case UTT_IsReference:
4828 case UTT_IsArithmetic:
4829 case UTT_IsFundamental:
4832 case UTT_IsCompound:
4833 case UTT_IsMemberPointer:
4842 case UTT_IsVolatile:
4844 case UTT_IsUnboundedArray:
4845 case UTT_IsUnsigned:
4848 case UTT_IsInterfaceClass:
4854 case UTT_IsPolymorphic:
4855 case UTT_IsAbstract:
4859 Loc, ArgTy, diag::err_incomplete_type_used_in_type_trait_expr);
4868 Loc, ArgTy, diag::err_incomplete_type_used_in_type_trait_expr);
4872 case UTT_IsAggregate:
4877 Loc, ArgTy, diag::err_incomplete_type_used_in_type_trait_expr);
4882 case UTT_IsTriviallyCopyable:
4883 case UTT_IsStandardLayout:
4887 case UTT_IsTriviallyRelocatable:
4891 case UTT_HasNothrowAssign:
4892 case UTT_HasNothrowMoveAssign:
4893 case UTT_HasNothrowConstructor:
4894 case UTT_HasNothrowCopy:
4895 case UTT_HasTrivialAssign:
4896 case UTT_HasTrivialMoveAssign:
4897 case UTT_HasTrivialDefaultConstructor:
4898 case UTT_HasTrivialMoveConstructor:
4899 case UTT_HasTrivialCopy:
4900 case UTT_HasTrivialDestructor:
4901 case UTT_HasVirtualDestructor:
4907 case UTT_IsDestructible:
4908 case UTT_IsNothrowDestructible:
4909 case UTT_IsTriviallyDestructible:
4910 case UTT_HasUniqueObjectRepresentations:
4915 Loc, ArgTy, diag::err_incomplete_type_used_in_type_trait_expr);
4926 if ((RD->*HasTrivial)() && !(RD->*HasNonTrivial)())
4933 bool FoundOperator =
false;
4936 Op != OpEnd; ++Op) {
4937 if (isa<FunctionTemplateDecl>(*Op))
4941 if((Operator->*IsDesiredOp)()) {
4942 FoundOperator =
true;
4949 return FoundOperator;
4956 assert(!T->
isDependentType() &&
"Cannot evaluate traits of dependent type");
4960 default: llvm_unreachable(
"not a UTT");
4965 case UTT_IsIntegral:
4967 case UTT_IsFloatingPoint:
4971 case UTT_IsBoundedArray:
4976 Self.
Diag(KeyLoc, diag::err_vla_unsupported)
4977 << 1 << tok::kw___is_bounded_array;
4979 case UTT_IsUnboundedArray:
4984 Self.
Diag(KeyLoc, diag::err_vla_unsupported)
4985 << 1 << tok::kw___is_unbounded_array;
4989 case UTT_IsNullPointer:
4991 case UTT_IsLvalueReference:
4993 case UTT_IsRvalueReference:
4995 case UTT_IsMemberFunctionPointer:
4997 case UTT_IsMemberObjectPointer:
5001 case UTT_IsScopedEnum:
5007 case UTT_IsFunction:
5012 case UTT_IsReference:
5014 case UTT_IsArithmetic:
5016 case UTT_IsFundamental:
5039 case UTT_IsCompound:
5041 case UTT_IsMemberPointer:
5048 case UTT_IsVolatile:
5052 case UTT_IsTriviallyCopyable:
5054 case UTT_IsStandardLayout:
5064 case UTT_IsPolymorphic:
5068 case UTT_IsAbstract:
5072 case UTT_IsAggregate:
5081 case UTT_IsInterfaceClass:
5086 return RD->
hasAttr<FinalAttr>();
5093 case UTT_IsUnsigned:
5112 case UTT_HasTrivialDefaultConstructor:
5119 if (
CXXRecordDecl *RD = C.getBaseElementType(T)->getAsCXXRecordDecl())
5123 case UTT_HasTrivialMoveConstructor:
5129 if (
CXXRecordDecl *RD = C.getBaseElementType(T)->getAsCXXRecordDecl())
5132 case UTT_HasTrivialCopy:
5144 case UTT_HasTrivialMoveAssign:
5150 if (
CXXRecordDecl *RD = C.getBaseElementType(T)->getAsCXXRecordDecl())
5153 case UTT_HasTrivialAssign:
5174 case UTT_IsDestructible:
5175 case UTT_IsTriviallyDestructible:
5176 case UTT_IsNothrowDestructible:
5202 if (
auto *RD = C.getBaseElementType(T)->getAsCXXRecordDecl()) {
5209 if (Destructor->isDeleted())
5211 if (C.getLangOpts().AccessControl && Destructor->getAccess() !=
AS_public)
5213 if (UTT == UTT_IsNothrowDestructible) {
5222 case UTT_HasTrivialDestructor:
5237 if (
CXXRecordDecl *RD = C.getBaseElementType(T)->getAsCXXRecordDecl())
5241 case UTT_HasNothrowAssign:
5249 if (C.getBaseElementType(T).isConstQualified())
5262 case UTT_HasNothrowMoveAssign:
5275 case UTT_HasNothrowCopy:
5288 bool FoundConstructor =
false;
5294 if (isa<FunctionTemplateDecl>(ND->getUnderlyingDecl()))
5297 if (isa<UsingDecl>(ND))
5299 auto *Constructor = cast<CXXConstructorDecl>(ND->getUnderlyingDecl());
5300 if (Constructor->isCopyConstructor(FoundTQs)) {
5301 FoundConstructor =
true;
5313 return FoundConstructor;
5316 case UTT_HasNothrowConstructor:
5324 if (
CXXRecordDecl *RD = C.getBaseElementType(T)->getAsCXXRecordDecl()) {
5329 bool FoundConstructor =
false;
5332 if (isa<FunctionTemplateDecl>(ND->getUnderlyingDecl()))
5335 if (isa<UsingDecl>(ND))
5337 auto *Constructor = cast<CXXConstructorDecl>(ND->getUnderlyingDecl());
5338 if (Constructor->isDefaultConstructor()) {
5339 FoundConstructor =
true;
5350 return FoundConstructor;
5353 case UTT_HasVirtualDestructor:
5359 return Destructor->isVirtual();