48#include "llvm/ADT/APInt.h"
49#include "llvm/ADT/STLExtras.h"
50#include "llvm/ADT/STLForwardCompat.h"
51#include "llvm/ADT/StringExtras.h"
52#include "llvm/Support/ErrorHandling.h"
53#include "llvm/Support/TypeSize.h"
83 llvm_unreachable(
"Nested name specifier is not a type for inheriting ctor");
97 "not a constructor name");
119 auto *RD = dyn_cast<CXXRecordDecl>(ND);
120 if (RD && RD->isInjectedClassName()) {
121 InjectedClassName = RD;
125 if (!InjectedClassName) {
130 diag::err_incomplete_nested_name_spec) << CurClass << SS.
getRange();
145 bool EnteringContext) {
191 auto IsAcceptableResult = [&](
NamedDecl *
D) ->
bool {
192 auto *
Type = dyn_cast<TypeDecl>(
D->getUnderlyingDecl());
203 unsigned NumAcceptableResults = 0;
205 if (IsAcceptableResult(
D))
206 ++NumAcceptableResults;
211 if (
auto *RD = dyn_cast<CXXRecordDecl>(
D))
212 if (RD->isInjectedClassName())
213 D = cast<NamedDecl>(RD->getParent());
215 if (FoundDeclSet.insert(
D).second)
216 FoundDecls.push_back(
D);
224 if (
Found.isAmbiguous() && NumAcceptableResults == 1) {
225 Diag(NameLoc, diag::ext_dtor_name_ambiguous);
229 if (
auto *TD = dyn_cast<TypeDecl>(
D->getUnderlyingDecl()))
235 if (!IsAcceptableResult(
D))
241 if (
Found.isAmbiguous())
245 if (IsAcceptableResult(
Type)) {
257 bool IsDependent =
false;
259 auto LookupInObjectType = [&]() ->
ParsedType {
260 if (Failed || SearchType.
isNull())
270 return CheckLookupResult(
Found);
288 return CheckLookupResult(
Found);
297 return CheckLookupResult(
Found);
344 if (
ParsedType T = LookupInNestedNameSpec(PrefixSS))
374 unsigned NumNonExtensionDecls = FoundDecls.size();
398 Diag(SS.
getEndLoc(), diag::ext_qualified_dtor_named_in_lexical_scope)
400 Diag(FoundDecls.back()->getLocation(), diag::note_destructor_type_here)
411 FoundDecls.resize(NumNonExtensionDecls);
414 std::stable_sort(FoundDecls.begin(), FoundDecls.end(),
416 return isa<TypeDecl>(A->getUnderlyingDecl()) >
417 isa<TypeDecl>(B->getUnderlyingDecl());
421 auto MakeFixItHint = [&]{
427 Destroyed = dyn_cast_or_null<CXXRecordDecl>(S->getEntity());
434 if (FoundDecls.empty()) {
436 Diag(NameLoc, diag::err_undeclared_destructor_name)
437 << &II << MakeFixItHint();
438 }
else if (!SearchType.
isNull() && FoundDecls.size() == 1) {
439 if (
auto *TD = dyn_cast<TypeDecl>(FoundDecls[0]->getUnderlyingDecl())) {
440 assert(!SearchType.
isNull() &&
441 "should only reject a type result if we have a search type");
443 Diag(NameLoc, diag::err_destructor_expr_type_mismatch)
444 <<
T << SearchType << MakeFixItHint();
446 Diag(NameLoc, diag::err_destructor_expr_nontype)
447 << &II << MakeFixItHint();
450 Diag(NameLoc, SearchType.
isNull() ? diag::err_destructor_name_nontype
451 : diag::err_destructor_expr_mismatch)
452 << &II << SearchType << MakeFixItHint();
456 if (
auto *TD = dyn_cast<TypeDecl>(FoundD->getUnderlyingDecl()))
457 Diag(FoundD->getLocation(), diag::note_destructor_type_here)
460 Diag(FoundD->getLocation(), diag::note_destructor_nontype_here)
478 "unexpected type in getDestructorType");
507 Name.getSourceRange(),
508 (StringRef(
"operator\"\"") + II->
getName()).str());
510 Diag(
Loc, diag::warn_reserved_extern_symbol)
511 << II << static_cast<int>(Status) << Hint;
513 Diag(
Loc, diag::warn_deprecated_literal_operator_id) << II << Hint;
529 Diag(Name.getBeginLoc(), diag::err_literal_operator_id_outside_namespace)
540 llvm_unreachable(
"unknown nested name specifier kind");
561 return ExprError(
Diag(TypeidLoc, diag::err_variably_modified_typeid) <<
T);
574 bool WasEvaluated =
false;
584 CXXRecordDecl *RecordD = cast<CXXRecordDecl>(RecordT->getDecl());
630 return ExprError(
Diag(TypeidLoc, diag::err_variably_modified_typeid)
637 ? diag::warn_side_effects_typeid
638 : diag::warn_side_effects_unevaluated_context);
651 return ExprError(
Diag(OpLoc, diag::err_openclcxx_not_supported)
657 return ExprError(
Diag(OpLoc, diag::err_need_header_before_typeid));
671 return ExprError(
Diag(OpLoc, diag::err_need_header_before_typeid));
675 return ExprError(
Diag(OpLoc, diag::err_no_typeid_with_fno_rtti));
699 if (
auto *CTE = dyn_cast<CXXTypeidExpr>(
Result.get()))
700 if (CTE->isPotentiallyEvaluated() && !CTE->isMostDerived(
Context))
701 Diag(OpLoc, diag::warn_no_typeid_with_rtti_disabled)
723 if (
const auto *Uuid = TD->getMostRecentDecl()->getAttr<UuidAttr>()) {
724 UuidAttrs.insert(Uuid);
729 if (
const auto *CTSD = dyn_cast<ClassTemplateSpecializationDecl>(TD)) {
732 const UuidAttr *UuidForTA =
nullptr;
739 UuidAttrs.insert(UuidForTA);
749 if (!Operand->getType()->isDependentType()) {
752 if (UuidAttrs.empty())
753 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_without_guid));
754 if (UuidAttrs.size() > 1)
755 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_with_multiple_guids));
756 Guid = UuidAttrs.back()->getGuidDecl();
773 if (UuidAttrs.empty())
774 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_without_guid));
775 if (UuidAttrs.size() > 1)
776 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_with_multiple_guids));
777 Guid = UuidAttrs.back()->getGuidDecl();
812 assert((Kind == tok::kw_true || Kind == tok::kw_false) &&
813 "Unknown C++ Boolean value!");
825 bool IsThrownVarInScope =
false;
838 if (
const auto *DRE = dyn_cast<DeclRefExpr>(Ex->
IgnoreParens()))
839 if (
const auto *Var = dyn_cast<VarDecl>(DRE->getDecl());
840 Var && Var->hasLocalStorage() &&
841 !Var->getType().isVolatileQualified()) {
842 for (; S; S = S->getParent()) {
843 if (S->isDeclScope(Var)) {
844 IsThrownVarInScope =
true;
861 bool IsThrownVarInScope) {
863 const bool IsOpenMPGPUTarget =
864 getLangOpts().OpenMPIsTargetDevice && (
T.isNVPTX() ||
T.isAMDGCN());
867 if (!IsOpenMPGPUTarget && !
getLangOpts().CXXExceptions &&
870 targetDiag(OpLoc, diag::err_exceptions_disabled) <<
"throw";
874 if (IsOpenMPGPUTarget)
875 targetDiag(OpLoc, diag::warn_throw_not_valid_on_target) <<
T.str();
880 <<
"throw" << llvm::to_underlying(
CUDA().CurrentTarget());
883 Diag(OpLoc, diag::err_omp_simd_region_cannot_use_stmt) <<
"throw";
888 Diag(OpLoc, diag::err_acc_branch_in_out_compute_construct)
932 llvm::DenseMap<CXXRecordDecl *, unsigned> &SubobjectsSeen,
933 llvm::SmallPtrSetImpl<CXXRecordDecl *> &VBases,
934 llvm::SetVector<CXXRecordDecl *> &PublicSubobjectsSeen,
935 bool ParentIsPublic) {
937 CXXRecordDecl *BaseDecl = BS.getType()->getAsCXXRecordDecl();
942 NewSubobject = VBases.insert(BaseDecl).second;
947 ++SubobjectsSeen[BaseDecl];
950 bool PublicPath = ParentIsPublic && BS.getAccessSpecifier() ==
AS_public;
952 PublicSubobjectsSeen.insert(BaseDecl);
962 llvm::DenseMap<CXXRecordDecl *, unsigned> SubobjectsSeen;
963 llvm::SmallSet<CXXRecordDecl *, 2> VBases;
964 llvm::SetVector<CXXRecordDecl *> PublicSubobjectsSeen;
965 SubobjectsSeen[RD] = 1;
966 PublicSubobjectsSeen.insert(RD);
970 for (
CXXRecordDecl *PublicSubobject : PublicSubobjectsSeen) {
972 if (SubobjectsSeen[PublicSubobject] > 1)
975 Objects.push_back(PublicSubobject);
984 bool isPointer =
false;
1004 isPointer ? diag::err_throw_incomplete_ptr
1005 : diag::err_throw_incomplete,
1015 diag::err_throw_abstract_type,
E))
1037 PDiag(diag::err_access_dtor_exception) << Ty);
1053 for (
CXXRecordDecl *Subobject : UnambiguousPublicSubobjects) {
1094 if (ExnObjAlign < TypeAlign) {
1095 Diag(ThrowLoc, diag::warn_throw_underaligned_obj);
1096 Diag(ThrowLoc, diag::note_throw_underaligned_obj)
1101 if (!isPointer &&
getLangOpts().AssumeNothrowExceptionDtor) {
1103 auto Ty = Dtor->getType();
1107 Diag(ThrowLoc, diag::err_throw_object_throwing_dtor) << RD;
1159 for (
int I = FunctionScopes.size();
1160 I-- && isa<LambdaScopeInfo>(FunctionScopes[I]) &&
1162 cast<LambdaScopeInfo>(FunctionScopes[I])->CallOperator);
1164 CurLSI = cast<LambdaScopeInfo>(FunctionScopes[I]);
1171 if (
C.isCopyCapture()) {
1183 "While computing 'this' capture-type for a generic lambda, when we "
1184 "run out of enclosing LSI's, yet the enclosing DC is a "
1185 "lambda-call-operator we must be (i.e. Current LSI) in a generic "
1186 "lambda call oeprator");
1189 auto IsThisCaptured =
1194 if (
C.capturesThis()) {
1205 bool IsByCopyCapture =
false;
1206 bool IsConstCapture =
false;
1209 IsThisCaptured(Closure, IsByCopyCapture, IsConstCapture)) {
1210 if (IsByCopyCapture) {
1228 if (method && method->isImplicitObjectMemberFunction())
1229 ThisTy = method->getThisType().getNonReferenceType();
1257 : S(S), OldCXXThisTypeOverride(S.CXXThisTypeOverride), Enabled(
false)
1259 if (!Enabled || !ContextDecl)
1264 Record = Template->getTemplatedDecl();
1266 Record = cast<CXXRecordDecl>(ContextDecl);
1274 this->Enabled =
true;
1291 Sema.
Diag(DiagLoc, diag::note_lambda_this_capture_fixit)
1297 bool BuildAndDiagnose,
const unsigned *
const FunctionScopeIndexToStopAt,
1298 const bool ByCopy) {
1303 assert((!ByCopy || Explicit) &&
"cannot implicitly capture *this by value");
1305 const int MaxFunctionScopesIndex = FunctionScopeIndexToStopAt
1306 ? *FunctionScopeIndexToStopAt
1332 unsigned NumCapturingClosures = 0;
1333 for (
int idx = MaxFunctionScopesIndex; idx >= 0; idx--) {
1336 if (CSI->CXXThisCaptureIndex != 0) {
1338 CSI->Captures[CSI->CXXThisCaptureIndex - 1].markUsed(BuildAndDiagnose);
1344 if (BuildAndDiagnose) {
1346 Diag(
Loc, diag::err_this_capture)
1347 << (Explicit && idx == MaxFunctionScopesIndex);
1357 (Explicit && idx == MaxFunctionScopesIndex)) {
1363 NumCapturingClosures++;
1367 if (BuildAndDiagnose) {
1369 Diag(
Loc, diag::err_this_capture)
1370 << (Explicit && idx == MaxFunctionScopesIndex);
1378 if (!BuildAndDiagnose)
return false;
1391 "Only a lambda can capture the enclosing object (referred to by "
1394 for (
int idx = MaxFunctionScopesIndex; NumCapturingClosures;
1395 --idx, --NumCapturingClosures) {
1402 bool isNested = NumCapturingClosures > 1;
1436 const auto *Method = dyn_cast<CXXMethodDecl>(DC);
1437 if (Method && Method->isExplicitObjectMemberFunction()) {
1438 Diag(
Loc, diag::err_invalid_this_use) << 1;
1440 Diag(
Loc, diag::err_invalid_this_use) << 1;
1442 Diag(
Loc, diag::err_invalid_this_use) << 0;
1456 if (This->isTypeDependent())
1461 auto IsDependent = [&]() {
1463 auto *LSI = dyn_cast<sema::LambdaScopeInfo>(
Scope);
1467 if (LSI->Lambda && !LSI->Lambda->Encloses(
CurContext) &&
1468 LSI->AfterParameterList)
1475 if (LSI->isCXXThisCaptured()) {
1476 if (!LSI->getCXXThisCapture().isCopyCapture())
1479 const auto *MD = LSI->CallOperator;
1480 if (MD->getType().isNull())
1484 return Ty && MD->isExplicitObjectMemberFunction() &&
1491 This->setCapturedByCopyInLambdaWithExplicitObjectParameter(IsDependent);
1511 bool ListInitialization) {
1521 RParenOrBraceLoc, ListInitialization);
1525 if (!
Result.isInvalid() &&
Result.get()->isInstantiationDependent() &&
1526 !
Result.get()->isTypeDependent())
1528 else if (
Result.isInvalid())
1530 RParenOrBraceLoc, exprs, Ty);
1539 bool ListInitialization) {
1543 assert((!ListInitialization || Exprs.size() == 1) &&
1544 "List initialization must have exactly one expression.");
1551 ? ListInitialization
1553 TyBeginLoc, LParenOrBraceLoc, RParenOrBraceLoc)
1567 isa<DeducedTemplateSpecializationType>(Deduced)) {
1573 }
else if (Deduced && !Deduced->
isDeduced()) {
1575 if (ListInitialization) {
1576 auto *ILE = cast<InitListExpr>(Exprs[0]);
1577 Inits =
MultiExprArg(ILE->getInits(), ILE->getNumInits());
1581 return ExprError(
Diag(TyBeginLoc, diag::err_auto_expr_init_no_expression)
1582 << Ty << FullRange);
1583 if (Inits.size() > 1) {
1584 Expr *FirstBad = Inits[1];
1586 diag::err_auto_expr_init_multiple_expressions)
1587 << Ty << FullRange);
1591 Diag(TyBeginLoc, diag::warn_cxx20_compat_auto_expr) << FullRange;
1593 Expr *Deduce = Inits[0];
1594 if (isa<InitListExpr>(Deduce))
1597 << ListInitialization << Ty << FullRange);
1604 return ExprError(
Diag(TyBeginLoc, diag::err_auto_expr_deduction_failure)
1605 << Ty << Deduce->
getType() << FullRange
1619 RParenOrBraceLoc, ListInitialization);
1625 if (Exprs.size() == 1 && !ListInitialization &&
1626 !isa<InitListExpr>(Exprs[0])) {
1627 Expr *Arg = Exprs[0];
1635 if (!ListInitialization)
1636 return ExprError(
Diag(TyBeginLoc, diag::err_value_init_for_array_type)
1646 return ExprError(
Diag(TyBeginLoc, diag::err_init_for_function_type)
1647 << Ty << FullRange);
1654 diag::err_invalid_incomplete_type_use, FullRange))
1667 Inner = BTE->getSubExpr();
1668 if (
auto *CE = dyn_cast<ConstantExpr>(Inner);
1669 CE && CE->isImmediateInvocation())
1670 Inner = CE->getSubExpr();
1671 if (!isa<CXXTemporaryObjectExpr>(Inner) &&
1672 !isa<CXXScalarValueInitExpr>(Inner)) {
1684 :
SourceRange(LParenOrBraceLoc, RParenOrBraceLoc);
1706 for (
const auto *
D : R) {
1707 if (
const auto *FD = dyn_cast<FunctionDecl>(
D)) {
1724 return llvm::none_of(PreventedBy, [&](
const FunctionDecl *FD) {
1726 "Only single-operand functions should be in PreventedBy");
1741 unsigned UsualParams = 1;
1743 if (S.
getLangOpts().SizedDeallocation && UsualParams < FD->getNumParams() &&
1749 if (S.
getLangOpts().AlignedAllocation && UsualParams < FD->getNumParams() &&
1759 struct UsualDeallocFnInfo {
1760 UsualDeallocFnInfo() :
Found(), FD(nullptr) {}
1768 unsigned NumBaseParams = 1;
1769 if (FD->isDestroyingOperatorDelete()) {
1774 if (NumBaseParams < FD->getNumParams() &&
1776 FD->getParamDecl(NumBaseParams)->getType(),
1782 if (NumBaseParams < FD->getNumParams() &&
1783 FD->getParamDecl(NumBaseParams)->getType()->isAlignValT()) {
1785 HasAlignValT =
true;
1794 explicit operator bool()
const {
return FD; }
1796 bool isBetterThan(
const UsualDeallocFnInfo &
Other,
bool WantSize,
1797 bool WantAlign)
const {
1801 if (Destroying !=
Other.Destroying)
1808 if (HasAlignValT !=
Other.HasAlignValT)
1809 return HasAlignValT == WantAlign;
1811 if (HasSizeT !=
Other.HasSizeT)
1812 return HasSizeT == WantSize;
1815 return CUDAPref >
Other.CUDAPref;
1820 bool Destroying, HasSizeT, HasAlignValT;
1840 UsualDeallocFnInfo Best;
1842 for (
auto I = R.
begin(),
E = R.
end(); I !=
E; ++I) {
1843 UsualDeallocFnInfo Info(S, I.getPair());
1851 BestFns->push_back(Info);
1855 if (Best.isBetterThan(Info, WantSize, WantAlign))
1860 if (BestFns && Info.isBetterThan(Best, WantSize, WantAlign))
1865 BestFns->push_back(Info);
1879 if (!record)
return false;
1892 if (ops.
empty())
return false;
1904 return Best && Best.HasSizeT;
1912 std::optional<Expr *> ArraySize;
1914 if (
D.getNumTypeObjects() > 0 &&
1917 if (
D.getDeclSpec().hasAutoTypeSpec())
1928 D.DropFirstTypeObject();
1933 for (
unsigned I = 0, N =
D.getNumTypeObjects(); I < N; ++I) {
1938 if (
Expr *NumElts = (
Expr *)Array.NumElts) {
1939 if (!NumElts->isTypeDependent() && !NumElts->isValueDependent()) {
1954 NumElts,
nullptr, diag::err_new_array_nonconst,
AllowFold)
1966 if (
D.isInvalidType())
1971 DirectInitRange = List->getSourceRange();
1974 PlacementLParen, PlacementArgs, PlacementRParen,
1975 TypeIdParens, AllocType, TInfo, ArraySize, DirectInitRange,
1984 return IsCPlusPlus20 || PLE->getNumExprs() == 0;
1985 if (isa<ImplicitValueInitExpr>(
Init))
1988 return !CCE->isListInitialization() &&
1989 CCE->getConstructor()->isDefaultConstructor();
1991 assert(isa<InitListExpr>(
Init) &&
1992 "Shouldn't create list CXXConstructExprs for arrays.");
2004 std::optional<unsigned> AlignmentParam;
2017 StringRef OSName = AvailabilityAttr::getPlatformNameSourceSpelling(
2022 bool IsDelete = Kind == OO_Delete || Kind == OO_Array_Delete;
2023 Diag(
Loc, diag::err_aligned_allocation_unavailable)
2025 << OSVersion.getAsString() << OSVersion.empty();
2026 Diag(
Loc, diag::note_silence_aligned_allocation_unavailable);
2036 std::optional<Expr *> ArraySize,
2042 if (DirectInitRange.
isValid()) {
2043 assert(
Initializer &&
"Have parens but no initializer.");
2045 }
else if (isa_and_nonnull<InitListExpr>(
Initializer))
2050 "Initializer expression that cannot have been implicitly created.");
2057 "paren init for non-call init");
2058 Exprs =
MultiExprArg(List->getExprs(), List->getNumExprs());
2065 switch (InitStyle) {
2076 DirectInitRange.
getEnd());
2082 llvm_unreachable(
"Unknown initialization kind");
2087 if (Deduced && !Deduced->isDeduced() &&
2088 isa<DeducedTemplateSpecializationType>(Deduced)) {
2091 Diag(*ArraySize ? (*ArraySize)->getExprLoc() : TypeRange.
getBegin(),
2092 diag::err_deduced_class_template_compound_type)
2094 << (*ArraySize ? (*ArraySize)->getSourceRange() : TypeRange));
2099 AllocTypeInfo, Entity, Kind, Exprs);
2102 }
else if (Deduced && !Deduced->isDeduced()) {
2106 auto *ILE = cast<InitListExpr>(Exprs[0]);
2107 Inits =
MultiExprArg(ILE->getInits(), ILE->getNumInits());
2111 return ExprError(
Diag(StartLoc, diag::err_auto_new_requires_ctor_arg)
2112 << AllocType << TypeRange);
2113 if (Inits.size() > 1) {
2114 Expr *FirstBad = Inits[1];
2116 diag::err_auto_new_ctor_multiple_expressions)
2117 << AllocType << TypeRange);
2121 << AllocType << TypeRange;
2122 Expr *Deduce = Inits[0];
2123 if (isa<InitListExpr>(Deduce))
2126 << Braced << AllocType << TypeRange);
2133 return ExprError(
Diag(StartLoc, diag::err_auto_new_deduction_failure)
2134 << AllocType << Deduce->
getType() << TypeRange
2151 AllocType = Array->getElementType();
2171 if (ArraySize && *ArraySize &&
2172 (*ArraySize)->getType()->isNonOverloadPlaceholderType()) {
2175 ArraySize = result.
get();
2184 std::optional<uint64_t> KnownArraySize;
2185 if (ArraySize && *ArraySize && !(*ArraySize)->isTypeDependent()) {
2194 (*ArraySize)->getType()->getAs<
RecordType>())
2196 Diag(StartLoc, diag::warn_cxx98_compat_array_size_conversion)
2197 << (*ArraySize)->getType() << 0 <<
"'size_t'";
2204 SizeConvertDiagnoser(
Expr *ArraySize)
2206 ArraySize(ArraySize) {}
2210 return S.
Diag(
Loc, diag::err_array_size_not_integral)
2216 return S.
Diag(
Loc, diag::err_array_size_incomplete_type)
2222 return S.
Diag(
Loc, diag::err_array_size_explicit_conversion) <<
T << ConvTy;
2233 return S.
Diag(
Loc, diag::err_array_size_ambiguous_conversion) <<
T;
2247 ? diag::warn_cxx98_compat_array_size_conversion
2248 : diag::ext_array_size_conversion)
2251 } SizeDiagnoser(*ArraySize);
2259 ArraySize = ConvertedSize.
get();
2260 QualType SizeType = (*ArraySize)->getType();
2278 if (std::optional<llvm::APSInt>
Value =
2279 (*ArraySize)->getIntegerConstantExpr(
Context)) {
2280 if (
Value->isSigned() &&
Value->isNegative()) {
2282 diag::err_typecheck_negative_array_size)
2283 << (*ArraySize)->getSourceRange());
2287 unsigned ActiveSizeBits =
2291 Diag((*ArraySize)->getBeginLoc(), diag::err_array_too_large)
2295 KnownArraySize =
Value->getZExtValue();
2296 }
else if (TypeIdParens.
isValid()) {
2298 Diag((*ArraySize)->getBeginLoc(), diag::ext_new_paren_array_nonconst)
2299 << (*ArraySize)->getSourceRange()
2312 unsigned Alignment =
2315 bool PassAlignment =
getLangOpts().AlignedAllocation &&
2316 Alignment > NewAlignment;
2326 AllocType, ArraySize.has_value(), PassAlignment, PlacementArgs,
2327 OperatorNew, OperatorDelete))
2332 bool UsualArrayDeleteWantsSize =
false;
2334 UsualArrayDeleteWantsSize =
2347 unsigned NumImplicitArgs = PassAlignment ? 2 : 1;
2349 NumImplicitArgs, PlacementArgs, AllPlaceArgs,
2353 if (!AllPlaceArgs.empty())
2354 PlacementArgs = AllPlaceArgs;
2364 llvm::APInt SingleEltSize(
2368 std::optional<llvm::APInt> AllocationSize;
2371 AllocationSize = SingleEltSize;
2375 AllocationSize = llvm::APInt(SizeTyWidth, *KnownArraySize)
2376 .umul_ov(SingleEltSize, Overflow);
2380 "Expected that all the overflows would have been handled already.");
2384 Context, AllocationSize.value_or(llvm::APInt::getZero(SizeTyWidth)),
2403 CK_IntegralCast, &AlignmentLiteral,
2408 CallArgs.reserve(NumImplicitArgs + PlacementArgs.size());
2409 CallArgs.emplace_back(AllocationSize
2410 ?
static_cast<Expr *
>(&AllocationSizeLiteral)
2411 : &OpaqueAllocationSize);
2413 CallArgs.emplace_back(&DesiredAlignment);
2414 CallArgs.insert(CallArgs.end(), PlacementArgs.begin(), PlacementArgs.end());
2418 checkCall(OperatorNew, Proto,
nullptr, CallArgs,
2419 false, StartLoc,
Range, CallType);
2423 if (PlacementArgs.empty() && !PassAlignment &&
2427 if (Alignment > NewAlignment)
2428 Diag(StartLoc, diag::warn_overaligned_type)
2440 SourceRange InitRange(Exprs.front()->getBeginLoc(),
2441 Exprs.back()->getEndLoc());
2442 Diag(StartLoc, diag::err_new_array_init_args) << InitRange;
2462 InitType = AllocType;
2475 dyn_cast_or_null<CXXBindTemporaryExpr>(FullInit.
get()))
2476 FullInit = Binder->getSubExpr();
2483 if (ArraySize && !*ArraySize) {
2491 Diag(TypeRange.
getEnd(), diag::err_new_array_size_unknown_from_init)
2503 if (OperatorDelete) {
2510 PassAlignment, UsualArrayDeleteWantsSize,
2511 PlacementArgs, TypeIdParens, ArraySize, InitStyle,
2521 return Diag(
Loc, diag::err_bad_new_type)
2522 << AllocType << 0 << R;
2524 return Diag(
Loc, diag::err_bad_new_type)
2525 << AllocType << 1 << R;
2528 Loc, AllocType, diag::err_new_incomplete_or_sizeless_type, R))
2531 diag::err_allocation_of_abstract_type))
2534 return Diag(
Loc, diag::err_variably_modified_new_type)
2538 return Diag(
Loc, diag::err_address_space_qualified_new)
2546 return Diag(
Loc, diag::err_arc_new_array_without_ownership)
2561 Alloc != AllocEnd; ++Alloc) {
2564 NamedDecl *
D = (*Alloc)->getUnderlyingDecl();
2598 if (PassAlignment) {
2599 PassAlignment =
false;
2601 Args.erase(Args.begin() + 1);
2603 Operator, &Candidates, AlignArg,
2629 (Args[1]->getType()->isObjectPointerType() ||
2630 Args[1]->getType()->isArrayType())) {
2631 S.
Diag(R.
getNameLoc(), diag::err_need_header_before_placement_new)
2646 if (AlignedCandidates) {
2648 return C.Function->getNumParams() > 1 &&
2649 C.Function->getParamDecl(1)->getType()->isAlignValT();
2653 AlignedArgs.reserve(Args.size() + 1);
2654 AlignedArgs.push_back(Args[0]);
2655 AlignedArgs.push_back(AlignArg);
2656 AlignedArgs.append(Args.begin() + 1, Args.end());
2669 if (AlignedCandidates)
2670 AlignedCandidates->
NoteCandidates(S, AlignedArgs, AlignedCands,
"",
2680 S.
PDiag(diag::err_ovl_ambiguous_call)
2689 Candidates, Best->Function, Args);
2693 llvm_unreachable(
"Unreachable, bad result from BestViableFunction");
2715 AllocArgs.reserve((PassAlignment ? 2 : 1) + PlaceArgs.size());
2725 AllocArgs.push_back(&Size);
2728 if (PassAlignment) {
2734 AllocArgs.push_back(&Align);
2736 AllocArgs.insert(AllocArgs.end(), PlaceArgs.begin(), PlaceArgs.end());
2745 IsArray ? OO_Array_New : OO_New);
2777 if (PlaceArgs.empty()) {
2778 Diag(StartLoc, diag::err_openclcxx_not_supported) <<
"default new";
2780 Diag(StartLoc, diag::err_openclcxx_placement_new);
2785 assert(!R.
empty() &&
"implicitly declared allocation functions not found");
2786 assert(!R.
isAmbiguous() &&
"global allocation functions are ambiguous");
2792 OperatorNew,
nullptr,
2799 OperatorDelete =
nullptr;
2834 while (Filter.hasNext()) {
2835 auto *FD = dyn_cast<FunctionDecl>(Filter.next()->getUnderlyingDecl());
2836 if (FD && FD->isDestroyingOperatorDelete())
2842 bool FoundGlobalDelete = FoundDelete.
empty();
2843 if (FoundDelete.
empty()) {
2870 bool isPlacementNew = !PlaceArgs.empty() || OperatorNew->
param_size() != 1 ||
2873 if (isPlacementNew) {
2890 for (
unsigned I = 1, N = Proto->getNumParams(); I < N; ++I)
2891 ArgTypes.push_back(Proto->getParamType(I));
2895 EPI.
Variadic = Proto->isVariadic();
2897 ExpectedFunctionType
2902 DEnd = FoundDelete.
end();
2906 dyn_cast<FunctionTemplateDecl>((*D)->getUnderlyingDecl())) {
2914 Fn = cast<FunctionDecl>((*D)->getUnderlyingDecl());
2917 ExpectedFunctionType,
2919 ExpectedFunctionType))
2920 Matches.push_back(std::make_pair(
D.getPair(), Fn));
2936 *
this, FoundDelete, FoundGlobalDelete,
2940 Matches.push_back(std::make_pair(Selected.Found, Selected.FD));
2944 for (
auto Fn : BestDeallocFns)
2945 Matches.push_back(std::make_pair(Fn.Found, Fn.FD));
2953 if (Matches.size() == 1) {
2954 OperatorDelete = Matches[0].second;
2964 UsualDeallocFnInfo Info(*
this,
2970 bool IsSizedDelete = Info.HasSizeT;
2971 if (IsSizedDelete && !FoundGlobalDelete) {
2972 auto NonSizedDelete =
2975 if (NonSizedDelete && !NonSizedDelete.HasSizeT &&
2976 NonSizedDelete.HasAlignValT == Info.HasAlignValT)
2977 IsSizedDelete =
false;
2980 if (IsSizedDelete) {
2984 PlaceArgs.back()->getEndLoc());
2985 Diag(StartLoc, diag::err_placement_new_non_placement_delete) << R;
2994 }
else if (!Matches.empty()) {
2998 Diag(StartLoc, diag::warn_ambiguous_suitable_delete_function_found)
2999 << DeleteName << AllocElemType;
3001 for (
auto &Match : Matches)
3002 Diag(Match.second->getLocation(),
3003 diag::note_member_declared_here) << DeleteName;
3067 if (TheGlobalModuleFragment) {
3082 if (TheGlobalModuleFragment) {
3083 AlignValT->setModuleOwnershipKind(
3085 AlignValT->setLocalOwningModule(TheGlobalModuleFragment);
3090 AlignValT->setImplicit(
true);
3103 Params.push_back(Param);
3106 bool HasSizedVariant =
getLangOpts().SizedDeallocation &&
3107 (Kind == OO_Delete || Kind == OO_Array_Delete);
3108 bool HasAlignedVariant =
getLangOpts().AlignedAllocation;
3110 int NumSizeVariants = (HasSizedVariant ? 2 : 1);
3111 int NumAlignVariants = (HasAlignedVariant ? 2 : 1);
3112 for (
int Sized = 0; Sized < NumSizeVariants; ++Sized) {
3114 Params.push_back(SizeT);
3116 for (
int Aligned = 0; Aligned < NumAlignVariants; ++Aligned) {
3129 DeclareGlobalAllocationFunctions(OO_New, VoidPtr, SizeT);
3130 DeclareGlobalAllocationFunctions(OO_Array_New, VoidPtr, SizeT);
3131 DeclareGlobalAllocationFunctions(OO_Delete,
Context.
VoidTy, VoidPtr);
3132 DeclareGlobalAllocationFunctions(OO_Array_Delete,
Context.
VoidTy, VoidPtr);
3135 PopGlobalModuleFragment();
3148 Alloc != AllocEnd; ++Alloc) {
3152 if (
Func->getNumParams() == Params.size()) {
3154 for (
auto *
P :
Func->parameters())
3155 FuncParams.push_back(
3161 Func->setVisibleDespiteOwningModule();
3169 false,
false,
true));
3172 bool HasBadAllocExceptionSpec
3173 = (Name.getCXXOverloadedOperator() == OO_New ||
3174 Name.getCXXOverloadedOperator() == OO_Array_New);
3175 if (HasBadAllocExceptionSpec) {
3178 assert(
StdBadAlloc &&
"Must have std::bad_alloc declared");
3190 auto CreateAllocationFunctionDecl = [&](
Attr *ExtraAttr) {
3196 Alloc->setImplicit();
3198 Alloc->setVisibleDespiteOwningModule();
3200 if (HasBadAllocExceptionSpec &&
getLangOpts().NewInfallible &&
3203 ReturnsNonNullAttr::CreateImplicit(
Context, Alloc->getLocation()));
3215 if (TheGlobalModuleFragment) {
3216 Alloc->setModuleOwnershipKind(
3218 Alloc->setLocalOwningModule(TheGlobalModuleFragment);
3222 Alloc->addAttr(VisibilityAttr::CreateImplicit(
3224 ? VisibilityAttr::Hidden
3226 ? VisibilityAttr::Protected
3227 : VisibilityAttr::Default));
3234 ParamDecls.back()->setImplicit();
3236 Alloc->setParams(ParamDecls);
3238 Alloc->addAttr(ExtraAttr);
3245 CreateAllocationFunctionDecl(
nullptr);
3249 CreateAllocationFunctionDecl(CUDAHostAttr::CreateImplicit(
Context));
3250 CreateAllocationFunctionDecl(CUDADeviceAttr::CreateImplicit(
Context));
3255 bool CanProvideSize,
3269 assert(
Result.FD &&
"operator delete missing from global scope?");
3281 return OperatorDelete;
3293 bool WantSize,
bool WantAligned) {
3298 if (
Found.isAmbiguous())
3301 Found.suppressDiagnostics();
3311 Overaligned, &Matches);
3314 if (Matches.size() == 1) {
3315 Operator = cast<CXXMethodDecl>(Matches[0].FD);
3321 Diag(StartLoc, diag::err_deleted_function_use)
3322 << (Msg !=
nullptr) << (Msg ? Msg->
getString() : StringRef());
3338 if (!Matches.empty()) {
3340 Diag(StartLoc, diag::err_ambiguous_suitable_delete_member_function_found)
3342 for (
auto &Match : Matches)
3343 Diag(Match.FD->getLocation(), diag::note_member_declared_here) << Name;
3350 if (!
Found.empty()) {
3352 Diag(StartLoc, diag::err_no_suitable_delete_member_function_found)
3357 diag::note_member_declared_here) << Name;
3369class MismatchingNewDeleteDetector {
3371 enum MismatchResult {
3377 MemberInitMismatches,
3386 explicit MismatchingNewDeleteDetector(
bool EndOfTU)
3387 : Field(nullptr), IsArrayForm(
false), EndOfTU(EndOfTU),
3388 HasUndefinedConstructors(
false) {}
3405 MismatchResult analyzeField(
FieldDecl *Field,
bool DeleteWasArrayForm);
3415 bool HasUndefinedConstructors;
3427 MismatchResult analyzeMemberExpr(
const MemberExpr *ME);
3450 MismatchResult analyzeInClassInitializer();
3454MismatchingNewDeleteDetector::MismatchResult
3455MismatchingNewDeleteDetector::analyzeDeleteExpr(
const CXXDeleteExpr *DE) {
3457 assert(DE &&
"Expected delete-expression");
3460 if (
const MemberExpr *ME = dyn_cast<const MemberExpr>(
E)) {
3461 return analyzeMemberExpr(ME);
3462 }
else if (
const DeclRefExpr *
D = dyn_cast<const DeclRefExpr>(
E)) {
3463 if (!hasMatchingVarInit(
D))
3464 return VarInitMismatches;
3470MismatchingNewDeleteDetector::getNewExprFromInitListOrExpr(
const Expr *
E) {
3471 assert(
E !=
nullptr &&
"Expected a valid initializer expression");
3473 if (
const InitListExpr *ILE = dyn_cast<const InitListExpr>(
E)) {
3474 if (ILE->getNumInits() == 1)
3475 E = dyn_cast<const CXXNewExpr>(ILE->getInit(0)->IgnoreParenImpCasts());
3478 return dyn_cast_or_null<const CXXNewExpr>(
E);
3481bool MismatchingNewDeleteDetector::hasMatchingNewInCtorInit(
3485 (NE = getNewExprFromInitListOrExpr(CI->
getInit()))) {
3486 if (
NE->isArray() == IsArrayForm)
3489 NewExprs.push_back(NE);
3494bool MismatchingNewDeleteDetector::hasMatchingNewInCtor(
3500 HasUndefinedConstructors =
true;
3503 for (
const auto *CI : cast<const CXXConstructorDecl>(
Definition)->inits()) {
3504 if (hasMatchingNewInCtorInit(CI))
3510MismatchingNewDeleteDetector::MismatchResult
3511MismatchingNewDeleteDetector::analyzeInClassInitializer() {
3512 assert(Field !=
nullptr &&
"This should be called only for members");
3513 const Expr *InitExpr =
Field->getInClassInitializer();
3515 return EndOfTU ? NoMismatch : AnalyzeLater;
3516 if (
const CXXNewExpr *NE = getNewExprFromInitListOrExpr(InitExpr)) {
3517 if (
NE->isArray() != IsArrayForm) {
3518 NewExprs.push_back(NE);
3519 return MemberInitMismatches;
3525MismatchingNewDeleteDetector::MismatchResult
3526MismatchingNewDeleteDetector::analyzeField(
FieldDecl *Field,
3527 bool DeleteWasArrayForm) {
3528 assert(Field !=
nullptr &&
"Analysis requires a valid class member.");
3529 this->Field =
Field;
3530 IsArrayForm = DeleteWasArrayForm;
3532 for (
const auto *CD : RD->
ctors()) {
3533 if (hasMatchingNewInCtor(CD))
3536 if (HasUndefinedConstructors)
3537 return EndOfTU ? NoMismatch : AnalyzeLater;
3538 if (!NewExprs.empty())
3539 return MemberInitMismatches;
3540 return Field->hasInClassInitializer() ? analyzeInClassInitializer()
3544MismatchingNewDeleteDetector::MismatchResult
3545MismatchingNewDeleteDetector::analyzeMemberExpr(
const MemberExpr *ME) {
3546 assert(ME !=
nullptr &&
"Expected a member expression");
3548 return analyzeField(F, IsArrayForm);
3552bool MismatchingNewDeleteDetector::hasMatchingVarInit(
const DeclRefExpr *
D) {
3554 if (
const VarDecl *VD = dyn_cast<const VarDecl>(
D->getDecl())) {
3555 if (VD->hasInit() && (NE = getNewExprFromInitListOrExpr(VD->getInit())) &&
3556 NE->isArray() != IsArrayForm) {
3557 NewExprs.push_back(NE);
3560 return NewExprs.empty();
3565 const MismatchingNewDeleteDetector &Detector) {
3568 if (!Detector.IsArrayForm)
3577 SemaRef.
Diag(DeleteLoc, diag::warn_mismatched_delete_new)
3578 << Detector.IsArrayForm << H;
3580 for (
const auto *NE : Detector.NewExprs)
3581 SemaRef.
Diag(NE->getExprLoc(), diag::note_allocated_here)
3582 << Detector.IsArrayForm;
3585void Sema::AnalyzeDeleteExprMismatch(
const CXXDeleteExpr *DE) {
3588 MismatchingNewDeleteDetector Detector(
false);
3589 switch (Detector.analyzeDeleteExpr(DE)) {
3590 case MismatchingNewDeleteDetector::VarInitMismatches:
3591 case MismatchingNewDeleteDetector::MemberInitMismatches: {
3595 case MismatchingNewDeleteDetector::AnalyzeLater: {
3600 case MismatchingNewDeleteDetector::NoMismatch:
3606 bool DeleteWasArrayForm) {
3607 MismatchingNewDeleteDetector Detector(
true);
3608 switch (Detector.analyzeField(Field, DeleteWasArrayForm)) {
3609 case MismatchingNewDeleteDetector::VarInitMismatches:
3610 llvm_unreachable(
"This analysis should have been done for class members.");
3611 case MismatchingNewDeleteDetector::AnalyzeLater:
3612 llvm_unreachable(
"Analysis cannot be postponed any point beyond end of "
3613 "translation unit.");
3614 case MismatchingNewDeleteDetector::MemberInitMismatches:
3617 case MismatchingNewDeleteDetector::NoMismatch:
3624 bool ArrayForm,
Expr *ExE) {
3634 bool ArrayFormAsWritten = ArrayForm;
3635 bool UsualArrayDeleteWantsSize =
false;
3653 if (ConvPtrType->getPointeeType()->isIncompleteOrObjectType())
3660 return S.
Diag(
Loc, diag::err_delete_operand) <<
T;
3665 return S.
Diag(
Loc, diag::err_delete_incomplete_class_type) <<
T;
3671 return S.
Diag(
Loc, diag::err_delete_explicit_conversion) <<
T << ConvTy;
3682 return S.
Diag(
Loc, diag::err_ambiguous_delete_operand) <<
T;
3694 llvm_unreachable(
"conversion functions are permitted");
3702 if (!Converter.match(
Type))
3713 diag::err_address_space_qualified_delete)
3723 Diag(StartLoc,
LangOpts.CPlusPlus26 ? diag::err_delete_incomplete
3724 : diag::ext_delete_void_ptr_operand)
3736 ? diag::err_delete_incomplete
3737 : diag::warn_delete_incomplete,
3740 PointeeRD = cast<CXXRecordDecl>(RT->getDecl());
3745 Diag(StartLoc, diag::warn_delete_array_type)
3752 ArrayForm ? OO_Array_Delete : OO_Delete);
3766 UsualArrayDeleteWantsSize =
3771 else if (isa_and_nonnull<CXXMethodDecl>(OperatorDelete))
3772 UsualArrayDeleteWantsSize =
3773 UsualDeallocFnInfo(*
this,
3792 if (!OperatorDelete) {
3794 Diag(StartLoc, diag::err_openclcxx_not_supported) <<
"default delete";
3799 bool CanProvideSize =
3800 IsComplete && (!ArrayForm || UsualArrayDeleteWantsSize ||
3806 Overaligned, DeleteName);
3816 bool IsVirtualDelete =
false;
3820 PDiag(diag::err_access_dtor) << PointeeElem);
3821 IsVirtualDelete = Dtor->isVirtual();
3850 UsualArrayDeleteWantsSize, OperatorDelete, Ex.
get(), StartLoc);
3851 AnalyzeDeleteExprMismatch(
Result);
3860 IsDelete ? OO_Delete : OO_New);
3864 assert(!R.
empty() &&
"implicitly declared allocation functions not found");
3865 assert(!R.
isAmbiguous() &&
"global allocation functions are ambiguous");
3874 FnOvl != FnOvlEnd; ++FnOvl) {
3877 NamedDecl *
D = (*FnOvl)->getUnderlyingDecl();
3901 "class members should not be considered");
3904 S.
Diag(R.
getNameLoc(), diag::err_builtin_operator_new_delete_not_usual)
3905 << (IsDelete ? 1 : 0) <<
Range;
3906 S.
Diag(FnDecl->
getLocation(), diag::note_non_usual_function_declared_here)
3918 S.
PDiag(diag::err_ovl_no_viable_function_in_call)
3926 S.
PDiag(diag::err_ovl_ambiguous_call)
3933 Candidates, Best->Function, Args);
3936 llvm_unreachable(
"Unreachable, bad result from BestViableFunction");
3941 CallExpr *TheCall = cast<CallExpr>(TheCallResult.
get());
3944 << (IsDelete ?
"__builtin_operator_delete" :
"__builtin_operator_new")
3954 OperatorNewOrDelete))
3956 assert(OperatorNewOrDelete &&
"should be found");
3962 for (
unsigned i = 0; i != TheCall->
getNumArgs(); ++i) {
3973 assert(Callee &&
Callee->getCastKind() == CK_BuiltinFnToFnPtr &&
3974 "Callee expected to be implicit cast to a builtin function pointer");
3977 return TheCallResult;
3981 bool IsDelete,
bool CallCanBeVirtual,
3982 bool WarnOnNonAbstractTypes,
4009 Diag(
Loc, diag::warn_delete_abstract_non_virtual_dtor) << (IsDelete ? 0 : 1)
4011 }
else if (WarnOnNonAbstractTypes) {
4014 Diag(
Loc, diag::warn_delete_non_virtual_dtor) << (IsDelete ? 0 : 1)
4018 std::string TypeStr;
4020 Diag(DtorLoc, diag::note_delete_non_virtual)
4048 diag::err_invalid_use_of_function_type)
4052 diag::err_invalid_use_of_array_type)
4070 llvm_unreachable(
"unexpected condition kind");
4096 diag::err_constexpr_if_condition_expression_is_not_constant);
4104 From = Cast->getSubExpr();
4116 if (!ToPtrType->getPointeeType().hasQualifiers()) {
4117 switch (StrLit->getKind()) {
4124 return (ToPointeeType->getKind() == BuiltinType::Char_U ||
4125 ToPointeeType->getKind() == BuiltinType::Char_S);
4130 assert(
false &&
"Unevaluated string literal in expression");
4145 bool HadMultipleCandidates,
4148 default: llvm_unreachable(
"Unhandled cast kind!");
4149 case CK_ConstructorConversion: {
4154 diag::err_allocation_of_abstract_type))
4167 CastLoc, Ty, FoundDecl, cast<CXXConstructorDecl>(Method),
4168 ConstructorArgs, HadMultipleCandidates,
4169 false,
false,
false,
4177 case CK_UserDefinedConversion: {
4187 HadMultipleCandidates);
4192 CK_UserDefinedConversion,
Result.get(),
4193 nullptr,
Result.get()->getValueKind(),
4226 assert(FD &&
"no conversion function for user-defined conversion seq");
4228 CastKind = CK_UserDefinedConversion;
4236 CastKind = CK_ConstructorConversion;
4264 From = CastArg.
get();
4278 PDiag(diag::err_typecheck_ambiguous_condition)
4284 llvm_unreachable(
"bad conversion");
4291 ToType, From->
getType(), From, Action);
4292 assert(Diagnosed &&
"failed to diagnose bad conversion"); (void)Diagnosed;
4305 QualType ElType = ToVec->getElementType();
4341 false,
false,
false,
4348 false,
false,
false,
4381 ToAtomicType = ToType;
4382 ToType = ToAtomic->getValueType();
4385 QualType InitialFromType = FromType;
4387 switch (SCS.
First) {
4390 FromType = FromAtomic->getValueType().getUnqualifiedType();
4403 From = FromRes.
get();
4430 llvm_unreachable(
"Improper first standard conversion");
4471 "only enums with fixed underlying type can promote to bool");
4501 CK = CK_FloatingComplexCast;
4503 CK = CK_FloatingComplexToIntegralComplex;
4505 CK = CK_IntegralComplexToFloatingComplex;
4507 CK = CK_IntegralComplexCast;
4533 "Attempting implicit fixed point conversion without a fixed "
4538 nullptr, CCK).
get();
4542 nullptr, CCK).
get();
4546 nullptr, CCK).
get();
4550 nullptr, CCK).
get();
4554 nullptr, CCK).
get();
4558 nullptr, CCK).
get();
4563 nullptr, CCK).
get();
4572 diag::ext_typecheck_convert_incompatible_pointer)
4577 diag::ext_typecheck_convert_incompatible_pointer)
4584 }
else if (
getLangOpts().allowsNonTrivialObjCLifetimeQualifiers() &&
4585 !
ObjC().CheckObjCARCUnavailableWeakConversion(ToType,
4590 Diag(From->
getBeginLoc(), diag::err_arc_convesion_of_weak_unavailable)
4619 if (Kind == CK_BlockPointerToObjCPointerCast) {
4681 &BasePath, CCK).
get();
4710 QualType ElType = ToComplex->getElementType();
4718 isFloatingComplex ? CK_FloatingCast : CK_FloatingToIntegral).
get();
4722 isFloatingComplex ? CK_IntegralToFloating : CK_IntegralCast).
get();
4726 isFloatingComplex ? CK_FloatingRealToComplex
4727 : CK_IntegralRealToComplex).
get();
4732 QualType ElType = FromComplex->getElementType();
4737 isFloatingComplex ? CK_FloatingComplexToReal
4738 : CK_IntegralComplexToReal,
4747 isFloatingComplex ? CK_FloatingCast
4748 : CK_IntegralToFloating,
4754 isFloatingComplex ? CK_FloatingToIntegral