48#include "llvm/ADT/APInt.h"
49#include "llvm/ADT/STLExtras.h"
50#include "llvm/ADT/STLForwardCompat.h"
51#include "llvm/ADT/StringExtras.h"
52#include "llvm/Support/ErrorHandling.h"
53#include "llvm/Support/TypeSize.h"
83 llvm_unreachable(
"Nested name specifier is not a type for inheriting ctor");
97 "not a constructor name");
119 auto *RD = dyn_cast<CXXRecordDecl>(ND);
120 if (RD && RD->isInjectedClassName()) {
121 InjectedClassName = RD;
125 if (!InjectedClassName) {
130 diag::err_incomplete_nested_name_spec) << CurClass << SS.
getRange();
145 bool EnteringContext) {
191 auto IsAcceptableResult = [&](
NamedDecl *
D) ->
bool {
192 auto *
Type = dyn_cast<TypeDecl>(
D->getUnderlyingDecl());
203 unsigned NumAcceptableResults = 0;
205 if (IsAcceptableResult(
D))
206 ++NumAcceptableResults;
211 if (
auto *RD = dyn_cast<CXXRecordDecl>(
D))
212 if (RD->isInjectedClassName())
213 D = cast<NamedDecl>(RD->getParent());
215 if (FoundDeclSet.insert(
D).second)
216 FoundDecls.push_back(
D);
224 if (
Found.isAmbiguous() && NumAcceptableResults == 1) {
225 Diag(NameLoc, diag::ext_dtor_name_ambiguous);
229 if (
auto *TD = dyn_cast<TypeDecl>(
D->getUnderlyingDecl()))
235 if (!IsAcceptableResult(
D))
241 if (
Found.isAmbiguous())
245 if (IsAcceptableResult(
Type)) {
257 bool IsDependent =
false;
259 auto LookupInObjectType = [&]() ->
ParsedType {
260 if (Failed || SearchType.
isNull())
270 return CheckLookupResult(
Found);
288 return CheckLookupResult(
Found);
297 return CheckLookupResult(
Found);
344 if (
ParsedType T = LookupInNestedNameSpec(PrefixSS))
374 unsigned NumNonExtensionDecls = FoundDecls.size();
398 Diag(SS.
getEndLoc(), diag::ext_qualified_dtor_named_in_lexical_scope)
400 Diag(FoundDecls.back()->getLocation(), diag::note_destructor_type_here)
411 FoundDecls.resize(NumNonExtensionDecls);
414 std::stable_sort(FoundDecls.begin(), FoundDecls.end(),
416 return isa<TypeDecl>(A->getUnderlyingDecl()) >
417 isa<TypeDecl>(B->getUnderlyingDecl());
421 auto MakeFixItHint = [&]{
427 Destroyed = dyn_cast_or_null<CXXRecordDecl>(S->getEntity());
434 if (FoundDecls.empty()) {
436 Diag(NameLoc, diag::err_undeclared_destructor_name)
437 << &II << MakeFixItHint();
438 }
else if (!SearchType.
isNull() && FoundDecls.size() == 1) {
439 if (
auto *TD = dyn_cast<TypeDecl>(FoundDecls[0]->getUnderlyingDecl())) {
440 assert(!SearchType.
isNull() &&
441 "should only reject a type result if we have a search type");
443 Diag(NameLoc, diag::err_destructor_expr_type_mismatch)
444 <<
T << SearchType << MakeFixItHint();
446 Diag(NameLoc, diag::err_destructor_expr_nontype)
447 << &II << MakeFixItHint();
450 Diag(NameLoc, SearchType.
isNull() ? diag::err_destructor_name_nontype
451 : diag::err_destructor_expr_mismatch)
452 << &II << SearchType << MakeFixItHint();
456 if (
auto *TD = dyn_cast<TypeDecl>(FoundD->getUnderlyingDecl()))
457 Diag(FoundD->getLocation(), diag::note_destructor_type_here)
460 Diag(FoundD->getLocation(), diag::note_destructor_nontype_here)
478 "unexpected type in getDestructorType");
507 Name.getSourceRange(),
508 (StringRef(
"operator\"\"") + II->
getName()).str());
510 Diag(
Loc, diag::warn_reserved_extern_symbol)
511 << II << static_cast<int>(Status) << Hint;
513 Diag(
Loc, diag::warn_deprecated_literal_operator_id) << II << Hint;
529 Diag(Name.getBeginLoc(), diag::err_literal_operator_id_outside_namespace)
540 llvm_unreachable(
"unknown nested name specifier kind");
561 return ExprError(
Diag(TypeidLoc, diag::err_variably_modified_typeid) <<
T);
574 bool WasEvaluated =
false;
584 CXXRecordDecl *RecordD = cast<CXXRecordDecl>(RecordT->getDecl());
630 return ExprError(
Diag(TypeidLoc, diag::err_variably_modified_typeid)
637 ? diag::warn_side_effects_typeid
638 : diag::warn_side_effects_unevaluated_context);
651 return ExprError(
Diag(OpLoc, diag::err_openclcxx_not_supported)
657 return ExprError(
Diag(OpLoc, diag::err_need_header_before_typeid));
671 return ExprError(
Diag(OpLoc, diag::err_need_header_before_typeid));
675 return ExprError(
Diag(OpLoc, diag::err_no_typeid_with_fno_rtti));
699 if (
auto *CTE = dyn_cast<CXXTypeidExpr>(
Result.get()))
700 if (CTE->isPotentiallyEvaluated() && !CTE->isMostDerived(
Context))
701 Diag(OpLoc, diag::warn_no_typeid_with_rtti_disabled)
723 if (
const auto *Uuid = TD->getMostRecentDecl()->getAttr<UuidAttr>()) {
724 UuidAttrs.insert(Uuid);
729 if (
const auto *CTSD = dyn_cast<ClassTemplateSpecializationDecl>(TD)) {
732 const UuidAttr *UuidForTA =
nullptr;
739 UuidAttrs.insert(UuidForTA);
749 if (!Operand->getType()->isDependentType()) {
752 if (UuidAttrs.empty())
753 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_without_guid));
754 if (UuidAttrs.size() > 1)
755 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_with_multiple_guids));
756 Guid = UuidAttrs.back()->getGuidDecl();
773 if (UuidAttrs.empty())
774 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_without_guid));
775 if (UuidAttrs.size() > 1)
776 return ExprError(
Diag(TypeidLoc, diag::err_uuidof_with_multiple_guids));
777 Guid = UuidAttrs.back()->getGuidDecl();
812 assert((Kind == tok::kw_true || Kind == tok::kw_false) &&
813 "Unknown C++ Boolean value!");
825 bool IsThrownVarInScope =
false;
838 if (
const auto *DRE = dyn_cast<DeclRefExpr>(Ex->
IgnoreParens()))
839 if (
const auto *Var = dyn_cast<VarDecl>(DRE->getDecl());
840 Var && Var->hasLocalStorage() &&
841 !Var->getType().isVolatileQualified()) {
842 for (; S; S = S->getParent()) {
843 if (S->isDeclScope(Var)) {
844 IsThrownVarInScope =
true;
861 bool IsThrownVarInScope) {
863 const bool IsOpenMPGPUTarget =
864 getLangOpts().OpenMPIsTargetDevice && (
T.isNVPTX() ||
T.isAMDGCN());
867 if (!IsOpenMPGPUTarget && !
getLangOpts().CXXExceptions &&
870 targetDiag(OpLoc, diag::err_exceptions_disabled) <<
"throw";
874 if (IsOpenMPGPUTarget)
875 targetDiag(OpLoc, diag::warn_throw_not_valid_on_target) <<
T.str();
880 <<
"throw" << llvm::to_underlying(
CUDA().CurrentTarget());
883 Diag(OpLoc, diag::err_omp_simd_region_cannot_use_stmt) <<
"throw";
888 Diag(OpLoc, diag::err_acc_branch_in_out_compute_construct)
932 llvm::DenseMap<CXXRecordDecl *, unsigned> &SubobjectsSeen,
933 llvm::SmallPtrSetImpl<CXXRecordDecl *> &VBases,
934 llvm::SetVector<CXXRecordDecl *> &PublicSubobjectsSeen,
935 bool ParentIsPublic) {
937 CXXRecordDecl *BaseDecl = BS.getType()->getAsCXXRecordDecl();
942 NewSubobject = VBases.insert(BaseDecl).second;
947 ++SubobjectsSeen[BaseDecl];
950 bool PublicPath = ParentIsPublic && BS.getAccessSpecifier() ==
AS_public;
952 PublicSubobjectsSeen.insert(BaseDecl);
962 llvm::DenseMap<CXXRecordDecl *, unsigned> SubobjectsSeen;
963 llvm::SmallSet<CXXRecordDecl *, 2> VBases;
964 llvm::SetVector<CXXRecordDecl *> PublicSubobjectsSeen;
965 SubobjectsSeen[RD] = 1;
966 PublicSubobjectsSeen.insert(RD);
970 for (
CXXRecordDecl *PublicSubobject : PublicSubobjectsSeen) {
972 if (SubobjectsSeen[PublicSubobject] > 1)
975 Objects.push_back(PublicSubobject);
984 bool isPointer =
false;
1004 isPointer ? diag::err_throw_incomplete_ptr
1005 : diag::err_throw_incomplete,
1015 diag::err_throw_abstract_type,
E))
1037 PDiag(diag::err_access_dtor_exception) << Ty);
1053 for (
CXXRecordDecl *Subobject : UnambiguousPublicSubobjects) {
1094 if (ExnObjAlign < TypeAlign) {
1095 Diag(ThrowLoc, diag::warn_throw_underaligned_obj);
1096 Diag(ThrowLoc, diag::note_throw_underaligned_obj)
1101 if (!isPointer &&
getLangOpts().AssumeNothrowExceptionDtor) {
1103 auto Ty = Dtor->getType();
1107 Diag(ThrowLoc, diag::err_throw_object_throwing_dtor) << RD;
1159 for (
int I = FunctionScopes.size();
1160 I-- && isa<LambdaScopeInfo>(FunctionScopes[I]) &&
1162 cast<LambdaScopeInfo>(FunctionScopes[I])->CallOperator);
1164 CurLSI = cast<LambdaScopeInfo>(FunctionScopes[I]);
1171 if (
C.isCopyCapture()) {
1183 "While computing 'this' capture-type for a generic lambda, when we "
1184 "run out of enclosing LSI's, yet the enclosing DC is a "
1185 "lambda-call-operator we must be (i.e. Current LSI) in a generic "
1186 "lambda call oeprator");
1189 auto IsThisCaptured =
1194 if (
C.capturesThis()) {
1205 bool IsByCopyCapture =
false;
1206 bool IsConstCapture =
false;
1209 IsThisCaptured(Closure, IsByCopyCapture, IsConstCapture)) {
1210 if (IsByCopyCapture) {
1228 if (method && method->isImplicitObjectMemberFunction())
1229 ThisTy = method->getThisType().getNonReferenceType();
1257 : S(S), OldCXXThisTypeOverride(S.CXXThisTypeOverride), Enabled(
false)
1259 if (!Enabled || !ContextDecl)
1264 Record = Template->getTemplatedDecl();
1266 Record = cast<CXXRecordDecl>(ContextDecl);
1274 this->Enabled =
true;
1291 Sema.
Diag(DiagLoc, diag::note_lambda_this_capture_fixit)
1297 bool BuildAndDiagnose,
const unsigned *
const FunctionScopeIndexToStopAt,
1298 const bool ByCopy) {
1303 assert((!ByCopy || Explicit) &&
"cannot implicitly capture *this by value");
1305 const int MaxFunctionScopesIndex = FunctionScopeIndexToStopAt
1306 ? *FunctionScopeIndexToStopAt
1332 unsigned NumCapturingClosures = 0;
1333 for (
int idx = MaxFunctionScopesIndex; idx >= 0; idx--) {
1336 if (CSI->CXXThisCaptureIndex != 0) {
1338 CSI->Captures[CSI->CXXThisCaptureIndex - 1].markUsed(BuildAndDiagnose);
1344 if (BuildAndDiagnose) {
1346 Diag(
Loc, diag::err_this_capture)
1347 << (Explicit && idx == MaxFunctionScopesIndex);
1357 (Explicit && idx == MaxFunctionScopesIndex)) {
1363 NumCapturingClosures++;
1367 if (BuildAndDiagnose) {
1369 Diag(
Loc, diag::err_this_capture)
1370 << (Explicit && idx == MaxFunctionScopesIndex);
1378 if (!BuildAndDiagnose)
return false;
1391 "Only a lambda can capture the enclosing object (referred to by "
1394 for (
int idx = MaxFunctionScopesIndex; NumCapturingClosures;
1395 --idx, --NumCapturingClosures) {
1402 bool isNested = NumCapturingClosures > 1;
1436 const auto *Method = dyn_cast<CXXMethodDecl>(DC);
1437 if (Method && Method->isExplicitObjectMemberFunction()) {
1438 Diag(
Loc, diag::err_invalid_this_use) << 1;
1440 Diag(
Loc, diag::err_invalid_this_use) << 1;
1442 Diag(
Loc, diag::err_invalid_this_use) << 0;
1456 if (This->isTypeDependent())
1461 auto IsDependent = [&]() {
1463 auto *LSI = dyn_cast<sema::LambdaScopeInfo>(
Scope);
1467 if (LSI->Lambda && !LSI->Lambda->Encloses(
CurContext) &&
1468 LSI->AfterParameterList)
1475 if (LSI->isCXXThisCaptured()) {
1476 if (!LSI->getCXXThisCapture().isCopyCapture())
1479 const auto *MD = LSI->CallOperator;
1480 if (MD->getType().isNull())
1484 return Ty && MD->isExplicitObjectMemberFunction() &&
1491 This->setCapturedByCopyInLambdaWithExplicitObjectParameter(IsDependent);
1511 bool ListInitialization) {
1521 RParenOrBraceLoc, ListInitialization);
1525 if (!
Result.isInvalid() &&
Result.get()->isInstantiationDependent() &&
1526 !
Result.get()->isTypeDependent())
1528 else if (
Result.isInvalid())
1530 RParenOrBraceLoc, exprs, Ty);
1539 bool ListInitialization) {
1543 assert((!ListInitialization || Exprs.size() == 1) &&
1544 "List initialization must have exactly one expression.");
1551 ? ListInitialization
1553 TyBeginLoc, LParenOrBraceLoc, RParenOrBraceLoc)
1567 isa<DeducedTemplateSpecializationType>(Deduced)) {
1573 }
else if (Deduced && !Deduced->
isDeduced()) {
1575 if (ListInitialization) {
1576 auto *ILE = cast<InitListExpr>(Exprs[0]);
1577 Inits =
MultiExprArg(ILE->getInits(), ILE->getNumInits());
1581 return ExprError(
Diag(TyBeginLoc, diag::err_auto_expr_init_no_expression)
1582 << Ty << FullRange);
1583 if (Inits.size() > 1) {
1584 Expr *FirstBad = Inits[1];
1586 diag::err_auto_expr_init_multiple_expressions)
1587 << Ty << FullRange);
1591 Diag(TyBeginLoc, diag::warn_cxx20_compat_auto_expr) << FullRange;
1593 Expr *Deduce = Inits[0];
1594 if (isa<InitListExpr>(Deduce))
1597 << ListInitialization << Ty << FullRange);
1604 return ExprError(
Diag(TyBeginLoc, diag::err_auto_expr_deduction_failure)
1605 << Ty << Deduce->
getType() << FullRange
1619 RParenOrBraceLoc, ListInitialization);
1625 if (Exprs.size() == 1 && !ListInitialization &&
1626 !isa<InitListExpr>(Exprs[0])) {
1627 Expr *Arg = Exprs[0];
1635 if (!ListInitialization)
1636 return ExprError(
Diag(TyBeginLoc, diag::err_value_init_for_array_type)
1646 return ExprError(
Diag(TyBeginLoc, diag::err_init_for_function_type)
1647 << Ty << FullRange);
1656 if (ListInitialization &&
1657 cast<InitListExpr>(Exprs[0])->getNumInits() == 0) {
1661 Exprs[0]->getBeginLoc(), Exprs[0]->getEndLoc());
1664 diag::err_invalid_incomplete_type_use,
1678 Inner = BTE->getSubExpr();
1679 if (
auto *CE = dyn_cast<ConstantExpr>(Inner);
1680 CE && CE->isImmediateInvocation())
1681 Inner = CE->getSubExpr();
1682 if (!isa<CXXTemporaryObjectExpr>(Inner) &&
1683 !isa<CXXScalarValueInitExpr>(Inner)) {
1695 :
SourceRange(LParenOrBraceLoc, RParenOrBraceLoc);
1717 for (
const auto *
D : R) {
1718 if (
const auto *FD = dyn_cast<FunctionDecl>(
D)) {
1735 return llvm::none_of(PreventedBy, [&](
const FunctionDecl *FD) {
1737 "Only single-operand functions should be in PreventedBy");
1752 unsigned UsualParams = 1;
1754 if (S.
getLangOpts().SizedDeallocation && UsualParams < FD->getNumParams() &&
1760 if (S.
getLangOpts().AlignedAllocation && UsualParams < FD->getNumParams() &&
1770 struct UsualDeallocFnInfo {
1771 UsualDeallocFnInfo() :
Found(), FD(nullptr) {}
1779 unsigned NumBaseParams = 1;
1780 if (FD->isDestroyingOperatorDelete()) {
1785 if (NumBaseParams < FD->getNumParams() &&
1787 FD->getParamDecl(NumBaseParams)->getType(),
1793 if (NumBaseParams < FD->getNumParams() &&
1794 FD->getParamDecl(NumBaseParams)->getType()->isAlignValT()) {
1796 HasAlignValT =
true;
1805 explicit operator bool()
const {
return FD; }
1807 bool isBetterThan(
const UsualDeallocFnInfo &
Other,
bool WantSize,
1808 bool WantAlign)
const {
1812 if (Destroying !=
Other.Destroying)
1819 if (HasAlignValT !=
Other.HasAlignValT)
1820 return HasAlignValT == WantAlign;
1822 if (HasSizeT !=
Other.HasSizeT)
1823 return HasSizeT == WantSize;
1826 return CUDAPref >
Other.CUDAPref;
1831 bool Destroying, HasSizeT, HasAlignValT;
1851 UsualDeallocFnInfo Best;
1853 for (
auto I = R.
begin(),
E = R.
end(); I !=
E; ++I) {
1854 UsualDeallocFnInfo Info(S, I.getPair());
1862 BestFns->push_back(Info);
1866 if (Best.isBetterThan(Info, WantSize, WantAlign))
1871 if (BestFns && Info.isBetterThan(Best, WantSize, WantAlign))
1876 BestFns->push_back(Info);
1890 if (!record)
return false;
1903 if (ops.
empty())
return false;
1915 return Best && Best.HasSizeT;
1923 std::optional<Expr *> ArraySize;
1925 if (
D.getNumTypeObjects() > 0 &&
1928 if (
D.getDeclSpec().hasAutoTypeSpec())
1939 D.DropFirstTypeObject();
1944 for (
unsigned I = 0, N =
D.getNumTypeObjects(); I < N; ++I) {
1949 if (
Expr *NumElts = (
Expr *)Array.NumElts) {
1950 if (!NumElts->isTypeDependent() && !NumElts->isValueDependent()) {
1965 NumElts,
nullptr, diag::err_new_array_nonconst,
AllowFold)
1977 if (
D.isInvalidType())
1982 DirectInitRange = List->getSourceRange();
1985 PlacementLParen, PlacementArgs, PlacementRParen,
1986 TypeIdParens, AllocType, TInfo, ArraySize, DirectInitRange,
1995 return IsCPlusPlus20 || PLE->getNumExprs() == 0;
1996 if (isa<ImplicitValueInitExpr>(
Init))
1999 return !CCE->isListInitialization() &&
2000 CCE->getConstructor()->isDefaultConstructor();
2002 assert(isa<InitListExpr>(
Init) &&
2003 "Shouldn't create list CXXConstructExprs for arrays.");
2015 std::optional<unsigned> AlignmentParam;
2028 StringRef OSName = AvailabilityAttr::getPlatformNameSourceSpelling(
2033 bool IsDelete = Kind == OO_Delete || Kind == OO_Array_Delete;
2034 Diag(
Loc, diag::err_aligned_allocation_unavailable)
2036 << OSVersion.getAsString() << OSVersion.empty();
2037 Diag(
Loc, diag::note_silence_aligned_allocation_unavailable);
2047 std::optional<Expr *> ArraySize,
2053 if (DirectInitRange.
isValid()) {
2054 assert(
Initializer &&
"Have parens but no initializer.");
2056 }
else if (isa_and_nonnull<InitListExpr>(
Initializer))
2061 "Initializer expression that cannot have been implicitly created.");
2068 "paren init for non-call init");
2069 Exprs =
MultiExprArg(List->getExprs(), List->getNumExprs());
2076 switch (InitStyle) {
2087 DirectInitRange.
getEnd());
2093 llvm_unreachable(
"Unknown initialization kind");
2098 if (Deduced && !Deduced->isDeduced() &&
2099 isa<DeducedTemplateSpecializationType>(Deduced)) {
2102 Diag(*ArraySize ? (*ArraySize)->getExprLoc() : TypeRange.
getBegin(),
2103 diag::err_deduced_class_template_compound_type)
2105 << (*ArraySize ? (*ArraySize)->getSourceRange() : TypeRange));
2110 AllocTypeInfo, Entity, Kind, Exprs);
2113 }
else if (Deduced && !Deduced->isDeduced()) {
2117 auto *ILE = cast<InitListExpr>(Exprs[0]);
2118 Inits =
MultiExprArg(ILE->getInits(), ILE->getNumInits());
2122 return ExprError(
Diag(StartLoc, diag::err_auto_new_requires_ctor_arg)
2123 << AllocType << TypeRange);
2124 if (Inits.size() > 1) {
2125 Expr *FirstBad = Inits[1];
2127 diag::err_auto_new_ctor_multiple_expressions)
2128 << AllocType << TypeRange);
2132 << AllocType << TypeRange;
2133 Expr *Deduce = Inits[0];
2134 if (isa<InitListExpr>(Deduce))
2137 << Braced << AllocType << TypeRange);
2144 return ExprError(
Diag(StartLoc, diag::err_auto_new_deduction_failure)
2145 << AllocType << Deduce->
getType() << TypeRange
2162 AllocType = Array->getElementType();
2182 if (ArraySize && *ArraySize &&
2183 (*ArraySize)->getType()->isNonOverloadPlaceholderType()) {
2186 ArraySize = result.
get();
2195 std::optional<uint64_t> KnownArraySize;
2196 if (ArraySize && *ArraySize && !(*ArraySize)->isTypeDependent()) {
2205 (*ArraySize)->getType()->getAs<
RecordType>())
2207 Diag(StartLoc, diag::warn_cxx98_compat_array_size_conversion)
2208 << (*ArraySize)->getType() << 0 <<
"'size_t'";
2215 SizeConvertDiagnoser(
Expr *ArraySize)
2217 ArraySize(ArraySize) {}
2221 return S.
Diag(
Loc, diag::err_array_size_not_integral)
2227 return S.
Diag(
Loc, diag::err_array_size_incomplete_type)
2233 return S.
Diag(
Loc, diag::err_array_size_explicit_conversion) <<
T << ConvTy;
2244 return S.
Diag(
Loc, diag::err_array_size_ambiguous_conversion) <<
T;
2258 ? diag::warn_cxx98_compat_array_size_conversion
2259 : diag::ext_array_size_conversion)
2262 } SizeDiagnoser(*ArraySize);
2270 ArraySize = ConvertedSize.
get();
2271 QualType SizeType = (*ArraySize)->getType();
2289 if (std::optional<llvm::APSInt>
Value =
2290 (*ArraySize)->getIntegerConstantExpr(
Context)) {
2291 if (
Value->isSigned() &&
Value->isNegative()) {
2293 diag::err_typecheck_negative_array_size)
2294 << (*ArraySize)->getSourceRange());
2298 unsigned ActiveSizeBits =
2302 Diag((*ArraySize)->getBeginLoc(), diag::err_array_too_large)
2306 KnownArraySize =
Value->getZExtValue();
2307 }
else if (TypeIdParens.
isValid()) {
2309 Diag((*ArraySize)->getBeginLoc(), diag::ext_new_paren_array_nonconst)
2310 << (*ArraySize)->getSourceRange()
2323 unsigned Alignment =
2326 bool PassAlignment =
getLangOpts().AlignedAllocation &&
2327 Alignment > NewAlignment;
2337 AllocType, ArraySize.has_value(), PassAlignment, PlacementArgs,
2338 OperatorNew, OperatorDelete))
2343 bool UsualArrayDeleteWantsSize =
false;
2345 UsualArrayDeleteWantsSize =
2358 unsigned NumImplicitArgs = PassAlignment ? 2 : 1;
2360 NumImplicitArgs, PlacementArgs, AllPlaceArgs,
2364 if (!AllPlaceArgs.empty())
2365 PlacementArgs = AllPlaceArgs;
2375 llvm::APInt SingleEltSize(
2379 std::optional<llvm::APInt> AllocationSize;
2382 AllocationSize = SingleEltSize;
2386 AllocationSize = llvm::APInt(SizeTyWidth, *KnownArraySize)
2387 .umul_ov(SingleEltSize, Overflow);
2391 "Expected that all the overflows would have been handled already.");
2395 Context, AllocationSize.value_or(llvm::APInt::getZero(SizeTyWidth)),
2414 CK_IntegralCast, &AlignmentLiteral,
2419 CallArgs.reserve(NumImplicitArgs + PlacementArgs.size());
2420 CallArgs.emplace_back(AllocationSize
2421 ?
static_cast<Expr *
>(&AllocationSizeLiteral)
2422 : &OpaqueAllocationSize);
2424 CallArgs.emplace_back(&DesiredAlignment);
2425 CallArgs.insert(CallArgs.end(), PlacementArgs.begin(), PlacementArgs.end());
2429 checkCall(OperatorNew, Proto,
nullptr, CallArgs,
2430 false, StartLoc,
Range, CallType);
2434 if (PlacementArgs.empty() && !PassAlignment &&
2438 if (Alignment > NewAlignment)
2439 Diag(StartLoc, diag::warn_overaligned_type)
2451 SourceRange InitRange(Exprs.front()->getBeginLoc(),
2452 Exprs.back()->getEndLoc());
2453 Diag(StartLoc, diag::err_new_array_init_args) << InitRange;
2473 InitType = AllocType;
2486 dyn_cast_or_null<CXXBindTemporaryExpr>(FullInit.
get()))
2487 FullInit = Binder->getSubExpr();
2494 if (ArraySize && !*ArraySize) {
2502 Diag(TypeRange.
getEnd(), diag::err_new_array_size_unknown_from_init)
2514 if (OperatorDelete) {
2521 PassAlignment, UsualArrayDeleteWantsSize,
2522 PlacementArgs, TypeIdParens, ArraySize, InitStyle,
2532 return Diag(
Loc, diag::err_bad_new_type)
2533 << AllocType << 0 << R;
2535 return Diag(
Loc, diag::err_bad_new_type)
2536 << AllocType << 1 << R;
2539 Loc, AllocType, diag::err_new_incomplete_or_sizeless_type, R))
2542 diag::err_allocation_of_abstract_type))
2545 return Diag(
Loc, diag::err_variably_modified_new_type)
2549 return Diag(
Loc, diag::err_address_space_qualified_new)
2557 return Diag(
Loc, diag::err_arc_new_array_without_ownership)
2572 Alloc != AllocEnd; ++Alloc) {
2575 NamedDecl *
D = (*Alloc)->getUnderlyingDecl();
2609 if (PassAlignment) {
2610 PassAlignment =
false;
2612 Args.erase(Args.begin() + 1);
2614 Operator, &Candidates, AlignArg,
2640 (Args[1]->getType()->isObjectPointerType() ||
2641 Args[1]->getType()->isArrayType())) {
2642 S.
Diag(R.
getNameLoc(), diag::err_need_header_before_placement_new)
2657 if (AlignedCandidates) {
2659 return C.Function->getNumParams() > 1 &&
2660 C.Function->getParamDecl(1)->getType()->isAlignValT();
2664 AlignedArgs.reserve(Args.size() + 1);
2665 AlignedArgs.push_back(Args[0]);
2666 AlignedArgs.push_back(AlignArg);
2667 AlignedArgs.append(Args.begin() + 1, Args.end());
2680 if (AlignedCandidates)
2681 AlignedCandidates->
NoteCandidates(S, AlignedArgs, AlignedCands,
"",
2691 S.
PDiag(diag::err_ovl_ambiguous_call)
2700 Candidates, Best->Function, Args);
2704 llvm_unreachable(
"Unreachable, bad result from BestViableFunction");
2726 AllocArgs.reserve((PassAlignment ? 2 : 1) + PlaceArgs.size());
2736 AllocArgs.push_back(&Size);
2739 if (PassAlignment) {
2745 AllocArgs.push_back(&Align);
2747 AllocArgs.insert(AllocArgs.end(), PlaceArgs.begin(), PlaceArgs.end());
2756 IsArray ? OO_Array_New : OO_New);
2788 if (PlaceArgs.empty()) {
2789 Diag(StartLoc, diag::err_openclcxx_not_supported) <<
"default new";
2791 Diag(StartLoc, diag::err_openclcxx_placement_new);
2796 assert(!R.
empty() &&
"implicitly declared allocation functions not found");
2797 assert(!R.
isAmbiguous() &&
"global allocation functions are ambiguous");
2803 OperatorNew,
nullptr,
2810 OperatorDelete =
nullptr;
2845 while (Filter.hasNext()) {
2846 auto *FD = dyn_cast<FunctionDecl>(Filter.next()->getUnderlyingDecl());
2847 if (FD && FD->isDestroyingOperatorDelete())
2853 bool FoundGlobalDelete = FoundDelete.
empty();
2854 if (FoundDelete.
empty()) {
2881 bool isPlacementNew = !PlaceArgs.empty() || OperatorNew->
param_size() != 1 ||
2884 if (isPlacementNew) {
2901 for (
unsigned I = 1, N = Proto->getNumParams(); I < N; ++I)
2902 ArgTypes.push_back(Proto->getParamType(I));
2906 EPI.
Variadic = Proto->isVariadic();
2908 ExpectedFunctionType
2913 DEnd = FoundDelete.
end();
2917 dyn_cast<FunctionTemplateDecl>((*D)->getUnderlyingDecl())) {
2925 Fn = cast<FunctionDecl>((*D)->getUnderlyingDecl());
2928 ExpectedFunctionType,
2930 ExpectedFunctionType))
2931 Matches.push_back(std::make_pair(
D.getPair(), Fn));
2947 *
this, FoundDelete, FoundGlobalDelete,
2951 Matches.push_back(std::make_pair(Selected.Found, Selected.FD));
2955 for (
auto Fn : BestDeallocFns)
2956 Matches.push_back(std::make_pair(Fn.Found, Fn.FD));
2964 if (Matches.size() == 1) {
2965 OperatorDelete = Matches[0].second;
2975 UsualDeallocFnInfo Info(*
this,
2981 bool IsSizedDelete = Info.HasSizeT;
2982 if (IsSizedDelete && !FoundGlobalDelete) {
2983 auto NonSizedDelete =
2986 if (NonSizedDelete && !NonSizedDelete.HasSizeT &&
2987 NonSizedDelete.HasAlignValT == Info.HasAlignValT)
2988 IsSizedDelete =
false;
2991 if (IsSizedDelete) {
2995 PlaceArgs.back()->getEndLoc());
2996 Diag(StartLoc, diag::err_placement_new_non_placement_delete) << R;
3005 }
else if (!Matches.empty()) {
3009 Diag(StartLoc, diag::warn_ambiguous_suitable_delete_function_found)
3010 << DeleteName << AllocElemType;
3012 for (
auto &Match : Matches)
3013 Diag(Match.second->getLocation(),
3014 diag::note_member_declared_here) << DeleteName;
3078 if (TheGlobalModuleFragment) {
3093 if (TheGlobalModuleFragment) {
3094 AlignValT->setModuleOwnershipKind(
3096 AlignValT->setLocalOwningModule(TheGlobalModuleFragment);
3101 AlignValT->setImplicit(
true);
3114 Params.push_back(Param);
3117 bool HasSizedVariant =
getLangOpts().SizedDeallocation &&
3118 (Kind == OO_Delete || Kind == OO_Array_Delete);
3119 bool HasAlignedVariant =
getLangOpts().AlignedAllocation;
3121 int NumSizeVariants = (HasSizedVariant ? 2 : 1);
3122 int NumAlignVariants = (HasAlignedVariant ? 2 : 1);
3123 for (
int Sized = 0; Sized < NumSizeVariants; ++Sized) {
3125 Params.push_back(SizeT);
3127 for (
int Aligned = 0; Aligned < NumAlignVariants; ++Aligned) {
3140 DeclareGlobalAllocationFunctions(OO_New, VoidPtr, SizeT);
3141 DeclareGlobalAllocationFunctions(OO_Array_New, VoidPtr, SizeT);
3142 DeclareGlobalAllocationFunctions(OO_Delete,
Context.
VoidTy, VoidPtr);
3143 DeclareGlobalAllocationFunctions(OO_Array_Delete,
Context.
VoidTy, VoidPtr);
3146 PopGlobalModuleFragment();
3159 Alloc != AllocEnd; ++Alloc) {
3163 if (
Func->getNumParams() == Params.size()) {
3165 for (
auto *
P :
Func->parameters())
3166 FuncParams.push_back(
3172 Func->setVisibleDespiteOwningModule();
3180 false,
false,
true));
3183 bool HasBadAllocExceptionSpec
3184 = (Name.getCXXOverloadedOperator() == OO_New ||
3185 Name.getCXXOverloadedOperator() == OO_Array_New);
3186 if (HasBadAllocExceptionSpec) {
3189 assert(
StdBadAlloc &&
"Must have std::bad_alloc declared");
3201 auto CreateAllocationFunctionDecl = [&](
Attr *ExtraAttr) {
3207 Alloc->setImplicit();
3209 Alloc->setVisibleDespiteOwningModule();
3211 if (HasBadAllocExceptionSpec &&
getLangOpts().NewInfallible &&
3214 ReturnsNonNullAttr::CreateImplicit(
Context, Alloc->getLocation()));
3226 if (TheGlobalModuleFragment) {
3227 Alloc->setModuleOwnershipKind(
3229 Alloc->setLocalOwningModule(TheGlobalModuleFragment);
3233 Alloc->addAttr(VisibilityAttr::CreateImplicit(
3235 ? VisibilityAttr::Hidden
3237 ? VisibilityAttr::Protected
3238 : VisibilityAttr::Default));
3245 ParamDecls.back()->setImplicit();
3247 Alloc->setParams(ParamDecls);
3249 Alloc->addAttr(ExtraAttr);
3256 CreateAllocationFunctionDecl(
nullptr);
3260 CreateAllocationFunctionDecl(CUDAHostAttr::CreateImplicit(
Context));
3261 CreateAllocationFunctionDecl(CUDADeviceAttr::CreateImplicit(
Context));
3266 bool CanProvideSize,
3280 assert(
Result.FD &&
"operator delete missing from global scope?");
3292 return OperatorDelete;
3304 bool WantSize,
bool WantAligned) {
3309 if (
Found.isAmbiguous())
3312 Found.suppressDiagnostics();
3322 Overaligned, &Matches);
3325 if (Matches.size() == 1) {
3326 Operator = cast<CXXMethodDecl>(Matches[0].FD);
3332 Diag(StartLoc, diag::err_deleted_function_use)
3333 << (Msg !=
nullptr) << (Msg ? Msg->
getString() : StringRef());
3349 if (!Matches.empty()) {
3351 Diag(StartLoc, diag::err_ambiguous_suitable_delete_member_function_found)
3353 for (
auto &Match : Matches)
3354 Diag(Match.FD->getLocation(), diag::note_member_declared_here) << Name;
3361 if (!
Found.empty()) {
3363 Diag(StartLoc, diag::err_no_suitable_delete_member_function_found)
3368 diag::note_member_declared_here) << Name;
3380class MismatchingNewDeleteDetector {
3382 enum MismatchResult {
3388 MemberInitMismatches,
3397 explicit MismatchingNewDeleteDetector(
bool EndOfTU)
3398 : Field(nullptr), IsArrayForm(
false), EndOfTU(EndOfTU),
3399 HasUndefinedConstructors(
false) {}
3416 MismatchResult analyzeField(
FieldDecl *Field,
bool DeleteWasArrayForm);
3426 bool HasUndefinedConstructors;
3438 MismatchResult analyzeMemberExpr(
const MemberExpr *ME);
3461 MismatchResult analyzeInClassInitializer();
3465MismatchingNewDeleteDetector::MismatchResult
3466MismatchingNewDeleteDetector::analyzeDeleteExpr(
const CXXDeleteExpr *DE) {
3468 assert(DE &&
"Expected delete-expression");
3471 if (
const MemberExpr *ME = dyn_cast<const MemberExpr>(
E)) {
3472 return analyzeMemberExpr(ME);
3473 }
else if (
const DeclRefExpr *
D = dyn_cast<const DeclRefExpr>(
E)) {
3474 if (!hasMatchingVarInit(
D))
3475 return VarInitMismatches;
3481MismatchingNewDeleteDetector::getNewExprFromInitListOrExpr(
const Expr *
E) {
3482 assert(
E !=
nullptr &&
"Expected a valid initializer expression");
3484 if (
const InitListExpr *ILE = dyn_cast<const InitListExpr>(
E)) {
3485 if (ILE->getNumInits() == 1)
3486 E = dyn_cast<const CXXNewExpr>(ILE->getInit(0)->IgnoreParenImpCasts());
3489 return dyn_cast_or_null<const CXXNewExpr>(
E);
3492bool MismatchingNewDeleteDetector::hasMatchingNewInCtorInit(
3496 (NE = getNewExprFromInitListOrExpr(CI->
getInit()))) {
3497 if (
NE->isArray() == IsArrayForm)
3500 NewExprs.push_back(NE);
3505bool MismatchingNewDeleteDetector::hasMatchingNewInCtor(
3511 HasUndefinedConstructors =
true;
3514 for (
const auto *CI : cast<const CXXConstructorDecl>(
Definition)->inits()) {
3515 if (hasMatchingNewInCtorInit(CI))
3521MismatchingNewDeleteDetector::MismatchResult
3522MismatchingNewDeleteDetector::analyzeInClassInitializer() {
3523 assert(Field !=
nullptr &&
"This should be called only for members");
3524 const Expr *InitExpr =
Field->getInClassInitializer();
3526 return EndOfTU ? NoMismatch : AnalyzeLater;
3527 if (
const CXXNewExpr *NE = getNewExprFromInitListOrExpr(InitExpr)) {
3528 if (
NE->isArray() != IsArrayForm) {
3529 NewExprs.push_back(NE);
3530 return MemberInitMismatches;
3536MismatchingNewDeleteDetector::MismatchResult
3537MismatchingNewDeleteDetector::analyzeField(
FieldDecl *Field,
3538 bool DeleteWasArrayForm) {
3539 assert(Field !=
nullptr &&
"Analysis requires a valid class member.");
3540 this->Field =
Field;
3541 IsArrayForm = DeleteWasArrayForm;
3543 for (
const auto *CD : RD->
ctors()) {
3544 if (hasMatchingNewInCtor(CD))
3547 if (HasUndefinedConstructors)
3548 return EndOfTU ? NoMismatch : AnalyzeLater;
3549 if (!NewExprs.empty())
3550 return MemberInitMismatches;
3551 return Field->hasInClassInitializer() ? analyzeInClassInitializer()
3555MismatchingNewDeleteDetector::MismatchResult
3556MismatchingNewDeleteDetector::analyzeMemberExpr(
const MemberExpr *ME) {
3557 assert(ME !=
nullptr &&
"Expected a member expression");
3559 return analyzeField(F, IsArrayForm);
3563bool MismatchingNewDeleteDetector::hasMatchingVarInit(
const DeclRefExpr *
D) {
3565 if (
const VarDecl *VD = dyn_cast<const VarDecl>(
D->getDecl())) {
3566 if (VD->hasInit() && (NE = getNewExprFromInitListOrExpr(VD->getInit())) &&
3567 NE->isArray() != IsArrayForm) {
3568 NewExprs.push_back(NE);
3571 return NewExprs.empty();
3576 const MismatchingNewDeleteDetector &Detector) {
3579 if (!Detector.IsArrayForm)
3588 SemaRef.
Diag(DeleteLoc, diag::warn_mismatched_delete_new)
3589 << Detector.IsArrayForm << H;
3591 for (
const auto *NE : Detector.NewExprs)
3592 SemaRef.
Diag(NE->getExprLoc(), diag::note_allocated_here)
3593 << Detector.IsArrayForm;
3596void Sema::AnalyzeDeleteExprMismatch(
const CXXDeleteExpr *DE) {
3599 MismatchingNewDeleteDetector Detector(
false);
3600 switch (Detector.analyzeDeleteExpr(DE)) {
3601 case MismatchingNewDeleteDetector::VarInitMismatches:
3602 case MismatchingNewDeleteDetector::MemberInitMismatches: {
3606 case MismatchingNewDeleteDetector::AnalyzeLater: {
3611 case MismatchingNewDeleteDetector::NoMismatch:
3617 bool DeleteWasArrayForm) {
3618 MismatchingNewDeleteDetector Detector(
true);
3619 switch (Detector.analyzeField(Field, DeleteWasArrayForm)) {
3620 case MismatchingNewDeleteDetector::VarInitMismatches:
3621 llvm_unreachable(
"This analysis should have been done for class members.");
3622 case MismatchingNewDeleteDetector::AnalyzeLater:
3623 llvm_unreachable(
"Analysis cannot be postponed any point beyond end of "
3624 "translation unit.");
3625 case MismatchingNewDeleteDetector::MemberInitMismatches:
3628 case MismatchingNewDeleteDetector::NoMismatch:
3635 bool ArrayForm,
Expr *ExE) {
3645 bool ArrayFormAsWritten = ArrayForm;
3646 bool UsualArrayDeleteWantsSize =
false;
3664 if (ConvPtrType->getPointeeType()->isIncompleteOrObjectType())
3671 return S.
Diag(
Loc, diag::err_delete_operand) <<
T;
3676 return S.
Diag(
Loc, diag::err_delete_incomplete_class_type) <<
T;
3682 return S.
Diag(
Loc, diag::err_delete_explicit_conversion) <<
T << ConvTy;
3693 return S.
Diag(
Loc, diag::err_ambiguous_delete_operand) <<
T;
3705 llvm_unreachable(
"conversion functions are permitted");
3713 if (!Converter.match(
Type))
3724 diag::err_address_space_qualified_delete)
3734 Diag(StartLoc,
LangOpts.CPlusPlus26 ? diag::err_delete_incomplete
3735 : diag::ext_delete_void_ptr_operand)
3747 ? diag::err_delete_incomplete
3748 : diag::warn_delete_incomplete,
3751 PointeeRD = cast<CXXRecordDecl>(RT->getDecl());
3756 Diag(StartLoc, diag::warn_delete_array_type)
3763 ArrayForm ? OO_Array_Delete : OO_Delete);
3777 UsualArrayDeleteWantsSize =
3782 else if (isa_and_nonnull<CXXMethodDecl>(OperatorDelete))
3783 UsualArrayDeleteWantsSize =
3784 UsualDeallocFnInfo(*
this,
3803 if (!OperatorDelete) {
3805 Diag(StartLoc, diag::err_openclcxx_not_supported) <<
"default delete";
3810 bool CanProvideSize =
3811 IsComplete && (!ArrayForm || UsualArrayDeleteWantsSize ||
3817 Overaligned, DeleteName);
3827 bool IsVirtualDelete =
false;
3831 PDiag(diag::err_access_dtor) << PointeeElem);
3832 IsVirtualDelete = Dtor->isVirtual();
3861 UsualArrayDeleteWantsSize, OperatorDelete, Ex.
get(), StartLoc);
3862 AnalyzeDeleteExprMismatch(
Result);
3871 IsDelete ? OO_Delete : OO_New);
3875 assert(!R.
empty() &&
"implicitly declared allocation functions not found");
3876 assert(!R.
isAmbiguous() &&
"global allocation functions are ambiguous");
3885 FnOvl != FnOvlEnd; ++FnOvl) {
3888 NamedDecl *
D = (*FnOvl)->getUnderlyingDecl();
3912 "class members should not be considered");
3915 S.
Diag(R.
getNameLoc(), diag::err_builtin_operator_new_delete_not_usual)
3916 << (IsDelete ? 1 : 0) <<
Range;
3917 S.
Diag(FnDecl->
getLocation(), diag::note_non_usual_function_declared_here)
3929 S.
PDiag(diag::err_ovl_no_viable_function_in_call)
3937 S.
PDiag(diag::err_ovl_ambiguous_call)
3944 Candidates, Best->Function, Args);
3947 llvm_unreachable(
"Unreachable, bad result from BestViableFunction");
3952 CallExpr *TheCall = cast<CallExpr>(TheCallResult.
get());
3955 << (IsDelete ?
"__builtin_operator_delete" :
"__builtin_operator_new")
3965 OperatorNewOrDelete))
3967 assert(OperatorNewOrDelete &&
"should be found");
3973 for (
unsigned i = 0; i != TheCall->
getNumArgs(); ++i) {
3984 assert(Callee &&
Callee->getCastKind() == CK_BuiltinFnToFnPtr &&
3985 "Callee expected to be implicit cast to a builtin function pointer");
3988 return TheCallResult;
3992 bool IsDelete,
bool CallCanBeVirtual,
3993 bool WarnOnNonAbstractTypes,
4020 Diag(
Loc, diag::warn_delete_abstract_non_virtual_dtor) << (IsDelete ? 0 : 1)
4022 }
else if (WarnOnNonAbstractTypes) {
4025 Diag(
Loc, diag::warn_delete_non_virtual_dtor) << (IsDelete ? 0 : 1)
4029 std::string TypeStr;
4031 Diag(DtorLoc, diag::note_delete_non_virtual)
4059 diag::err_invalid_use_of_function_type)
4063 diag::err_invalid_use_of_array_type)
4081 llvm_unreachable(
"unexpected condition kind");
4107 diag::err_constexpr_if_condition_expression_is_not_constant);
4115 From = Cast->getSubExpr();
4127 if (!ToPtrType->getPointeeType().hasQualifiers()) {
4128 switch (StrLit->getKind()) {
4135 return (ToPointeeType->getKind() == BuiltinType::Char_U ||
4136 ToPointeeType->getKind() == BuiltinType::Char_S);
4141 assert(
false &&
"Unevaluated string literal in expression");
4156 bool HadMultipleCandidates,
4159 default: llvm_unreachable(
"Unhandled cast kind!");
4160 case CK_ConstructorConversion: {
4165 diag::err_allocation_of_abstract_type))
4178 CastLoc, Ty, FoundDecl, cast<CXXConstructorDecl>(Method),
4179 ConstructorArgs, HadMultipleCandidates,
4180 false,
false,
false,
4188 case CK_UserDefinedConversion: {
4198 HadMultipleCandidates);
4203 CK_UserDefinedConversion,
Result.get(),
4204 nullptr,
Result.get()->getValueKind(),
4237 assert(FD &&
"no conversion function for user-defined conversion seq");
4239 CastKind = CK_UserDefinedConversion;
4247 CastKind = CK_ConstructorConversion;
4275 From = CastArg.
get();
4289 PDiag(diag::err_typecheck_ambiguous_condition)
4295 llvm_unreachable(
"bad conversion");
4302 ToType, From->
getType(), From, Action);
4303 assert(Diagnosed &&
"failed to diagnose bad conversion"); (void)Diagnosed;
4316 QualType ElType = ToVec->getElementType();
4352 false,
false,
false,
4359 false,
false,
false,
4392 ToAtomicType = ToType;
4393 ToType = ToAtomic->getValueType();
4396 QualType InitialFromType = FromType;
4398 switch (SCS.
First) {
4401 FromType = FromAtomic->getValueType().getUnqualifiedType();
4414 From = FromRes.
get();
4441 llvm_unreachable(
"Improper first standard conversion");
4482 "only enums with fixed underlying type can promote to bool");
4512 CK = CK_FloatingComplexCast;
4514 CK = CK_FloatingComplexToIntegralComplex;
4516 CK = CK_IntegralComplexToFloatingComplex;
4518 CK = CK_IntegralComplexCast;
4544 "Attempting implicit fixed point conversion without a fixed "
4549 nullptr, CCK).
get();
4553 nullptr, CCK).
get();
4557 nullptr, CCK).
get();
4561 nullptr, CCK).
get();
4565 nullptr, CCK).
get();
4569 nullptr, CCK).
get();
4574 nullptr, CCK).
get();
4583 diag::ext_typecheck_convert_incompatible_pointer)
4588 diag::ext_typecheck_convert_incompatible_pointer)
4595 }
else if (
getLangOpts().allowsNonTrivialObjCLifetimeQualifiers() &&
4596 !
ObjC().CheckObjCARCUnavailableWeakConversion(ToType,
4601 Diag(From->
getBeginLoc(), diag::err_arc_convesion_of_weak_unavailable)
4630 if (Kind == CK_BlockPointerToObjCPointerCast) {
4692 &BasePath, CCK).
get();
4721 QualType ElType = ToComplex->getElementType();
4729 isFloatingComplex ? CK_FloatingCast : CK_FloatingToIntegral).
get();
4733 isFloatingComplex ? CK_IntegralToFloating : CK_IntegralCast).
get();
4737 isFloatingComplex ? CK_FloatingRealToComplex
4738 : CK_IntegralRealToComplex).
get();
4743 QualType ElType = FromComplex->getElementType();
4748 isFloatingComplex ? CK_FloatingComplexToReal
4749 : CK_IntegralComplexToReal,
4758 isFloatingComplex ? CK_FloatingCast
4759 : CK_IntegralToFloating,
4765 isFloatingComplex ? CK_FloatingToIntegral