30 #include "llvm/ADT/APFloat.h"
31 #include "llvm/ADT/APInt.h"
32 #include "llvm/ADT/SmallPtrSet.h"
33 #include "llvm/ADT/StringExtras.h"
34 #include "llvm/Analysis/ValueTracking.h"
35 #include "llvm/IR/DataLayout.h"
36 #include "llvm/IR/InlineAsm.h"
37 #include "llvm/IR/Intrinsics.h"
38 #include "llvm/IR/IntrinsicsAArch64.h"
39 #include "llvm/IR/IntrinsicsAMDGPU.h"
40 #include "llvm/IR/IntrinsicsARM.h"
41 #include "llvm/IR/IntrinsicsBPF.h"
42 #include "llvm/IR/IntrinsicsHexagon.h"
43 #include "llvm/IR/IntrinsicsNVPTX.h"
44 #include "llvm/IR/IntrinsicsPowerPC.h"
45 #include "llvm/IR/IntrinsicsR600.h"
46 #include "llvm/IR/IntrinsicsRISCV.h"
47 #include "llvm/IR/IntrinsicsS390.h"
48 #include "llvm/IR/IntrinsicsVE.h"
49 #include "llvm/IR/IntrinsicsWebAssembly.h"
50 #include "llvm/IR/IntrinsicsX86.h"
51 #include "llvm/IR/MDBuilder.h"
52 #include "llvm/IR/MatrixBuilder.h"
53 #include "llvm/Support/ConvertUTF.h"
54 #include "llvm/Support/ScopedPrinter.h"
55 #include "llvm/Support/X86TargetParser.h"
58 using namespace clang;
59 using namespace CodeGen;
63 int64_t
clamp(int64_t
Value, int64_t Low, int64_t High) {
68 Align AlignmentInBytes) {
70 switch (CGF.
getLangOpts().getTrivialAutoVarInit()) {
75 Byte = CGF.
Builder.getInt8(0x00);
79 Byte = llvm::dyn_cast<llvm::ConstantInt>(
87 I->addAnnotationMetadata(
"auto-init");
102 static SmallDenseMap<unsigned, StringRef, 8> F128Builtins{
103 {Builtin::BI__builtin_printf,
"__printfieee128"},
104 {Builtin::BI__builtin_vsnprintf,
"__vsnprintfieee128"},
105 {Builtin::BI__builtin_vsprintf,
"__vsprintfieee128"},
106 {Builtin::BI__builtin_sprintf,
"__sprintfieee128"},
107 {Builtin::BI__builtin_snprintf,
"__snprintfieee128"},
108 {Builtin::BI__builtin_fprintf,
"__fprintfieee128"},
109 {Builtin::BI__builtin_nexttowardf128,
"__nexttowardieee128"},
115 if (FD->
hasAttr<AsmLabelAttr>())
116 Name = getMangledName(D);
120 if (getTriple().isPPC64() &&
121 &getTarget().getLongDoubleFormat() == &llvm::APFloat::IEEEquad() &&
122 F128Builtins.find(BuiltinID) != F128Builtins.end())
123 Name = F128Builtins[BuiltinID];
128 llvm::FunctionType *Ty =
129 cast<llvm::FunctionType>(getTypes().ConvertType(FD->
getType()));
131 return GetOrCreateLLVMFunction(Name, Ty, D,
false);
137 QualType T, llvm::IntegerType *IntType) {
140 if (
V->getType()->isPointerTy())
141 return CGF.
Builder.CreatePtrToInt(
V, IntType);
143 assert(
V->getType() == IntType);
148 QualType T, llvm::Type *ResultType) {
151 if (ResultType->isPointerTy())
152 return CGF.
Builder.CreateIntToPtr(
V, ResultType);
154 assert(
V->getType() == ResultType);
162 AtomicOrdering Ordering = AtomicOrdering::SequentiallyConsistent) {
171 unsigned AddrSpace = DestPtr->getType()->getPointerAddressSpace();
173 llvm::IntegerType *IntType =
176 llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
178 llvm::Value *Args[2];
179 Args[0] = CGF.
Builder.CreateBitCast(DestPtr, IntPtrType);
181 llvm::Type *ValueType = Args[1]->getType();
182 Args[1] =
EmitToInt(CGF, Args[1], T, IntType);
185 Kind, Args[0], Args[1], Ordering);
195 unsigned SrcAddrSpace =
Address->
getType()->getPointerAddressSpace();
197 Address, llvm::PointerType::get(Val->getType(), SrcAddrSpace),
"cast");
213 llvm::AtomicRMWInst::BinOp
Kind,
222 llvm::AtomicRMWInst::BinOp
Kind,
224 Instruction::BinaryOps Op,
225 bool Invert =
false) {
233 unsigned AddrSpace = DestPtr->getType()->getPointerAddressSpace();
235 llvm::IntegerType *IntType =
238 llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
240 llvm::Value *Args[2];
242 llvm::Type *ValueType = Args[1]->getType();
243 Args[1] =
EmitToInt(CGF, Args[1], T, IntType);
244 Args[0] = CGF.
Builder.CreateBitCast(DestPtr, IntPtrType);
247 Kind, Args[0], Args[1], llvm::AtomicOrdering::SequentiallyConsistent);
248 Result = CGF.
Builder.CreateBinOp(Op, Result, Args[1]);
251 CGF.
Builder.CreateBinOp(llvm::Instruction::Xor, Result,
252 llvm::ConstantInt::getAllOnesValue(IntType));
275 unsigned AddrSpace = DestPtr->getType()->getPointerAddressSpace();
277 llvm::IntegerType *IntType = llvm::IntegerType::get(
279 llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
282 Args[0] = CGF.
Builder.CreateBitCast(DestPtr, IntPtrType);
284 llvm::Type *ValueType = Args[1]->getType();
285 Args[1] =
EmitToInt(CGF, Args[1], T, IntType);
289 Args[0], Args[1], Args[2], llvm::AtomicOrdering::SequentiallyConsistent,
290 llvm::AtomicOrdering::SequentiallyConsistent);
293 return CGF.
Builder.CreateZExt(CGF.
Builder.CreateExtractValue(Pair, 1),
316 AtomicOrdering SuccessOrdering = AtomicOrdering::SequentiallyConsistent) {
330 auto FailureOrdering = SuccessOrdering == AtomicOrdering::Release ?
331 AtomicOrdering::Monotonic :
339 Destination, Comparand, Exchange,
340 SuccessOrdering, FailureOrdering);
341 Result->setVolatile(
true);
342 return CGF.
Builder.CreateExtractValue(Result, 0);
355 AtomicOrdering SuccessOrdering) {
362 assert(Destination->getType()->isPointerTy());
363 assert(!ExchangeHigh->getType()->isPointerTy());
364 assert(!ExchangeLow->getType()->isPointerTy());
365 assert(ComparandPtr->getType()->isPointerTy());
368 auto FailureOrdering = SuccessOrdering == AtomicOrdering::Release
369 ? AtomicOrdering::Monotonic
373 llvm::Type *Int128Ty = llvm::IntegerType::get(CGF.
getLLVMContext(), 128);
374 llvm::Type *Int128PtrTy = Int128Ty->getPointerTo();
375 Destination = CGF.
Builder.CreateBitCast(Destination, Int128PtrTy);
376 Address ComparandResult(CGF.
Builder.CreateBitCast(ComparandPtr, Int128PtrTy),
380 ExchangeHigh = CGF.
Builder.CreateZExt(ExchangeHigh, Int128Ty);
381 ExchangeLow = CGF.
Builder.CreateZExt(ExchangeLow, Int128Ty);
383 CGF.
Builder.CreateShl(ExchangeHigh, llvm::ConstantInt::get(Int128Ty, 64));
384 llvm::Value *Exchange = CGF.
Builder.CreateOr(ExchangeHigh, ExchangeLow);
390 SuccessOrdering, FailureOrdering);
396 CXI->setVolatile(
true);
403 Value *Success = CGF.
Builder.CreateExtractValue(CXI, 1);
408 AtomicOrdering Ordering = AtomicOrdering::SequentiallyConsistent) {
415 ConstantInt::get(IntTy, 1),
417 return CGF.
Builder.CreateAdd(Result, ConstantInt::get(IntTy, 1));
421 AtomicOrdering Ordering = AtomicOrdering::SequentiallyConsistent) {
428 ConstantInt::get(IntTy, 1),
430 return CGF.
Builder.CreateSub(Result, ConstantInt::get(IntTy, 1));
440 Ptr = CGF.
Builder.CreateBitCast(Ptr, ITy->getPointerTo());
442 Load->setVolatile(
true);
454 Ptr = CGF.
Builder.CreateBitCast(Ptr, ITy->getPointerTo());
455 llvm::StoreInst *
Store =
457 Store->setVolatile(
true);
465 const CallExpr *E,
unsigned IntrinsicID,
466 unsigned ConstrainedIntrinsicID) {
469 if (CGF.
Builder.getIsFPConstrained()) {
471 Function *F = CGF.
CGM.
getIntrinsic(ConstrainedIntrinsicID, Src0->getType());
472 return CGF.
Builder.CreateConstrainedFPCall(F, { Src0 });
475 return CGF.
Builder.CreateCall(F, Src0);
482 const CallExpr *E,
unsigned IntrinsicID,
483 unsigned ConstrainedIntrinsicID) {
487 if (CGF.
Builder.getIsFPConstrained()) {
489 Function *F = CGF.
CGM.
getIntrinsic(ConstrainedIntrinsicID, Src0->getType());
490 return CGF.
Builder.CreateConstrainedFPCall(F, { Src0, Src1 });
493 return CGF.
Builder.CreateCall(F, { Src0, Src1 });
500 const CallExpr *E,
unsigned IntrinsicID,
501 unsigned ConstrainedIntrinsicID) {
506 if (CGF.
Builder.getIsFPConstrained()) {
508 Function *F = CGF.
CGM.
getIntrinsic(ConstrainedIntrinsicID, Src0->getType());
509 return CGF.
Builder.CreateConstrainedFPCall(F, { Src0, Src1, Src2 });
512 return CGF.
Builder.CreateCall(F, { Src0, Src1, Src2 });
519 unsigned IntrinsicID,
520 unsigned ConstrainedIntrinsicID,
524 if (CGF.
Builder.getIsFPConstrained())
529 if (CGF.
Builder.getIsFPConstrained())
530 return CGF.
Builder.CreateConstrainedFPCall(F, Args);
532 return CGF.
Builder.CreateCall(F, Args);
538 unsigned IntrinsicID,
539 llvm::StringRef Name =
"") {
543 return CGF.
Builder.CreateCall(F, Src0, Name);
549 unsigned IntrinsicID) {
554 return CGF.
Builder.CreateCall(F, { Src0, Src1 });
560 unsigned IntrinsicID) {
566 return CGF.
Builder.CreateCall(F, { Src0, Src1, Src2 });
572 unsigned IntrinsicID) {
577 return CGF.
Builder.CreateCall(F, {Src0, Src1});
583 unsigned IntrinsicID,
584 unsigned ConstrainedIntrinsicID) {
588 if (CGF.
Builder.getIsFPConstrained()) {
591 {ResultType, Src0->getType()});
592 return CGF.
Builder.CreateConstrainedFPCall(F, {Src0});
596 return CGF.
Builder.CreateCall(F, Src0);
603 llvm::CallInst *Call = CGF.
Builder.CreateCall(F,
V);
604 Call->setDoesNotAccessMemory();
613 llvm::Type *Ty =
V->getType();
614 int Width = Ty->getPrimitiveSizeInBits();
615 llvm::Type *IntTy = llvm::IntegerType::get(C, Width);
617 if (Ty->isPPC_FP128Ty()) {
627 Value *ShiftCst = llvm::ConstantInt::get(IntTy, Width);
632 IntTy = llvm::IntegerType::get(C, Width);
635 Value *
Zero = llvm::Constant::getNullValue(IntTy);
640 const CallExpr *E, llvm::Constant *calleeValue) {
656 llvm::Value *
X, llvm::Value *Y,
657 llvm::Value *&Carry) {
659 assert(
X->getType() == Y->getType() &&
660 "Arguments must be the same type. (Did you forget to make sure both "
661 "arguments have the same integer width?)");
664 llvm::Value *Tmp = CGF.
Builder.CreateCall(Callee, {
X, Y});
665 Carry = CGF.
Builder.CreateExtractValue(Tmp, 1);
666 return CGF.
Builder.CreateExtractValue(Tmp, 0);
670 unsigned IntrinsicID,
673 llvm::MDNode *RNode = MDHelper.createRange(
APInt(32, low),
APInt(32, high));
675 llvm::Instruction *Call = CGF.
Builder.CreateCall(F);
676 Call->setMetadata(llvm::LLVMContext::MD_range, RNode);
681 struct WidthAndSignedness {
687 static WidthAndSignedness
701 static struct WidthAndSignedness
703 assert(Types.size() > 0 &&
"Empty list of types.");
707 for (
const auto &
Type : Types) {
716 for (
const auto &
Type : Types) {
718 if (Width < MinWidth) {
727 llvm::Type *DestType = Int8PtrTy;
728 if (ArgValue->getType() != DestType)
730 Builder.CreateBitCast(ArgValue, DestType, ArgValue->getName().data());
732 Intrinsic::ID inst = IsStart ? Intrinsic::vastart : Intrinsic::vaend;
733 return Builder.CreateCall(CGM.getIntrinsic(inst), ArgValue);
742 return From == To || (From == 0 && To == 1) || (From == 3 && To == 2);
747 return ConstantInt::get(ResType, (
Type & 2) ? 0 : -1,
true);
751 CodeGenFunction::evaluateOrEmitBuiltinObjectSize(
const Expr *E,
unsigned Type,
752 llvm::IntegerType *ResType,
753 llvm::Value *EmittedE,
757 return emitBuiltinObjectSize(E,
Type, ResType, EmittedE, IsDynamic);
758 return ConstantInt::get(ResType, ObjectSize,
true);
771 CodeGenFunction::emitBuiltinObjectSize(
const Expr *E,
unsigned Type,
772 llvm::IntegerType *ResType,
773 llvm::Value *EmittedE,
bool IsDynamic) {
777 auto *Param = dyn_cast<ParmVarDecl>(D->getDecl());
778 auto *PS = D->getDecl()->getAttr<PassObjectSizeAttr>();
779 if (Param !=
nullptr && PS !=
nullptr &&
781 auto Iter = SizeArguments.find(Param);
782 assert(Iter != SizeArguments.end());
785 auto DIter = LocalDeclMap.find(D);
786 assert(DIter != LocalDeclMap.end());
788 return EmitLoadOfScalar(DIter->second,
false,
799 Value *Ptr = EmittedE ? EmittedE : EmitScalarExpr(E);
800 assert(Ptr->getType()->isPointerTy() &&
801 "Non-pointer passed to __builtin_object_size?");
804 CGM.getIntrinsic(Intrinsic::objectsize, {ResType, Ptr->getType()});
807 Value *Min = Builder.getInt1((
Type & 2) != 0);
809 Value *NullIsUnknown = Builder.getTrue();
811 return Builder.CreateCall(F, {Ptr, Min, NullIsUnknown,
Dynamic});
817 enum ActionKind : uint8_t { TestOnly, Complement, Reset, Set };
818 enum InterlockingKind : uint8_t {
827 InterlockingKind Interlocking;
830 static BitTest decodeBitTestBuiltin(
unsigned BuiltinID);
834 BitTest BitTest::decodeBitTestBuiltin(
unsigned BuiltinID) {
837 case Builtin::BI_bittest:
838 return {TestOnly, Unlocked,
false};
839 case Builtin::BI_bittestandcomplement:
840 return {Complement, Unlocked,
false};
841 case Builtin::BI_bittestandreset:
842 return {Reset, Unlocked,
false};
843 case Builtin::BI_bittestandset:
844 return {Set, Unlocked,
false};
845 case Builtin::BI_interlockedbittestandreset:
846 return {Reset, Sequential,
false};
847 case Builtin::BI_interlockedbittestandset:
848 return {Set, Sequential,
false};
851 case Builtin::BI_bittest64:
852 return {TestOnly, Unlocked,
true};
853 case Builtin::BI_bittestandcomplement64:
854 return {Complement, Unlocked,
true};
855 case Builtin::BI_bittestandreset64:
856 return {Reset, Unlocked,
true};
857 case Builtin::BI_bittestandset64:
858 return {Set, Unlocked,
true};
859 case Builtin::BI_interlockedbittestandreset64:
860 return {Reset, Sequential,
true};
861 case Builtin::BI_interlockedbittestandset64:
862 return {Set, Sequential,
true};
865 case Builtin::BI_interlockedbittestandset_acq:
866 return {Set, Acquire,
false};
867 case Builtin::BI_interlockedbittestandset_rel:
868 return {Set, Release,
false};
869 case Builtin::BI_interlockedbittestandset_nf:
870 return {Set, NoFence,
false};
871 case Builtin::BI_interlockedbittestandreset_acq:
872 return {Reset, Acquire,
false};
873 case Builtin::BI_interlockedbittestandreset_rel:
874 return {Reset, Release,
false};
875 case Builtin::BI_interlockedbittestandreset_nf:
876 return {Reset, NoFence,
false};
878 llvm_unreachable(
"expected only bittest intrinsics");
883 case BitTest::TestOnly:
return '\0';
884 case BitTest::Complement:
return 'c';
885 case BitTest::Reset:
return 'r';
886 case BitTest::Set:
return 's';
888 llvm_unreachable(
"invalid action");
896 char SizeSuffix = BT.Is64Bit ?
'q' :
'l';
900 raw_svector_ostream AsmOS(
Asm);
901 if (BT.Interlocking != BitTest::Unlocked)
906 AsmOS << SizeSuffix <<
" $2, ($1)";
909 std::string Constraints =
"={@ccc},r,r,~{cc},~{memory}";
911 if (!MachineClobbers.empty()) {
913 Constraints += MachineClobbers;
915 llvm::IntegerType *IntType = llvm::IntegerType::get(
918 llvm::Type *IntPtrType = IntType->getPointerTo();
919 llvm::FunctionType *FTy =
920 llvm::FunctionType::get(CGF.
Int8Ty, {IntPtrType, IntType},
false);
922 llvm::InlineAsm *IA =
923 llvm::InlineAsm::get(FTy,
Asm, Constraints,
true);
924 return CGF.
Builder.CreateCall(IA, {BitBase, BitPos});
927 static llvm::AtomicOrdering
930 case BitTest::Unlocked:
return llvm::AtomicOrdering::NotAtomic;
931 case BitTest::Sequential:
return llvm::AtomicOrdering::SequentiallyConsistent;
932 case BitTest::Acquire:
return llvm::AtomicOrdering::Acquire;
933 case BitTest::Release:
return llvm::AtomicOrdering::Release;
934 case BitTest::NoFence:
return llvm::AtomicOrdering::Monotonic;
936 llvm_unreachable(
"invalid interlocking");
949 BitTest BT = BitTest::decodeBitTestBuiltin(BuiltinID);
961 BitPos, llvm::ConstantInt::get(BitPos->getType(), 3),
"bittest.byteidx");
964 ByteIndex,
"bittest.byteaddr"),
968 llvm::ConstantInt::get(CGF.
Int8Ty, 0x7));
971 Value *Mask =
nullptr;
972 if (BT.Action != BitTest::TestOnly) {
973 Mask = CGF.
Builder.CreateShl(llvm::ConstantInt::get(CGF.
Int8Ty, 1), PosLow,
980 Value *OldByte =
nullptr;
981 if (Ordering != llvm::AtomicOrdering::NotAtomic) {
984 llvm::AtomicRMWInst::BinOp RMWOp = llvm::AtomicRMWInst::Or;
985 if (BT.Action == BitTest::Reset) {
986 Mask = CGF.
Builder.CreateNot(Mask);
994 Value *NewByte =
nullptr;
996 case BitTest::TestOnly:
999 case BitTest::Complement:
1000 NewByte = CGF.
Builder.CreateXor(OldByte, Mask);
1002 case BitTest::Reset:
1003 NewByte = CGF.
Builder.CreateAnd(OldByte, CGF.
Builder.CreateNot(Mask));
1006 NewByte = CGF.
Builder.CreateOr(OldByte, Mask);
1015 Value *ShiftedByte = CGF.
Builder.CreateLShr(OldByte, PosLow,
"bittest.shr");
1017 ShiftedByte, llvm::ConstantInt::get(CGF.
Int8Ty, 1),
"bittest.res");
1026 raw_svector_ostream AsmOS(
Asm);
1027 llvm::IntegerType *RetType = CGF.
Int32Ty;
1029 switch (BuiltinID) {
1030 case clang::PPC::BI__builtin_ppc_ldarx:
1034 case clang::PPC::BI__builtin_ppc_lwarx:
1038 case clang::PPC::BI__builtin_ppc_lharx:
1042 case clang::PPC::BI__builtin_ppc_lbarx:
1047 llvm_unreachable(
"Expected only PowerPC load reserve intrinsics");
1050 AsmOS <<
"$0, ${1:y}";
1054 if (!MachineClobbers.empty()) {
1056 Constraints += MachineClobbers;
1059 llvm::Type *IntPtrType = RetType->getPointerTo();
1060 llvm::FunctionType *FTy =
1061 llvm::FunctionType::get(RetType, {IntPtrType},
false);
1063 llvm::InlineAsm *IA =
1064 llvm::InlineAsm::get(FTy,
Asm, Constraints,
true);
1065 llvm::CallInst *CI = CGF.
Builder.CreateCall(IA, {Addr});
1067 0, Attribute::get(CGF.
getLLVMContext(), Attribute::ElementType, RetType));
1072 enum class MSVCSetJmpKind {
1084 llvm::Value *Arg1 =
nullptr;
1085 llvm::Type *Arg1Ty =
nullptr;
1087 bool IsVarArg =
false;
1088 if (SJKind == MSVCSetJmpKind::_setjmp3) {
1091 Arg1 = llvm::ConstantInt::get(CGF.
IntTy, 0);
1094 Name = SJKind == MSVCSetJmpKind::_setjmp ?
"_setjmp" :
"_setjmpex";
1097 Arg1 = CGF.
Builder.CreateCall(
1100 Arg1 = CGF.
Builder.CreateCall(
1102 llvm::ConstantInt::get(CGF.
Int32Ty, 0));
1106 llvm::Type *ArgTypes[2] = {CGF.
Int8PtrTy, Arg1Ty};
1107 llvm::AttributeList ReturnsTwiceAttr = llvm::AttributeList::get(
1109 llvm::Attribute::ReturnsTwice);
1111 llvm::FunctionType::get(CGF.
IntTy, ArgTypes, IsVarArg), Name,
1112 ReturnsTwiceAttr,
true);
1114 llvm::Value *Buf = CGF.
Builder.CreateBitOrPointerCast(
1116 llvm::Value *Args[] = {Buf, Arg1};
1118 CB->setAttributes(ReturnsTwiceAttr);
1128 _InterlockedDecrement,
1129 _InterlockedExchange,
1130 _InterlockedExchangeAdd,
1131 _InterlockedExchangeSub,
1132 _InterlockedIncrement,
1135 _InterlockedExchangeAdd_acq,
1136 _InterlockedExchangeAdd_rel,
1137 _InterlockedExchangeAdd_nf,
1138 _InterlockedExchange_acq,
1139 _InterlockedExchange_rel,
1140 _InterlockedExchange_nf,
1141 _InterlockedCompareExchange_acq,
1142 _InterlockedCompareExchange_rel,
1143 _InterlockedCompareExchange_nf,
1144 _InterlockedCompareExchange128,
1145 _InterlockedCompareExchange128_acq,
1146 _InterlockedCompareExchange128_rel,
1147 _InterlockedCompareExchange128_nf,
1151 _InterlockedXor_acq,
1152 _InterlockedXor_rel,
1154 _InterlockedAnd_acq,
1155 _InterlockedAnd_rel,
1157 _InterlockedIncrement_acq,
1158 _InterlockedIncrement_rel,
1159 _InterlockedIncrement_nf,
1160 _InterlockedDecrement_acq,
1161 _InterlockedDecrement_rel,
1162 _InterlockedDecrement_nf,
1169 switch (BuiltinID) {
1172 case ARM::BI_BitScanForward:
1173 case ARM::BI_BitScanForward64:
1174 return MSVCIntrin::_BitScanForward;
1175 case ARM::BI_BitScanReverse:
1176 case ARM::BI_BitScanReverse64:
1177 return MSVCIntrin::_BitScanReverse;
1178 case ARM::BI_InterlockedAnd64:
1179 return MSVCIntrin::_InterlockedAnd;
1180 case ARM::BI_InterlockedExchange64:
1181 return MSVCIntrin::_InterlockedExchange;
1182 case ARM::BI_InterlockedExchangeAdd64:
1183 return MSVCIntrin::_InterlockedExchangeAdd;
1184 case ARM::BI_InterlockedExchangeSub64:
1185 return MSVCIntrin::_InterlockedExchangeSub;
1186 case ARM::BI_InterlockedOr64:
1187 return MSVCIntrin::_InterlockedOr;
1188 case ARM::BI_InterlockedXor64:
1189 return MSVCIntrin::_InterlockedXor;
1190 case ARM::BI_InterlockedDecrement64:
1191 return MSVCIntrin::_InterlockedDecrement;
1192 case ARM::BI_InterlockedIncrement64:
1193 return MSVCIntrin::_InterlockedIncrement;
1194 case ARM::BI_InterlockedExchangeAdd8_acq:
1195 case ARM::BI_InterlockedExchangeAdd16_acq:
1196 case ARM::BI_InterlockedExchangeAdd_acq:
1197 case ARM::BI_InterlockedExchangeAdd64_acq:
1198 return MSVCIntrin::_InterlockedExchangeAdd_acq;
1199 case ARM::BI_InterlockedExchangeAdd8_rel:
1200 case ARM::BI_InterlockedExchangeAdd16_rel:
1201 case ARM::BI_InterlockedExchangeAdd_rel:
1202 case ARM::BI_InterlockedExchangeAdd64_rel:
1203 return MSVCIntrin::_InterlockedExchangeAdd_rel;
1204 case ARM::BI_InterlockedExchangeAdd8_nf:
1205 case ARM::BI_InterlockedExchangeAdd16_nf:
1206 case ARM::BI_InterlockedExchangeAdd_nf:
1207 case ARM::BI_InterlockedExchangeAdd64_nf:
1208 return MSVCIntrin::_InterlockedExchangeAdd_nf;
1209 case ARM::BI_InterlockedExchange8_acq:
1210 case ARM::BI_InterlockedExchange16_acq:
1211 case ARM::BI_InterlockedExchange_acq:
1212 case ARM::BI_InterlockedExchange64_acq:
1213 return MSVCIntrin::_InterlockedExchange_acq;
1214 case ARM::BI_InterlockedExchange8_rel:
1215 case ARM::BI_InterlockedExchange16_rel:
1216 case ARM::BI_InterlockedExchange_rel:
1217 case ARM::BI_InterlockedExchange64_rel:
1218 return MSVCIntrin::_InterlockedExchange_rel;
1219 case ARM::BI_InterlockedExchange8_nf:
1220 case ARM::BI_InterlockedExchange16_nf:
1221 case ARM::BI_InterlockedExchange_nf:
1222 case ARM::BI_InterlockedExchange64_nf:
1223 return MSVCIntrin::_InterlockedExchange_nf;
1224 case ARM::BI_InterlockedCompareExchange8_acq:
1225 case ARM::BI_InterlockedCompareExchange16_acq:
1226 case ARM::BI_InterlockedCompareExchange_acq:
1227 case ARM::BI_InterlockedCompareExchange64_acq:
1228 return MSVCIntrin::_InterlockedCompareExchange_acq;
1229 case ARM::BI_InterlockedCompareExchange8_rel:
1230 case ARM::BI_InterlockedCompareExchange16_rel:
1231 case ARM::BI_InterlockedCompareExchange_rel:
1232 case ARM::BI_InterlockedCompareExchange64_rel:
1233 return MSVCIntrin::_InterlockedCompareExchange_rel;
1234 case ARM::BI_InterlockedCompareExchange8_nf:
1235 case ARM::BI_InterlockedCompareExchange16_nf:
1236 case ARM::BI_InterlockedCompareExchange_nf:
1237 case ARM::BI_InterlockedCompareExchange64_nf:
1238 return MSVCIntrin::_InterlockedCompareExchange_nf;
1239 case ARM::BI_InterlockedOr8_acq:
1240 case ARM::BI_InterlockedOr16_acq:
1241 case ARM::BI_InterlockedOr_acq:
1242 case ARM::BI_InterlockedOr64_acq:
1243 return MSVCIntrin::_InterlockedOr_acq;
1244 case ARM::BI_InterlockedOr8_rel:
1245 case ARM::BI_InterlockedOr16_rel:
1246 case ARM::BI_InterlockedOr_rel:
1247 case ARM::BI_InterlockedOr64_rel:
1248 return MSVCIntrin::_InterlockedOr_rel;
1249 case ARM::BI_InterlockedOr8_nf:
1250 case ARM::BI_InterlockedOr16_nf:
1251 case ARM::BI_InterlockedOr_nf:
1252 case ARM::BI_InterlockedOr64_nf:
1253 return MSVCIntrin::_InterlockedOr_nf;
1254 case ARM::BI_InterlockedXor8_acq:
1255 case ARM::BI_InterlockedXor16_acq:
1256 case ARM::BI_InterlockedXor_acq:
1257 case ARM::BI_InterlockedXor64_acq:
1258 return MSVCIntrin::_InterlockedXor_acq;
1259 case ARM::BI_InterlockedXor8_rel:
1260 case ARM::BI_InterlockedXor16_rel:
1261 case ARM::BI_InterlockedXor_rel:
1262 case ARM::BI_InterlockedXor64_rel:
1263 return MSVCIntrin::_InterlockedXor_rel;
1264 case ARM::BI_InterlockedXor8_nf:
1265 case ARM::BI_InterlockedXor16_nf:
1266 case ARM::BI_InterlockedXor_nf:
1267 case ARM::BI_InterlockedXor64_nf:
1268 return MSVCIntrin::_InterlockedXor_nf;
1269 case ARM::BI_InterlockedAnd8_acq:
1270 case ARM::BI_InterlockedAnd16_acq:
1271 case ARM::BI_InterlockedAnd_acq:
1272 case ARM::BI_InterlockedAnd64_acq:
1273 return MSVCIntrin::_InterlockedAnd_acq;
1274 case ARM::BI_InterlockedAnd8_rel:
1275 case ARM::BI_InterlockedAnd16_rel:
1276 case ARM::BI_InterlockedAnd_rel:
1277 case ARM::BI_InterlockedAnd64_rel:
1278 return MSVCIntrin::_InterlockedAnd_rel;
1279 case ARM::BI_InterlockedAnd8_nf:
1280 case ARM::BI_InterlockedAnd16_nf:
1281 case ARM::BI_InterlockedAnd_nf:
1282 case ARM::BI_InterlockedAnd64_nf:
1283 return MSVCIntrin::_InterlockedAnd_nf;
1284 case ARM::BI_InterlockedIncrement16_acq:
1285 case ARM::BI_InterlockedIncrement_acq:
1286 case ARM::BI_InterlockedIncrement64_acq:
1287 return MSVCIntrin::_InterlockedIncrement_acq;
1288 case ARM::BI_InterlockedIncrement16_rel:
1289 case ARM::BI_InterlockedIncrement_rel:
1290 case ARM::BI_InterlockedIncrement64_rel:
1291 return MSVCIntrin::_InterlockedIncrement_rel;
1292 case ARM::BI_InterlockedIncrement16_nf:
1293 case ARM::BI_InterlockedIncrement_nf:
1294 case ARM::BI_InterlockedIncrement64_nf:
1295 return MSVCIntrin::_InterlockedIncrement_nf;
1296 case ARM::BI_InterlockedDecrement16_acq:
1297 case ARM::BI_InterlockedDecrement_acq:
1298 case ARM::BI_InterlockedDecrement64_acq:
1299 return MSVCIntrin::_InterlockedDecrement_acq;
1300 case ARM::BI_InterlockedDecrement16_rel:
1301 case ARM::BI_InterlockedDecrement_rel:
1302 case ARM::BI_InterlockedDecrement64_rel:
1303 return MSVCIntrin::_InterlockedDecrement_rel;
1304 case ARM::BI_InterlockedDecrement16_nf:
1305 case ARM::BI_InterlockedDecrement_nf:
1306 case ARM::BI_InterlockedDecrement64_nf:
1307 return MSVCIntrin::_InterlockedDecrement_nf;
1309 llvm_unreachable(
"must return from switch");
1315 switch (BuiltinID) {
1318 case AArch64::BI_BitScanForward:
1319 case AArch64::BI_BitScanForward64:
1320 return MSVCIntrin::_BitScanForward;
1321 case AArch64::BI_BitScanReverse:
1322 case AArch64::BI_BitScanReverse64:
1323 return MSVCIntrin::_BitScanReverse;
1324 case AArch64::BI_InterlockedAnd64:
1325 return MSVCIntrin::_InterlockedAnd;
1326 case AArch64::BI_InterlockedExchange64:
1327 return MSVCIntrin::_InterlockedExchange;
1328 case AArch64::BI_InterlockedExchangeAdd64:
1329 return MSVCIntrin::_InterlockedExchangeAdd;
1330 case AArch64::BI_InterlockedExchangeSub64:
1331 return MSVCIntrin::_InterlockedExchangeSub;
1332 case AArch64::BI_InterlockedOr64:
1333 return MSVCIntrin::_InterlockedOr;
1334 case AArch64::BI_InterlockedXor64:
1335 return MSVCIntrin::_InterlockedXor;
1336 case AArch64::BI_InterlockedDecrement64:
1337 return MSVCIntrin::_InterlockedDecrement;
1338 case AArch64::BI_InterlockedIncrement64:
1339 return MSVCIntrin::_InterlockedIncrement;
1340 case AArch64::BI_InterlockedExchangeAdd8_acq:
1341 case AArch64::BI_InterlockedExchangeAdd16_acq:
1342 case AArch64::BI_InterlockedExchangeAdd_acq:
1343 case AArch64::BI_InterlockedExchangeAdd64_acq:
1344 return MSVCIntrin::_InterlockedExchangeAdd_acq;
1345 case AArch64::BI_InterlockedExchangeAdd8_rel:
1346 case AArch64::BI_InterlockedExchangeAdd16_rel:
1347 case AArch64::BI_InterlockedExchangeAdd_rel:
1348 case AArch64::BI_InterlockedExchangeAdd64_rel:
1349 return MSVCIntrin::_InterlockedExchangeAdd_rel;
1350 case AArch64::BI_InterlockedExchangeAdd8_nf:
1351 case AArch64::BI_InterlockedExchangeAdd16_nf:
1352 case AArch64::BI_InterlockedExchangeAdd_nf:
1353 case AArch64::BI_InterlockedExchangeAdd64_nf:
1354 return MSVCIntrin::_InterlockedExchangeAdd_nf;
1355 case AArch64::BI_InterlockedExchange8_acq:
1356 case AArch64::BI_InterlockedExchange16_acq:
1357 case AArch64::BI_InterlockedExchange_acq:
1358 case AArch64::BI_InterlockedExchange64_acq:
1359 return MSVCIntrin::_InterlockedExchange_acq;
1360 case AArch64::BI_InterlockedExchange8_rel:
1361 case AArch64::BI_InterlockedExchange16_rel:
1362 case AArch64::BI_InterlockedExchange_rel:
1363 case AArch64::BI_InterlockedExchange64_rel:
1364 return MSVCIntrin::_InterlockedExchange_rel;
1365 case AArch64::BI_InterlockedExchange8_nf:
1366 case AArch64::BI_InterlockedExchange16_nf:
1367 case AArch64::BI_InterlockedExchange_nf:
1368 case AArch64::BI_InterlockedExchange64_nf:
1369 return MSVCIntrin::_InterlockedExchange_nf;
1370 case AArch64::BI_InterlockedCompareExchange8_acq:
1371 case AArch64::BI_InterlockedCompareExchange16_acq:
1372 case AArch64::BI_InterlockedCompareExchange_acq:
1373 case AArch64::BI_InterlockedCompareExchange64_acq:
1374 return MSVCIntrin::_InterlockedCompareExchange_acq;
1375 case AArch64::BI_InterlockedCompareExchange8_rel:
1376 case AArch64::BI_InterlockedCompareExchange16_rel:
1377 case AArch64::BI_InterlockedCompareExchange_rel:
1378 case AArch64::BI_InterlockedCompareExchange64_rel:
1379 return MSVCIntrin::_InterlockedCompareExchange_rel;
1380 case AArch64::BI_InterlockedCompareExchange8_nf:
1381 case AArch64::BI_InterlockedCompareExchange16_nf:
1382 case AArch64::BI_InterlockedCompareExchange_nf:
1383 case AArch64::BI_InterlockedCompareExchange64_nf:
1384 return MSVCIntrin::_InterlockedCompareExchange_nf;
1385 case AArch64::BI_InterlockedCompareExchange128:
1386 return MSVCIntrin::_InterlockedCompareExchange128;
1387 case AArch64::BI_InterlockedCompareExchange128_acq:
1388 return MSVCIntrin::_InterlockedCompareExchange128_acq;
1389 case AArch64::BI_InterlockedCompareExchange128_nf:
1390 return MSVCIntrin::_InterlockedCompareExchange128_nf;
1391 case AArch64::BI_InterlockedCompareExchange128_rel:
1392 return MSVCIntrin::_InterlockedCompareExchange128_rel;
1393 case AArch64::BI_InterlockedOr8_acq:
1394 case AArch64::BI_InterlockedOr16_acq:
1395 case AArch64::BI_InterlockedOr_acq:
1396 case AArch64::BI_InterlockedOr64_acq:
1397 return MSVCIntrin::_InterlockedOr_acq;
1398 case AArch64::BI_InterlockedOr8_rel:
1399 case AArch64::BI_InterlockedOr16_rel:
1400 case AArch64::BI_InterlockedOr_rel:
1401 case AArch64::BI_InterlockedOr64_rel:
1402 return MSVCIntrin::_InterlockedOr_rel;
1403 case AArch64::BI_InterlockedOr8_nf:
1404 case AArch64::BI_InterlockedOr16_nf:
1405 case AArch64::BI_InterlockedOr_nf:
1406 case AArch64::BI_InterlockedOr64_nf:
1407 return MSVCIntrin::_InterlockedOr_nf;
1408 case AArch64::BI_InterlockedXor8_acq:
1409 case AArch64::BI_InterlockedXor16_acq:
1410 case AArch64::BI_InterlockedXor_acq:
1411 case AArch64::BI_InterlockedXor64_acq:
1412 return MSVCIntrin::_InterlockedXor_acq;
1413 case AArch64::BI_InterlockedXor8_rel:
1414 case AArch64::BI_InterlockedXor16_rel:
1415 case AArch64::BI_InterlockedXor_rel:
1416 case AArch64::BI_InterlockedXor64_rel:
1417 return MSVCIntrin::_InterlockedXor_rel;
1418 case AArch64::BI_InterlockedXor8_nf:
1419 case AArch64::BI_InterlockedXor16_nf:
1420 case AArch64::BI_InterlockedXor_nf:
1421 case AArch64::BI_InterlockedXor64_nf:
1422 return MSVCIntrin::_InterlockedXor_nf;
1423 case AArch64::BI_InterlockedAnd8_acq:
1424 case AArch64::BI_InterlockedAnd16_acq:
1425 case AArch64::BI_InterlockedAnd_acq:
1426 case AArch64::BI_InterlockedAnd64_acq:
1427 return MSVCIntrin::_InterlockedAnd_acq;
1428 case AArch64::BI_InterlockedAnd8_rel:
1429 case AArch64::BI_InterlockedAnd16_rel:
1430 case AArch64::BI_InterlockedAnd_rel:
1431 case AArch64::BI_InterlockedAnd64_rel:
1432 return MSVCIntrin::_InterlockedAnd_rel;
1433 case AArch64::BI_InterlockedAnd8_nf:
1434 case AArch64::BI_InterlockedAnd16_nf:
1435 case AArch64::BI_InterlockedAnd_nf:
1436 case AArch64::BI_InterlockedAnd64_nf:
1437 return MSVCIntrin::_InterlockedAnd_nf;
1438 case AArch64::BI_InterlockedIncrement16_acq:
1439 case AArch64::BI_InterlockedIncrement_acq:
1440 case AArch64::BI_InterlockedIncrement64_acq:
1441 return MSVCIntrin::_InterlockedIncrement_acq;
1442 case AArch64::BI_InterlockedIncrement16_rel:
1443 case AArch64::BI_InterlockedIncrement_rel:
1444 case AArch64::BI_InterlockedIncrement64_rel:
1445 return MSVCIntrin::_InterlockedIncrement_rel;
1446 case AArch64::BI_InterlockedIncrement16_nf:
1447 case AArch64::BI_InterlockedIncrement_nf:
1448 case AArch64::BI_InterlockedIncrement64_nf:
1449 return MSVCIntrin::_InterlockedIncrement_nf;
1450 case AArch64::BI_InterlockedDecrement16_acq:
1451 case AArch64::BI_InterlockedDecrement_acq:
1452 case AArch64::BI_InterlockedDecrement64_acq:
1453 return MSVCIntrin::_InterlockedDecrement_acq;
1454 case AArch64::BI_InterlockedDecrement16_rel:
1455 case AArch64::BI_InterlockedDecrement_rel:
1456 case AArch64::BI_InterlockedDecrement64_rel:
1457 return MSVCIntrin::_InterlockedDecrement_rel;
1458 case AArch64::BI_InterlockedDecrement16_nf:
1459 case AArch64::BI_InterlockedDecrement_nf:
1460 case AArch64::BI_InterlockedDecrement64_nf:
1461 return MSVCIntrin::_InterlockedDecrement_nf;
1463 llvm_unreachable(
"must return from switch");
1469 switch (BuiltinID) {
1472 case clang::X86::BI_BitScanForward:
1473 case clang::X86::BI_BitScanForward64:
1474 return MSVCIntrin::_BitScanForward;
1475 case clang::X86::BI_BitScanReverse:
1476 case clang::X86::BI_BitScanReverse64:
1477 return MSVCIntrin::_BitScanReverse;
1478 case clang::X86::BI_InterlockedAnd64:
1479 return MSVCIntrin::_InterlockedAnd;
1480 case clang::X86::BI_InterlockedCompareExchange128:
1481 return MSVCIntrin::_InterlockedCompareExchange128;
1482 case clang::X86::BI_InterlockedExchange64:
1483 return MSVCIntrin::_InterlockedExchange;
1484 case clang::X86::BI_InterlockedExchangeAdd64:
1485 return MSVCIntrin::_InterlockedExchangeAdd;
1486 case clang::X86::BI_InterlockedExchangeSub64:
1487 return MSVCIntrin::_InterlockedExchangeSub;
1488 case clang::X86::BI_InterlockedOr64:
1489 return MSVCIntrin::_InterlockedOr;
1490 case clang::X86::BI_InterlockedXor64:
1491 return MSVCIntrin::_InterlockedXor;
1492 case clang::X86::BI_InterlockedDecrement64:
1493 return MSVCIntrin::_InterlockedDecrement;
1494 case clang::X86::BI_InterlockedIncrement64:
1495 return MSVCIntrin::_InterlockedIncrement;
1497 llvm_unreachable(
"must return from switch");
1503 switch (BuiltinID) {
1504 case MSVCIntrin::_BitScanForward:
1505 case MSVCIntrin::_BitScanReverse: {
1506 Address IndexAddress(EmitPointerWithAlignment(E->
getArg(0)));
1509 llvm::Type *ArgType = ArgValue->getType();
1511 llvm::Type *ResultType = ConvertType(E->
getType());
1513 Value *ArgZero = llvm::Constant::getNullValue(ArgType);
1514 Value *ResZero = llvm::Constant::getNullValue(ResultType);
1515 Value *ResOne = llvm::ConstantInt::get(ResultType, 1);
1517 BasicBlock *
Begin = Builder.GetInsertBlock();
1518 BasicBlock *
End = createBasicBlock(
"bitscan_end", this->CurFn);
1519 Builder.SetInsertPoint(
End);
1520 PHINode *Result = Builder.CreatePHI(ResultType, 2,
"bitscan_result");
1522 Builder.SetInsertPoint(
Begin);
1523 Value *IsZero = Builder.CreateICmpEQ(ArgValue, ArgZero);
1524 BasicBlock *NotZero = createBasicBlock(
"bitscan_not_zero", this->CurFn);
1525 Builder.CreateCondBr(IsZero,
End, NotZero);
1526 Result->addIncoming(ResZero,
Begin);
1528 Builder.SetInsertPoint(NotZero);
1530 if (BuiltinID == MSVCIntrin::_BitScanForward) {
1531 Function *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
1532 Value *ZeroCount = Builder.CreateCall(F, {ArgValue, Builder.getTrue()});
1533 ZeroCount = Builder.CreateIntCast(ZeroCount, IndexType,
false);
1534 Builder.CreateStore(ZeroCount, IndexAddress,
false);
1536 unsigned ArgWidth = cast<llvm::IntegerType>(ArgType)->getBitWidth();
1537 Value *ArgTypeLastIndex = llvm::ConstantInt::get(IndexType, ArgWidth - 1);
1539 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
1540 Value *ZeroCount = Builder.CreateCall(F, {ArgValue, Builder.getTrue()});
1541 ZeroCount = Builder.CreateIntCast(ZeroCount, IndexType,
false);
1542 Value *Index = Builder.CreateNSWSub(ArgTypeLastIndex, ZeroCount);
1543 Builder.CreateStore(Index, IndexAddress,
false);
1545 Builder.CreateBr(
End);
1546 Result->addIncoming(ResOne, NotZero);
1548 Builder.SetInsertPoint(
End);
1551 case MSVCIntrin::_InterlockedAnd:
1553 case MSVCIntrin::_InterlockedExchange:
1555 case MSVCIntrin::_InterlockedExchangeAdd:
1557 case MSVCIntrin::_InterlockedExchangeSub:
1559 case MSVCIntrin::_InterlockedOr:
1561 case MSVCIntrin::_InterlockedXor:
1563 case MSVCIntrin::_InterlockedExchangeAdd_acq:
1565 AtomicOrdering::Acquire);
1566 case MSVCIntrin::_InterlockedExchangeAdd_rel:
1568 AtomicOrdering::Release);
1569 case MSVCIntrin::_InterlockedExchangeAdd_nf:
1571 AtomicOrdering::Monotonic);
1572 case MSVCIntrin::_InterlockedExchange_acq:
1574 AtomicOrdering::Acquire);
1575 case MSVCIntrin::_InterlockedExchange_rel:
1577 AtomicOrdering::Release);
1578 case MSVCIntrin::_InterlockedExchange_nf:
1580 AtomicOrdering::Monotonic);
1581 case MSVCIntrin::_InterlockedCompareExchange_acq:
1583 case MSVCIntrin::_InterlockedCompareExchange_rel:
1585 case MSVCIntrin::_InterlockedCompareExchange_nf:
1587 case MSVCIntrin::_InterlockedCompareExchange128:
1589 *
this, E, AtomicOrdering::SequentiallyConsistent);
1590 case MSVCIntrin::_InterlockedCompareExchange128_acq:
1592 case MSVCIntrin::_InterlockedCompareExchange128_rel:
1594 case MSVCIntrin::_InterlockedCompareExchange128_nf:
1596 case MSVCIntrin::_InterlockedOr_acq:
1598 AtomicOrdering::Acquire);
1599 case MSVCIntrin::_InterlockedOr_rel:
1601 AtomicOrdering::Release);
1602 case MSVCIntrin::_InterlockedOr_nf:
1604 AtomicOrdering::Monotonic);
1605 case MSVCIntrin::_InterlockedXor_acq:
1607 AtomicOrdering::Acquire);
1608 case MSVCIntrin::_InterlockedXor_rel:
1610 AtomicOrdering::Release);
1611 case MSVCIntrin::_InterlockedXor_nf:
1613 AtomicOrdering::Monotonic);
1614 case MSVCIntrin::_InterlockedAnd_acq:
1616 AtomicOrdering::Acquire);
1617 case MSVCIntrin::_InterlockedAnd_rel:
1619 AtomicOrdering::Release);
1620 case MSVCIntrin::_InterlockedAnd_nf:
1622 AtomicOrdering::Monotonic);
1623 case MSVCIntrin::_InterlockedIncrement_acq:
1625 case MSVCIntrin::_InterlockedIncrement_rel:
1627 case MSVCIntrin::_InterlockedIncrement_nf:
1629 case MSVCIntrin::_InterlockedDecrement_acq:
1631 case MSVCIntrin::_InterlockedDecrement_rel:
1633 case MSVCIntrin::_InterlockedDecrement_nf:
1636 case MSVCIntrin::_InterlockedDecrement:
1638 case MSVCIntrin::_InterlockedIncrement:
1641 case MSVCIntrin::__fastfail: {
1645 llvm::Triple::ArchType ISA = getTarget().getTriple().getArch();
1646 StringRef
Asm, Constraints;
1649 ErrorUnsupported(E,
"__fastfail call for this architecture");
1651 case llvm::Triple::x86:
1652 case llvm::Triple::x86_64:
1654 Constraints =
"{cx}";
1656 case llvm::Triple::thumb:
1658 Constraints =
"{r0}";
1660 case llvm::Triple::aarch64:
1661 Asm =
"brk #0xF003";
1662 Constraints =
"{w0}";
1664 llvm::FunctionType *FTy = llvm::FunctionType::get(VoidTy, {Int32Ty},
false);
1665 llvm::InlineAsm *IA =
1666 llvm::InlineAsm::get(FTy,
Asm, Constraints,
true);
1667 llvm::AttributeList NoReturnAttr = llvm::AttributeList::get(
1668 getLLVMContext(), llvm::AttributeList::FunctionIndex,
1669 llvm::Attribute::NoReturn);
1670 llvm::CallInst *CI = Builder.CreateCall(IA, EmitScalarExpr(E->
getArg(0)));
1671 CI->setAttributes(NoReturnAttr);
1675 llvm_unreachable(
"Incorrect MSVC intrinsic!");
1681 CallObjCArcUse(llvm::Value *
object) :
object(
object) {}
1692 assert((
Kind == BCK_CLZPassedZero ||
Kind == BCK_CTZPassedZero)
1693 &&
"Unsupported builtin check kind");
1695 Value *ArgValue = EmitScalarExpr(E);
1696 if (!SanOpts.has(SanitizerKind::Builtin) || !getTarget().isCLZForZeroUndef())
1700 Value *Cond = Builder.CreateICmpNE(
1701 ArgValue, llvm::Constant::getNullValue(ArgValue->getType()));
1702 EmitCheck(std::make_pair(Cond, SanitizerKind::Builtin),
1703 SanitizerHandler::InvalidBuiltin,
1705 llvm::ConstantInt::get(Builder.getInt8Ty(),
Kind)},
1712 QualType UnsignedTy = C.getIntTypeForBitwidth(Size * 8,
false);
1713 return C.getCanonicalType(UnsignedTy);
1723 raw_svector_ostream OS(Name);
1724 OS <<
"__os_log_helper";
1728 for (
const auto &Item : Layout.
Items)
1729 OS <<
"_" <<
int(Item.getSizeByte()) <<
"_"
1730 <<
int(Item.getDescriptorByte());
1733 if (llvm::Function *F = CGM.getModule().getFunction(Name))
1743 for (
unsigned int I = 0, E = Layout.
Items.size(); I < E; ++I) {
1744 char Size = Layout.
Items[I].getSizeByte();
1753 ArgTys.emplace_back(ArgTy);
1762 CGM.getTypes().arrangeBuiltinFunctionDeclaration(ReturnTy, Args);
1763 llvm::FunctionType *FuncTy = CGM.getTypes().GetFunctionType(FI);
1764 llvm::Function *Fn = llvm::Function::Create(
1765 FuncTy, llvm::GlobalValue::LinkOnceODRLinkage, Name, &CGM.getModule());
1767 CGM.SetLLVMFunctionAttributes(
GlobalDecl(), FI, Fn,
false);
1768 CGM.SetLLVMFunctionAttributesForDefinition(
nullptr, Fn);
1769 Fn->setDoesNotThrow();
1772 if (CGM.getCodeGenOpts().OptimizeSize == 2)
1773 Fn->addFnAttr(llvm::Attribute::NoInline);
1776 StartFunction(
GlobalDecl(), ReturnTy, Fn, FI, Args);
1783 Address(Builder.CreateLoad(GetAddrOfLocalVar(Args[0]),
"buf"), Int8Ty,
1786 Builder.CreateConstByteGEP(BufAddr,
Offset++,
"summary"));
1788 Builder.CreateConstByteGEP(BufAddr,
Offset++,
"numArgs"));
1791 for (
const auto &Item : Layout.
Items) {
1792 Builder.CreateStore(
1793 Builder.getInt8(Item.getDescriptorByte()),
1794 Builder.CreateConstByteGEP(BufAddr,
Offset++,
"argDescriptor"));
1795 Builder.CreateStore(
1796 Builder.getInt8(Item.getSizeByte()),
1797 Builder.CreateConstByteGEP(BufAddr,
Offset++,
"argSize"));
1800 if (!Size.getQuantity())
1803 Address Arg = GetAddrOfLocalVar(Args[I]);
1804 Address Addr = Builder.CreateConstByteGEP(BufAddr,
Offset,
"argData");
1806 Builder.CreateElementBitCast(Addr, Arg.
getElementType(),
"argDataCast");
1807 Builder.CreateStore(Builder.CreateLoad(Arg), Addr);
1819 "__builtin_os_log_format takes at least 2 arguments");
1830 for (
const auto &Item : Layout.
Items) {
1831 int Size = Item.getSizeByte();
1835 llvm::Value *ArgVal;
1839 for (
unsigned I = 0, E = Item.getMaskType().size(); I < E; ++I)
1840 Val |= ((uint64_t)Item.getMaskType()[I]) << I * 8;
1841 ArgVal = llvm::Constant::getIntegerValue(Int64Ty,
llvm::APInt(64, Val));
1842 }
else if (
const Expr *TheExpr = Item.getExpr()) {
1843 ArgVal = EmitScalarExpr(TheExpr,
false);
1848 auto LifetimeExtendObject = [&](
const Expr *E) {
1856 if (isa<CallExpr>(E) || isa<ObjCMessageExpr>(E))
1861 if (TheExpr->getType()->isObjCRetainableType() &&
1862 getLangOpts().ObjCAutoRefCount && LifetimeExtendObject(TheExpr)) {
1863 assert(getEvaluationKind(TheExpr->getType()) ==
TEK_Scalar &&
1864 "Only scalar can be a ObjC retainable type");
1865 if (!isa<Constant>(ArgVal)) {
1869 Address Addr = CreateMemTemp(Ty,
"os.log.arg", &Alloca);
1870 ArgVal = EmitARCRetain(Ty, ArgVal);
1871 Builder.CreateStore(ArgVal, Addr);
1872 pushLifetimeExtendedDestroy(Cleanup, Alloca, Ty,
1878 if (CGM.getCodeGenOpts().OptimizationLevel != 0)
1879 pushCleanupAfterFullExpr<CallObjCArcUse>(Cleanup, ArgVal);
1883 ArgVal = Builder.getInt32(Item.getConstValue().getQuantity());
1886 unsigned ArgValSize =
1887 CGM.getDataLayout().getTypeSizeInBits(ArgVal->getType());
1888 llvm::IntegerType *IntTy = llvm::Type::getIntNTy(getLLVMContext(),
1890 ArgVal = Builder.CreateBitOrPointerCast(ArgVal, IntTy);
1893 ArgVal = Builder.CreateZExtOrBitCast(ArgVal, ConvertType(ArgTy));
1898 CGM.getTypes().arrangeBuiltinFunctionCall(Ctx.
VoidTy, Args);
1906 unsigned BuiltinID, WidthAndSignedness Op1Info, WidthAndSignedness Op2Info,
1907 WidthAndSignedness ResultInfo) {
1908 return BuiltinID == Builtin::BI__builtin_mul_overflow &&
1909 Op1Info.Width == Op2Info.Width && Op2Info.Width == ResultInfo.Width &&
1910 !Op1Info.Signed && !Op2Info.Signed && ResultInfo.Signed;
1915 const clang::Expr *Op2, WidthAndSignedness Op2Info,
1917 WidthAndSignedness ResultInfo) {
1919 Builtin::BI__builtin_mul_overflow, Op1Info, Op2Info, ResultInfo) &&
1920 "Cannot specialize this multiply");
1925 llvm::Value *HasOverflow;
1927 CGF, llvm::Intrinsic::umul_with_overflow, V1, V2, HasOverflow);
1932 auto IntMax = llvm::APInt::getSignedMaxValue(ResultInfo.Width);
1933 llvm::Value *IntMaxValue = llvm::ConstantInt::get(Result->getType(), IntMax);
1935 llvm::Value *IntMaxOverflow = CGF.
Builder.CreateICmpUGT(Result, IntMaxValue);
1936 HasOverflow = CGF.
Builder.CreateOr(HasOverflow, IntMaxOverflow);
1948 WidthAndSignedness Op1Info,
1949 WidthAndSignedness Op2Info,
1950 WidthAndSignedness ResultInfo) {
1951 return BuiltinID == Builtin::BI__builtin_mul_overflow &&
1952 std::max(Op1Info.Width, Op2Info.Width) >= ResultInfo.Width &&
1953 Op1Info.Signed != Op2Info.Signed;
1960 WidthAndSignedness Op1Info,
const clang::Expr *Op2,
1961 WidthAndSignedness Op2Info,
1963 WidthAndSignedness ResultInfo) {
1965 Op2Info, ResultInfo) &&
1966 "Not a mixed-sign multipliction we can specialize");
1969 const clang::Expr *SignedOp = Op1Info.Signed ? Op1 : Op2;
1970 const clang::Expr *UnsignedOp = Op1Info.Signed ? Op2 : Op1;
1973 unsigned SignedOpWidth = Op1Info.Signed ? Op1Info.Width : Op2Info.Width;
1974 unsigned UnsignedOpWidth = Op1Info.Signed ? Op2Info.Width : Op1Info.Width;
1977 if (SignedOpWidth < UnsignedOpWidth)
1979 if (UnsignedOpWidth < SignedOpWidth)
1982 llvm::Type *OpTy =
Signed->getType();
1983 llvm::Value *
Zero = llvm::Constant::getNullValue(OpTy);
1986 unsigned OpWidth =
std::max(Op1Info.Width, Op2Info.Width);
1991 llvm::Value *AbsSigned =
1992 CGF.
Builder.CreateSelect(IsNegative, AbsOfNegative,
Signed);
1995 llvm::Value *UnsignedOverflow;
1996 llvm::Value *UnsignedResult =
2000 llvm::Value *Overflow, *Result;
2001 if (ResultInfo.Signed) {
2005 llvm::APInt::getSignedMaxValue(ResultInfo.Width).zext(OpWidth);
2006 llvm::Value *MaxResult =
2007 CGF.
Builder.CreateAdd(llvm::ConstantInt::get(OpTy, IntMax),
2008 CGF.
Builder.CreateZExt(IsNegative, OpTy));
2009 llvm::Value *SignedOverflow =
2010 CGF.
Builder.CreateICmpUGT(UnsignedResult, MaxResult);
2011 Overflow = CGF.
Builder.CreateOr(UnsignedOverflow, SignedOverflow);
2014 llvm::Value *NegativeResult = CGF.
Builder.CreateNeg(UnsignedResult);
2015 llvm::Value *SignedResult =
2016 CGF.
Builder.CreateSelect(IsNegative, NegativeResult, UnsignedResult);
2017 Result = CGF.
Builder.CreateTrunc(SignedResult, ResTy);
2020 llvm::Value *Underflow = CGF.
Builder.CreateAnd(
2021 IsNegative, CGF.
Builder.CreateIsNotNull(UnsignedResult));
2022 Overflow = CGF.
Builder.CreateOr(UnsignedOverflow, Underflow);
2023 if (ResultInfo.Width < OpWidth) {
2025 llvm::APInt::getMaxValue(ResultInfo.Width).zext(OpWidth);
2026 llvm::Value *TruncOverflow = CGF.
Builder.CreateICmpUGT(
2027 UnsignedResult, llvm::ConstantInt::get(OpTy, IntMax));
2028 Overflow = CGF.
Builder.CreateOr(Overflow, TruncOverflow);
2032 Result = CGF.
Builder.CreateSelect(
2033 IsNegative, CGF.
Builder.CreateNeg(UnsignedResult), UnsignedResult);
2035 Result = CGF.
Builder.CreateTrunc(Result, ResTy);
2037 assert(Overflow && Result &&
"Missing overflow or result");
2048 llvm::SmallPtrSetImpl<const Decl *> &Seen) {
2057 if (!Seen.insert(Record).second)
2060 assert(Record->hasDefinition() &&
2061 "Incomplete types should already be diagnosed");
2063 if (Record->isDynamicClass())
2083 llvm::Value *Src = EmitScalarExpr(E->
getArg(0));
2084 llvm::Value *ShiftAmt = EmitScalarExpr(E->
getArg(1));
2088 llvm::Type *Ty = Src->getType();
2089 ShiftAmt = Builder.CreateIntCast(ShiftAmt, Ty,
false);
2092 unsigned IID = IsRotateRight ? Intrinsic::fshr : Intrinsic::fshl;
2093 Function *F = CGM.getIntrinsic(IID, Ty);
2094 return RValue::get(Builder.CreateCall(F, { Src, Src, ShiftAmt }));
2099 switch (BuiltinID) {
2100 #define MUTATE_LDBL(func) \
2101 case Builtin::BI__builtin_##func##l: \
2102 return Builtin::BI__builtin_##func##f128;
2177 !Result.hasSideEffects()) {
2178 if (Result.Val.isInt())
2179 return RValue::get(llvm::ConstantInt::get(getLLVMContext(),
2180 Result.Val.getInt()));
2181 if (Result.Val.isFloat())
2182 return RValue::get(llvm::ConstantFP::get(getLLVMContext(),
2183 Result.Val.getFloat()));
2190 if (getTarget().getTriple().isPPC64() &&
2191 &getTarget().getLongDoubleFormat() == &llvm::APFloat::IEEEquad())
2198 const unsigned BuiltinIDIfNoAsmLabel =
2199 FD->
hasAttr<AsmLabelAttr>() ? 0 : BuiltinID;
2206 if (FD->
hasAttr<ConstAttr>()) {
2207 switch (BuiltinIDIfNoAsmLabel) {
2208 case Builtin::BIceil:
2209 case Builtin::BIceilf:
2210 case Builtin::BIceill:
2211 case Builtin::BI__builtin_ceil:
2212 case Builtin::BI__builtin_ceilf:
2213 case Builtin::BI__builtin_ceilf16:
2214 case Builtin::BI__builtin_ceill:
2215 case Builtin::BI__builtin_ceilf128:
2218 Intrinsic::experimental_constrained_ceil));
2220 case Builtin::BIcopysign:
2221 case Builtin::BIcopysignf:
2222 case Builtin::BIcopysignl:
2223 case Builtin::BI__builtin_copysign:
2224 case Builtin::BI__builtin_copysignf:
2225 case Builtin::BI__builtin_copysignf16:
2226 case Builtin::BI__builtin_copysignl:
2227 case Builtin::BI__builtin_copysignf128:
2230 case Builtin::BIcos:
2231 case Builtin::BIcosf:
2232 case Builtin::BIcosl:
2233 case Builtin::BI__builtin_cos:
2234 case Builtin::BI__builtin_cosf:
2235 case Builtin::BI__builtin_cosf16:
2236 case Builtin::BI__builtin_cosl:
2237 case Builtin::BI__builtin_cosf128:
2240 Intrinsic::experimental_constrained_cos));
2242 case Builtin::BIexp:
2243 case Builtin::BIexpf:
2244 case Builtin::BIexpl:
2245 case Builtin::BI__builtin_exp:
2246 case Builtin::BI__builtin_expf:
2247 case Builtin::BI__builtin_expf16:
2248 case Builtin::BI__builtin_expl:
2249 case Builtin::BI__builtin_expf128:
2252 Intrinsic::experimental_constrained_exp));
2254 case Builtin::BIexp2:
2255 case Builtin::BIexp2f:
2256 case Builtin::BIexp2l:
2257 case Builtin::BI__builtin_exp2:
2258 case Builtin::BI__builtin_exp2f:
2259 case Builtin::BI__builtin_exp2f16:
2260 case Builtin::BI__builtin_exp2l:
2261 case Builtin::BI__builtin_exp2f128:
2264 Intrinsic::experimental_constrained_exp2));
2266 case Builtin::BIfabs:
2267 case Builtin::BIfabsf:
2268 case Builtin::BIfabsl:
2269 case Builtin::BI__builtin_fabs:
2270 case Builtin::BI__builtin_fabsf:
2271 case Builtin::BI__builtin_fabsf16:
2272 case Builtin::BI__builtin_fabsl:
2273 case Builtin::BI__builtin_fabsf128:
2276 case Builtin::BIfloor:
2277 case Builtin::BIfloorf:
2278 case Builtin::BIfloorl:
2279 case Builtin::BI__builtin_floor:
2280 case Builtin::BI__builtin_floorf:
2281 case Builtin::BI__builtin_floorf16:
2282 case Builtin::BI__builtin_floorl:
2283 case Builtin::BI__builtin_floorf128:
2286 Intrinsic::experimental_constrained_floor));
2288 case Builtin::BIfma:
2289 case Builtin::BIfmaf:
2290 case Builtin::BIfmal:
2291 case Builtin::BI__builtin_fma:
2292 case Builtin::BI__builtin_fmaf:
2293 case Builtin::BI__builtin_fmaf16:
2294 case Builtin::BI__builtin_fmal:
2295 case Builtin::BI__builtin_fmaf128:
2298 Intrinsic::experimental_constrained_fma));
2300 case Builtin::BIfmax:
2301 case Builtin::BIfmaxf:
2302 case Builtin::BIfmaxl:
2303 case Builtin::BI__builtin_fmax:
2304 case Builtin::BI__builtin_fmaxf:
2305 case Builtin::BI__builtin_fmaxf16:
2306 case Builtin::BI__builtin_fmaxl:
2307 case Builtin::BI__builtin_fmaxf128:
2310 Intrinsic::experimental_constrained_maxnum));
2312 case Builtin::BIfmin:
2313 case Builtin::BIfminf:
2314 case Builtin::BIfminl:
2315 case Builtin::BI__builtin_fmin:
2316 case Builtin::BI__builtin_fminf:
2317 case Builtin::BI__builtin_fminf16:
2318 case Builtin::BI__builtin_fminl:
2319 case Builtin::BI__builtin_fminf128:
2322 Intrinsic::experimental_constrained_minnum));
2326 case Builtin::BIfmod:
2327 case Builtin::BIfmodf:
2328 case Builtin::BIfmodl:
2329 case Builtin::BI__builtin_fmod:
2330 case Builtin::BI__builtin_fmodf:
2331 case Builtin::BI__builtin_fmodf16:
2332 case Builtin::BI__builtin_fmodl:
2333 case Builtin::BI__builtin_fmodf128: {
2337 return RValue::get(Builder.CreateFRem(Arg1, Arg2,
"fmod"));
2340 case Builtin::BIlog:
2341 case Builtin::BIlogf:
2342 case Builtin::BIlogl:
2343 case Builtin::BI__builtin_log:
2344 case Builtin::BI__builtin_logf:
2345 case Builtin::BI__builtin_logf16:
2346 case Builtin::BI__builtin_logl:
2347 case Builtin::BI__builtin_logf128:
2350 Intrinsic::experimental_constrained_log));
2352 case Builtin::BIlog10:
2353 case Builtin::BIlog10f:
2354 case Builtin::BIlog10l:
2355 case Builtin::BI__builtin_log10:
2356 case Builtin::BI__builtin_log10f:
2357 case Builtin::BI__builtin_log10f16:
2358 case Builtin::BI__builtin_log10l:
2359 case Builtin::BI__builtin_log10f128:
2362 Intrinsic::experimental_constrained_log10));
2364 case Builtin::BIlog2:
2365 case Builtin::BIlog2f:
2366 case Builtin::BIlog2l:
2367 case Builtin::BI__builtin_log2:
2368 case Builtin::BI__builtin_log2f:
2369 case Builtin::BI__builtin_log2f16:
2370 case Builtin::BI__builtin_log2l:
2371 case Builtin::BI__builtin_log2f128:
2374 Intrinsic::experimental_constrained_log2));
2376 case Builtin::BInearbyint:
2377 case Builtin::BInearbyintf:
2378 case Builtin::BInearbyintl:
2379 case Builtin::BI__builtin_nearbyint:
2380 case Builtin::BI__builtin_nearbyintf:
2381 case Builtin::BI__builtin_nearbyintl:
2382 case Builtin::BI__builtin_nearbyintf128:
2385 Intrinsic::experimental_constrained_nearbyint));
2387 case Builtin::BIpow:
2388 case Builtin::BIpowf:
2389 case Builtin::BIpowl:
2390 case Builtin::BI__builtin_pow:
2391 case Builtin::BI__builtin_powf:
2392 case Builtin::BI__builtin_powf16:
2393 case Builtin::BI__builtin_powl:
2394 case Builtin::BI__builtin_powf128:
2397 Intrinsic::experimental_constrained_pow));
2399 case Builtin::BIrint:
2400 case Builtin::BIrintf:
2401 case Builtin::BIrintl:
2402 case Builtin::BI__builtin_rint:
2403 case Builtin::BI__builtin_rintf:
2404 case Builtin::BI__builtin_rintf16:
2405 case Builtin::BI__builtin_rintl:
2406 case Builtin::BI__builtin_rintf128:
2409 Intrinsic::experimental_constrained_rint));
2411 case Builtin::BIround:
2412 case Builtin::BIroundf:
2413 case Builtin::BIroundl:
2414 case Builtin::BI__builtin_round:
2415 case Builtin::BI__builtin_roundf:
2416 case Builtin::BI__builtin_roundf16:
2417 case Builtin::BI__builtin_roundl:
2418 case Builtin::BI__builtin_roundf128:
2421 Intrinsic::experimental_constrained_round));
2423 case Builtin::BIsin:
2424 case Builtin::BIsinf:
2425 case Builtin::BIsinl:
2426 case Builtin::BI__builtin_sin:
2427 case Builtin::BI__builtin_sinf:
2428 case Builtin::BI__builtin_sinf16:
2429 case Builtin::BI__builtin_sinl:
2430 case Builtin::BI__builtin_sinf128:
2433 Intrinsic::experimental_constrained_sin));
2435 case Builtin::BIsqrt:
2436 case Builtin::BIsqrtf:
2437 case Builtin::BIsqrtl:
2438 case Builtin::BI__builtin_sqrt:
2439 case Builtin::BI__builtin_sqrtf:
2440 case Builtin::BI__builtin_sqrtf16:
2441 case Builtin::BI__builtin_sqrtl:
2442 case Builtin::BI__builtin_sqrtf128:
2445 Intrinsic::experimental_constrained_sqrt));
2447 case Builtin::BItrunc:
2448 case Builtin::BItruncf:
2449 case Builtin::BItruncl:
2450 case Builtin::BI__builtin_trunc:
2451 case Builtin::BI__builtin_truncf:
2452 case Builtin::BI__builtin_truncf16:
2453 case Builtin::BI__builtin_truncl:
2454 case Builtin::BI__builtin_truncf128:
2457 Intrinsic::experimental_constrained_trunc));
2459 case Builtin::BIlround:
2460 case Builtin::BIlroundf:
2461 case Builtin::BIlroundl:
2462 case Builtin::BI__builtin_lround:
2463 case Builtin::BI__builtin_lroundf:
2464 case Builtin::BI__builtin_lroundl:
2465 case Builtin::BI__builtin_lroundf128:
2468 Intrinsic::experimental_constrained_lround));
2470 case Builtin::BIllround:
2471 case Builtin::BIllroundf:
2472 case Builtin::BIllroundl:
2473 case Builtin::BI__builtin_llround:
2474 case Builtin::BI__builtin_llroundf:
2475 case Builtin::BI__builtin_llroundl:
2476 case Builtin::BI__builtin_llroundf128:
2479 Intrinsic::experimental_constrained_llround));
2481 case Builtin::BIlrint:
2482 case Builtin::BIlrintf:
2483 case Builtin::BIlrintl:
2484 case Builtin::BI__builtin_lrint:
2485 case Builtin::BI__builtin_lrintf:
2486 case Builtin::BI__builtin_lrintl:
2487 case Builtin::BI__builtin_lrintf128:
2490 Intrinsic::experimental_constrained_lrint));
2492 case Builtin::BIllrint:
2493 case Builtin::BIllrintf:
2494 case Builtin::BIllrintl:
2495 case Builtin::BI__builtin_llrint:
2496 case Builtin::BI__builtin_llrintf:
2497 case Builtin::BI__builtin_llrintl:
2498 case Builtin::BI__builtin_llrintf128:
2501 Intrinsic::experimental_constrained_llrint));
2508 switch (BuiltinIDIfNoAsmLabel) {
2510 case Builtin::BI__builtin___CFStringMakeConstantString:
2511 case Builtin::BI__builtin___NSStringMakeConstantString:
2513 case Builtin::BI__builtin_stdarg_start:
2514 case Builtin::BI__builtin_va_start:
2515 case Builtin::BI__va_start:
2516 case Builtin::BI__builtin_va_end:
2518 EmitVAStartEnd(BuiltinID == Builtin::BI__va_start
2519 ? EmitScalarExpr(E->
getArg(0))
2520 : EmitVAListRef(E->
getArg(0)).getPointer(),
2521 BuiltinID != Builtin::BI__builtin_va_end));
2522 case Builtin::BI__builtin_va_copy: {
2523 Value *DstPtr = EmitVAListRef(E->
getArg(0)).getPointer();
2524 Value *SrcPtr = EmitVAListRef(E->
getArg(1)).getPointer();
2526 llvm::Type *
Type = Int8PtrTy;
2528 DstPtr = Builder.CreateBitCast(DstPtr,
Type);
2529 SrcPtr = Builder.CreateBitCast(SrcPtr,
Type);
2530 return RValue::get(Builder.CreateCall(CGM.getIntrinsic(Intrinsic::vacopy),
2533 case Builtin::BI__builtin_abs:
2534 case Builtin::BI__builtin_labs:
2535 case Builtin::BI__builtin_llabs: {
2539 Value *NegOp = Builder.CreateNSWNeg(ArgValue,
"neg");
2540 Constant *
Zero = llvm::Constant::getNullValue(ArgValue->getType());
2541 Value *CmpResult = Builder.CreateICmpSLT(ArgValue,
Zero,
"abscond");
2542 Value *Result = Builder.CreateSelect(CmpResult, NegOp, ArgValue,
"abs");
2545 case Builtin::BI__builtin_complex: {
2550 case Builtin::BI__builtin_conj:
2551 case Builtin::BI__builtin_conjf:
2552 case Builtin::BI__builtin_conjl:
2553 case Builtin::BIconj:
2554 case Builtin::BIconjf:
2555 case Builtin::BIconjl: {
2557 Value *Real = ComplexVal.first;
2558 Value *Imag = ComplexVal.second;
2559 Imag = Builder.CreateFNeg(Imag,
"neg");
2562 case Builtin::BI__builtin_creal:
2563 case Builtin::BI__builtin_crealf:
2564 case Builtin::BI__builtin_creall:
2565 case Builtin::BIcreal:
2566 case Builtin::BIcrealf:
2567 case Builtin::BIcreall: {
2572 case Builtin::BI__builtin_preserve_access_index: {
2576 if (!getDebugInfo()) {
2577 CGM.Error(E->
getExprLoc(),
"using builtin_preserve_access_index() without -g");
2582 if (IsInPreservedAIRegion) {
2583 CGM.Error(E->
getExprLoc(),
"nested builtin_preserve_access_index() not supported");
2587 IsInPreservedAIRegion =
true;
2589 IsInPreservedAIRegion =
false;
2593 case Builtin::BI__builtin_cimag:
2594 case Builtin::BI__builtin_cimagf:
2595 case Builtin::BI__builtin_cimagl:
2596 case Builtin::BIcimag:
2597 case Builtin::BIcimagf:
2598 case Builtin::BIcimagl: {
2603 case Builtin::BI__builtin_clrsb:
2604 case Builtin::BI__builtin_clrsbl:
2605 case Builtin::BI__builtin_clrsbll: {
2609 llvm::Type *ArgType = ArgValue->getType();
2610 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
2612 llvm::Type *ResultType = ConvertType(E->
getType());
2613 Value *
Zero = llvm::Constant::getNullValue(ArgType);
2614 Value *IsNeg = Builder.CreateICmpSLT(ArgValue,
Zero,
"isneg");
2615 Value *Inverse = Builder.CreateNot(ArgValue,
"not");
2616 Value *Tmp = Builder.CreateSelect(IsNeg, Inverse, ArgValue);
2617 Value *Ctlz = Builder.CreateCall(F, {Tmp, Builder.getFalse()});
2618 Value *Result = Builder.CreateSub(Ctlz, llvm::ConstantInt::get(ArgType, 1));
2619 Result = Builder.CreateIntCast(Result, ResultType,
true,
2623 case Builtin::BI__builtin_ctzs:
2624 case Builtin::BI__builtin_ctz:
2625 case Builtin::BI__builtin_ctzl:
2626 case Builtin::BI__builtin_ctzll: {
2627 Value *ArgValue = EmitCheckedArgForBuiltin(E->
getArg(0), BCK_CTZPassedZero);
2629 llvm::Type *ArgType = ArgValue->getType();
2630 Function *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
2632 llvm::Type *ResultType = ConvertType(E->
getType());
2633 Value *ZeroUndef = Builder.getInt1(getTarget().isCLZForZeroUndef());
2634 Value *Result = Builder.CreateCall(F, {ArgValue, ZeroUndef});
2635 if (Result->getType() != ResultType)
2636 Result = Builder.CreateIntCast(Result, ResultType,
true,
2640 case Builtin::BI__builtin_clzs:
2641 case Builtin::BI__builtin_clz:
2642 case Builtin::BI__builtin_clzl:
2643 case Builtin::BI__builtin_clzll: {
2644 Value *ArgValue = EmitCheckedArgForBuiltin(E->
getArg(0), BCK_CLZPassedZero);
2646 llvm::Type *ArgType = ArgValue->getType();
2647 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
2649 llvm::Type *ResultType = ConvertType(E->
getType());
2650 Value *ZeroUndef = Builder.getInt1(getTarget().isCLZForZeroUndef());
2651 Value *Result = Builder.CreateCall(F, {ArgValue, ZeroUndef});
2652 if (Result->getType() != ResultType)
2653 Result = Builder.CreateIntCast(Result, ResultType,
true,
2657 case Builtin::BI__builtin_ffs:
2658 case Builtin::BI__builtin_ffsl:
2659 case Builtin::BI__builtin_ffsll: {
2663 llvm::Type *ArgType = ArgValue->getType();
2664 Function *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
2666 llvm::Type *ResultType = ConvertType(E->
getType());
2668 Builder.CreateAdd(Builder.CreateCall(F, {ArgValue, Builder.getTrue()}),
2669 llvm::ConstantInt::get(ArgType, 1));
2670 Value *
Zero = llvm::Constant::getNullValue(ArgType);
2671 Value *IsZero = Builder.CreateICmpEQ(ArgValue,
Zero,
"iszero");
2672 Value *Result = Builder.CreateSelect(IsZero,
Zero, Tmp,
"ffs");
2673 if (Result->getType() != ResultType)
2674 Result = Builder.CreateIntCast(Result, ResultType,
true,
2678 case Builtin::BI__builtin_parity:
2679 case Builtin::BI__builtin_parityl:
2680 case Builtin::BI__builtin_parityll: {
2684 llvm::Type *ArgType = ArgValue->getType();
2685 Function *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
2687 llvm::Type *ResultType = ConvertType(E->
getType());
2688 Value *Tmp = Builder.CreateCall(F, ArgValue);
2689 Value *Result = Builder.CreateAnd(Tmp, llvm::ConstantInt::get(ArgType, 1));
2690 if (Result->getType() != ResultType)
2691 Result = Builder.CreateIntCast(Result, ResultType,
true,
2695 case Builtin::BI__lzcnt16:
2696 case Builtin::BI__lzcnt:
2697 case Builtin::BI__lzcnt64: {
2700 llvm::Type *ArgType = ArgValue->getType();
2701 Function *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
2703 llvm::Type *ResultType = ConvertType(E->
getType());
2704 Value *Result = Builder.CreateCall(F, {ArgValue, Builder.getFalse()});
2705 if (Result->getType() != ResultType)
2706 Result = Builder.CreateIntCast(Result, ResultType,
true,
2710 case Builtin::BI__popcnt16:
2711 case Builtin::BI__popcnt:
2712 case Builtin::BI__popcnt64:
2713 case Builtin::BI__builtin_popcount:
2714 case Builtin::BI__builtin_popcountl:
2715 case Builtin::BI__builtin_popcountll: {
2718 llvm::Type *ArgType = ArgValue->getType();
2719 Function *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
2721 llvm::Type *ResultType = ConvertType(E->
getType());
2722 Value *Result = Builder.CreateCall(F, ArgValue);
2723 if (Result->getType() != ResultType)
2724 Result = Builder.CreateIntCast(Result, ResultType,
true,
2728 case Builtin::BI__builtin_unpredictable: {
2734 case Builtin::BI__builtin_expect: {
2736 llvm::Type *ArgType = ArgValue->getType();
2738 Value *ExpectedValue = EmitScalarExpr(E->
getArg(1));
2742 if (CGM.getCodeGenOpts().OptimizationLevel == 0)
2745 Function *FnExpect = CGM.getIntrinsic(Intrinsic::expect, ArgType);
2747 Builder.CreateCall(FnExpect, {ArgValue, ExpectedValue},
"expval");
2750 case Builtin::BI__builtin_expect_with_probability: {
2752 llvm::Type *ArgType = ArgValue->getType();
2754 Value *ExpectedValue = EmitScalarExpr(E->
getArg(1));
2755 llvm::APFloat Probability(0.0);
2757 bool EvalSucceed = ProbArg->
EvaluateAsFloat(Probability, CGM.getContext());
2758 assert(EvalSucceed &&
"probability should be able to evaluate as float");
2760 bool LoseInfo =
false;
2761 Probability.convert(llvm::APFloat::IEEEdouble(),
2762 llvm::RoundingMode::Dynamic, &LoseInfo);
2763 llvm::Type *Ty = ConvertType(ProbArg->
getType());
2764 Constant *Confidence = ConstantFP::get(Ty, Probability);
2768 if (CGM.getCodeGenOpts().OptimizationLevel == 0)
2771 Function *FnExpect =
2772 CGM.getIntrinsic(Intrinsic::expect_with_probability, ArgType);
2773 Value *Result = Builder.CreateCall(
2774 FnExpect, {ArgValue, ExpectedValue, Confidence},
"expval");
2777 case Builtin::BI__builtin_assume_aligned: {
2779 Value *PtrValue = EmitScalarExpr(Ptr);
2780 Value *OffsetValue =
2783 Value *AlignmentValue = EmitScalarExpr(E->
getArg(1));
2784 ConstantInt *AlignmentCI = cast<ConstantInt>(AlignmentValue);
2785 if (AlignmentCI->getValue().ugt(llvm::Value::MaximumAlignment))
2786 AlignmentCI = ConstantInt::get(AlignmentCI->getType(),
2787 llvm::Value::MaximumAlignment);
2789 emitAlignmentAssumption(PtrValue, Ptr,
2791 AlignmentCI, OffsetValue);
2794 case Builtin::BI__assume:
2795 case Builtin::BI__builtin_assume: {
2800 Function *FnAssume = CGM.getIntrinsic(Intrinsic::assume);
2801 return RValue::get(Builder.CreateCall(FnAssume, ArgValue));
2803 case Builtin::BI__arithmetic_fence: {
2807 llvm::FastMathFlags FMF = Builder.getFastMathFlags();
2808 bool isArithmeticFenceEnabled =
2809 FMF.allowReassoc() &&
2810 getContext().getTargetInfo().checkArithmeticFenceSupported();
2813 if (isArithmeticFenceEnabled) {
2816 Value *Real = Builder.CreateArithmeticFence(ComplexVal.first,
2817 ConvertType(ElementType));
2818 Value *Imag = Builder.CreateArithmeticFence(ComplexVal.second,
2819 ConvertType(ElementType));
2823 Value *Real = ComplexVal.first;
2824 Value *Imag = ComplexVal.second;
2828 if (isArithmeticFenceEnabled)
2830 Builder.CreateArithmeticFence(ArgValue, ConvertType(ArgType)));
2833 case Builtin::BI__builtin_bswap16:
2834 case Builtin::BI__builtin_bswap32:
2835 case Builtin::BI__builtin_bswap64:
2836 case Builtin::BI_byteswap_ushort:
2837 case Builtin::BI_byteswap_ulong:
2838 case Builtin::BI_byteswap_uint64: {
2841 case Builtin::BI__builtin_bitreverse8:
2842 case Builtin::BI__builtin_bitreverse16:
2843 case Builtin::BI__builtin_bitreverse32:
2844 case Builtin::BI__builtin_bitreverse64: {
2847 case Builtin::BI__builtin_rotateleft8:
2848 case Builtin::BI__builtin_rotateleft16:
2849 case Builtin::BI__builtin_rotateleft32:
2850 case Builtin::BI__builtin_rotateleft64:
2851 case Builtin::BI_rotl8:
2852 case Builtin::BI_rotl16:
2853 case Builtin::BI_rotl:
2854 case Builtin::BI_lrotl:
2855 case Builtin::BI_rotl64:
2856 return emitRotate(E,
false);
2858 case Builtin::BI__builtin_rotateright8:
2859 case Builtin::BI__builtin_rotateright16:
2860 case Builtin::BI__builtin_rotateright32:
2861 case Builtin::BI__builtin_rotateright64:
2862 case Builtin::BI_rotr8:
2863 case Builtin::BI_rotr16:
2864 case Builtin::BI_rotr:
2865 case Builtin::BI_lrotr:
2866 case Builtin::BI_rotr64:
2867 return emitRotate(E,
true);
2869 case Builtin::BI__builtin_constant_p: {
2870 llvm::Type *ResultType = ConvertType(E->
getType());
2880 return RValue::get(ConstantInt::get(ResultType, 0));
2885 return RValue::get(ConstantInt::get(ResultType, 0));
2887 Value *ArgValue = EmitScalarExpr(Arg);
2891 ArgType = CGM.getContext().getObjCIdType();
2892 ArgValue = Builder.CreateBitCast(ArgValue, ConvertType(ArgType));
2895 CGM.getIntrinsic(Intrinsic::is_constant, ConvertType(ArgType));
2896 Value *Result = Builder.CreateCall(F, ArgValue);
2897 if (Result->getType() != ResultType)
2898 Result = Builder.CreateIntCast(Result, ResultType,
false);
2901 case Builtin::BI__builtin_dynamic_object_size:
2902 case Builtin::BI__builtin_object_size: {
2905 auto *ResType = cast<llvm::IntegerType>(ConvertType(E->
getType()));
2909 bool IsDynamic = BuiltinID == Builtin::BI__builtin_dynamic_object_size;
2911 nullptr, IsDynamic));
2913 case Builtin::BI__builtin_prefetch: {
2917 llvm::ConstantInt::get(Int32Ty, 0);
2919 llvm::ConstantInt::get(Int32Ty, 3);
2920 Value *Data = llvm::ConstantInt::get(Int32Ty, 1);
2922 return RValue::get(Builder.CreateCall(F, {Address, RW, Locality, Data}));
2924 case Builtin::BI__builtin_readcyclecounter: {
2925 Function *F = CGM.getIntrinsic(Intrinsic::readcyclecounter);
2928 case Builtin::BI__builtin___clear_cache: {
2931 Function *F = CGM.getIntrinsic(Intrinsic::clear_cache);
2932 return RValue::get(Builder.CreateCall(F, {Begin, End}));
2934 case Builtin::BI__builtin_trap:
2935 return RValue::get(EmitTrapCall(Intrinsic::trap));
2936 case Builtin::BI__debugbreak:
2937 return RValue::get(EmitTrapCall(Intrinsic::debugtrap));
2938 case Builtin::BI__builtin_unreachable: {
2942 EmitBlock(createBasicBlock(
"unreachable.cont"));
2947 case Builtin::BI__builtin_powi:
2948 case Builtin::BI__builtin_powif:
2949 case Builtin::BI__builtin_powil: {
2950 llvm::Value *Src0 = EmitScalarExpr(E->
getArg(0));
2951 llvm::Value *Src1 = EmitScalarExpr(E->
getArg(1));
2953 if (Builder.getIsFPConstrained()) {
2955 Function *F = CGM.getIntrinsic(Intrinsic::experimental_constrained_powi,
2957 return RValue::get(Builder.CreateConstrainedFPCall(F, { Src0, Src1 }));
2961 { Src0->getType(), Src1->getType() });
2962 return RValue::get(Builder.CreateCall(F, { Src0, Src1 }));
2964 case Builtin::BI__builtin_isgreater:
2965 case Builtin::BI__builtin_isgreaterequal:
2966 case Builtin::BI__builtin_isless:
2967 case Builtin::BI__builtin_islessequal:
2968 case Builtin::BI__builtin_islessgreater:
2969 case Builtin::BI__builtin_isunordered: {
2977 switch (BuiltinID) {
2978 default: llvm_unreachable(
"Unknown ordered comparison");
2979 case Builtin::BI__builtin_isgreater:
2980 LHS = Builder.CreateFCmpOGT(LHS, RHS,
"cmp");
2982 case Builtin::BI__builtin_isgreaterequal:
2983 LHS = Builder.CreateFCmpOGE(LHS, RHS,
"cmp");
2985 case Builtin::BI__builtin_isless:
2986 LHS = Builder.CreateFCmpOLT(LHS, RHS,
"cmp");
2988 case Builtin::BI__builtin_islessequal:
2989 LHS = Builder.CreateFCmpOLE(LHS, RHS,
"cmp");
2991 case Builtin::BI__builtin_islessgreater:
2992 LHS = Builder.CreateFCmpONE(LHS, RHS,
"cmp");
2994 case Builtin::BI__builtin_isunordered:
2995 LHS = Builder.CreateFCmpUNO(LHS, RHS,
"cmp");
3001 case Builtin::BI__builtin_isnan: {
3004 llvm::Type *Ty =
V->getType();
3005 const llvm::fltSemantics &Semantics = Ty->getFltSemantics();
3006 if (!Builder.getIsFPConstrained() ||
3007 Builder.getDefaultConstrainedExcept() == fp::ebIgnore ||
3009 V = Builder.CreateFCmpUNO(
V,
V,
"cmp");
3013 if (
Value *Result = getTargetHooks().testFPKind(
V, BuiltinID, Builder, CGM))
3018 unsigned bitsize = Ty->getScalarSizeInBits();
3019 llvm::IntegerType *IntTy = Builder.getIntNTy(bitsize);
3020 Value *IntV = Builder.CreateBitCast(
V, IntTy);
3021 APInt AndMask = APInt::getSignedMaxValue(bitsize);
3023 Builder.CreateAnd(IntV, llvm::ConstantInt::get(IntTy, AndMask));
3024 APInt ExpMask = APFloat::getInf(Semantics).bitcastToAPInt();
3026 Builder.CreateSub(llvm::ConstantInt::get(IntTy, ExpMask), AbsV);
3028 V = Builder.CreateLShr(
Sub, llvm::ConstantInt::get(IntTy, bitsize - 1));
3030 V = Builder.CreateTrunc(
V, ConvertType(E->
getType()));
3034 case Builtin::BI__builtin_elementwise_abs: {
3039 QT = VecTy->getElementType();
3041 Result = Builder.CreateBinaryIntrinsic(
3043 Builder.getFalse(),
nullptr,
"elt.abs");
3050 case Builtin::BI__builtin_elementwise_ceil:
3053 case Builtin::BI__builtin_elementwise_floor:
3056 case Builtin::BI__builtin_elementwise_roundeven:
3059 case Builtin::BI__builtin_elementwise_trunc:
3063 case Builtin::BI__builtin_elementwise_add_sat:
3064 case Builtin::BI__builtin_elementwise_sub_sat: {
3068 assert(Op0->getType()->isIntOrIntVectorTy() &&
"integer type expected");
3071 Ty = VecTy->getElementType();
3074 if (BuiltinIDIfNoAsmLabel == Builtin::BI__builtin_elementwise_add_sat)
3075 Opc = IsSigned ? llvm::Intrinsic::sadd_sat : llvm::Intrinsic::uadd_sat;
3077 Opc = IsSigned ? llvm::Intrinsic::ssub_sat : llvm::Intrinsic::usub_sat;
3078 Result = Builder.CreateBinaryIntrinsic(Opc, Op0, Op1,
nullptr,
"elt.sat");
3082 case Builtin::BI__builtin_elementwise_max: {
3086 if (Op0->getType()->isIntOrIntVectorTy()) {
3089 Ty = VecTy->getElementType();
3091 ? llvm::Intrinsic::smax
3093 Op0, Op1,
nullptr,
"elt.max");
3095 Result = Builder.CreateMaxNum(Op0, Op1,
"elt.max");
3098 case Builtin::BI__builtin_elementwise_min: {
3102 if (Op0->getType()->isIntOrIntVectorTy()) {
3105 Ty = VecTy->getElementType();
3107 ? llvm::Intrinsic::smin
3109 Op0, Op1,
nullptr,
"elt.min");
3111 Result = Builder.CreateMinNum(Op0, Op1,
"elt.min");
3115 case Builtin::BI__builtin_reduce_max: {
3116 auto GetIntrinsicID = [](
QualType QT) {
3118 QT = VecTy->getElementType();
3119 if (QT->isSignedIntegerType())
3120 return llvm::Intrinsic::vector_reduce_smax;
3121 if (QT->isUnsignedIntegerType())
3122 return llvm::Intrinsic::vector_reduce_umax;
3123 assert(QT->isFloatingType() &&
"must have a float here");
3124 return llvm::Intrinsic::vector_reduce_fmax;
3127 *
this, E, GetIntrinsicID(E->
getArg(0)->
getType()),
"rdx.min"));
3130 case Builtin::BI__builtin_reduce_min: {
3131 auto GetIntrinsicID = [](
QualType QT) {
3133 QT = VecTy->getElementType();
3134 if (QT->isSignedIntegerType())
3135 return llvm::Intrinsic::vector_reduce_smin;
3136 if (QT->isUnsignedIntegerType())
3137 return llvm::Intrinsic::vector_reduce_umin;
3138 assert(QT->isFloatingType() &&
"must have a float here");
3139 return llvm::Intrinsic::vector_reduce_fmin;
3143 *
this, E, GetIntrinsicID(E->
getArg(0)->
getType()),
"rdx.min"));
3146 case Builtin::BI__builtin_reduce_add:
3148 *
this, E, llvm::Intrinsic::vector_reduce_add,
"rdx.add"));
3149 case Builtin::BI__builtin_reduce_mul:
3151 *
this, E, llvm::Intrinsic::vector_reduce_mul,
"rdx.mul"));
3152 case Builtin::BI__builtin_reduce_xor:
3154 *
this, E, llvm::Intrinsic::vector_reduce_xor,
"rdx.xor"));
3155 case Builtin::BI__builtin_reduce_or:
3157 *
this, E, llvm::Intrinsic::vector_reduce_or,
"rdx.or"));
3158 case Builtin::BI__builtin_reduce_and:
3160 *
this, E, llvm::Intrinsic::vector_reduce_and,
"rdx.and"));
3162 case Builtin::BI__builtin_matrix_transpose: {
3165 MatrixBuilder MB(Builder);
3166 Value *Result = MB.CreateMatrixTranspose(MatValue, MatrixTy->getNumRows(),
3167 MatrixTy->getNumColumns());
3171 case Builtin::BI__builtin_matrix_column_major_load: {
3172 MatrixBuilder MB(Builder);
3177 assert(PtrTy &&
"arg0 must be of pointer type");
3183 Value *Result = MB.CreateColumnMajorLoad(
3186 ResultTy->getNumRows(), ResultTy->getNumColumns(),
3191 case Builtin::BI__builtin_matrix_column_major_store: {
3192 MatrixBuilder MB(Builder);
3199 assert(PtrTy &&
"arg1 must be of pointer type");
3204 Value *Result = MB.CreateColumnMajorStore(
3206 Stride, IsVolatile, MatrixTy->getNumRows(), MatrixTy->getNumColumns());
3210 case Builtin::BIfinite:
3211 case Builtin::BI__finite:
3212 case Builtin::BIfinitef:
3213 case Builtin::BI__finitef:
3214 case Builtin::BIfinitel:
3215 case Builtin::BI__finitel:
3216 case Builtin::BI__builtin_isinf:
3217 case Builtin::BI__builtin_isfinite: {
3223 llvm::Type *Ty =
V->getType();
3224 if (!Builder.getIsFPConstrained() ||
3225 Builder.getDefaultConstrainedExcept() == fp::ebIgnore ||
3228 Constant *Infinity = ConstantFP::getInfinity(
V->getType());
3229 CmpInst::Predicate Pred = (BuiltinID == Builtin::BI__builtin_isinf)
3231 : CmpInst::FCMP_ONE;
3232 Value *FCmp = Builder.CreateFCmp(Pred, Fabs, Infinity,
"cmpinf");
3236 if (
Value *Result = getTargetHooks().testFPKind(
V, BuiltinID, Builder, CGM))
3242 unsigned bitsize = Ty->getScalarSizeInBits();
3243 llvm::IntegerType *IntTy = Builder.getIntNTy(bitsize);
3244 Value *IntV = Builder.CreateBitCast(
V, IntTy);
3245 Value *Shl1 = Builder.CreateShl(IntV, 1);
3246 const llvm::fltSemantics &Semantics = Ty->getFltSemantics();
3247 APInt ExpMask = APFloat::getInf(Semantics).bitcastToAPInt();
3248 Value *ExpMaskShl1 = llvm::ConstantInt::get(IntTy, ExpMask.shl(1));
3249 if (BuiltinID == Builtin::BI__builtin_isinf)
3250 V = Builder.CreateICmpEQ(Shl1, ExpMaskShl1);
3252 V = Builder.CreateICmpULT(Shl1, ExpMaskShl1);
3256 case Builtin::BI__builtin_isinf_sign: {
3262 Value *IsInf = Builder.CreateFCmpOEQ(
3263 AbsArg, ConstantFP::getInfinity(Arg->getType()),
"isinf");
3266 llvm::Type *IntTy = ConvertType(E->
getType());
3267 Value *
Zero = Constant::getNullValue(IntTy);
3268 Value *One = ConstantInt::get(IntTy, 1);
3269 Value *NegativeOne = ConstantInt::get(IntTy, -1);
3270 Value *SignResult = Builder.CreateSelect(IsNeg, NegativeOne, One);
3271 Value *Result = Builder.CreateSelect(IsInf, SignResult,
Zero);
3275 case Builtin::BI__builtin_isnormal: {
3280 Value *
Eq = Builder.CreateFCmpOEQ(
V,
V,
"iseq");
3283 Value *IsLessThanInf =
3284 Builder.CreateFCmpULT(Abs, ConstantFP::getInfinity(
V->getType()),
"isinf");
3285 APFloat Smallest = APFloat::getSmallestNormalized(
3286 getContext().getFloatTypeSemantics(E->
getArg(0)->
getType()));
3288 Builder.CreateFCmpUGE(Abs, ConstantFP::get(
V->getContext(), Smallest),
3290 V = Builder.CreateAnd(
Eq, IsLessThanInf,
"and");
3291 V = Builder.CreateAnd(
V, IsNormal,
"and");
3295 case Builtin::BI__builtin_flt_rounds: {
3296 Function *F = CGM.getIntrinsic(Intrinsic::flt_rounds);
3298 llvm::Type *ResultType = ConvertType(E->
getType());
3299 Value *Result = Builder.CreateCall(F);
3300 if (Result->getType() != ResultType)
3301 Result = Builder.CreateIntCast(Result, ResultType,
true,
3306 case Builtin::BI__builtin_fpclassify: {
3313 BasicBlock *
Begin = Builder.GetInsertBlock();
3314 BasicBlock *
End = createBasicBlock(
"fpclassify_end", this->CurFn);
3315 Builder.SetInsertPoint(
End);
3318 "fpclassify_result");
3321 Builder.SetInsertPoint(
Begin);
3322 Value *IsZero = Builder.CreateFCmpOEQ(
V, Constant::getNullValue(Ty),
3324 Value *ZeroLiteral = EmitScalarExpr(E->
getArg(4));
3325 BasicBlock *NotZero = createBasicBlock(
"fpclassify_not_zero", this->CurFn);
3326 Builder.CreateCondBr(IsZero,
End, NotZero);
3327 Result->addIncoming(ZeroLiteral,
Begin);
3330 Builder.SetInsertPoint(NotZero);
3331 Value *IsNan = Builder.CreateFCmpUNO(
V,
V,
"cmp");
3333 BasicBlock *NotNan = createBasicBlock(
"fpclassify_not_nan", this->CurFn);
3334 Builder.CreateCondBr(IsNan,
End, NotNan);
3335 Result->addIncoming(NanLiteral, NotZero);
3338 Builder.SetInsertPoint(NotNan);
3341 Builder.CreateFCmpOEQ(VAbs, ConstantFP::getInfinity(
V->getType()),
3344 BasicBlock *NotInf = createBasicBlock(
"fpclassify_not_inf", this->CurFn);
3345 Builder.CreateCondBr(IsInf,
End, NotInf);
3346 Result->addIncoming(InfLiteral, NotNan);
3349 Builder.SetInsertPoint(NotInf);
3350 APFloat Smallest = APFloat::getSmallestNormalized(
3351 getContext().getFloatTypeSemantics(E->
getArg(5)->
getType()));
3353 Builder.CreateFCmpUGE(VAbs, ConstantFP::get(
V->getContext(), Smallest),
3355 Value *NormalResult =
3356 Builder.CreateSelect(IsNormal, EmitScalarExpr(E->
getArg(2)),
3357 EmitScalarExpr(E->
getArg(3)));
3358 Builder.CreateBr(
End);
3359 Result->addIncoming(NormalResult, NotInf);
3362 Builder.SetInsertPoint(
End);
3366 case Builtin::BIalloca:
3367 case Builtin::BI_alloca:
3368 case Builtin::BI__builtin_alloca_uninitialized:
3369 case Builtin::BI__builtin_alloca: {
3371 const TargetInfo &TI = getContext().getTargetInfo();
3373 const Align SuitableAlignmentInBytes =
3377 AllocaInst *AI = Builder.CreateAlloca(Builder.getInt8Ty(), Size);
3378 AI->setAlignment(SuitableAlignmentInBytes);
3379 if (BuiltinID != Builtin::BI__builtin_alloca_uninitialized)
3384 case Builtin::BI__builtin_alloca_with_align_uninitialized:
3385 case Builtin::BI__builtin_alloca_with_align: {
3387 Value *AlignmentInBitsValue = EmitScalarExpr(E->
getArg(1));
3388 auto *AlignmentInBitsCI = cast<ConstantInt>(AlignmentInBitsValue);
3389 unsigned AlignmentInBits = AlignmentInBitsCI->getZExtValue();
3390 const Align AlignmentInBytes =
3391 CGM.getContext().toCharUnitsFromBits(AlignmentInBits).getAsAlign();
3392 AllocaInst *AI = Builder.CreateAlloca(Builder.getInt8Ty(), Size);
3393 AI->setAlignment(AlignmentInBytes);
3394 if (BuiltinID != Builtin::BI__builtin_alloca_with_align_uninitialized)
3399 case Builtin::BIbzero:
3400 case Builtin::BI__builtin_bzero: {
3405 Builder.CreateMemSet(Dest, Builder.getInt8(0), SizeVal,
false);
3408 case Builtin::BImemcpy:
3409 case Builtin::BI__builtin_memcpy:
3410 case Builtin::BImempcpy:
3411 case Builtin::BI__builtin_mempcpy: {
3419 Builder.CreateMemCpy(Dest, Src, SizeVal,
false);
3420 if (BuiltinID == Builtin::BImempcpy ||
3421 BuiltinID == Builtin::BI__builtin_mempcpy)
3428 case Builtin::BI__builtin_memcpy_inline: {
3437 Builder.CreateMemCpyInline(Dest, Src, Size);
3441 case Builtin::BI__builtin_char_memchr:
3442 BuiltinID = Builtin::BI__builtin_memchr;
3445 case Builtin::BI__builtin___memcpy_chk: {
3453 if (Size.ugt(DstSize))
3457 Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
3458 Builder.CreateMemCpy(Dest, Src, SizeVal,
false);
3462 case Builtin::BI__builtin_objc_memmove_collectable: {
3466 CGM.getObjCRuntime().EmitGCMemmoveCollectable(*
this,
3467 DestAddr, SrcAddr, SizeVal);
3471 case Builtin::BI__builtin___memmove_chk: {
3479 if (Size.ugt(DstSize))
3483 Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
3484 Builder.CreateMemMove(Dest, Src, SizeVal,
false);
3488 case Builtin::BImemmove:
3489 case Builtin::BI__builtin_memmove: {
3497 Builder.CreateMemMove(Dest, Src, SizeVal,
false);
3500 case Builtin::BImemset:
3501 case Builtin::BI__builtin_memset: {
3503 Value *ByteVal = Builder.CreateTrunc(EmitScalarExpr(E->
getArg(1)),
3504 Builder.getInt8Ty());
3508 Builder.CreateMemSet(Dest, ByteVal, SizeVal,
false);
3511 case Builtin::BI__builtin___memset_chk: {
3519 if (Size.ugt(DstSize))
3522 Value *ByteVal = Builder.CreateTrunc(EmitScalarExpr(E->
getArg(1)),
3523 Builder.getInt8Ty());
3524 Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
3525 Builder.CreateMemSet(Dest, ByteVal, SizeVal,
false);
3528 case Builtin::BI__builtin_wmemchr: {
3531 if (!getTarget().getTriple().isOSMSVCRT())
3534 llvm::Type *WCharTy = ConvertType(getContext().WCharTy);
3539 BasicBlock *Entry = Builder.GetInsertBlock();
3540 BasicBlock *CmpEq = createBasicBlock(
"wmemchr.eq");
3541 BasicBlock *Next = createBasicBlock(
"wmemchr.next");
3542 BasicBlock *Exit = createBasicBlock(
"wmemchr.exit");
3543 Value *SizeEq0 = Builder.CreateICmpEQ(Size, ConstantInt::get(SizeTy, 0));
3544 Builder.CreateCondBr(SizeEq0, Exit, CmpEq);
3547 PHINode *StrPhi = Builder.CreatePHI(Str->getType(), 2);
3548 StrPhi->addIncoming(Str, Entry);
3549 PHINode *SizePhi = Builder.CreatePHI(SizeTy, 2);
3550 SizePhi->addIncoming(Size, Entry);
3552 getContext().getTypeAlignInChars(getContext().WCharTy);
3553 Value *StrCh = Builder.CreateAlignedLoad(WCharTy, StrPhi, WCharAlign);
3554 Value *FoundChr = Builder.CreateConstInBoundsGEP1_32(WCharTy, StrPhi, 0);
3555 Value *StrEqChr = Builder.CreateICmpEQ(StrCh, Chr);
3556 Builder.CreateCondBr(StrEqChr, Exit, Next);
3559 Value *NextStr = Builder.CreateConstInBoundsGEP1_32(WCharTy, StrPhi, 1);
3560 Value *NextSize = Builder.CreateSub(SizePhi, ConstantInt::get(SizeTy, 1));
3561 Value *NextSizeEq0 =
3562 Builder.CreateICmpEQ(NextSize, ConstantInt::get(SizeTy, 0));
3563 Builder.CreateCondBr(NextSizeEq0, Exit, CmpEq);
3564 StrPhi->addIncoming(NextStr, Next);
3565 SizePhi->addIncoming(NextSize, Next);
3568 PHINode *
Ret = Builder.CreatePHI(Str->getType(), 3);
3569 Ret->addIncoming(llvm::Constant::getNullValue(Str->getType()), Entry);
3570 Ret->addIncoming(llvm::Constant::getNullValue(Str->getType()), Next);
3571 Ret->addIncoming(FoundChr, CmpEq);
3574 case Builtin::BI__builtin_wmemcmp: {
3577 if (!getTarget().getTriple().isOSMSVCRT())
3580 llvm::Type *WCharTy = ConvertType(getContext().WCharTy);
3586 BasicBlock *Entry = Builder.GetInsertBlock();
3587 BasicBlock *CmpGT = createBasicBlock(
"wmemcmp.gt");
3588 BasicBlock *CmpLT = createBasicBlock(
"wmemcmp.lt");
3589 BasicBlock *Next = createBasicBlock(
"wmemcmp.next");
3590 BasicBlock *Exit = createBasicBlock(
"wmemcmp.exit");
3591 Value *SizeEq0 = Builder.CreateICmpEQ(Size, ConstantInt::get(SizeTy, 0));
3592 Builder.CreateCondBr(SizeEq0, Exit, CmpGT);
3595 PHINode *DstPhi = Builder.CreatePHI(Dst->getType(), 2);
3596 DstPhi->addIncoming(Dst, Entry);
3597 PHINode *SrcPhi = Builder.CreatePHI(Src->getType(), 2);
3598 SrcPhi->addIncoming(Src, Entry);
3599 PHINode *SizePhi = Builder.CreatePHI(SizeTy, 2);
3600 SizePhi->addIncoming(Size, Entry);
3602 getContext().getTypeAlignInChars(getContext().WCharTy);
3603 Value *DstCh = Builder.CreateAlignedLoad(WCharTy, DstPhi, WCharAlign);
3604 Value *SrcCh = Builder.CreateAlignedLoad(WCharTy, SrcPhi, WCharAlign);
3605 Value *DstGtSrc = Builder.CreateICmpUGT(DstCh, SrcCh);
3606 Builder.CreateCondBr(DstGtSrc, Exit, CmpLT);
3609 Value *DstLtSrc = Builder.CreateICmpULT(DstCh, SrcCh);
3610 Builder.CreateCondBr(DstLtSrc, Exit, Next);
3613 Value *NextDst = Builder.CreateConstInBoundsGEP1_32(WCharTy, DstPhi, 1);
3614 Value *NextSrc = Builder.CreateConstInBoundsGEP1_32(WCharTy, SrcPhi, 1);
3615 Value *NextSize = Builder.CreateSub(SizePhi, ConstantInt::get(SizeTy, 1));
3616 Value *NextSizeEq0 =
3617 Builder.CreateICmpEQ(NextSize, ConstantInt::get(SizeTy, 0));
3618 Builder.CreateCondBr(NextSizeEq0, Exit, CmpGT);
3619 DstPhi->addIncoming(NextDst, Next);
3620 SrcPhi->addIncoming(NextSrc, Next);
3621 SizePhi->addIncoming(NextSize, Next);
3624 PHINode *
Ret = Builder.CreatePHI(IntTy, 4);
3625 Ret->addIncoming(ConstantInt::get(IntTy, 0), Entry);
3626 Ret->addIncoming(ConstantInt::get(IntTy, 1), CmpGT);
3627 Ret->addIncoming(ConstantInt::get(IntTy, -1), CmpLT);
3628 Ret->addIncoming(ConstantInt::get(IntTy, 0), Next);
3631 case Builtin::BI__builtin_dwarf_cfa: {
3642 Function *F = CGM.getIntrinsic(Intrinsic::eh_dwarf_cfa);
3644 llvm::ConstantInt::get(Int32Ty,
Offset)));
3646 case Builtin::BI__builtin_return_address: {
3648 getContext().UnsignedIntTy);
3649 Function *F = CGM.getIntrinsic(Intrinsic::returnaddress);
3652 case Builtin::BI_ReturnAddress: {
3653 Function *F = CGM.getIntrinsic(Intrinsic::returnaddress);
3654 return RValue::get(Builder.CreateCall(F, Builder.getInt32(0)));
3656 case Builtin::BI__builtin_frame_address: {
3658 getContext().UnsignedIntTy);
3659 Function *F = CGM.getIntrinsic(Intrinsic::frameaddress, AllocaInt8PtrTy);
3662 case Builtin::BI__builtin_extract_return_addr: {
3664 Value *Result = getTargetHooks().decodeReturnAddress(*
this,
Address);
3667 case Builtin::BI__builtin_frob_return_addr: {
3669 Value *Result = getTargetHooks().encodeReturnAddress(*
this,
Address);
3672 case Builtin::BI__builtin_dwarf_sp_column: {
3673 llvm::IntegerType *Ty
3674 = cast<llvm::IntegerType>(ConvertType(E->
getType()));
3675 int Column = getTargetHooks().getDwarfEHStackPointer(CGM);
3677 CGM.ErrorUnsupported(E,
"__builtin_dwarf_sp_column");
3680 return RValue::get(llvm::ConstantInt::get(Ty, Column,
true));
3682 case Builtin::BI__builtin_init_dwarf_reg_size_table: {
3684 if (getTargetHooks().initDwarfEHRegSizeTable(*
this,
Address))
3685 CGM.ErrorUnsupported(E,
"__builtin_init_dwarf_reg_size_table");
3688 case Builtin::BI__builtin_eh_return: {
3692 llvm::IntegerType *IntTy = cast<llvm::IntegerType>(Int->getType());
3693 assert((IntTy->getBitWidth() == 32 || IntTy->getBitWidth() == 64) &&
3694 "LLVM's __builtin_eh_return only supports 32- and 64-bit variants");
3696 CGM.getIntrinsic(IntTy->getBitWidth() == 32 ? Intrinsic::eh_return_i32
3697 : Intrinsic::eh_return_i64);
3698 Builder.CreateCall(F, {Int, Ptr});
3699 Builder.CreateUnreachable();
3702 EmitBlock(createBasicBlock(
"builtin_eh_return.cont"));
3706 case Builtin::BI__builtin_unwind_init: {
3707 Function *F = CGM.getIntrinsic(Intrinsic::eh_unwind_init);
3710 case Builtin::BI__builtin_extend_pointer: {
3723 Value *Result = Builder.CreatePtrToInt(Ptr, IntPtrTy,
"extend.cast");
3726 if (IntPtrTy->getBitWidth() == 64)
3730 if (getTargetHooks().extendPointerWithSExt())
3731 return RValue::get(Builder.CreateSExt(Result, Int64Ty,
"extend.sext"));
3733 return RValue::get(Builder.CreateZExt(Result, Int64Ty,
"extend.zext"));
3735 case Builtin::BI__builtin_setjmp: {
3740 Value *FrameAddr = Builder.CreateCall(
3741 CGM.getIntrinsic(Intrinsic::frameaddress, AllocaInt8PtrTy),
3742 ConstantInt::get(Int32Ty, 0));
3743 Builder.CreateStore(FrameAddr, Buf);
3747 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::stacksave));
3748 Address StackSaveSlot = Builder.CreateConstInBoundsGEP(Buf, 2);
3749 Builder.CreateStore(StackAddr, StackSaveSlot);
3752 Function *F = CGM.getIntrinsic(Intrinsic::eh_sjlj_setjmp);
3753 Buf = Builder.CreateElementBitCast(Buf, Int8Ty);
3756 case Builtin::BI__builtin_longjmp: {
3758 Buf = Builder.CreateBitCast(Buf, Int8PtrTy);
3761 Builder.CreateCall(CGM.getIntrinsic(Intrinsic::eh_sjlj_longjmp), Buf);
3764 Builder.CreateUnreachable();
3767 EmitBlock(createBasicBlock(
"longjmp.cont"));
3771 case Builtin::BI__builtin_launder: {
3774 Value *Ptr = EmitScalarExpr(Arg);
3776 Ptr = Builder.CreateLaunderInvariantGroup(Ptr);
3780 case Builtin::BI__sync_fetch_and_add:
3781 case Builtin::BI__sync_fetch_and_sub:
3782 case Builtin::BI__sync_fetch_and_or:
3783 case Builtin::BI__sync_fetch_and_and:
3784 case Builtin::BI__sync_fetch_and_xor:
3785 case Builtin::BI__sync_fetch_and_nand:
3786 case Builtin::BI__sync_add_and_fetch:
3787 case Builtin::BI__sync_sub_and_fetch:
3788 case Builtin::BI__sync_and_and_fetch:
3789 case Builtin::BI__sync_or_and_fetch:
3790 case Builtin::BI__sync_xor_and_fetch:
3791 case Builtin::BI__sync_nand_and_fetch:
3792 case Builtin::BI__sync_val_compare_and_swap:
3793 case Builtin::BI__sync_bool_compare_and_swap:
3794 case Builtin::BI__sync_lock_test_and_set:
3795 case Builtin::BI__sync_lock_release:
3796 case Builtin::BI__sync_swap:
3797 llvm_unreachable(
"Shouldn't make it through sema");
3798 case Builtin::BI__sync_fetch_and_add_1:
3799 case Builtin::BI__sync_fetch_and_add_2:
3800 case Builtin::BI__sync_fetch_and_add_4:
3801 case Builtin::BI__sync_fetch_and_add_8:
3802 case Builtin::BI__sync_fetch_and_add_16:
3804 case Builtin::BI__sync_fetch_and_sub_1:
3805 case Builtin::BI__sync_fetch_and_sub_2:
3806 case Builtin::BI__sync_fetch_and_sub_4:
3807 case Builtin::BI__sync_fetch_and_sub_8:
3808 case Builtin::BI__sync_fetch_and_sub_16:
3810 case Builtin::BI__sync_fetch_and_or_1:
3811 case Builtin::BI__sync_fetch_and_or_2:
3812 case Builtin::BI__sync_fetch_and_or_4:
3813 case Builtin::BI__sync_fetch_and_or_8:
3814 case Builtin::BI__sync_fetch_and_or_16:
3816 case Builtin::BI__sync_fetch_and_and_1:
3817 case Builtin::BI__sync_fetch_and_and_2:
3818 case Builtin::BI__sync_fetch_and_and_4:
3819 case Builtin::BI__sync_fetch_and_and_8:
3820 case Builtin::BI__sync_fetch_and_and_16:
3822 case Builtin::BI__sync_fetch_and_xor_1:
3823 case Builtin::BI__sync_fetch_and_xor_2:
3824 case Builtin::BI__sync_fetch_and_xor_4:
3825 case Builtin::BI__sync_fetch_and_xor_8:
3826 case Builtin::BI__sync_fetch_and_xor_16:
3828 case Builtin::BI__sync_fetch_and_nand_1:
3829 case Builtin::BI__sync_fetch_and_nand_2:
3830 case Builtin::BI__sync_fetch_and_nand_4:
3831 case Builtin::BI__sync_fetch_and_nand_8:
3832 case Builtin::BI__sync_fetch_and_nand_16:
3836 case Builtin::BI__sync_fetch_and_min:
3838 case Builtin::BI__sync_fetch_and_max:
3840 case Builtin::BI__sync_fetch_and_umin:
3842 case Builtin::BI__sync_fetch_and_umax:
3845 case Builtin::BI__sync_add_and_fetch_1:
3846 case Builtin::BI__sync_add_and_fetch_2:
3847 case Builtin::BI__sync_add_and_fetch_4:
3848 case Builtin::BI__sync_add_and_fetch_8:
3849 case Builtin::BI__sync_add_and_fetch_16:
3852 case Builtin::BI__sync_sub_and_fetch_1:
3853 case Builtin::BI__sync_sub_and_fetch_2:
3854 case Builtin::BI__sync_sub_and_fetch_4:
3855 case Builtin::BI__sync_sub_and_fetch_8:
3856 case Builtin::BI__sync_sub_and_fetch_16:
3859 case Builtin::BI__sync_and_and_fetch_1:
3860 case Builtin::BI__sync_and_and_fetch_2:
3861 case Builtin::BI__sync_and_and_fetch_4:
3862 case Builtin::BI__sync_and_and_fetch_8:
3863 case Builtin::BI__sync_and_and_fetch_16:
3866 case Builtin::BI__sync_or_and_fetch_1:
3867 case Builtin::BI__sync_or_and_fetch_2:
3868 case Builtin::BI__sync_or_and_fetch_4:
3869 case Builtin::BI__sync_or_and_fetch_8:
3870 case Builtin::BI__sync_or_and_fetch_16:
3872 llvm::Instruction::Or);
3873 case Builtin::BI__sync_xor_and_fetch_1:
3874 case Builtin::BI__sync_xor_and_fetch_2:
3875 case Builtin::BI__sync_xor_and_fetch_4:
3876 case Builtin::BI__sync_xor_and_fetch_8:
3877 case Builtin::BI__sync_xor_and_fetch_16:
3879 llvm::Instruction::Xor);
3880 case Builtin::BI__sync_nand_and_fetch_1:
3881 case Builtin::BI__sync_nand_and_fetch_2:
3882 case Builtin::BI__sync_nand_and_fetch_4:
3883 case Builtin::BI__sync_nand_and_fetch_8:
3884 case Builtin::BI__sync_nand_and_fetch_16:
3888 case Builtin::BI__sync_val_compare_and_swap_1:
3889 case Builtin::BI__sync_val_compare_and_swap_2:
3890 case Builtin::BI__sync_val_compare_and_swap_4:
3891 case Builtin::BI__sync_val_compare_and_swap_8:
3892 case Builtin::BI__sync_val_compare_and_swap_16:
3895 case Builtin::BI__sync_bool_compare_and_swap_1:
3896 case Builtin::BI__sync_bool_compare_and_swap_2:
3897 case Builtin::BI__sync_bool_compare_and_swap_4:
3898 case Builtin::BI__sync_bool_compare_and_swap_8:
3899 case Builtin::BI__sync_bool_compare_and_swap_16:
3902 case Builtin::BI__sync_swap_1:
3903 case Builtin::BI__sync_swap_2:
3904 case Builtin::BI__sync_swap_4:
3905 case Builtin::BI__sync_swap_8:
3906 case Builtin::BI__sync_swap_16:
3909 case Builtin::BI__sync_lock_test_and_set_1:
3910 case Builtin::BI__sync_lock_test_and_set_2:
3911 case Builtin::BI__sync_lock_test_and_set_4:
3912 case Builtin::BI__sync_lock_test_and_set_8:
3913 case Builtin::BI__sync_lock_test_and_set_16:
3916 case Builtin::BI__sync_lock_release_1:
3917 case Builtin::BI__sync_lock_release_2:
3918 case Builtin::BI__sync_lock_release_4:
3919 case Builtin::BI__sync_lock_release_8:
3920 case Builtin::BI__sync_lock_release_16: {
3923 CharUnits StoreSize = getContext().getTypeSizeInChars(ElTy);
3924 llvm::Type *ITy = llvm::IntegerType::get(getLLVMContext(),
3926 Ptr = Builder.CreateBitCast(Ptr, ITy->getPointerTo());
3927 llvm::StoreInst *
Store =
3928 Builder.CreateAlignedStore(llvm::Constant::getNullValue(ITy), Ptr,
3930 Store->setAtomic(llvm::AtomicOrdering::Release);
3934 case Builtin::BI__sync_synchronize: {
3942 Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent);
3946 case Builtin::BI__builtin_nontemporal_load:
3948 case Builtin::BI__builtin_nontemporal_store:
3950 case Builtin::BI__c11_atomic_is_lock_free:
3951 case Builtin::BI__atomic_is_lock_free: {
3955 const char *LibCallName =
"__atomic_is_lock_free";
3958 getContext().getSizeType());
3959 if (BuiltinID == Builtin::BI__atomic_is_lock_free)
3961 getContext().VoidPtrTy);
3964 getContext().VoidPtrTy);
3966 CGM.getTypes().arrangeBuiltinFunctionCall(E->
getType(), Args);
3967 llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(FuncInfo);
3968 llvm::FunctionCallee Func = CGM.CreateRuntimeFunction(FTy, LibCallName);
3973 case Builtin::BI__atomic_test_and_set: {
3981 unsigned AddrSpace = Ptr->getType()->getPointerAddressSpace();
3982 Ptr = Builder.CreateBitCast(Ptr, Int8Ty->getPointerTo(AddrSpace));
3983 Value *NewVal = Builder.getInt8(1);
3985 if (isa<llvm::ConstantInt>(Order)) {
3986 int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
3987 AtomicRMWInst *Result =
nullptr;
3991 Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
3992 llvm::AtomicOrdering::Monotonic);
3996 Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
3997 llvm::AtomicOrdering::Acquire);
4000 Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
4001 llvm::AtomicOrdering::Release);
4005 Result = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
4006 llvm::AtomicOrdering::AcquireRelease);
4009 Result = Builder.CreateAtomicRMW(
4010 llvm::AtomicRMWInst::Xchg, Ptr, NewVal,
4011 llvm::AtomicOrdering::SequentiallyConsistent);
4014 Result->setVolatile(Volatile);
4015 return RValue::get(Builder.CreateIsNotNull(Result,
"tobool"));
4018 llvm::BasicBlock *ContBB = createBasicBlock(
"atomic.continue", CurFn);
4020 llvm::BasicBlock *BBs[5] = {
4021 createBasicBlock(
"monotonic", CurFn),
4022 createBasicBlock(
"acquire", CurFn),
4023 createBasicBlock(
"release", CurFn),
4024 createBasicBlock(
"acqrel", CurFn),
4025 createBasicBlock(
"seqcst", CurFn)
4027 llvm::AtomicOrdering Orders[5] = {
4028 llvm::AtomicOrdering::Monotonic, llvm::AtomicOrdering::Acquire,
4029 llvm::AtomicOrdering::Release, llvm::AtomicOrdering::AcquireRelease,
4030 llvm::AtomicOrdering::SequentiallyConsistent};
4032 Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(),
false);
4033 llvm::SwitchInst *SI = Builder.CreateSwitch(Order, BBs[0]);
4035 Builder.SetInsertPoint(ContBB);
4036 PHINode *Result = Builder.CreatePHI(Int8Ty, 5,
"was_set");
4038 for (
unsigned i = 0; i < 5; ++i) {
4039 Builder.SetInsertPoint(BBs[i]);
4040 AtomicRMWInst *RMW = Builder.CreateAtomicRMW(llvm::AtomicRMWInst::Xchg,
4041 Ptr, NewVal, Orders[i]);
4042 RMW->setVolatile(Volatile);
4043 Result->addIncoming(RMW, BBs[i]);
4044 Builder.CreateBr(ContBB);
4047 SI->addCase(Builder.getInt32(0), BBs[0]);
4048 SI->addCase(Builder.getInt32(1), BBs[1]);
4049 SI->addCase(Builder.getInt32(2), BBs[1]);
4050 SI->addCase(Builder.getInt32(3), BBs[2]);
4051 SI->addCase(Builder.getInt32(4), BBs[3]);
4052 SI->addCase(Builder.getInt32(5), BBs[4]);
4054 Builder.SetInsertPoint(ContBB);
4055 return RValue::get(Builder.CreateIsNotNull(Result,
"tobool"));
4058 case Builtin::BI__atomic_clear: {
4064 Ptr = Builder.CreateElementBitCast(Ptr, Int8Ty);
4065 Value *NewVal = Builder.getInt8(0);
4067 if (isa<llvm::ConstantInt>(Order)) {
4068 int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
4069 StoreInst *
Store = Builder.CreateStore(NewVal, Ptr, Volatile);
4073 Store->setOrdering(llvm::AtomicOrdering::Monotonic);
4076 Store->setOrdering(llvm::AtomicOrdering::Release);
4079 Store->setOrdering(llvm::AtomicOrdering::SequentiallyConsistent);
4085 llvm::BasicBlock *ContBB = createBasicBlock(
"atomic.continue", CurFn);
4087 llvm::BasicBlock *BBs[3] = {
4088 createBasicBlock(
"monotonic", CurFn),
4089 createBasicBlock(
"release", CurFn),
4090 createBasicBlock(
"seqcst", CurFn)
4092 llvm::AtomicOrdering Orders[3] = {
4093 llvm::AtomicOrdering::Monotonic, llvm::AtomicOrdering::Release,
4094 llvm::AtomicOrdering::SequentiallyConsistent};
4096 Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(),
false);
4097 llvm::SwitchInst *SI = Builder.CreateSwitch(Order, BBs[0]);
4099 for (
unsigned i = 0; i < 3; ++i) {
4100 Builder.SetInsertPoint(BBs[i]);
4101 StoreInst *
Store = Builder.CreateStore(NewVal, Ptr, Volatile);
4102 Store->setOrdering(Orders[i]);
4103 Builder.CreateBr(ContBB);
4106 SI->addCase(Builder.getInt32(0), BBs[0]);
4107 SI->addCase(Builder.getInt32(3), BBs[1]);
4108 SI->addCase(Builder.getInt32(5), BBs[2]);
4110 Builder.SetInsertPoint(ContBB);
4114 case Builtin::BI__atomic_thread_fence:
4115 case Builtin::BI__atomic_signal_fence:
4116 case Builtin::BI__c11_atomic_thread_fence:
4117 case Builtin::BI__c11_atomic_signal_fence: {
4119 if (BuiltinID == Builtin::BI__atomic_signal_fence ||
4120 BuiltinID == Builtin::BI__c11_atomic_signal_fence)
4121 SSID = llvm::SyncScope::SingleThread;
4125 if (isa<llvm::ConstantInt>(Order)) {
4126 int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
4133 Builder.CreateFence(llvm::AtomicOrdering::Acquire, SSID);
4136 Builder.CreateFence(llvm::AtomicOrdering::Release, SSID);
4139 Builder.CreateFence(llvm::AtomicOrdering::AcquireRelease, SSID);
4142 Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent, SSID);
4148 llvm::BasicBlock *AcquireBB, *ReleaseBB, *AcqRelBB, *SeqCstBB;
4149 AcquireBB = createBasicBlock(
"acquire", CurFn);
4150 ReleaseBB = createBasicBlock(
"release", CurFn);
4151 AcqRelBB = createBasicBlock(
"acqrel", CurFn);
4152 SeqCstBB = createBasicBlock(
"seqcst", CurFn);
4153 llvm::BasicBlock *ContBB = createBasicBlock(
"atomic.continue", CurFn);
4155 Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(),
false);
4156 llvm::SwitchInst *SI = Builder.CreateSwitch(Order, ContBB);
4158 Builder.SetInsertPoint(AcquireBB);
4159 Builder.CreateFence(llvm::AtomicOrdering::Acquire, SSID);
4160 Builder.CreateBr(ContBB);
4161 SI->addCase(Builder.getInt32(1), AcquireBB);
4162 SI->addCase(Builder.getInt32(2), AcquireBB);
4164 Builder.SetInsertPoint(ReleaseBB);
4165 Builder.CreateFence(llvm::AtomicOrdering::Release, SSID);
4166 Builder.CreateBr(ContBB);
4167 SI->addCase(Builder.getInt32(3), ReleaseBB);
4169 Builder.SetInsertPoint(AcqRelBB);
4170 Builder.CreateFence(llvm::AtomicOrdering::AcquireRelease, SSID);
4171 Builder.CreateBr(ContBB);
4172 SI->addCase(Builder.getInt32(4), AcqRelBB);
4174 Builder.SetInsertPoint(SeqCstBB);
4175 Builder.CreateFence(llvm::AtomicOrdering::SequentiallyConsistent, SSID);
4176 Builder.CreateBr(ContBB);
4177 SI->addCase(Builder.getInt32(5), SeqCstBB);
4179 Builder.SetInsertPoint(ContBB);
4183 case Builtin::BI__builtin_signbit:
4184 case Builtin::BI__builtin_signbitf:
4185 case Builtin::BI__builtin_signbitl: {
4190 case Builtin::BI__warn_memset_zero_len:
4192 case Builtin::BI__annotation: {
4196 const auto *Str = cast<StringLiteral>(Arg->IgnoreParenCasts());
4197 assert(Str->getCharByteWidth() == 2);
4198 StringRef WideBytes = Str->getBytes();
4200 if (!convertUTF16ToUTF8String(
4201 makeArrayRef(WideBytes.data(), WideBytes.size()), StrUtf8)) {
4202 CGM.ErrorUnsupported(E,
"non-UTF16 __annotation argument");
4205 Strings.push_back(llvm::MDString::get(getLLVMContext(), StrUtf8));
4210 CGM.getIntrinsic(llvm::Intrinsic::codeview_annotation, {});
4211 MDTuple *StrTuple = MDTuple::get(getLLVMContext(), Strings);
4212 Builder.CreateCall(F, MetadataAsValue::get(getLLVMContext(), StrTuple));
4215 case Builtin::BI__builtin_annotation: {
4216 llvm::Value *AnnVal = EmitScalarExpr(E->
getArg(0));
4217 llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::annotation,
4223 StringRef Str = cast<StringLiteral>(AnnotationStrExpr)->getString();
4225 EmitAnnotationCall(F, AnnVal, Str, E->
getExprLoc(),
nullptr));
4227 case Builtin::BI__builtin_addcb:
4228 case Builtin::BI__builtin_addcs:
4229 case Builtin::BI__builtin_addc:
4230 case Builtin::BI__builtin_addcl:
4231 case Builtin::BI__builtin_addcll:
4232 case Builtin::BI__builtin_subcb:
4233 case Builtin::BI__builtin_subcs:
4234 case Builtin::BI__builtin_subc:
4235 case Builtin::BI__builtin_subcl:
4236 case Builtin::BI__builtin_subcll: {
4256 llvm::Value *
X = EmitScalarExpr(E->
getArg(0));
4257 llvm::Value *Y = EmitScalarExpr(E->
getArg(1));
4258 llvm::Value *Carryin = EmitScalarExpr(E->
getArg(2));
4259 Address CarryOutPtr = EmitPointerWithAlignment(E->
getArg(3));
4263 switch (BuiltinID) {
4264 default: llvm_unreachable(
"Unknown multiprecision builtin id.");
4265 case Builtin::BI__builtin_addcb:
4266 case Builtin::BI__builtin_addcs:
4267 case Builtin::BI__builtin_addc:
4268 case Builtin::BI__builtin_addcl:
4269 case Builtin::BI__builtin_addcll:
4270 IntrinsicId = llvm::Intrinsic::uadd_with_overflow;
4272 case Builtin::BI__builtin_subcb:
4273 case Builtin::BI__builtin_subcs:
4274 case Builtin::BI__builtin_subc:
4275 case Builtin::BI__builtin_subcl:
4276 case Builtin::BI__builtin_subcll:
4277 IntrinsicId = llvm::Intrinsic::usub_with_overflow;
4282 llvm::Value *Carry1;
4285 llvm::Value *Carry2;
4287 Sum1, Carryin, Carry2);
4288 llvm::Value *CarryOut = Builder.CreateZExt(Builder.CreateOr(Carry1, Carry2),
4290 Builder.CreateStore(CarryOut, CarryOutPtr);
4294 case Builtin::BI__builtin_add_overflow:
4295 case Builtin::BI__builtin_sub_overflow:
4296 case Builtin::BI__builtin_mul_overflow: {
4304 WidthAndSignedness LeftInfo =
4306 WidthAndSignedness RightInfo =
4308 WidthAndSignedness ResultInfo =
4315 RightInfo, ResultArg, ResultQTy,
4321 *
this, LeftArg, LeftInfo, RightArg, RightInfo, ResultArg, ResultQTy,
4324 WidthAndSignedness EncompassingInfo =
4327 llvm::Type *EncompassingLLVMTy =
4328 llvm::IntegerType::get(CGM.getLLVMContext(), EncompassingInfo.Width);
4330 llvm::Type *ResultLLVMTy = CGM.getTypes().ConvertType(ResultQTy);
4333 switch (BuiltinID) {
4335 llvm_unreachable(
"Unknown overflow builtin id.");
4336 case Builtin::BI__builtin_add_overflow:
4337 IntrinsicId = EncompassingInfo.Signed
4338 ? llvm::Intrinsic::sadd_with_overflow
4339 : llvm::Intrinsic::uadd_with_overflow;
4341 case Builtin::BI__builtin_sub_overflow:
4342 IntrinsicId = EncompassingInfo.Signed
4343 ? llvm::Intrinsic::ssub_with_overflow
4344 : llvm::Intrinsic::usub_with_overflow;
4346 case Builtin::BI__builtin_mul_overflow:
4347 IntrinsicId = EncompassingInfo.Signed
4348 ? llvm::Intrinsic::smul_with_overflow
4349 : llvm::Intrinsic::umul_with_overflow;
4353 llvm::Value *Left = EmitScalarExpr(LeftArg);
4354 llvm::Value *Right = EmitScalarExpr(RightArg);
4355 Address ResultPtr = EmitPointerWithAlignment(ResultArg);
4358 Left = Builder.CreateIntCast(Left, EncompassingLLVMTy, LeftInfo.Signed);
4359 Right = Builder.CreateIntCast(Right, EncompassingLLVMTy, RightInfo.Signed);
4362 llvm::Value *Overflow, *Result;
4365 if (EncompassingInfo.Width > ResultInfo.Width) {
4368 llvm::Value *ResultTrunc = Builder.CreateTrunc(Result, ResultLLVMTy);
4372 llvm::Value *ResultTruncExt = Builder.CreateIntCast(
4373 ResultTrunc, EncompassingLLVMTy, ResultInfo.Signed);
4374 llvm::Value *TruncationOverflow =
4375 Builder.CreateICmpNE(Result, ResultTruncExt);
4377 Overflow = Builder.CreateOr(Overflow, TruncationOverflow);
4378 Result = ResultTrunc;
4384 Builder.CreateStore(EmitToMemory(Result, ResultQTy), ResultPtr, isVolatile);
4389 case Builtin::BI__builtin_uadd_overflow:
4390 case Builtin::BI__builtin_uaddl_overflow:
4391 case Builtin::BI__builtin_uaddll_overflow:
4392 case Builtin::BI__builtin_usub_overflow:
4393 case Builtin::BI__builtin_usubl_overflow:
4394 case Builtin::BI__builtin_usubll_overflow:
4395 case Builtin::BI__builtin_umul_overflow:
4396 case Builtin::BI__builtin_umull_overflow:
4397 case Builtin::BI__builtin_umulll_overflow:
4398 case Builtin::BI__builtin_sadd_overflow:
4399 case Builtin::BI__builtin_saddl_overflow:
4400 case Builtin::BI__builtin_saddll_overflow:
4401 case Builtin::BI__builtin_ssub_overflow:
4402 case Builtin::BI__builtin_ssubl_overflow:
4403 case Builtin::BI__builtin_ssubll_overflow:
4404 case Builtin::BI__builtin_smul_overflow:
4405 case Builtin::BI__builtin_smull_overflow:
4406 case Builtin::BI__builtin_smulll_overflow: {
4411 llvm::Value *
X = EmitScalarExpr(E->
getArg(0));
4412 llvm::Value *Y = EmitScalarExpr(E->
getArg(1));
4417 switch (BuiltinID) {
4418 default: llvm_unreachable(
"Unknown overflow builtin id.");
4419 case Builtin::BI__builtin_uadd_overflow:
4420 case Builtin::BI__builtin_uaddl_overflow:
4421 case Builtin::BI__builtin_uaddll_overflow:
4422 IntrinsicId = llvm::Intrinsic::uadd_with_overflow;
4424 case Builtin::BI__builtin_usub_overflow:
4425 case Builtin::BI__builtin_usubl_overflow:
4426 case Builtin::BI__builtin_usubll_overflow:
4427 IntrinsicId = llvm::Intrinsic::usub_with_overflow;
4429 case Builtin::BI__builtin_umul_overflow:
4430 case Builtin::BI__builtin_umull_overflow:
4431 case Builtin::BI__builtin_umulll_overflow:
4432 IntrinsicId = llvm::Intrinsic::umul_with_overflow;
4434 case Builtin::BI__builtin_sadd_overflow:
4435 case Builtin::BI__builtin_saddl_overflow:
4436 case Builtin::BI__builtin_saddll_overflow:
4437 IntrinsicId = llvm::Intrinsic::sadd_with_overflow;
4439 case Builtin::BI__builtin_ssub_overflow:
4440 case Builtin::BI__builtin_ssubl_overflow:
4441 case Builtin::BI__builtin_ssubll_overflow:
4442 IntrinsicId = llvm::Intrinsic::ssub_with_overflow;
4444 case Builtin::BI__builtin_smul_overflow:
4445 case Builtin::BI__builtin_smull_overflow:
4446 case Builtin::BI__builtin_smulll_overflow:
4447 IntrinsicId = llvm::Intrinsic::smul_with_overflow;
4454 Builder.CreateStore(Sum, SumOutPtr);
4458 case Builtin::BIaddressof:
4459 case Builtin::BI__addressof:
4460 case Builtin::BI__builtin_addressof:
4462 case Builtin::BI__builtin_function_start:
4465 case Builtin::BI__builtin_operator_new:
4466 return EmitBuiltinNewDeleteCall(
4468 case Builtin::BI__builtin_operator_delete:
4469 return EmitBuiltinNewDeleteCall(
4472 case Builtin::BI__builtin_is_aligned:
4473 return EmitBuiltinIsAligned(E);
4474 case Builtin::BI__builtin_align_up:
4475 return EmitBuiltinAlignTo(E,
true);
4476 case Builtin::BI__builtin_align_down:
4477 return EmitBuiltinAlignTo(E,
false);
4479 case Builtin::BI__noop:
4482 case Builtin::BI__builtin_call_with_static_chain: {
4485 return EmitCall(Call->getCallee()->getType(),
4486 EmitCallee(Call->getCallee()), Call, ReturnValue,
4487 EmitScalarExpr(Chain));
4489 case Builtin::BI_InterlockedExchange8:
4490 case Builtin::BI_InterlockedExchange16:
4491 case Builtin::BI_InterlockedExchange:
4492 case Builtin::BI_InterlockedExchangePointer:
4494 EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchange, E));
4495 case Builtin::BI_InterlockedCompareExchangePointer:
4496 case Builtin::BI_InterlockedCompareExchangePointer_nf: {
4498 llvm::IntegerType *IntType =
4499 IntegerType::get(getLLVMContext(),
4500 getContext().getTypeSize(E->
getType()));
4501 llvm::Type *IntPtrType = IntType->getPointerTo();
4503 llvm::Value *Destination =
4504 Builder.CreateBitCast(EmitScalarExpr(E->
getArg(0)), IntPtrType);
4506 llvm::Value *Exchange = EmitScalarExpr(E->
getArg(1));
4507 RTy = Exchange->getType();
4508 Exchange = Builder.CreatePtrToInt(Exchange, IntType);
4510 llvm::Value *Comparand =
4511 Builder.CreatePtrToInt(EmitScalarExpr(E->
getArg(2)), IntType);
4514 BuiltinID == Builtin::BI_InterlockedCompareExchangePointer_nf ?
4515 AtomicOrdering::Monotonic : AtomicOrdering::SequentiallyConsistent;
4517 auto Result = Builder.CreateAtomicCmpXchg(Destination, Comparand, Exchange,
4518 Ordering, Ordering);
4519 Result->setVolatile(
true);
4521 return RValue::get(Builder.CreateIntToPtr(Builder.CreateExtractValue(Result,
4525 case Builtin::BI_InterlockedCompareExchange8:
4526 case Builtin::BI_InterlockedCompareExchange16:
4527 case Builtin::BI_InterlockedCompareExchange:
4528 case Builtin::BI_InterlockedCompareExchange64:
4530 case Builtin::BI_InterlockedIncrement16:
4531 case Builtin::BI_InterlockedIncrement:
4533 EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedIncrement, E));
4534 case Builtin::BI_InterlockedDecrement16:
4535 case Builtin::BI_InterlockedDecrement:
4537 EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedDecrement, E));
4538 case Builtin::BI_InterlockedAnd8:
4539 case Builtin::BI_InterlockedAnd16:
4540 case Builtin::BI_InterlockedAnd:
4541 return RValue::get(EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedAnd, E));
4542 case Builtin::BI_InterlockedExchangeAdd8:
4543 case Builtin::BI_InterlockedExchangeAdd16:
4544 case Builtin::BI_InterlockedExchangeAdd:
4546 EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeAdd, E));
4547 case Builtin::BI_InterlockedExchangeSub8:
4548 case Builtin::BI_InterlockedExchangeSub16:
4549 case Builtin::BI_InterlockedExchangeSub:
4551 EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedExchangeSub, E));
4552 case Builtin::BI_InterlockedOr8:
4553 case Builtin::BI_InterlockedOr16:
4554 case Builtin::BI_InterlockedOr:
4555 return RValue::get(EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedOr, E));
4556 case Builtin::BI_InterlockedXor8:
4557 case Builtin::BI_InterlockedXor16:
4558 case Builtin::BI_InterlockedXor:
4559 return RValue::get(EmitMSVCBuiltinExpr(MSVCIntrin::_InterlockedXor, E));
4561 case Builtin::BI_bittest64:
4562 case Builtin::BI_bittest:
4563 case Builtin::BI_bittestandcomplement64:
4564 case Builtin::BI_bittestandcomplement:
4565 case Builtin::BI_bittestandreset64:
4566 case Builtin::BI_bittestandreset:
4567 case Builtin::BI_bittestandset64:
4568 case Builtin::BI_bittestandset:
4569 case Builtin::BI_interlockedbittestandreset:
4570 case Builtin::BI_interlockedbittestandreset64:
4571 case Builtin::BI_interlockedbittestandset64:
4572 case Builtin::BI_interlockedbittestandset:
4573 case Builtin::BI_interlockedbittestandset_acq:
4574 case Builtin::BI_interlockedbittestandset_rel:
4575 case Builtin::BI_interlockedbittestandset_nf:
4576 case Builtin::BI_interlockedbittestandreset_acq:
4577 case Builtin::BI_interlockedbittestandreset_rel:
4578 case Builtin::BI_interlockedbittestandreset_nf:
4583 case Builtin::BI__iso_volatile_load8:
4584 case Builtin::BI__iso_volatile_load16:
4585 case Builtin::BI__iso_volatile_load32:
4586 case Builtin::BI__iso_volatile_load64:
4588 case Builtin::BI__iso_volatile_store8:
4589 case Builtin::BI__iso_volatile_store16:
4590 case Builtin::BI__iso_volatile_store32:
4591 case Builtin::BI__iso_volatile_store64:
4594 case Builtin::BI__exception_code:
4595 case Builtin::BI_exception_code:
4597 case Builtin::BI__exception_info:
4598 case Builtin::BI_exception_info:
4600 case Builtin::BI__abnormal_termination:
4601 case Builtin::BI_abnormal_termination:
4603 case Builtin::BI_setjmpex:
4604 if (getTarget().getTriple().isOSMSVCRT() && E->
getNumArgs() == 1 &&
4608 case Builtin::BI_setjmp:
4609 if (getTarget().getTriple().isOSMSVCRT() && E->
getNumArgs() == 1 &&
4611 if (getTarget().getTriple().getArch() == llvm::Triple::x86)
4613 else if (getTarget().getTriple().getArch() == llvm::Triple::aarch64)
4620 case Builtin::BImove:
4621 case Builtin::BImove_if_noexcept:
4622 case Builtin::BIforward:
4623 case Builtin::BIas_const:
4625 case Builtin::BI__GetExceptionInfo: {
4626 if (llvm::GlobalVariable *GV =
4628 return RValue::get(llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy));
4632 case Builtin::BI__fastfail:
4633 return RValue::get(EmitMSVCBuiltinExpr(MSVCIntrin::__fastfail, E));
4635 case Builtin::BI__builtin_coro_size: {
4636 auto & Context = getContext();
4638 auto T = Builder.getIntNTy(Context.
getTypeSize(SizeTy));
4639 Function *F = CGM.getIntrinsic(Intrinsic::coro_size, T);
4643 case Builtin::BI__builtin_coro_id:
4644 return EmitCoroutineIntrinsic(E, Intrinsic::coro_id);
4645 case Builtin::BI__builtin_coro_promise:
4646 return EmitCoroutineIntrinsic(E, Intrinsic::coro_promise);
4647 case Builtin::BI__builtin_coro_resume:
4648 return EmitCoroutineIntrinsic(E, Intrinsic::coro_resume);
4649 case Builtin::BI__builtin_coro_frame:
4650 return EmitCoroutineIntrinsic(E, Intrinsic::coro_frame);
4651 case Builtin::BI__builtin_coro_noop:
4652 return EmitCoroutineIntrinsic(E, Intrinsic::coro_noop);
4653 case Builtin::BI__builtin_coro_free:
4654 return EmitCoroutineIntrinsic(E, Intrinsic::coro_free);
4655 case Builtin::BI__builtin_coro_destroy:
4656 return EmitCoroutineIntrinsic(E, Intrinsic::coro_destroy);
4657 case Builtin::BI__builtin_coro_done:
4658 return EmitCoroutineIntrinsic(E, Intrinsic::coro_done);
4659 case Builtin::BI__builtin_coro_alloc:
4660 return EmitCoroutineIntrinsic(E, Intrinsic::coro_alloc);
4661 case Builtin::BI__builtin_coro_begin:
4662 return EmitCoroutineIntrinsic(E, Intrinsic::coro_begin);
4663 case Builtin::BI__builtin_coro_end:
4664 return EmitCoroutineIntrinsic(E, Intrinsic::coro_end);
4665 case Builtin::BI__builtin_coro_suspend:
4666 return EmitCoroutineIntrinsic(E, Intrinsic::coro_suspend);
4669 case Builtin::BIread_pipe:
4670 case Builtin::BIwrite_pipe: {
4672 *Arg1 = EmitScalarExpr(E->
getArg(1));
4678 unsigned GenericAS =
4680 llvm::Type *I8PTy = llvm::PointerType::get(
4681 llvm::Type::getInt8Ty(getLLVMContext()), GenericAS);
4685 const char *Name = (BuiltinID == Builtin::BIread_pipe) ?
"__read_pipe_2"
4689 llvm::Type *ArgTys[] = {Arg0->getType(), I8PTy, Int32Ty, Int32Ty};
4690 llvm::FunctionType *FTy = llvm::FunctionType::get(
4692 Value *BCast = Builder.CreatePointerCast(Arg1, I8PTy);
4694 EmitRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name),
4695 {Arg0, BCast, PacketSize, PacketAlign}));
4698 "Illegal number of parameters to pipe function");
4699 const char *Name = (BuiltinID == Builtin::BIread_pipe) ?
"__read_pipe_4"
4702 llvm::Type *ArgTys[] = {Arg0->getType(), Arg1->getType(), Int32Ty, I8PTy,
4705 *Arg3 = EmitScalarExpr(E->
getArg(3));
4706 llvm::FunctionType *FTy = llvm::FunctionType::get(
4708 Value *BCast = Builder.CreatePointerCast(Arg3, I8PTy);
4711 if (Arg2->getType() != Int32Ty)
4712 Arg2 = Builder.CreateZExtOrTrunc(Arg2, Int32Ty);
4714 EmitRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name),
4715 {Arg0, Arg1, Arg2, BCast, PacketSize, PacketAlign}));
4720 case Builtin::BIreserve_read_pipe:
4721 case Builtin::BIreserve_write_pipe:
4722 case Builtin::BIwork_group_reserve_read_pipe:
4723 case Builtin::BIwork_group_reserve_write_pipe:
4724 case Builtin::BIsub_group_reserve_read_pipe:
4725 case Builtin::BIsub_group_reserve_write_pipe: {
4728 if (BuiltinID == Builtin::BIreserve_read_pipe)
4729 Name =
"__reserve_read_pipe";
4730 else if (BuiltinID == Builtin::BIreserve_write_pipe)
4731 Name =
"__reserve_write_pipe";
4732 else if (BuiltinID == Builtin::BIwork_group_reserve_read_pipe)
4733 Name =
"__work_group_reserve_read_pipe";
4734 else if (BuiltinID == Builtin::BIwork_group_reserve_write_pipe)
4735 Name =
"__work_group_reserve_write_pipe";
4736 else if (BuiltinID == Builtin::BIsub_group_reserve_read_pipe)
4737 Name =
"__sub_group_reserve_read_pipe";
4739 Name =
"__sub_group_reserve_write_pipe";
4742 *Arg1 = EmitScalarExpr(E->
getArg(1));
4743 llvm::Type *ReservedIDTy = ConvertType(getContext().OCLReserveIDTy);
4749 llvm::Type *ArgTys[] = {Arg0->getType(), Int32Ty, Int32Ty, Int32Ty};
4750 llvm::FunctionType *FTy = llvm::FunctionType::get(
4754 if (Arg1->getType() != Int32Ty)
4755 Arg1 = Builder.CreateZExtOrTrunc(Arg1, Int32Ty);
4756 return RValue::get(EmitRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name),
4757 {Arg0, Arg1, PacketSize, PacketAlign}));
4761 case Builtin::BIcommit_read_pipe:
4762 case Builtin::BIcommit_write_pipe:
4763 case Builtin::BIwork_group_commit_read_pipe:
4764 case Builtin::BIwork_group_commit_write_pipe:
4765 case Builtin::BIsub_group_commit_read_pipe:
4766 case Builtin::BIsub_group_commit_write_pipe: {
4768 if (BuiltinID == Builtin::BIcommit_read_pipe)
4769 Name =
"__commit_read_pipe";
4770 else if (BuiltinID == Builtin::BIcommit_write_pipe)
4771 Name =
"__commit_write_pipe";
4772 else if (BuiltinID == Builtin::BIwork_group_commit_read_pipe)
4773 Name =
"__work_group_commit_read_pipe";
4774 else if (BuiltinID == Builtin::BIwork_group_commit_write_pipe)
4775 Name =
"__work_group_commit_write_pipe";
4776 else if (BuiltinID == Builtin::BIsub_group_commit_read_pipe)
4777 Name =
"__sub_group_commit_read_pipe";
4779 Name =
"__sub_group_commit_write_pipe";
4782 *Arg1 = EmitScalarExpr(E->
getArg(1));
4788 llvm::Type *ArgTys[] = {Arg0->getType(), Arg1->getType(), Int32Ty, Int32Ty};
4789 llvm::FunctionType *FTy =
4790 llvm::FunctionType::get(llvm::Type::getVoidTy(getLLVMContext()),
4793 return RValue::get(EmitRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name),
4794 {Arg0, Arg1, PacketSize, PacketAlign}));
4797 case Builtin::BIget_pipe_num_packets:
4798 case Builtin::BIget_pipe_max_packets: {
4799 const char *BaseName;
4801 if (BuiltinID == Builtin::BIget_pipe_num_packets)
4802 BaseName =
"__get_pipe_num_packets";
4804 BaseName =
"__get_pipe_max_packets";
4806 std::string(PipeTy->isReadOnly() ?
"_ro" :
"_wo");
4813 llvm::Type *ArgTys[] = {Arg0->getType(), Int32Ty, Int32Ty};
4814 llvm::FunctionType *FTy = llvm::FunctionType::get(
4817 return RValue::get(EmitRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name),
4818 {Arg0, PacketSize, PacketAlign}));
4822 case Builtin::BIto_global:
4823 case Builtin::BIto_local:
4824 case Builtin::BIto_private: {
4825 auto Arg0 = EmitScalarExpr(E->
getArg(0));
4826 auto NewArgT = llvm::PointerType::get(Int8Ty,
4828 auto NewRetT = llvm::PointerType::get(Int8Ty,
4829 CGM.getContext().getTargetAddressSpace(
4831 auto FTy = llvm::FunctionType::get(NewRetT, {NewArgT},
false);
4832 llvm::Value *NewArg;
4833 if (Arg0->getType()->getPointerAddressSpace() !=
4834 NewArgT->getPointerAddressSpace())
4835 NewArg = Builder.CreateAddrSpaceCast(Arg0, NewArgT);
4837 NewArg = Builder.CreateBitOrPointerCast(Arg0, NewArgT);
4840 EmitRuntimeCall(CGM.CreateRuntimeFunction(FTy, NewName), {NewArg});
4841 return RValue::get(Builder.CreateBitOrPointerCast(NewCall,
4847 case Builtin::BIenqueue_kernel: {
4851 llvm::Type *QueueTy = ConvertType(getContext().OCLQueueTy);
4852 llvm::Type *GenericVoidPtrTy = Builder.getInt8PtrTy(
4855 llvm::Value *Queue = EmitScalarExpr(E->
getArg(0));
4856 llvm::Value *Flags = EmitScalarExpr(E->
getArg(1));
4864 Name =
"__enqueue_kernel_basic";
4865 llvm::Type *ArgTys[] = {QueueTy, Int32Ty, RangeTy, GenericVoidPtrTy,
4867 llvm::FunctionType *FTy = llvm::FunctionType::get(
4871 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*
this, E->
getArg(3));
4873 Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
4874 llvm::Value *
Block =
4875 Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
4877 AttrBuilder B(Builder.getContext());
4879 llvm::AttributeList ByValAttrSet =
4880 llvm::AttributeList::get(CGM.getModule().getContext(), 3
U, B);
4883 EmitRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name, ByValAttrSet),
4884 {Queue, Flags, Range, Kernel, Block});
4885 RTCall->setAttributes(ByValAttrSet);
4888 assert(NumArgs >= 5 &&
"Invalid enqueue_kernel signature");
4892 auto CreateArrayForSizeVar = [=](
unsigned First)
4893 -> std::tuple<llvm::Value *, llvm::Value *, llvm::Value *> {
4895 QualType SizeArrayTy = getContext().getConstantArrayType(
4898 auto Tmp = CreateMemTemp(SizeArrayTy,
"block_sizes");
4899 llvm::Value *TmpPtr = Tmp.getPointer();
4900 llvm::Value *TmpSize = EmitLifetimeStart(
4901 CGM.getDataLayout().getTypeAllocSize(Tmp.getElementType()), TmpPtr);
4902 llvm::Value *ElemPtr;
4905 auto *
Zero = llvm::ConstantInt::get(IntTy, 0);
4906 for (
unsigned I =
First; I < NumArgs; ++I) {
4907 auto *Index = llvm::ConstantInt::get(IntTy, I -
First);
4908 auto *GEP = Builder.CreateGEP(Tmp.getElementType(), TmpPtr,
4913 Builder.CreateZExtOrTrunc(EmitScalarExpr(E->
getArg(I)), SizeTy);
4914 Builder.CreateAlignedStore(
4915 V, GEP, CGM.getDataLayout().getPrefTypeAlign(SizeTy));
4917 return std::tie(ElemPtr, TmpSize, TmpPtr);
4923 Name =
"__enqueue_kernel_varargs";
4925 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*
this, E->
getArg(3));
4927 Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
4928 auto *
Block = Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
4929 llvm::Value *ElemPtr, *TmpSize, *TmpPtr;
4930 std::tie(ElemPtr, TmpSize, TmpPtr) = CreateArrayForSizeVar(4);
4934 llvm::Value *
const Args[] = {Queue, Flags,
4936 Block, ConstantInt::get(IntTy, NumArgs - 4),
4938 llvm::Type *
const ArgTys[] = {
4939 QueueTy, IntTy, RangeTy, GenericVoidPtrTy,
4940 GenericVoidPtrTy, IntTy, ElemPtr->getType()};
4942 llvm::FunctionType *FTy = llvm::FunctionType::get(Int32Ty, ArgTys,
false);
4944 EmitRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name), Args));
4946 EmitLifetimeEnd(TmpSize, TmpPtr);
4951 llvm::Type *EventTy = ConvertType(getContext().OCLClkEventTy);
4952 llvm::PointerType *EventPtrTy = EventTy->getPointerTo(
4955 llvm::Value *NumEvents =
4956 Builder.CreateZExtOrTrunc(EmitScalarExpr(E->
getArg(3)), Int32Ty);
4961 llvm::Value *EventWaitList =
nullptr;
4964 EventWaitList = llvm::ConstantPointerNull::get(EventPtrTy);
4967 ? EmitArrayToPointerDecay(E->
getArg(4)).getPointer()
4968 : EmitScalarExpr(E->
getArg(4));
4970 EventWaitList = Builder.CreatePointerCast(EventWaitList, EventPtrTy);
4972 llvm::Value *EventRet =
nullptr;
4975 EventRet = llvm::ConstantPointerNull::get(EventPtrTy);
4978 Builder.CreatePointerCast(EmitScalarExpr(E->
getArg(5)), EventPtrTy);
4982 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*
this, E->
getArg(6));
4984 Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
4985 llvm::Value *
Block =
4986 Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
4988 std::vector<llvm::Type *> ArgTys = {
4989 QueueTy, Int32Ty, RangeTy, Int32Ty,
4990 EventPtrTy, EventPtrTy, GenericVoidPtrTy, GenericVoidPtrTy};
4992 std::vector<llvm::Value *> Args = {Queue, Flags, Range,
4993 NumEvents, EventWaitList, EventRet,
4998 Name =
"__enqueue_kernel_basic_events";
4999 llvm::FunctionType *FTy = llvm::FunctionType::get(
5002 EmitRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name),
5007 Args.push_back(ConstantInt::get(Int32Ty, NumArgs - 7));
5008 ArgTys.push_back(Int32Ty);
5009 Name =
"__enqueue_kernel_events_varargs";
5011 llvm::Value *ElemPtr, *TmpSize, *TmpPtr;
5012 std::tie(ElemPtr, TmpSize, TmpPtr) = CreateArrayForSizeVar(7);
5013 Args.push_back(ElemPtr);
5014 ArgTys.push_back(ElemPtr->getType());
5016 llvm::FunctionType *FTy = llvm::FunctionType::get(
5019 RValue::get(EmitRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name),
5022 EmitLifetimeEnd(TmpSize, TmpPtr);
5029 case Builtin::BIget_kernel_work_group_size: {
5030 llvm::Type *GenericVoidPtrTy = Builder.getInt8PtrTy(
5033 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*
this, E->
getArg(0));
5034 Value *
Kernel = Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
5035 Value *Arg = Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
5037 CGM.CreateRuntimeFunction(
5038 llvm::FunctionType::get(IntTy, {GenericVoidPtrTy, GenericVoidPtrTy},
5040 "__get_kernel_work_group_size_impl"),
5043 case Builtin::BIget_kernel_preferred_work_group_size_multiple: {
5044 llvm::Type *GenericVoidPtrTy = Builder.getInt8PtrTy(
5047 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*
this, E->
getArg(0));
5048 Value *
Kernel = Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
5049 Value *Arg = Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
5051 CGM.CreateRuntimeFunction(
5052 llvm::FunctionType::get(IntTy, {GenericVoidPtrTy, GenericVoidPtrTy},
5054 "__get_kernel_preferred_work_group_size_multiple_impl"),
5057 case Builtin::BIget_kernel_max_sub_group_size_for_ndrange:
5058 case Builtin::BIget_kernel_sub_group_count_for_ndrange: {
5059 llvm::Type *GenericVoidPtrTy = Builder.getInt8PtrTy(
5064 CGM.getOpenCLRuntime().emitOpenCLEnqueuedBlock(*
this, E->
getArg(1));
5065 Value *
Kernel = Builder.CreatePointerCast(Info.Kernel, GenericVoidPtrTy);
5066 Value *
Block = Builder.CreatePointerCast(Info.BlockArg, GenericVoidPtrTy);
5068 BuiltinID == Builtin::BIget_kernel_max_sub_group_size_for_ndrange
5069 ?
"__get_kernel_max_sub_group_size_for_ndrange_impl"
5070 :
"__get_kernel_sub_group_count_for_ndrange_impl";
5072 CGM.CreateRuntimeFunction(
5073 llvm::FunctionType::get(
5074 IntTy, {NDRange->getType(), GenericVoidPtrTy, GenericVoidPtrTy},
5077 {NDRange, Kernel, Block}));
5080 case Builtin::BI__builtin_store_half:
5081 case Builtin::BI__builtin_store_halff: {
5084 Value *HalfVal = Builder.CreateFPTrunc(Val, Builder.getHalfTy());
5087 case Builtin::BI__builtin_load_half: {
5090 return RValue::get(Builder.CreateFPExt(HalfVal, Builder.getDoubleTy()));
5092 case Builtin::BI__builtin_load_halff: {
5095 return RValue::get(Builder.CreateFPExt(HalfVal, Builder.getFloatTy()));
5097 case Builtin::BIprintf:
5098 if (getTarget().getTriple().isNVPTX() ||
5099 getTarget().getTriple().isAMDGCN()) {
5100 if (getLangOpts().OpenMPIsDevice)
5101 return EmitOpenMPDevicePrintfCallExpr(E);
5102 if (getTarget().getTriple().isNVPTX())
5103 return EmitNVPTXDevicePrintfCallExpr(E);
5104 if (getTarget().getTriple().isAMDGCN() && getLangOpts().
HIP)
5105 return EmitAMDGPUDevicePrintfCallExpr(E);
5109 case Builtin::BI__builtin_canonicalize:
5110 case Builtin::BI__builtin_canonicalizef:
5111 case Builtin::BI__builtin_canonicalizef16:
5112 case Builtin::BI__builtin_canonicalizel:
5115 case Builtin::BI__builtin_thread_pointer: {
5116 if (!getContext().getTargetInfo().isTLSSupported())
5117 CGM.ErrorUnsupported(E,
"__builtin_thread_pointer");
5121 case Builtin::BI__builtin_os_log_format:
5122 return emitBuiltinOSLogFormat(*E);
5124 case Builtin::BI__xray_customevent: {
5125 if (!ShouldXRayInstrumentFunction())
5128 if (!CGM.getCodeGenOpts().XRayInstrumentationBundle.has(
5132 if (
const auto *XRayAttr = CurFuncDecl->getAttr<XRayInstrumentAttr>())
5133 if (XRayAttr->neverXRayInstrument() && !AlwaysEmitXRayCustomEvents())
5136 Function *F = CGM.getIntrinsic(Intrinsic::xray_customevent);
5137 auto FTy = F->getFunctionType();
5138 auto Arg0 = E->
getArg(0);
5139 auto Arg0Val = EmitScalarExpr(Arg0);
5140 auto Arg0Ty = Arg0->getType();
5141 auto PTy0 = FTy->getParamType(0);
5142 if (PTy0 != Arg0Val->getType()) {
5143 if (Arg0Ty->isArrayType())
5144 Arg0Val = EmitArrayToPointerDecay(Arg0).getPointer();
5146 Arg0Val = Builder.CreatePointerCast(Arg0Val, PTy0);
5148 auto Arg1 = EmitScalarExpr(E->
getArg(1));
5149 auto PTy1 = FTy->getParamType(1);
5150 if (PTy1 != Arg1->getType())
5151 Arg1 = Builder.CreateTruncOrBitCast(Arg1, PTy1);
5152 return RValue::get(Builder.CreateCall(F, {Arg0Val, Arg1}));
5155 case Builtin::BI__xray_typedevent: {
5159 if (!ShouldXRayInstrumentFunction())
5162 if (!CGM.getCodeGenOpts().XRayInstrumentationBundle.has(
5166 if (
const auto *XRayAttr = CurFuncDecl->getAttr<XRayInstrumentAttr>())
5167 if (XRayAttr->neverXRayInstrument() && !AlwaysEmitXRayTypedEvents())
5170 Function *F = CGM.getIntrinsic(Intrinsic::xray_typedevent);
5171 auto FTy = F->getFunctionType();
5172 auto Arg0 = EmitScalarExpr(E->
getArg(0));
5173 auto PTy0 = FTy->getParamType(0);
5174 if (PTy0 != Arg0->getType())
5175 Arg0 = Builder.CreateTruncOrBitCast(Arg0, PTy0);
5176 auto Arg1 = E->
getArg(1);
5177 auto Arg1Val = EmitScalarExpr(Arg1);
5178 auto Arg1Ty = Arg1->getType();
5179 auto PTy1 = FTy->getParamType(1);
5180 if (PTy1 != Arg1Val->getType()) {
5181 if (Arg1Ty->isArrayType())
5182 Arg1Val = EmitArrayToPointerDecay(Arg1).getPointer();
5184 Arg1Val = Builder.CreatePointerCast(Arg1Val, PTy1);
5186 auto Arg2 = EmitScalarExpr(E->
getArg(2));
5187 auto PTy2 = FTy->getParamType(2);
5188 if (PTy2 != Arg2->getType())
5189 Arg2 = Builder.CreateTruncOrBitCast(Arg2, PTy2);
5190 return RValue::get(Builder.CreateCall(F, {Arg0, Arg1Val, Arg2}));
5193 case Builtin::BI__builtin_ms_va_start:
5194 case Builtin::BI__builtin_ms_va_end:
5196 EmitVAStartEnd(EmitMSVAListRef(E->
getArg(0)).getPointer(),
5197 BuiltinID == Builtin::BI__builtin_ms_va_start));
5199 case Builtin::BI__builtin_ms_va_copy: {
5209 llvm::Type *BPP = Int8PtrPtrTy;
5216 Value *ArgPtr = Builder.CreateLoad(SrcAddr,
"ap.val");
5217 return RValue::get(Builder.CreateStore(ArgPtr, DestAddr));
5220 case Builtin::BI__builtin_get_device_side_mangled_name: {
5221 auto Name = CGM.getCUDARuntime().getDeviceSideName(
5223 auto Str = CGM.GetAddrOfConstantCString(Name,
"");
5224 llvm::Constant *Zeros[] = {llvm::ConstantInt::get(SizeTy, 0),
5225 llvm::ConstantInt::get(SizeTy, 0)};
5226 auto *Ptr = llvm::ConstantExpr::getGetElementPtr(Str.getElementType(),
5227 Str.getPointer(), Zeros);
5235 if (getContext().
BuiltinInfo.isLibFunction(BuiltinID))
5237 CGM.getBuiltinLibFunction(FD, BuiltinID));
5241 if (getContext().
BuiltinInfo.isPredefinedLibFunction(BuiltinID))
5243 cast<llvm::Constant>(EmitScalarExpr(E->
getCallee())));
5250 checkTargetFeatures(E, FD);
5252 if (
unsigned VectorWidth = getContext().
BuiltinInfo.getRequiredVectorWidth(BuiltinID))
5253 LargestVectorWidth =
std::max(LargestVectorWidth, VectorWidth);
5256 const char *Name = getContext().BuiltinInfo.getName(BuiltinID);
5259 llvm::Triple::getArchTypePrefix(getTarget().getTriple().getArch());
5260 if (!Prefix.empty()) {
5261 IntrinsicID = Intrinsic::getIntrinsicForGCCBuiltin(Prefix.data(), Name);
5265 if (IntrinsicID == Intrinsic::not_intrinsic)
5266 IntrinsicID = Intrinsic::getIntrinsicForMSBuiltin(Prefix.data(), Name);
5269 if (IntrinsicID != Intrinsic::not_intrinsic) {
5274 unsigned ICEArguments = 0;
5276 getContext().GetBuiltinType(BuiltinID,
Error, &ICEArguments);
5279 Function *F = CGM.getIntrinsic(IntrinsicID);
5280 llvm::FunctionType *FTy = F->getFunctionType();
5282 for (
unsigned i = 0, e = E->
getNumArgs(); i != e; ++i) {
5285 if ((ICEArguments & (1 << i)) == 0) {
5286 ArgValue = EmitScalarExpr(E->
getArg(i));
5290 ArgValue = llvm::ConstantInt::get(
5297 llvm::Type *PTy = FTy->getParamType(i);
5298 if (PTy != ArgValue->getType()) {
5300 if (
auto *PtrTy = dyn_cast<llvm::PointerType>(PTy)) {
5301 if (PtrTy->getAddressSpace() !=
5302 ArgValue->getType()->getPointerAddressSpace()) {
5303 ArgValue = Builder.CreateAddrSpaceCast(
5305 ArgValue->getType()->getPointerTo(PtrTy->getAddressSpace()));
5309 assert(PTy->canLosslesslyBitCastTo(FTy->getParamType(i)) &&
5310 "Must be able to losslessly bit cast to param");
5313 if (PTy->isX86_AMXTy())
5314 ArgValue = Builder.CreateIntrinsic(Intrinsic::x86_cast_vector_to_tile,
5315 {ArgValue->getType()}, {ArgValue});
5317 ArgValue = Builder.CreateBitCast(ArgValue, PTy);
5320 Args.push_back(ArgValue);
5323 Value *
V = Builder.CreateCall(F, Args);
5326 llvm::Type *RetTy = VoidTy;
5328 RetTy = ConvertType(BuiltinRetType);
5330 if (RetTy !=
V->getType()) {
5332 if (
auto *PtrTy = dyn_cast<llvm::PointerType>(RetTy)) {
5333 if (PtrTy->getAddressSpace() !=
V->getType()->getPointerAddressSpace()) {
5334 V = Builder.CreateAddrSpaceCast(
5335 V,
V->getType()->getPointerTo(PtrTy->getAddressSpace()));
5339 assert(
V->getType()->canLosslesslyBitCastTo(RetTy) &&
5340 "Must be able to losslessly bit cast result type");
5343 if (
V->getType()->isX86_AMXTy())
5344 V = Builder.CreateIntrinsic(Intrinsic::x86_cast_tile_to_vector, {RetTy},
5347 V = Builder.CreateBitCast(
V, RetTy);
5364 if (
Value *
V = EmitTargetBuiltinExpr(BuiltinID, E, ReturnValue)) {
5370 ReturnValue.isVolatile());
5372 llvm_unreachable(
"No current target builtin returns complex");
5374 llvm_unreachable(
"Bad evaluation kind in EmitBuiltinExpr");
5377 ErrorUnsupported(E,
"builtin function");
5380 return GetUndefRValue(E->
getType());
5384 unsigned BuiltinID,
const CallExpr *E,
5386 llvm::Triple::ArchType Arch) {
5388 case llvm::Triple::arm:
5389 case llvm::Triple::armeb:
5390 case llvm::Triple::thumb:
5391 case llvm::Triple::thumbeb:
5393 case llvm::Triple::aarch64:
5394 case llvm::Triple::aarch64_32:
5395 case llvm::Triple::aarch64_be:
5397 case llvm::Triple::bpfeb:
5398 case llvm::Triple::bpfel:
5400 case llvm::Triple::x86:
5401 case llvm::Triple::x86_64:
5403 case llvm::Triple::ppc:
5404 case llvm::Triple::ppcle:
5405 case llvm::Triple::ppc64:
5406 case llvm::Triple::ppc64le:
5408 case llvm::Triple::r600:
5409 case llvm::Triple::amdgcn:
5411 case llvm::Triple::systemz:
5413 case llvm::Triple::nvptx:
5414 case llvm::Triple::nvptx64:
5416 case llvm::Triple::wasm32:
5417 case llvm::Triple::wasm64:
5419 case llvm::Triple::hexagon:
5421 case llvm::Triple::riscv32:
5422 case llvm::Triple::riscv64:
5432 if (getContext().
BuiltinInfo.isAuxBuiltinID(BuiltinID)) {
5433 assert(getContext().getAuxTargetInfo() &&
"Missing aux target info");
5435 this, getContext().
BuiltinInfo.getAuxBuiltinID(BuiltinID), E,
5436 ReturnValue, getContext().getAuxTargetInfo()->getTriple().getArch());
5440 getTarget().getTriple().getArch());
5445 bool HasLegalHalfType =
true,
5447 bool AllowBFloatArgsAndRet =
true) {
5448 int IsQuad = TypeFlags.
isQuad();
5452 return llvm::FixedVectorType::get(CGF->
Int8Ty, V1Ty ? 1 : (8 << IsQuad));
5455 return llvm::FixedVectorType::get(CGF->
Int16Ty, V1Ty ? 1 : (4 << IsQuad));
5457 if (AllowBFloatArgsAndRet)
5458 return llvm::FixedVectorType::get(CGF->
BFloatTy, V1Ty ? 1 : (4 << IsQuad));
5460 return llvm::FixedVectorType::get(CGF->
Int16Ty, V1Ty ? 1 : (4 << IsQuad));
5462 if (HasLegalHalfType)
5463 return llvm::FixedVectorType::get(CGF->
HalfTy, V1Ty ? 1 : (4 << IsQuad));
5465 return llvm::FixedVectorType::get(CGF->
Int16Ty, V1Ty ? 1 : (4 << IsQuad));
5467 return llvm::FixedVectorType::get(CGF->
Int32Ty, V1Ty ? 1 : (2 << IsQuad));
5470 return llvm::FixedVectorType::get(CGF->
Int64Ty, V1Ty ? 1 : (1 << IsQuad));
5475 return llvm::FixedVectorType::get(CGF->
Int8Ty, 16);
5477 return llvm::FixedVectorType::get(CGF->
FloatTy, V1Ty ? 1 : (2 << IsQuad));
5479 return llvm::FixedVectorType::get(CGF->
DoubleTy, V1Ty ? 1 : (1 << IsQuad));
5481 llvm_unreachable(
"Unknown vector element type!");
5486 int IsQuad = IntTypeFlags.
isQuad();
5489 return llvm::FixedVectorType::get(CGF->
HalfTy, (4 << IsQuad));
5491 return llvm::FixedVectorType::get(CGF->
FloatTy, (2 << IsQuad));
5493 return llvm::FixedVectorType::get(CGF->
DoubleTy, (1 << IsQuad));
5495 llvm_unreachable(
"Type can't be converted to floating-point!");