clang 22.0.0git
InterpBuiltin.cpp
Go to the documentation of this file.
1//===--- InterpBuiltin.cpp - Interpreter for the constexpr VM ---*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
9#include "Boolean.h"
10#include "EvalEmitter.h"
11#include "Interp.h"
13#include "PrimType.h"
14#include "clang/AST/OSLog.h"
19#include "llvm/ADT/StringExtras.h"
20#include "llvm/Support/ErrorHandling.h"
21#include "llvm/Support/SipHash.h"
22
23namespace clang {
24namespace interp {
25
26LLVM_ATTRIBUTE_UNUSED static bool isNoopBuiltin(unsigned ID) {
27 switch (ID) {
28 case Builtin::BIas_const:
29 case Builtin::BIforward:
30 case Builtin::BIforward_like:
31 case Builtin::BImove:
32 case Builtin::BImove_if_noexcept:
33 case Builtin::BIaddressof:
34 case Builtin::BI__addressof:
35 case Builtin::BI__builtin_addressof:
36 case Builtin::BI__builtin_launder:
37 return true;
38 default:
39 return false;
40 }
41 return false;
42}
43
44static void discard(InterpStack &Stk, PrimType T) {
45 TYPE_SWITCH(T, { Stk.discard<T>(); });
46}
47
49 INT_TYPE_SWITCH(T, return Stk.pop<T>().toAPSInt());
50}
51
52static APSInt popToAPSInt(InterpState &S, const Expr *E) {
53 return popToAPSInt(S.Stk, *S.getContext().classify(E->getType()));
54}
56 return popToAPSInt(S.Stk, *S.getContext().classify(T));
57}
58
59/// Pushes \p Val on the stack as the type given by \p QT.
60static void pushInteger(InterpState &S, const APSInt &Val, QualType QT) {
64 assert(T);
65
66 unsigned BitWidth = S.getASTContext().getTypeSize(QT);
67
68 if (T == PT_IntAPS) {
69 auto Result = S.allocAP<IntegralAP<true>>(BitWidth);
70 Result.copy(Val);
72 return;
73 }
74
75 if (T == PT_IntAP) {
76 auto Result = S.allocAP<IntegralAP<false>>(BitWidth);
77 Result.copy(Val);
79 return;
80 }
81
83 int64_t V = Val.getSExtValue();
84 INT_TYPE_SWITCH(*T, { S.Stk.push<T>(T::from(V, BitWidth)); });
85 } else {
87 uint64_t V = Val.getZExtValue();
88 INT_TYPE_SWITCH(*T, { S.Stk.push<T>(T::from(V, BitWidth)); });
89 }
90}
91
92template <typename T>
93static void pushInteger(InterpState &S, T Val, QualType QT) {
94 if constexpr (std::is_same_v<T, APInt>)
95 pushInteger(S, APSInt(Val, !std::is_signed_v<T>), QT);
96 else if constexpr (std::is_same_v<T, APSInt>)
97 pushInteger(S, Val, QT);
98 else
100 APSInt(APInt(sizeof(T) * 8, static_cast<uint64_t>(Val),
101 std::is_signed_v<T>),
102 !std::is_signed_v<T>),
103 QT);
104}
105
106static void assignInteger(InterpState &S, const Pointer &Dest, PrimType ValueT,
107 const APSInt &Value) {
108
109 if (ValueT == PT_IntAPS) {
110 Dest.deref<IntegralAP<true>>() =
111 S.allocAP<IntegralAP<true>>(Value.getBitWidth());
112 Dest.deref<IntegralAP<true>>().copy(Value);
113 } else if (ValueT == PT_IntAP) {
114 Dest.deref<IntegralAP<false>>() =
115 S.allocAP<IntegralAP<false>>(Value.getBitWidth());
116 Dest.deref<IntegralAP<false>>().copy(Value);
117 } else {
119 ValueT, { Dest.deref<T>() = T::from(static_cast<T>(Value)); });
120 }
121}
122
123static QualType getElemType(const Pointer &P) {
124 const Descriptor *Desc = P.getFieldDesc();
125 QualType T = Desc->getType();
126 if (Desc->isPrimitive())
127 return T;
128 if (T->isPointerType())
129 return T->getAs<PointerType>()->getPointeeType();
130 if (Desc->isArray())
131 return Desc->getElemQualType();
132 if (const auto *AT = T->getAsArrayTypeUnsafe())
133 return AT->getElementType();
134 return T;
135}
136
138 unsigned ID) {
139 if (!S.diagnosing())
140 return;
141
142 auto Loc = S.Current->getSource(OpPC);
143 if (S.getLangOpts().CPlusPlus11)
144 S.CCEDiag(Loc, diag::note_constexpr_invalid_function)
145 << /*isConstexpr=*/0 << /*isConstructor=*/0
147 else
148 S.CCEDiag(Loc, diag::note_invalid_subexpr_in_const_expr);
149}
150
151static llvm::APSInt convertBoolVectorToInt(const Pointer &Val) {
152 assert(Val.getFieldDesc()->isPrimitiveArray() &&
154 "Not a boolean vector");
155 unsigned NumElems = Val.getNumElems();
156
157 // Each element is one bit, so create an integer with NumElts bits.
158 llvm::APSInt Result(NumElems, 0);
159 for (unsigned I = 0; I != NumElems; ++I) {
160 if (Val.elem<bool>(I))
161 Result.setBit(I);
162 }
163
164 return Result;
165}
166
168 const InterpFrame *Frame,
169 const CallExpr *Call) {
170 unsigned Depth = S.Current->getDepth();
171 auto isStdCall = [](const FunctionDecl *F) -> bool {
172 return F && F->isInStdNamespace() && F->getIdentifier() &&
173 F->getIdentifier()->isStr("is_constant_evaluated");
174 };
175 const InterpFrame *Caller = Frame->Caller;
176 // The current frame is the one for __builtin_is_constant_evaluated.
177 // The one above that, potentially the one for std::is_constant_evaluated().
179 S.getEvalStatus().Diag &&
180 (Depth == 0 || (Depth == 1 && isStdCall(Frame->getCallee())))) {
181 if (Caller && isStdCall(Frame->getCallee())) {
182 const Expr *E = Caller->getExpr(Caller->getRetPC());
183 S.report(E->getExprLoc(),
184 diag::warn_is_constant_evaluated_always_true_constexpr)
185 << "std::is_constant_evaluated" << E->getSourceRange();
186 } else {
187 S.report(Call->getExprLoc(),
188 diag::warn_is_constant_evaluated_always_true_constexpr)
189 << "__builtin_is_constant_evaluated" << Call->getSourceRange();
190 }
191 }
192
194 return true;
195}
196
197// __builtin_assume(int)
199 const InterpFrame *Frame,
200 const CallExpr *Call) {
201 assert(Call->getNumArgs() == 1);
202 discard(S.Stk, *S.getContext().classify(Call->getArg(0)));
203 return true;
204}
205
207 const InterpFrame *Frame,
208 const CallExpr *Call, unsigned ID) {
209 uint64_t Limit = ~static_cast<uint64_t>(0);
210 if (ID == Builtin::BIstrncmp || ID == Builtin::BI__builtin_strncmp ||
211 ID == Builtin::BIwcsncmp || ID == Builtin::BI__builtin_wcsncmp)
212 Limit = popToAPSInt(S.Stk, *S.getContext().classify(Call->getArg(2)))
213 .getZExtValue();
214
215 const Pointer &B = S.Stk.pop<Pointer>();
216 const Pointer &A = S.Stk.pop<Pointer>();
217 if (ID == Builtin::BIstrcmp || ID == Builtin::BIstrncmp ||
218 ID == Builtin::BIwcscmp || ID == Builtin::BIwcsncmp)
219 diagnoseNonConstexprBuiltin(S, OpPC, ID);
220
221 if (Limit == 0) {
222 pushInteger(S, 0, Call->getType());
223 return true;
224 }
225
226 if (!CheckLive(S, OpPC, A, AK_Read) || !CheckLive(S, OpPC, B, AK_Read))
227 return false;
228
229 if (A.isDummy() || B.isDummy())
230 return false;
231 if (!A.isBlockPointer() || !B.isBlockPointer())
232 return false;
233
234 bool IsWide = ID == Builtin::BIwcscmp || ID == Builtin::BIwcsncmp ||
235 ID == Builtin::BI__builtin_wcscmp ||
236 ID == Builtin::BI__builtin_wcsncmp;
237 assert(A.getFieldDesc()->isPrimitiveArray());
238 assert(B.getFieldDesc()->isPrimitiveArray());
239
240 // Different element types shouldn't happen, but with casts they can.
242 return false;
243
244 PrimType ElemT = *S.getContext().classify(getElemType(A));
245
246 auto returnResult = [&](int V) -> bool {
247 pushInteger(S, V, Call->getType());
248 return true;
249 };
250
251 unsigned IndexA = A.getIndex();
252 unsigned IndexB = B.getIndex();
253 uint64_t Steps = 0;
254 for (;; ++IndexA, ++IndexB, ++Steps) {
255
256 if (Steps >= Limit)
257 break;
258 const Pointer &PA = A.atIndex(IndexA);
259 const Pointer &PB = B.atIndex(IndexB);
260 if (!CheckRange(S, OpPC, PA, AK_Read) ||
261 !CheckRange(S, OpPC, PB, AK_Read)) {
262 return false;
263 }
264
265 if (IsWide) {
266 INT_TYPE_SWITCH(ElemT, {
267 T CA = PA.deref<T>();
268 T CB = PB.deref<T>();
269 if (CA > CB)
270 return returnResult(1);
271 if (CA < CB)
272 return returnResult(-1);
273 if (CA.isZero() || CB.isZero())
274 return returnResult(0);
275 });
276 continue;
277 }
278
279 uint8_t CA = PA.deref<uint8_t>();
280 uint8_t CB = PB.deref<uint8_t>();
281
282 if (CA > CB)
283 return returnResult(1);
284 if (CA < CB)
285 return returnResult(-1);
286 if (CA == 0 || CB == 0)
287 return returnResult(0);
288 }
289
290 return returnResult(0);
291}
292
294 const InterpFrame *Frame,
295 const CallExpr *Call, unsigned ID) {
296 const Pointer &StrPtr = S.Stk.pop<Pointer>();
297
298 if (ID == Builtin::BIstrlen || ID == Builtin::BIwcslen)
299 diagnoseNonConstexprBuiltin(S, OpPC, ID);
300
301 if (!CheckArray(S, OpPC, StrPtr))
302 return false;
303
304 if (!CheckLive(S, OpPC, StrPtr, AK_Read))
305 return false;
306
307 if (!CheckDummy(S, OpPC, StrPtr.block(), AK_Read))
308 return false;
309
310 if (!StrPtr.getFieldDesc()->isPrimitiveArray())
311 return false;
312
313 assert(StrPtr.getFieldDesc()->isPrimitiveArray());
314 unsigned ElemSize = StrPtr.getFieldDesc()->getElemSize();
315
316 if (ID == Builtin::BI__builtin_wcslen || ID == Builtin::BIwcslen) {
317 [[maybe_unused]] const ASTContext &AC = S.getASTContext();
318 assert(ElemSize == AC.getTypeSizeInChars(AC.getWCharType()).getQuantity());
319 }
320
321 size_t Len = 0;
322 for (size_t I = StrPtr.getIndex();; ++I, ++Len) {
323 const Pointer &ElemPtr = StrPtr.atIndex(I);
324
325 if (!CheckRange(S, OpPC, ElemPtr, AK_Read))
326 return false;
327
328 uint32_t Val;
329 switch (ElemSize) {
330 case 1:
331 Val = ElemPtr.deref<uint8_t>();
332 break;
333 case 2:
334 Val = ElemPtr.deref<uint16_t>();
335 break;
336 case 4:
337 Val = ElemPtr.deref<uint32_t>();
338 break;
339 default:
340 llvm_unreachable("Unsupported char size");
341 }
342 if (Val == 0)
343 break;
344 }
345
346 pushInteger(S, Len, Call->getType());
347
348 return true;
349}
350
352 const InterpFrame *Frame, const CallExpr *Call,
353 bool Signaling) {
354 const Pointer &Arg = S.Stk.pop<Pointer>();
355
356 if (!CheckLoad(S, OpPC, Arg))
357 return false;
358
359 assert(Arg.getFieldDesc()->isPrimitiveArray());
360
361 // Convert the given string to an integer using StringRef's API.
362 llvm::APInt Fill;
363 std::string Str;
364 assert(Arg.getNumElems() >= 1);
365 for (unsigned I = 0;; ++I) {
366 const Pointer &Elem = Arg.atIndex(I);
367
368 if (!CheckLoad(S, OpPC, Elem))
369 return false;
370
371 if (Elem.deref<int8_t>() == 0)
372 break;
373
374 Str += Elem.deref<char>();
375 }
376
377 // Treat empty strings as if they were zero.
378 if (Str.empty())
379 Fill = llvm::APInt(32, 0);
380 else if (StringRef(Str).getAsInteger(0, Fill))
381 return false;
382
383 const llvm::fltSemantics &TargetSemantics =
385 Call->getDirectCallee()->getReturnType());
386
387 Floating Result = S.allocFloat(TargetSemantics);
389 if (Signaling)
390 Result.copy(
391 llvm::APFloat::getSNaN(TargetSemantics, /*Negative=*/false, &Fill));
392 else
393 Result.copy(
394 llvm::APFloat::getQNaN(TargetSemantics, /*Negative=*/false, &Fill));
395 } else {
396 // Prior to IEEE 754-2008, architectures were allowed to choose whether
397 // the first bit of their significand was set for qNaN or sNaN. MIPS chose
398 // a different encoding to what became a standard in 2008, and for pre-
399 // 2008 revisions, MIPS interpreted sNaN-2008 as qNan and qNaN-2008 as
400 // sNaN. This is now known as "legacy NaN" encoding.
401 if (Signaling)
402 Result.copy(
403 llvm::APFloat::getQNaN(TargetSemantics, /*Negative=*/false, &Fill));
404 else
405 Result.copy(
406 llvm::APFloat::getSNaN(TargetSemantics, /*Negative=*/false, &Fill));
407 }
408
410 return true;
411}
412
414 const InterpFrame *Frame,
415 const CallExpr *Call) {
416 const llvm::fltSemantics &TargetSemantics =
418 Call->getDirectCallee()->getReturnType());
419
420 Floating Result = S.allocFloat(TargetSemantics);
421 Result.copy(APFloat::getInf(TargetSemantics));
423 return true;
424}
425
427 const InterpFrame *Frame) {
428 const Floating &Arg2 = S.Stk.pop<Floating>();
429 const Floating &Arg1 = S.Stk.pop<Floating>();
430 Floating Result = S.allocFloat(Arg1.getSemantics());
431
432 APFloat Copy = Arg1.getAPFloat();
433 Copy.copySign(Arg2.getAPFloat());
434 Result.copy(Copy);
436
437 return true;
438}
439
441 const InterpFrame *Frame, bool IsNumBuiltin) {
442 const Floating &RHS = S.Stk.pop<Floating>();
443 const Floating &LHS = S.Stk.pop<Floating>();
444 Floating Result = S.allocFloat(LHS.getSemantics());
445
446 if (IsNumBuiltin)
447 Result.copy(llvm::minimumnum(LHS.getAPFloat(), RHS.getAPFloat()));
448 else
449 Result.copy(minnum(LHS.getAPFloat(), RHS.getAPFloat()));
451 return true;
452}
453
455 const InterpFrame *Frame, bool IsNumBuiltin) {
456 const Floating &RHS = S.Stk.pop<Floating>();
457 const Floating &LHS = S.Stk.pop<Floating>();
458 Floating Result = S.allocFloat(LHS.getSemantics());
459
460 if (IsNumBuiltin)
461 Result.copy(llvm::maximumnum(LHS.getAPFloat(), RHS.getAPFloat()));
462 else
463 Result.copy(maxnum(LHS.getAPFloat(), RHS.getAPFloat()));
465 return true;
466}
467
468/// Defined as __builtin_isnan(...), to accommodate the fact that it can
469/// take a float, double, long double, etc.
470/// But for us, that's all a Floating anyway.
472 const InterpFrame *Frame,
473 const CallExpr *Call) {
474 const Floating &Arg = S.Stk.pop<Floating>();
475
476 pushInteger(S, Arg.isNan(), Call->getType());
477 return true;
478}
479
481 const InterpFrame *Frame,
482 const CallExpr *Call) {
483 const Floating &Arg = S.Stk.pop<Floating>();
484
485 pushInteger(S, Arg.isSignaling(), Call->getType());
486 return true;
487}
488
490 const InterpFrame *Frame, bool CheckSign,
491 const CallExpr *Call) {
492 const Floating &Arg = S.Stk.pop<Floating>();
493 APFloat F = Arg.getAPFloat();
494 bool IsInf = F.isInfinity();
495
496 if (CheckSign)
497 pushInteger(S, IsInf ? (F.isNegative() ? -1 : 1) : 0, Call->getType());
498 else
499 pushInteger(S, IsInf, Call->getType());
500 return true;
501}
502
504 const InterpFrame *Frame,
505 const CallExpr *Call) {
506 const Floating &Arg = S.Stk.pop<Floating>();
507
508 pushInteger(S, Arg.isFinite(), Call->getType());
509 return true;
510}
511
513 const InterpFrame *Frame,
514 const CallExpr *Call) {
515 const Floating &Arg = S.Stk.pop<Floating>();
516
517 pushInteger(S, Arg.isNormal(), Call->getType());
518 return true;
519}
520
522 const InterpFrame *Frame,
523 const CallExpr *Call) {
524 const Floating &Arg = S.Stk.pop<Floating>();
525
526 pushInteger(S, Arg.isDenormal(), Call->getType());
527 return true;
528}
529
531 const InterpFrame *Frame,
532 const CallExpr *Call) {
533 const Floating &Arg = S.Stk.pop<Floating>();
534
535 pushInteger(S, Arg.isZero(), Call->getType());
536 return true;
537}
538
540 const InterpFrame *Frame,
541 const CallExpr *Call) {
542 const Floating &Arg = S.Stk.pop<Floating>();
543
544 pushInteger(S, Arg.isNegative(), Call->getType());
545 return true;
546}
547
549 const CallExpr *Call, unsigned ID) {
550 const Floating &RHS = S.Stk.pop<Floating>();
551 const Floating &LHS = S.Stk.pop<Floating>();
552
554 S,
555 [&] {
556 switch (ID) {
557 case Builtin::BI__builtin_isgreater:
558 return LHS > RHS;
559 case Builtin::BI__builtin_isgreaterequal:
560 return LHS >= RHS;
561 case Builtin::BI__builtin_isless:
562 return LHS < RHS;
563 case Builtin::BI__builtin_islessequal:
564 return LHS <= RHS;
565 case Builtin::BI__builtin_islessgreater: {
566 ComparisonCategoryResult Cmp = LHS.compare(RHS);
567 return Cmp == ComparisonCategoryResult::Less ||
569 }
570 case Builtin::BI__builtin_isunordered:
572 default:
573 llvm_unreachable("Unexpected builtin ID: Should be a floating point "
574 "comparison function");
575 }
576 }(),
577 Call->getType());
578 return true;
579}
580
581/// First parameter to __builtin_isfpclass is the floating value, the
582/// second one is an integral value.
584 const InterpFrame *Frame,
585 const CallExpr *Call) {
586 APSInt FPClassArg = popToAPSInt(S, Call->getArg(1));
587 const Floating &F = S.Stk.pop<Floating>();
588
589 int32_t Result = static_cast<int32_t>(
590 (F.classify() & std::move(FPClassArg)).getZExtValue());
591 pushInteger(S, Result, Call->getType());
592
593 return true;
594}
595
596/// Five int values followed by one floating value.
597/// __builtin_fpclassify(int, int, int, int, int, float)
599 const InterpFrame *Frame,
600 const CallExpr *Call) {
601 const Floating &Val = S.Stk.pop<Floating>();
602
603 PrimType IntT = *S.getContext().classify(Call->getArg(0));
604 APSInt Values[5];
605 for (unsigned I = 0; I != 5; ++I)
606 Values[4 - I] = popToAPSInt(S.Stk, IntT);
607
608 unsigned Index;
609 switch (Val.getCategory()) {
610 case APFloat::fcNaN:
611 Index = 0;
612 break;
613 case APFloat::fcInfinity:
614 Index = 1;
615 break;
616 case APFloat::fcNormal:
617 Index = Val.isDenormal() ? 3 : 2;
618 break;
619 case APFloat::fcZero:
620 Index = 4;
621 break;
622 }
623
624 // The last argument is first on the stack.
625 assert(Index <= 4);
626
627 pushInteger(S, Values[Index], Call->getType());
628 return true;
629}
630
631static inline Floating abs(InterpState &S, const Floating &In) {
632 if (!In.isNegative())
633 return In;
634
635 Floating Output = S.allocFloat(In.getSemantics());
636 APFloat New = In.getAPFloat();
637 New.changeSign();
638 Output.copy(New);
639 return Output;
640}
641
642// The C standard says "fabs raises no floating-point exceptions,
643// even if x is a signaling NaN. The returned value is independent of
644// the current rounding direction mode." Therefore constant folding can
645// proceed without regard to the floating point settings.
646// Reference, WG14 N2478 F.10.4.3
648 const InterpFrame *Frame) {
649 const Floating &Val = S.Stk.pop<Floating>();
650 S.Stk.push<Floating>(abs(S, Val));
651 return true;
652}
653
655 const InterpFrame *Frame,
656 const CallExpr *Call) {
657 APSInt Val = popToAPSInt(S, Call->getArg(0));
658 if (Val ==
659 APSInt(APInt::getSignedMinValue(Val.getBitWidth()), /*IsUnsigned=*/false))
660 return false;
661 if (Val.isNegative())
662 Val.negate();
663 pushInteger(S, Val, Call->getType());
664 return true;
665}
666
668 const InterpFrame *Frame,
669 const CallExpr *Call) {
670 APSInt Val;
671 if (Call->getArg(0)->getType()->isExtVectorBoolType()) {
672 const Pointer &Arg = S.Stk.pop<Pointer>();
673 Val = convertBoolVectorToInt(Arg);
674 } else {
675 Val = popToAPSInt(S, Call->getArg(0));
676 }
677 pushInteger(S, Val.popcount(), Call->getType());
678 return true;
679}
680
682 const InterpFrame *Frame,
683 const CallExpr *Call) {
684 APSInt Val = popToAPSInt(S, Call->getArg(0));
685 pushInteger(S, Val.popcount() % 2, Call->getType());
686 return true;
687}
688
690 const InterpFrame *Frame,
691 const CallExpr *Call) {
692 APSInt Val = popToAPSInt(S, Call->getArg(0));
693 pushInteger(S, Val.getBitWidth() - Val.getSignificantBits(), Call->getType());
694 return true;
695}
696
698 const InterpFrame *Frame,
699 const CallExpr *Call) {
700 APSInt Val = popToAPSInt(S, Call->getArg(0));
701 pushInteger(S, Val.reverseBits(), Call->getType());
702 return true;
703}
704
706 const InterpFrame *Frame,
707 const CallExpr *Call) {
708 // This is an unevaluated call, so there are no arguments on the stack.
709 assert(Call->getNumArgs() == 1);
710 const Expr *Arg = Call->getArg(0);
711
712 GCCTypeClass ResultClass =
714 int32_t ReturnVal = static_cast<int32_t>(ResultClass);
715 pushInteger(S, ReturnVal, Call->getType());
716 return true;
717}
718
719// __builtin_expect(long, long)
720// __builtin_expect_with_probability(long, long, double)
722 const InterpFrame *Frame,
723 const CallExpr *Call) {
724 // The return value is simply the value of the first parameter.
725 // We ignore the probability.
726 unsigned NumArgs = Call->getNumArgs();
727 assert(NumArgs == 2 || NumArgs == 3);
728
729 PrimType ArgT = *S.getContext().classify(Call->getArg(0)->getType());
730 if (NumArgs == 3)
731 S.Stk.discard<Floating>();
732 discard(S.Stk, ArgT);
733
734 APSInt Val = popToAPSInt(S.Stk, ArgT);
735 pushInteger(S, Val, Call->getType());
736 return true;
737}
738
740 const InterpFrame *Frame,
741 const CallExpr *Call) {
742 APSInt Value = popToAPSInt(S, Call->getArg(0));
743
744 uint64_t N = Value.countr_zero();
745 pushInteger(S, N == Value.getBitWidth() ? 0 : N + 1, Call->getType());
746 return true;
747}
748
750 const InterpFrame *Frame,
751 const CallExpr *Call) {
752#ifndef NDEBUG
753 assert(Call->getArg(0)->isLValue());
754 PrimType PtrT = S.getContext().classify(Call->getArg(0)).value_or(PT_Ptr);
755 assert(PtrT == PT_Ptr &&
756 "Unsupported pointer type passed to __builtin_addressof()");
757#endif
758 return true;
759}
760
762 const InterpFrame *Frame,
763 const CallExpr *Call) {
764 return Call->getDirectCallee()->isConstexpr();
765}
766
768 const InterpFrame *Frame,
769 const CallExpr *Call) {
770 APSInt Arg = popToAPSInt(S, Call->getArg(0));
771
773 Arg.getZExtValue());
774 pushInteger(S, Result, Call->getType());
775 return true;
776}
777
778// Two integral values followed by a pointer (lhs, rhs, resultOut)
780 const CallExpr *Call,
781 unsigned BuiltinOp) {
782 const Pointer &ResultPtr = S.Stk.pop<Pointer>();
783 if (ResultPtr.isDummy())
784 return false;
785
786 PrimType RHST = *S.getContext().classify(Call->getArg(1)->getType());
787 PrimType LHST = *S.getContext().classify(Call->getArg(0)->getType());
788 APSInt RHS = popToAPSInt(S.Stk, RHST);
789 APSInt LHS = popToAPSInt(S.Stk, LHST);
790 QualType ResultType = Call->getArg(2)->getType()->getPointeeType();
791 PrimType ResultT = *S.getContext().classify(ResultType);
792 bool Overflow;
793
795 if (BuiltinOp == Builtin::BI__builtin_add_overflow ||
796 BuiltinOp == Builtin::BI__builtin_sub_overflow ||
797 BuiltinOp == Builtin::BI__builtin_mul_overflow) {
798 bool IsSigned = LHS.isSigned() || RHS.isSigned() ||
800 bool AllSigned = LHS.isSigned() && RHS.isSigned() &&
802 uint64_t LHSSize = LHS.getBitWidth();
803 uint64_t RHSSize = RHS.getBitWidth();
804 uint64_t ResultSize = S.getASTContext().getTypeSize(ResultType);
805 uint64_t MaxBits = std::max(std::max(LHSSize, RHSSize), ResultSize);
806
807 // Add an additional bit if the signedness isn't uniformly agreed to. We
808 // could do this ONLY if there is a signed and an unsigned that both have
809 // MaxBits, but the code to check that is pretty nasty. The issue will be
810 // caught in the shrink-to-result later anyway.
811 if (IsSigned && !AllSigned)
812 ++MaxBits;
813
814 LHS = APSInt(LHS.extOrTrunc(MaxBits), !IsSigned);
815 RHS = APSInt(RHS.extOrTrunc(MaxBits), !IsSigned);
816 Result = APSInt(MaxBits, !IsSigned);
817 }
818
819 // Find largest int.
820 switch (BuiltinOp) {
821 default:
822 llvm_unreachable("Invalid value for BuiltinOp");
823 case Builtin::BI__builtin_add_overflow:
824 case Builtin::BI__builtin_sadd_overflow:
825 case Builtin::BI__builtin_saddl_overflow:
826 case Builtin::BI__builtin_saddll_overflow:
827 case Builtin::BI__builtin_uadd_overflow:
828 case Builtin::BI__builtin_uaddl_overflow:
829 case Builtin::BI__builtin_uaddll_overflow:
830 Result = LHS.isSigned() ? LHS.sadd_ov(RHS, Overflow)
831 : LHS.uadd_ov(RHS, Overflow);
832 break;
833 case Builtin::BI__builtin_sub_overflow:
834 case Builtin::BI__builtin_ssub_overflow:
835 case Builtin::BI__builtin_ssubl_overflow:
836 case Builtin::BI__builtin_ssubll_overflow:
837 case Builtin::BI__builtin_usub_overflow:
838 case Builtin::BI__builtin_usubl_overflow:
839 case Builtin::BI__builtin_usubll_overflow:
840 Result = LHS.isSigned() ? LHS.ssub_ov(RHS, Overflow)
841 : LHS.usub_ov(RHS, Overflow);
842 break;
843 case Builtin::BI__builtin_mul_overflow:
844 case Builtin::BI__builtin_smul_overflow:
845 case Builtin::BI__builtin_smull_overflow:
846 case Builtin::BI__builtin_smulll_overflow:
847 case Builtin::BI__builtin_umul_overflow:
848 case Builtin::BI__builtin_umull_overflow:
849 case Builtin::BI__builtin_umulll_overflow:
850 Result = LHS.isSigned() ? LHS.smul_ov(RHS, Overflow)
851 : LHS.umul_ov(RHS, Overflow);
852 break;
853 }
854
855 // In the case where multiple sizes are allowed, truncate and see if
856 // the values are the same.
857 if (BuiltinOp == Builtin::BI__builtin_add_overflow ||
858 BuiltinOp == Builtin::BI__builtin_sub_overflow ||
859 BuiltinOp == Builtin::BI__builtin_mul_overflow) {
860 // APSInt doesn't have a TruncOrSelf, so we use extOrTrunc instead,
861 // since it will give us the behavior of a TruncOrSelf in the case where
862 // its parameter <= its size. We previously set Result to be at least the
863 // type-size of the result, so getTypeSize(ResultType) <= Resu
864 APSInt Temp = Result.extOrTrunc(S.getASTContext().getTypeSize(ResultType));
865 Temp.setIsSigned(ResultType->isSignedIntegerOrEnumerationType());
866
867 if (!APSInt::isSameValue(Temp, Result))
868 Overflow = true;
869 Result = std::move(Temp);
870 }
871
872 // Write Result to ResultPtr and put Overflow on the stack.
873 assignInteger(S, ResultPtr, ResultT, Result);
874 if (ResultPtr.canBeInitialized())
875 ResultPtr.initialize();
876
877 assert(Call->getDirectCallee()->getReturnType()->isBooleanType());
878 S.Stk.push<Boolean>(Overflow);
879 return true;
880}
881
882/// Three integral values followed by a pointer (lhs, rhs, carry, carryOut).
884 const InterpFrame *Frame,
885 const CallExpr *Call, unsigned BuiltinOp) {
886 const Pointer &CarryOutPtr = S.Stk.pop<Pointer>();
887 PrimType LHST = *S.getContext().classify(Call->getArg(0)->getType());
888 PrimType RHST = *S.getContext().classify(Call->getArg(1)->getType());
889 APSInt CarryIn = popToAPSInt(S.Stk, LHST);
890 APSInt RHS = popToAPSInt(S.Stk, RHST);
891 APSInt LHS = popToAPSInt(S.Stk, LHST);
892
893 if (CarryOutPtr.isDummy())
894 return false;
895
896 APSInt CarryOut;
897
899 // Copy the number of bits and sign.
900 Result = LHS;
901 CarryOut = LHS;
902
903 bool FirstOverflowed = false;
904 bool SecondOverflowed = false;
905 switch (BuiltinOp) {
906 default:
907 llvm_unreachable("Invalid value for BuiltinOp");
908 case Builtin::BI__builtin_addcb:
909 case Builtin::BI__builtin_addcs:
910 case Builtin::BI__builtin_addc:
911 case Builtin::BI__builtin_addcl:
912 case Builtin::BI__builtin_addcll:
913 Result =
914 LHS.uadd_ov(RHS, FirstOverflowed).uadd_ov(CarryIn, SecondOverflowed);
915 break;
916 case Builtin::BI__builtin_subcb:
917 case Builtin::BI__builtin_subcs:
918 case Builtin::BI__builtin_subc:
919 case Builtin::BI__builtin_subcl:
920 case Builtin::BI__builtin_subcll:
921 Result =
922 LHS.usub_ov(RHS, FirstOverflowed).usub_ov(CarryIn, SecondOverflowed);
923 break;
924 }
925 // It is possible for both overflows to happen but CGBuiltin uses an OR so
926 // this is consistent.
927 CarryOut = (uint64_t)(FirstOverflowed | SecondOverflowed);
928
929 QualType CarryOutType = Call->getArg(3)->getType()->getPointeeType();
930 PrimType CarryOutT = *S.getContext().classify(CarryOutType);
931 assignInteger(S, CarryOutPtr, CarryOutT, CarryOut);
932 CarryOutPtr.initialize();
933
934 assert(Call->getType() == Call->getArg(0)->getType());
935 pushInteger(S, Result, Call->getType());
936 return true;
937}
938
940 const InterpFrame *Frame, const CallExpr *Call,
941 unsigned BuiltinOp) {
942
943 std::optional<APSInt> Fallback;
944 if (BuiltinOp == Builtin::BI__builtin_clzg && Call->getNumArgs() == 2)
945 Fallback = popToAPSInt(S, Call->getArg(1));
946
947 APSInt Val;
948 if (Call->getArg(0)->getType()->isExtVectorBoolType()) {
949 const Pointer &Arg = S.Stk.pop<Pointer>();
950 Val = convertBoolVectorToInt(Arg);
951 } else {
952 Val = popToAPSInt(S, Call->getArg(0));
953 }
954
955 // When the argument is 0, the result of GCC builtins is undefined, whereas
956 // for Microsoft intrinsics, the result is the bit-width of the argument.
957 bool ZeroIsUndefined = BuiltinOp != Builtin::BI__lzcnt16 &&
958 BuiltinOp != Builtin::BI__lzcnt &&
959 BuiltinOp != Builtin::BI__lzcnt64;
960
961 if (Val == 0) {
962 if (Fallback) {
963 pushInteger(S, *Fallback, Call->getType());
964 return true;
965 }
966
967 if (ZeroIsUndefined)
968 return false;
969 }
970
971 pushInteger(S, Val.countl_zero(), Call->getType());
972 return true;
973}
974
976 const InterpFrame *Frame, const CallExpr *Call,
977 unsigned BuiltinID) {
978 std::optional<APSInt> Fallback;
979 if (BuiltinID == Builtin::BI__builtin_ctzg && Call->getNumArgs() == 2)
980 Fallback = popToAPSInt(S, Call->getArg(1));
981
982 APSInt Val;
983 if (Call->getArg(0)->getType()->isExtVectorBoolType()) {
984 const Pointer &Arg = S.Stk.pop<Pointer>();
985 Val = convertBoolVectorToInt(Arg);
986 } else {
987 Val = popToAPSInt(S, Call->getArg(0));
988 }
989
990 if (Val == 0) {
991 if (Fallback) {
992 pushInteger(S, *Fallback, Call->getType());
993 return true;
994 }
995 return false;
996 }
997
998 pushInteger(S, Val.countr_zero(), Call->getType());
999 return true;
1000}
1001
1003 const InterpFrame *Frame,
1004 const CallExpr *Call) {
1005 const APSInt &Val = popToAPSInt(S, Call->getArg(0));
1006 assert(Val.getActiveBits() <= 64);
1007
1008 pushInteger(S, Val.byteSwap(), Call->getType());
1009 return true;
1010}
1011
1012/// bool __atomic_always_lock_free(size_t, void const volatile*)
1013/// bool __atomic_is_lock_free(size_t, void const volatile*)
1015 const InterpFrame *Frame,
1016 const CallExpr *Call,
1017 unsigned BuiltinOp) {
1018 auto returnBool = [&S](bool Value) -> bool {
1019 S.Stk.push<Boolean>(Value);
1020 return true;
1021 };
1022
1023 const Pointer &Ptr = S.Stk.pop<Pointer>();
1024 const APSInt &SizeVal = popToAPSInt(S, Call->getArg(0));
1025
1026 // For __atomic_is_lock_free(sizeof(_Atomic(T))), if the size is a power
1027 // of two less than or equal to the maximum inline atomic width, we know it
1028 // is lock-free. If the size isn't a power of two, or greater than the
1029 // maximum alignment where we promote atomics, we know it is not lock-free
1030 // (at least not in the sense of atomic_is_lock_free). Otherwise,
1031 // the answer can only be determined at runtime; for example, 16-byte
1032 // atomics have lock-free implementations on some, but not all,
1033 // x86-64 processors.
1034
1035 // Check power-of-two.
1036 CharUnits Size = CharUnits::fromQuantity(SizeVal.getZExtValue());
1037 if (Size.isPowerOfTwo()) {
1038 // Check against inlining width.
1039 unsigned InlineWidthBits =
1041 if (Size <= S.getASTContext().toCharUnitsFromBits(InlineWidthBits)) {
1042
1043 // OK, we will inline appropriately-aligned operations of this size,
1044 // and _Atomic(T) is appropriately-aligned.
1045 if (Size == CharUnits::One())
1046 return returnBool(true);
1047
1048 // Same for null pointers.
1049 assert(BuiltinOp != Builtin::BI__c11_atomic_is_lock_free);
1050 if (Ptr.isZero())
1051 return returnBool(true);
1052
1053 if (Ptr.isIntegralPointer()) {
1054 uint64_t IntVal = Ptr.getIntegerRepresentation();
1055 if (APSInt(APInt(64, IntVal, false), true).isAligned(Size.getAsAlign()))
1056 return returnBool(true);
1057 }
1058
1059 const Expr *PtrArg = Call->getArg(1);
1060 // Otherwise, check if the type's alignment against Size.
1061 if (const auto *ICE = dyn_cast<ImplicitCastExpr>(PtrArg)) {
1062 // Drop the potential implicit-cast to 'const volatile void*', getting
1063 // the underlying type.
1064 if (ICE->getCastKind() == CK_BitCast)
1065 PtrArg = ICE->getSubExpr();
1066 }
1067
1068 if (const auto *PtrTy = PtrArg->getType()->getAs<PointerType>()) {
1069 QualType PointeeType = PtrTy->getPointeeType();
1070 if (!PointeeType->isIncompleteType() &&
1071 S.getASTContext().getTypeAlignInChars(PointeeType) >= Size) {
1072 // OK, we will inline operations on this object.
1073 return returnBool(true);
1074 }
1075 }
1076 }
1077 }
1078
1079 if (BuiltinOp == Builtin::BI__atomic_always_lock_free)
1080 return returnBool(false);
1081
1082 return false;
1083}
1084
1085/// bool __c11_atomic_is_lock_free(size_t)
1087 CodePtr OpPC,
1088 const InterpFrame *Frame,
1089 const CallExpr *Call) {
1090 const APSInt &SizeVal = popToAPSInt(S, Call->getArg(0));
1091
1092 CharUnits Size = CharUnits::fromQuantity(SizeVal.getZExtValue());
1093 if (Size.isPowerOfTwo()) {
1094 // Check against inlining width.
1095 unsigned InlineWidthBits =
1097 if (Size <= S.getASTContext().toCharUnitsFromBits(InlineWidthBits)) {
1098 S.Stk.push<Boolean>(true);
1099 return true;
1100 }
1101 }
1102
1103 return false; // returnBool(false);
1104}
1105
1106/// __builtin_complex(Float A, float B);
1108 const InterpFrame *Frame,
1109 const CallExpr *Call) {
1110 const Floating &Arg2 = S.Stk.pop<Floating>();
1111 const Floating &Arg1 = S.Stk.pop<Floating>();
1112 Pointer &Result = S.Stk.peek<Pointer>();
1113
1114 Result.elem<Floating>(0) = Arg1;
1115 Result.elem<Floating>(1) = Arg2;
1116 Result.initializeAllElements();
1117
1118 return true;
1119}
1120
1121/// __builtin_is_aligned()
1122/// __builtin_align_up()
1123/// __builtin_align_down()
1124/// The first parameter is either an integer or a pointer.
1125/// The second parameter is the requested alignment as an integer.
1127 const InterpFrame *Frame,
1128 const CallExpr *Call,
1129 unsigned BuiltinOp) {
1130 const APSInt &Alignment = popToAPSInt(S, Call->getArg(1));
1131
1132 if (Alignment < 0 || !Alignment.isPowerOf2()) {
1133 S.FFDiag(Call, diag::note_constexpr_invalid_alignment) << Alignment;
1134 return false;
1135 }
1136 unsigned SrcWidth = S.getASTContext().getIntWidth(Call->getArg(0)->getType());
1137 APSInt MaxValue(APInt::getOneBitSet(SrcWidth, SrcWidth - 1));
1138 if (APSInt::compareValues(Alignment, MaxValue) > 0) {
1139 S.FFDiag(Call, diag::note_constexpr_alignment_too_big)
1140 << MaxValue << Call->getArg(0)->getType() << Alignment;
1141 return false;
1142 }
1143
1144 // The first parameter is either an integer or a pointer.
1145 PrimType FirstArgT = *S.Ctx.classify(Call->getArg(0));
1146
1147 if (isIntegralType(FirstArgT)) {
1148 const APSInt &Src = popToAPSInt(S.Stk, FirstArgT);
1149 APInt AlignMinusOne = Alignment.extOrTrunc(Src.getBitWidth()) - 1;
1150 if (BuiltinOp == Builtin::BI__builtin_align_up) {
1151 APSInt AlignedVal =
1152 APSInt((Src + AlignMinusOne) & ~AlignMinusOne, Src.isUnsigned());
1153 pushInteger(S, AlignedVal, Call->getType());
1154 } else if (BuiltinOp == Builtin::BI__builtin_align_down) {
1155 APSInt AlignedVal = APSInt(Src & ~AlignMinusOne, Src.isUnsigned());
1156 pushInteger(S, AlignedVal, Call->getType());
1157 } else {
1158 assert(*S.Ctx.classify(Call->getType()) == PT_Bool);
1159 S.Stk.push<Boolean>((Src & AlignMinusOne) == 0);
1160 }
1161 return true;
1162 }
1163 assert(FirstArgT == PT_Ptr);
1164 const Pointer &Ptr = S.Stk.pop<Pointer>();
1165 if (!Ptr.isBlockPointer())
1166 return false;
1167
1168 unsigned PtrOffset = Ptr.getIndex();
1169 CharUnits BaseAlignment =
1171 CharUnits PtrAlign =
1172 BaseAlignment.alignmentAtOffset(CharUnits::fromQuantity(PtrOffset));
1173
1174 if (BuiltinOp == Builtin::BI__builtin_is_aligned) {
1175 if (PtrAlign.getQuantity() >= Alignment) {
1176 S.Stk.push<Boolean>(true);
1177 return true;
1178 }
1179 // If the alignment is not known to be sufficient, some cases could still
1180 // be aligned at run time. However, if the requested alignment is less or
1181 // equal to the base alignment and the offset is not aligned, we know that
1182 // the run-time value can never be aligned.
1183 if (BaseAlignment.getQuantity() >= Alignment &&
1184 PtrAlign.getQuantity() < Alignment) {
1185 S.Stk.push<Boolean>(false);
1186 return true;
1187 }
1188
1189 S.FFDiag(Call->getArg(0), diag::note_constexpr_alignment_compute)
1190 << Alignment;
1191 return false;
1192 }
1193
1194 assert(BuiltinOp == Builtin::BI__builtin_align_down ||
1195 BuiltinOp == Builtin::BI__builtin_align_up);
1196
1197 // For align_up/align_down, we can return the same value if the alignment
1198 // is known to be greater or equal to the requested value.
1199 if (PtrAlign.getQuantity() >= Alignment) {
1200 S.Stk.push<Pointer>(Ptr);
1201 return true;
1202 }
1203
1204 // The alignment could be greater than the minimum at run-time, so we cannot
1205 // infer much about the resulting pointer value. One case is possible:
1206 // For `_Alignas(32) char buf[N]; __builtin_align_down(&buf[idx], 32)` we
1207 // can infer the correct index if the requested alignment is smaller than
1208 // the base alignment so we can perform the computation on the offset.
1209 if (BaseAlignment.getQuantity() >= Alignment) {
1210 assert(Alignment.getBitWidth() <= 64 &&
1211 "Cannot handle > 64-bit address-space");
1212 uint64_t Alignment64 = Alignment.getZExtValue();
1213 CharUnits NewOffset =
1214 CharUnits::fromQuantity(BuiltinOp == Builtin::BI__builtin_align_down
1215 ? llvm::alignDown(PtrOffset, Alignment64)
1216 : llvm::alignTo(PtrOffset, Alignment64));
1217
1218 S.Stk.push<Pointer>(Ptr.atIndex(NewOffset.getQuantity()));
1219 return true;
1220 }
1221
1222 // Otherwise, we cannot constant-evaluate the result.
1223 S.FFDiag(Call->getArg(0), diag::note_constexpr_alignment_adjust) << Alignment;
1224 return false;
1225}
1226
1227/// __builtin_assume_aligned(Ptr, Alignment[, ExtraOffset])
1229 const InterpFrame *Frame,
1230 const CallExpr *Call) {
1231 assert(Call->getNumArgs() == 2 || Call->getNumArgs() == 3);
1232
1233 std::optional<APSInt> ExtraOffset;
1234 if (Call->getNumArgs() == 3)
1235 ExtraOffset = popToAPSInt(S.Stk, *S.Ctx.classify(Call->getArg(2)));
1236
1237 APSInt Alignment = popToAPSInt(S.Stk, *S.Ctx.classify(Call->getArg(1)));
1238 const Pointer &Ptr = S.Stk.pop<Pointer>();
1239
1240 CharUnits Align = CharUnits::fromQuantity(Alignment.getZExtValue());
1241
1242 // If there is a base object, then it must have the correct alignment.
1243 if (Ptr.isBlockPointer()) {
1244 CharUnits BaseAlignment;
1245 if (const auto *VD = Ptr.getDeclDesc()->asValueDecl())
1246 BaseAlignment = S.getASTContext().getDeclAlign(VD);
1247 else if (const auto *E = Ptr.getDeclDesc()->asExpr())
1248 BaseAlignment = GetAlignOfExpr(S.getASTContext(), E, UETT_AlignOf);
1249
1250 if (BaseAlignment < Align) {
1251 S.CCEDiag(Call->getArg(0),
1252 diag::note_constexpr_baa_insufficient_alignment)
1253 << 0 << BaseAlignment.getQuantity() << Align.getQuantity();
1254 return false;
1255 }
1256 }
1257
1258 APValue AV = Ptr.toAPValue(S.getASTContext());
1259 CharUnits AVOffset = AV.getLValueOffset();
1260 if (ExtraOffset)
1261 AVOffset -= CharUnits::fromQuantity(ExtraOffset->getZExtValue());
1262 if (AVOffset.alignTo(Align) != AVOffset) {
1263 if (Ptr.isBlockPointer())
1264 S.CCEDiag(Call->getArg(0),
1265 diag::note_constexpr_baa_insufficient_alignment)
1266 << 1 << AVOffset.getQuantity() << Align.getQuantity();
1267 else
1268 S.CCEDiag(Call->getArg(0),
1269 diag::note_constexpr_baa_value_insufficient_alignment)
1270 << AVOffset.getQuantity() << Align.getQuantity();
1271 return false;
1272 }
1273
1274 S.Stk.push<Pointer>(Ptr);
1275 return true;
1276}
1277
1278/// (CarryIn, LHS, RHS, Result)
1280 CodePtr OpPC,
1281 const InterpFrame *Frame,
1282 const CallExpr *Call,
1283 unsigned BuiltinOp) {
1284 if (Call->getNumArgs() != 4 || !Call->getArg(0)->getType()->isIntegerType() ||
1285 !Call->getArg(1)->getType()->isIntegerType() ||
1286 !Call->getArg(2)->getType()->isIntegerType())
1287 return false;
1288
1289 const Pointer &CarryOutPtr = S.Stk.pop<Pointer>();
1290
1291 APSInt RHS = popToAPSInt(S, Call->getArg(2));
1292 APSInt LHS = popToAPSInt(S, Call->getArg(1));
1293 APSInt CarryIn = popToAPSInt(S, Call->getArg(0));
1294
1295 bool IsAdd = BuiltinOp == clang::X86::BI__builtin_ia32_addcarryx_u32 ||
1296 BuiltinOp == clang::X86::BI__builtin_ia32_addcarryx_u64;
1297
1298 unsigned BitWidth = LHS.getBitWidth();
1299 unsigned CarryInBit = CarryIn.ugt(0) ? 1 : 0;
1300 APInt ExResult =
1301 IsAdd ? (LHS.zext(BitWidth + 1) + (RHS.zext(BitWidth + 1) + CarryInBit))
1302 : (LHS.zext(BitWidth + 1) - (RHS.zext(BitWidth + 1) + CarryInBit));
1303
1304 APInt Result = ExResult.extractBits(BitWidth, 0);
1305 APSInt CarryOut =
1306 APSInt(ExResult.extractBits(1, BitWidth), /*IsUnsigned=*/true);
1307
1308 QualType CarryOutType = Call->getArg(3)->getType()->getPointeeType();
1309 PrimType CarryOutT = *S.getContext().classify(CarryOutType);
1310 assignInteger(S, CarryOutPtr, CarryOutT, APSInt(std::move(Result), true));
1311
1312 pushInteger(S, CarryOut, Call->getType());
1313
1314 return true;
1315}
1316
1318 CodePtr OpPC,
1319 const InterpFrame *Frame,
1320 const CallExpr *Call) {
1323 pushInteger(S, Layout.size().getQuantity(), Call->getType());
1324 return true;
1325}
1326
1327static bool
1329 const InterpFrame *Frame,
1330 const CallExpr *Call) {
1331 const auto &Ptr = S.Stk.pop<Pointer>();
1332 assert(Ptr.getFieldDesc()->isPrimitiveArray());
1333
1334 // This should be created for a StringLiteral, so should alway shold at least
1335 // one array element.
1336 assert(Ptr.getFieldDesc()->getNumElems() >= 1);
1337 StringRef R(&Ptr.deref<char>(), Ptr.getFieldDesc()->getNumElems() - 1);
1338 uint64_t Result = getPointerAuthStableSipHash(R);
1339 pushInteger(S, Result, Call->getType());
1340 return true;
1341}
1342
1344 const InterpFrame *Frame,
1345 const CallExpr *Call) {
1346 // A call to __operator_new is only valid within std::allocate<>::allocate.
1347 // Walk up the call stack to find the appropriate caller and get the
1348 // element type from it.
1349 auto [NewCall, ElemType] = S.getStdAllocatorCaller("allocate");
1350
1351 if (ElemType.isNull()) {
1352 S.FFDiag(Call, S.getLangOpts().CPlusPlus20
1353 ? diag::note_constexpr_new_untyped
1354 : diag::note_constexpr_new);
1355 return false;
1356 }
1357 assert(NewCall);
1358
1359 if (ElemType->isIncompleteType() || ElemType->isFunctionType()) {
1360 S.FFDiag(Call, diag::note_constexpr_new_not_complete_object_type)
1361 << (ElemType->isIncompleteType() ? 0 : 1) << ElemType;
1362 return false;
1363 }
1364
1365 // We only care about the first parameter (the size), so discard all the
1366 // others.
1367 {
1368 unsigned NumArgs = Call->getNumArgs();
1369 assert(NumArgs >= 1);
1370
1371 // The std::nothrow_t arg never gets put on the stack.
1372 if (Call->getArg(NumArgs - 1)->getType()->isNothrowT())
1373 --NumArgs;
1374 auto Args = ArrayRef(Call->getArgs(), Call->getNumArgs());
1375 // First arg is needed.
1376 Args = Args.drop_front();
1377
1378 // Discard the rest.
1379 for (const Expr *Arg : Args)
1380 discard(S.Stk, *S.getContext().classify(Arg));
1381 }
1382
1383 APSInt Bytes = popToAPSInt(S, Call->getArg(0));
1384 CharUnits ElemSize = S.getASTContext().getTypeSizeInChars(ElemType);
1385 assert(!ElemSize.isZero());
1386 // Divide the number of bytes by sizeof(ElemType), so we get the number of
1387 // elements we should allocate.
1388 APInt NumElems, Remainder;
1389 APInt ElemSizeAP(Bytes.getBitWidth(), ElemSize.getQuantity());
1390 APInt::udivrem(Bytes, ElemSizeAP, NumElems, Remainder);
1391 if (Remainder != 0) {
1392 // This likely indicates a bug in the implementation of 'std::allocator'.
1393 S.FFDiag(Call, diag::note_constexpr_operator_new_bad_size)
1394 << Bytes << APSInt(ElemSizeAP, true) << ElemType;
1395 return false;
1396 }
1397
1398 // NB: The same check we're using in CheckArraySize()
1399 if (NumElems.getActiveBits() >
1401 NumElems.ugt(Descriptor::MaxArrayElemBytes / ElemSize.getQuantity())) {
1402 // FIXME: NoThrow check?
1403 const SourceInfo &Loc = S.Current->getSource(OpPC);
1404 S.FFDiag(Loc, diag::note_constexpr_new_too_large)
1405 << NumElems.getZExtValue();
1406 return false;
1407 }
1408
1409 if (!CheckArraySize(S, OpPC, NumElems.getZExtValue()))
1410 return false;
1411
1412 bool IsArray = NumElems.ugt(1);
1413 OptPrimType ElemT = S.getContext().classify(ElemType);
1414 DynamicAllocator &Allocator = S.getAllocator();
1415 if (ElemT) {
1416 Block *B =
1417 Allocator.allocate(NewCall, *ElemT, NumElems.getZExtValue(),
1419 assert(B);
1420 S.Stk.push<Pointer>(Pointer(B).atIndex(0));
1421 return true;
1422 }
1423
1424 assert(!ElemT);
1425
1426 // Composite arrays
1427 if (IsArray) {
1428 const Descriptor *Desc =
1429 S.P.createDescriptor(NewCall, ElemType.getTypePtr(), std::nullopt);
1430 Block *B =
1431 Allocator.allocate(Desc, NumElems.getZExtValue(), S.Ctx.getEvalID(),
1433 assert(B);
1434 S.Stk.push<Pointer>(Pointer(B).atIndex(0));
1435 return true;
1436 }
1437
1438 // Records. Still allocate them as single-element arrays.
1440 ElemType, NumElems, nullptr, ArraySizeModifier::Normal, 0);
1441
1442 const Descriptor *Desc = S.P.createDescriptor(NewCall, AllocType.getTypePtr(),
1444 Block *B = Allocator.allocate(Desc, S.getContext().getEvalID(),
1446 assert(B);
1447 S.Stk.push<Pointer>(Pointer(B).atIndex(0).narrow());
1448 return true;
1449}
1450
1452 const InterpFrame *Frame,
1453 const CallExpr *Call) {
1454 const Expr *Source = nullptr;
1455 const Block *BlockToDelete = nullptr;
1456
1458 S.Stk.discard<Pointer>();
1459 return false;
1460 }
1461
1462 // This is permitted only within a call to std::allocator<T>::deallocate.
1463 if (!S.getStdAllocatorCaller("deallocate")) {
1464 S.FFDiag(Call);
1465 S.Stk.discard<Pointer>();
1466 return true;
1467 }
1468
1469 {
1470 const Pointer &Ptr = S.Stk.pop<Pointer>();
1471
1472 if (Ptr.isZero()) {
1473 S.CCEDiag(Call, diag::note_constexpr_deallocate_null);
1474 return true;
1475 }
1476
1477 Source = Ptr.getDeclDesc()->asExpr();
1478 BlockToDelete = Ptr.block();
1479
1480 if (!BlockToDelete->isDynamic()) {
1481 S.FFDiag(Call, diag::note_constexpr_delete_not_heap_alloc)
1483 if (const auto *D = Ptr.getFieldDesc()->asDecl())
1484 S.Note(D->getLocation(), diag::note_declared_at);
1485 }
1486 }
1487 assert(BlockToDelete);
1488
1489 DynamicAllocator &Allocator = S.getAllocator();
1490 const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1491 std::optional<DynamicAllocator::Form> AllocForm =
1492 Allocator.getAllocationForm(Source);
1493
1494 if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1495 // Nothing has been deallocated, this must be a double-delete.
1496 const SourceInfo &Loc = S.Current->getSource(OpPC);
1497 S.FFDiag(Loc, diag::note_constexpr_double_delete);
1498 return false;
1499 }
1500 assert(AllocForm);
1501
1502 return CheckNewDeleteForms(
1503 S, OpPC, *AllocForm, DynamicAllocator::Form::Operator, BlockDesc, Source);
1504}
1505
1507 const InterpFrame *Frame,
1508 const CallExpr *Call) {
1509 const Floating &Arg0 = S.Stk.pop<Floating>();
1510 S.Stk.push<Floating>(Arg0);
1511 return true;
1512}
1513
1515 const CallExpr *Call, unsigned ID) {
1516 const Pointer &Arg = S.Stk.pop<Pointer>();
1517 assert(Arg.getFieldDesc()->isPrimitiveArray());
1518
1519 QualType ElemType = Arg.getFieldDesc()->getElemQualType();
1520 assert(Call->getType() == ElemType);
1521 PrimType ElemT = *S.getContext().classify(ElemType);
1522 unsigned NumElems = Arg.getNumElems();
1523
1525 T Result = Arg.elem<T>(0);
1526 unsigned BitWidth = Result.bitWidth();
1527 for (unsigned I = 1; I != NumElems; ++I) {
1528 T Elem = Arg.elem<T>(I);
1529 T PrevResult = Result;
1530
1531 if (ID == Builtin::BI__builtin_reduce_add) {
1532 if (T::add(Result, Elem, BitWidth, &Result)) {
1533 unsigned OverflowBits = BitWidth + 1;
1534 (void)handleOverflow(S, OpPC,
1535 (PrevResult.toAPSInt(OverflowBits) +
1536 Elem.toAPSInt(OverflowBits)));
1537 return false;
1538 }
1539 } else if (ID == Builtin::BI__builtin_reduce_mul) {
1540 if (T::mul(Result, Elem, BitWidth, &Result)) {
1541 unsigned OverflowBits = BitWidth * 2;
1542 (void)handleOverflow(S, OpPC,
1543 (PrevResult.toAPSInt(OverflowBits) *
1544 Elem.toAPSInt(OverflowBits)));
1545 return false;
1546 }
1547
1548 } else if (ID == Builtin::BI__builtin_reduce_and) {
1549 (void)T::bitAnd(Result, Elem, BitWidth, &Result);
1550 } else if (ID == Builtin::BI__builtin_reduce_or) {
1551 (void)T::bitOr(Result, Elem, BitWidth, &Result);
1552 } else if (ID == Builtin::BI__builtin_reduce_xor) {
1553 (void)T::bitXor(Result, Elem, BitWidth, &Result);
1554 } else if (ID == Builtin::BI__builtin_reduce_min) {
1555 if (Elem < Result)
1556 Result = Elem;
1557 } else if (ID == Builtin::BI__builtin_reduce_max) {
1558 if (Elem > Result)
1559 Result = Elem;
1560 } else {
1561 llvm_unreachable("Unhandled vector reduce builtin");
1562 }
1563 }
1564 pushInteger(S, Result.toAPSInt(), Call->getType());
1565 });
1566
1567 return true;
1568}
1569
1571 const InterpFrame *Frame,
1572 const CallExpr *Call,
1573 unsigned BuiltinID) {
1574 assert(Call->getNumArgs() == 1);
1575 QualType Ty = Call->getArg(0)->getType();
1576 if (Ty->isIntegerType()) {
1577 APSInt Val = popToAPSInt(S, Call->getArg(0));
1578 pushInteger(S, Val.abs(), Call->getType());
1579 return true;
1580 }
1581
1582 if (Ty->isFloatingType()) {
1583 Floating Val = S.Stk.pop<Floating>();
1584 Floating Result = abs(S, Val);
1585 S.Stk.push<Floating>(Result);
1586 return true;
1587 }
1588
1589 // Otherwise, the argument must be a vector.
1590 assert(Call->getArg(0)->getType()->isVectorType());
1591 const Pointer &Arg = S.Stk.pop<Pointer>();
1592 assert(Arg.getFieldDesc()->isPrimitiveArray());
1593 const Pointer &Dst = S.Stk.peek<Pointer>();
1594 assert(Dst.getFieldDesc()->isPrimitiveArray());
1595 assert(Arg.getFieldDesc()->getNumElems() ==
1596 Dst.getFieldDesc()->getNumElems());
1597
1598 QualType ElemType = Arg.getFieldDesc()->getElemQualType();
1599 PrimType ElemT = *S.getContext().classify(ElemType);
1600 unsigned NumElems = Arg.getNumElems();
1601 // we can either have a vector of integer or a vector of floating point
1602 for (unsigned I = 0; I != NumElems; ++I) {
1603 if (ElemType->isIntegerType()) {
1605 Dst.elem<T>(I) = T::from(static_cast<T>(
1606 APSInt(Arg.elem<T>(I).toAPSInt().abs(),
1608 });
1609 } else {
1610 Floating Val = Arg.elem<Floating>(I);
1611 Dst.elem<Floating>(I) = abs(S, Val);
1612 }
1613 }
1615
1616 return true;
1617}
1618
1619/// Can be called with an integer or vector as the first and only parameter.
1621 const InterpFrame *Frame,
1622 const CallExpr *Call,
1623 unsigned BuiltinID) {
1624 assert(Call->getNumArgs() == 1);
1625 if (Call->getArg(0)->getType()->isIntegerType()) {
1626 APSInt Val = popToAPSInt(S, Call->getArg(0));
1627
1628 if (BuiltinID == Builtin::BI__builtin_elementwise_popcount) {
1629 pushInteger(S, Val.popcount(), Call->getType());
1630 } else {
1631 pushInteger(S, Val.reverseBits(), Call->getType());
1632 }
1633 return true;
1634 }
1635 // Otherwise, the argument must be a vector.
1636 assert(Call->getArg(0)->getType()->isVectorType());
1637 const Pointer &Arg = S.Stk.pop<Pointer>();
1638 assert(Arg.getFieldDesc()->isPrimitiveArray());
1639 const Pointer &Dst = S.Stk.peek<Pointer>();
1640 assert(Dst.getFieldDesc()->isPrimitiveArray());
1641 assert(Arg.getFieldDesc()->getNumElems() ==
1642 Dst.getFieldDesc()->getNumElems());
1643
1644 QualType ElemType = Arg.getFieldDesc()->getElemQualType();
1645 PrimType ElemT = *S.getContext().classify(ElemType);
1646 unsigned NumElems = Arg.getNumElems();
1647
1648 // FIXME: Reading from uninitialized vector elements?
1649 for (unsigned I = 0; I != NumElems; ++I) {
1651 if (BuiltinID == Builtin::BI__builtin_elementwise_popcount) {
1652 Dst.elem<T>(I) = T::from(Arg.elem<T>(I).toAPSInt().popcount());
1653 } else {
1654 Dst.elem<T>(I) =
1655 T::from(Arg.elem<T>(I).toAPSInt().reverseBits().getZExtValue());
1656 }
1657 });
1658 }
1660
1661 return true;
1662}
1663
1664/// Can be called with an integer or vector as the first and only parameter.
1666 CodePtr OpPC,
1667 const InterpFrame *Frame,
1668 const CallExpr *Call,
1669 unsigned BuiltinID) {
1670 const bool HasZeroArg = Call->getNumArgs() == 2;
1671 const bool IsCTTZ = BuiltinID == Builtin::BI__builtin_elementwise_ctzg;
1672 assert(Call->getNumArgs() == 1 || HasZeroArg);
1673 if (Call->getArg(0)->getType()->isIntegerType()) {
1674 PrimType ArgT = *S.getContext().classify(Call->getArg(0)->getType());
1675 APSInt Val = popToAPSInt(S.Stk, ArgT);
1676 std::optional<APSInt> ZeroVal;
1677 if (HasZeroArg) {
1678 ZeroVal = Val;
1679 Val = popToAPSInt(S.Stk, ArgT);
1680 }
1681
1682 if (Val.isZero()) {
1683 if (ZeroVal) {
1684 pushInteger(S, *ZeroVal, Call->getType());
1685 return true;
1686 }
1687 // If we haven't been provided the second argument, the result is
1688 // undefined
1689 S.FFDiag(S.Current->getSource(OpPC),
1690 diag::note_constexpr_countzeroes_zero)
1691 << /*IsTrailing=*/IsCTTZ;
1692 return false;
1693 }
1694
1695 if (BuiltinID == Builtin::BI__builtin_elementwise_clzg) {
1696 pushInteger(S, Val.countLeadingZeros(), Call->getType());
1697 } else {
1698 pushInteger(S, Val.countTrailingZeros(), Call->getType());
1699 }
1700 return true;
1701 }
1702 // Otherwise, the argument must be a vector.
1703 const ASTContext &ASTCtx = S.getASTContext();
1704 Pointer ZeroArg;
1705 if (HasZeroArg) {
1706 assert(Call->getArg(1)->getType()->isVectorType() &&
1707 ASTCtx.hasSameUnqualifiedType(Call->getArg(0)->getType(),
1708 Call->getArg(1)->getType()));
1709 (void)ASTCtx;
1710 ZeroArg = S.Stk.pop<Pointer>();
1711 assert(ZeroArg.getFieldDesc()->isPrimitiveArray());
1712 }
1713 assert(Call->getArg(0)->getType()->isVectorType());
1714 const Pointer &Arg = S.Stk.pop<Pointer>();
1715 assert(Arg.getFieldDesc()->isPrimitiveArray());
1716 const Pointer &Dst = S.Stk.peek<Pointer>();
1717 assert(Dst.getFieldDesc()->isPrimitiveArray());
1718 assert(Arg.getFieldDesc()->getNumElems() ==
1719 Dst.getFieldDesc()->getNumElems());
1720
1721 QualType ElemType = Arg.getFieldDesc()->getElemQualType();
1722 PrimType ElemT = *S.getContext().classify(ElemType);
1723 unsigned NumElems = Arg.getNumElems();
1724
1725 // FIXME: Reading from uninitialized vector elements?
1726 for (unsigned I = 0; I != NumElems; ++I) {
1728 APInt EltVal = Arg.atIndex(I).deref<T>().toAPSInt();
1729 if (EltVal.isZero()) {
1730 if (HasZeroArg) {
1731 Dst.atIndex(I).deref<T>() = ZeroArg.atIndex(I).deref<T>();
1732 } else {
1733 // If we haven't been provided the second argument, the result is
1734 // undefined
1735 S.FFDiag(S.Current->getSource(OpPC),
1736 diag::note_constexpr_countzeroes_zero)
1737 << /*IsTrailing=*/IsCTTZ;
1738 return false;
1739 }
1740 } else if (IsCTTZ) {
1741 Dst.atIndex(I).deref<T>() = T::from(EltVal.countTrailingZeros());
1742 } else {
1743 Dst.atIndex(I).deref<T>() = T::from(EltVal.countLeadingZeros());
1744 }
1745 Dst.atIndex(I).initialize();
1746 });
1747 }
1748
1749 return true;
1750}
1751
1753 const InterpFrame *Frame,
1754 const CallExpr *Call, unsigned ID) {
1755 assert(Call->getNumArgs() == 3);
1756 const ASTContext &ASTCtx = S.getASTContext();
1757 APSInt Size = popToAPSInt(S, Call->getArg(2));
1758 const Pointer SrcPtr = S.Stk.pop<Pointer>();
1759 const Pointer DestPtr = S.Stk.pop<Pointer>();
1760
1761 assert(!Size.isSigned() && "memcpy and friends take an unsigned size");
1762
1763 if (ID == Builtin::BImemcpy || ID == Builtin::BImemmove)
1764 diagnoseNonConstexprBuiltin(S, OpPC, ID);
1765
1766 bool Move =
1767 (ID == Builtin::BI__builtin_memmove || ID == Builtin::BImemmove ||
1768 ID == Builtin::BI__builtin_wmemmove || ID == Builtin::BIwmemmove);
1769 bool WChar = ID == Builtin::BIwmemcpy || ID == Builtin::BIwmemmove ||
1770 ID == Builtin::BI__builtin_wmemcpy ||
1771 ID == Builtin::BI__builtin_wmemmove;
1772
1773 // If the size is zero, we treat this as always being a valid no-op.
1774 if (Size.isZero()) {
1775 S.Stk.push<Pointer>(DestPtr);
1776 return true;
1777 }
1778
1779 if (SrcPtr.isZero() || DestPtr.isZero()) {
1780 Pointer DiagPtr = (SrcPtr.isZero() ? SrcPtr : DestPtr);
1781 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_memcpy_null)
1782 << /*IsMove=*/Move << /*IsWchar=*/WChar << !SrcPtr.isZero()
1783 << DiagPtr.toDiagnosticString(ASTCtx);
1784 return false;
1785 }
1786
1787 // Diagnose integral src/dest pointers specially.
1788 if (SrcPtr.isIntegralPointer() || DestPtr.isIntegralPointer()) {
1789 std::string DiagVal = "(void *)";
1790 DiagVal += SrcPtr.isIntegralPointer()
1791 ? std::to_string(SrcPtr.getIntegerRepresentation())
1792 : std::to_string(DestPtr.getIntegerRepresentation());
1793 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_memcpy_null)
1794 << Move << WChar << DestPtr.isIntegralPointer() << DiagVal;
1795 return false;
1796 }
1797
1798 // Can't read from dummy pointers.
1799 if (DestPtr.isDummy() || SrcPtr.isDummy())
1800 return false;
1801
1802 if (DestPtr.getType()->isIncompleteType()) {
1803 S.FFDiag(S.Current->getSource(OpPC),
1804 diag::note_constexpr_memcpy_incomplete_type)
1805 << Move << DestPtr.getType();
1806 return false;
1807 }
1808 if (SrcPtr.getType()->isIncompleteType()) {
1809 S.FFDiag(S.Current->getSource(OpPC),
1810 diag::note_constexpr_memcpy_incomplete_type)
1811 << Move << SrcPtr.getType();
1812 return false;
1813 }
1814
1815 QualType DestElemType = getElemType(DestPtr);
1816 if (DestElemType->isIncompleteType()) {
1817 S.FFDiag(S.Current->getSource(OpPC),
1818 diag::note_constexpr_memcpy_incomplete_type)
1819 << Move << DestElemType;
1820 return false;
1821 }
1822
1823 size_t RemainingDestElems;
1824 if (DestPtr.getFieldDesc()->isArray()) {
1825 RemainingDestElems = DestPtr.isUnknownSizeArray()
1826 ? 0
1827 : (DestPtr.getNumElems() - DestPtr.getIndex());
1828 } else {
1829 RemainingDestElems = 1;
1830 }
1831 unsigned DestElemSize = ASTCtx.getTypeSizeInChars(DestElemType).getQuantity();
1832
1833 if (WChar) {
1834 uint64_t WCharSize =
1835 ASTCtx.getTypeSizeInChars(ASTCtx.getWCharType()).getQuantity();
1836 Size *= APSInt(APInt(Size.getBitWidth(), WCharSize, /*IsSigned=*/false),
1837 /*IsUnsigend=*/true);
1838 }
1839
1840 if (Size.urem(DestElemSize) != 0) {
1841 S.FFDiag(S.Current->getSource(OpPC),
1842 diag::note_constexpr_memcpy_unsupported)
1843 << Move << WChar << 0 << DestElemType << Size << DestElemSize;
1844 return false;
1845 }
1846
1847 QualType SrcElemType = getElemType(SrcPtr);
1848 size_t RemainingSrcElems;
1849 if (SrcPtr.getFieldDesc()->isArray()) {
1850 RemainingSrcElems = SrcPtr.isUnknownSizeArray()
1851 ? 0
1852 : (SrcPtr.getNumElems() - SrcPtr.getIndex());
1853 } else {
1854 RemainingSrcElems = 1;
1855 }
1856 unsigned SrcElemSize = ASTCtx.getTypeSizeInChars(SrcElemType).getQuantity();
1857
1858 if (!ASTCtx.hasSameUnqualifiedType(DestElemType, SrcElemType)) {
1859 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_memcpy_type_pun)
1860 << Move << SrcElemType << DestElemType;
1861 return false;
1862 }
1863
1864 if (!DestElemType.isTriviallyCopyableType(ASTCtx)) {
1865 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_memcpy_nontrivial)
1866 << Move << DestElemType;
1867 return false;
1868 }
1869
1870 // Check if we have enough elements to read from and write to.
1871 size_t RemainingDestBytes = RemainingDestElems * DestElemSize;
1872 size_t RemainingSrcBytes = RemainingSrcElems * SrcElemSize;
1873 if (Size.ugt(RemainingDestBytes) || Size.ugt(RemainingSrcBytes)) {
1874 APInt N = Size.udiv(DestElemSize);
1875 S.FFDiag(S.Current->getSource(OpPC),
1876 diag::note_constexpr_memcpy_unsupported)
1877 << Move << WChar << (Size.ugt(RemainingSrcBytes) ? 1 : 2)
1878 << DestElemType << toString(N, 10, /*Signed=*/false);
1879 return false;
1880 }
1881
1882 // Check for overlapping memory regions.
1883 if (!Move && Pointer::pointToSameBlock(SrcPtr, DestPtr)) {
1884 // Remove base casts.
1885 Pointer SrcP = SrcPtr;
1886 while (SrcP.isBaseClass())
1887 SrcP = SrcP.getBase();
1888
1889 Pointer DestP = DestPtr;
1890 while (DestP.isBaseClass())
1891 DestP = DestP.getBase();
1892
1893 unsigned SrcIndex = SrcP.expand().getIndex() * SrcP.elemSize();
1894 unsigned DstIndex = DestP.expand().getIndex() * DestP.elemSize();
1895 unsigned N = Size.getZExtValue();
1896
1897 if ((SrcIndex <= DstIndex && (SrcIndex + N) > DstIndex) ||
1898 (DstIndex <= SrcIndex && (DstIndex + N) > SrcIndex)) {
1899 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_memcpy_overlap)
1900 << /*IsWChar=*/false;
1901 return false;
1902 }
1903 }
1904
1905 assert(Size.getZExtValue() % DestElemSize == 0);
1906 if (!DoMemcpy(S, OpPC, SrcPtr, DestPtr, Bytes(Size.getZExtValue()).toBits()))
1907 return false;
1908
1909 S.Stk.push<Pointer>(DestPtr);
1910 return true;
1911}
1912
1913/// Determine if T is a character type for which we guarantee that
1914/// sizeof(T) == 1.
1916 return T->isCharType() || T->isChar8Type();
1917}
1918
1920 const InterpFrame *Frame,
1921 const CallExpr *Call, unsigned ID) {
1922 assert(Call->getNumArgs() == 3);
1923 const APSInt &Size = popToAPSInt(S, Call->getArg(2));
1924 const Pointer &PtrB = S.Stk.pop<Pointer>();
1925 const Pointer &PtrA = S.Stk.pop<Pointer>();
1926
1927 if (ID == Builtin::BImemcmp || ID == Builtin::BIbcmp ||
1928 ID == Builtin::BIwmemcmp)
1929 diagnoseNonConstexprBuiltin(S, OpPC, ID);
1930
1931 if (Size.isZero()) {
1932 pushInteger(S, 0, Call->getType());
1933 return true;
1934 }
1935
1936 bool IsWide =
1937 (ID == Builtin::BIwmemcmp || ID == Builtin::BI__builtin_wmemcmp);
1938
1939 const ASTContext &ASTCtx = S.getASTContext();
1940 QualType ElemTypeA = getElemType(PtrA);
1941 QualType ElemTypeB = getElemType(PtrB);
1942 // FIXME: This is an arbitrary limitation the current constant interpreter
1943 // had. We could remove this.
1944 if (!IsWide && (!isOneByteCharacterType(ElemTypeA) ||
1945 !isOneByteCharacterType(ElemTypeB))) {
1946 S.FFDiag(S.Current->getSource(OpPC),
1947 diag::note_constexpr_memcmp_unsupported)
1948 << ASTCtx.BuiltinInfo.getQuotedName(ID) << PtrA.getType()
1949 << PtrB.getType();
1950 return false;
1951 }
1952
1953 if (PtrA.isDummy() || PtrB.isDummy())
1954 return false;
1955
1956 // Now, read both pointers to a buffer and compare those.
1957 BitcastBuffer BufferA(
1958 Bits(ASTCtx.getTypeSize(ElemTypeA) * PtrA.getNumElems()));
1959 readPointerToBuffer(S.getContext(), PtrA, BufferA, false);
1960 // FIXME: The swapping here is UNDOING something we do when reading the
1961 // data into the buffer.
1962 if (ASTCtx.getTargetInfo().isBigEndian())
1963 swapBytes(BufferA.Data.get(), BufferA.byteSize().getQuantity());
1964
1965 BitcastBuffer BufferB(
1966 Bits(ASTCtx.getTypeSize(ElemTypeB) * PtrB.getNumElems()));
1967 readPointerToBuffer(S.getContext(), PtrB, BufferB, false);
1968 // FIXME: The swapping here is UNDOING something we do when reading the
1969 // data into the buffer.
1970 if (ASTCtx.getTargetInfo().isBigEndian())
1971 swapBytes(BufferB.Data.get(), BufferB.byteSize().getQuantity());
1972
1973 size_t MinBufferSize = std::min(BufferA.byteSize().getQuantity(),
1974 BufferB.byteSize().getQuantity());
1975
1976 unsigned ElemSize = 1;
1977 if (IsWide)
1978 ElemSize = ASTCtx.getTypeSizeInChars(ASTCtx.getWCharType()).getQuantity();
1979 // The Size given for the wide variants is in wide-char units. Convert it
1980 // to bytes.
1981 size_t ByteSize = Size.getZExtValue() * ElemSize;
1982 size_t CmpSize = std::min(MinBufferSize, ByteSize);
1983
1984 for (size_t I = 0; I != CmpSize; I += ElemSize) {
1985 if (IsWide) {
1987 T A = *reinterpret_cast<T *>(BufferA.Data.get() + I);
1988 T B = *reinterpret_cast<T *>(BufferB.Data.get() + I);
1989 if (A < B) {
1990 pushInteger(S, -1, Call->getType());
1991 return true;
1992 }
1993 if (A > B) {
1994 pushInteger(S, 1, Call->getType());
1995 return true;
1996 }
1997 });
1998 } else {
1999 std::byte A = BufferA.Data[I];
2000 std::byte B = BufferB.Data[I];
2001
2002 if (A < B) {
2003 pushInteger(S, -1, Call->getType());
2004 return true;
2005 }
2006 if (A > B) {
2007 pushInteger(S, 1, Call->getType());
2008 return true;
2009 }
2010 }
2011 }
2012
2013 // We compared CmpSize bytes above. If the limiting factor was the Size
2014 // passed, we're done and the result is equality (0).
2015 if (ByteSize <= CmpSize) {
2016 pushInteger(S, 0, Call->getType());
2017 return true;
2018 }
2019
2020 // However, if we read all the available bytes but were instructed to read
2021 // even more, diagnose this as a "read of dereferenced one-past-the-end
2022 // pointer". This is what would happen if we called CheckLoad() on every array
2023 // element.
2024 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_past_end)
2025 << AK_Read << S.Current->getRange(OpPC);
2026 return false;
2027}
2028
2029// __builtin_memchr(ptr, int, int)
2030// __builtin_strchr(ptr, int)
2032 const CallExpr *Call, unsigned ID) {
2033 if (ID == Builtin::BImemchr || ID == Builtin::BIwcschr ||
2034 ID == Builtin::BIstrchr || ID == Builtin::BIwmemchr)
2035 diagnoseNonConstexprBuiltin(S, OpPC, ID);
2036
2037 std::optional<APSInt> MaxLength;
2038 if (Call->getNumArgs() == 3)
2039 MaxLength = popToAPSInt(S, Call->getArg(2));
2040
2041 APSInt Desired = popToAPSInt(S, Call->getArg(1));
2042 const Pointer &Ptr = S.Stk.pop<Pointer>();
2043
2044 if (MaxLength && MaxLength->isZero()) {
2045 S.Stk.push<Pointer>();
2046 return true;
2047 }
2048
2049 if (Ptr.isDummy()) {
2050 if (Ptr.getType()->isIncompleteType())
2051 S.FFDiag(S.Current->getSource(OpPC),
2052 diag::note_constexpr_ltor_incomplete_type)
2053 << Ptr.getType();
2054 return false;
2055 }
2056
2057 // Null is only okay if the given size is 0.
2058 if (Ptr.isZero()) {
2059 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_null)
2060 << AK_Read;
2061 return false;
2062 }
2063
2064 QualType ElemTy = Ptr.getFieldDesc()->isArray()
2065 ? Ptr.getFieldDesc()->getElemQualType()
2066 : Ptr.getFieldDesc()->getType();
2067 bool IsRawByte = ID == Builtin::BImemchr || ID == Builtin::BI__builtin_memchr;
2068
2069 // Give up on byte-oriented matching against multibyte elements.
2070 if (IsRawByte && !isOneByteCharacterType(ElemTy)) {
2071 S.FFDiag(S.Current->getSource(OpPC),
2072 diag::note_constexpr_memchr_unsupported)
2073 << S.getASTContext().BuiltinInfo.getQuotedName(ID) << ElemTy;
2074 return false;
2075 }
2076
2077 if (ID == Builtin::BIstrchr || ID == Builtin::BI__builtin_strchr) {
2078 // strchr compares directly to the passed integer, and therefore
2079 // always fails if given an int that is not a char.
2080 if (Desired !=
2081 Desired.trunc(S.getASTContext().getCharWidth()).getSExtValue()) {
2082 S.Stk.push<Pointer>();
2083 return true;
2084 }
2085 }
2086
2087 uint64_t DesiredVal;
2088 if (ID == Builtin::BIwmemchr || ID == Builtin::BI__builtin_wmemchr ||
2089 ID == Builtin::BIwcschr || ID == Builtin::BI__builtin_wcschr) {
2090 // wcschr and wmemchr are given a wchar_t to look for. Just use it.
2091 DesiredVal = Desired.getZExtValue();
2092 } else {
2093 DesiredVal = Desired.trunc(S.getASTContext().getCharWidth()).getZExtValue();
2094 }
2095
2096 bool StopAtZero =
2097 (ID == Builtin::BIstrchr || ID == Builtin::BI__builtin_strchr ||
2098 ID == Builtin::BIwcschr || ID == Builtin::BI__builtin_wcschr);
2099
2100 PrimType ElemT =
2101 IsRawByte ? PT_Sint8 : *S.getContext().classify(getElemType(Ptr));
2102
2103 size_t Index = Ptr.getIndex();
2104 size_t Step = 0;
2105 for (;;) {
2106 const Pointer &ElemPtr =
2107 (Index + Step) > 0 ? Ptr.atIndex(Index + Step) : Ptr;
2108
2109 if (!CheckLoad(S, OpPC, ElemPtr))
2110 return false;
2111
2112 uint64_t V;
2114 ElemT, { V = static_cast<uint64_t>(ElemPtr.deref<T>().toUnsigned()); });
2115
2116 if (V == DesiredVal) {
2117 S.Stk.push<Pointer>(ElemPtr);
2118 return true;
2119 }
2120
2121 if (StopAtZero && V == 0)
2122 break;
2123
2124 ++Step;
2125 if (MaxLength && Step == MaxLength->getZExtValue())
2126 break;
2127 }
2128
2129 S.Stk.push<Pointer>();
2130 return true;
2131}
2132
2133static std::optional<unsigned> computeFullDescSize(const ASTContext &ASTCtx,
2134 const Descriptor *Desc) {
2135 if (Desc->isPrimitive())
2136 return ASTCtx.getTypeSizeInChars(Desc->getType()).getQuantity();
2137 if (Desc->isArray())
2138 return ASTCtx.getTypeSizeInChars(Desc->getElemQualType()).getQuantity() *
2139 Desc->getNumElems();
2140 if (Desc->isRecord()) {
2141 // Can't use Descriptor::getType() as that may return a pointer type. Look
2142 // at the decl directly.
2143 return ASTCtx
2145 ASTCtx.getCanonicalTagType(Desc->ElemRecord->getDecl()))
2146 .getQuantity();
2147 }
2148
2149 return std::nullopt;
2150}
2151
2152/// Compute the byte offset of \p Ptr in the full declaration.
2153static unsigned computePointerOffset(const ASTContext &ASTCtx,
2154 const Pointer &Ptr) {
2155 unsigned Result = 0;
2156
2157 Pointer P = Ptr;
2158 while (P.isField() || P.isArrayElement()) {
2159 P = P.expand();
2160 const Descriptor *D = P.getFieldDesc();
2161
2162 if (P.isArrayElement()) {
2163 unsigned ElemSize =
2165 if (P.isOnePastEnd())
2166 Result += ElemSize * P.getNumElems();
2167 else
2168 Result += ElemSize * P.getIndex();
2169 P = P.expand().getArray();
2170 } else if (P.isBaseClass()) {
2171 const auto *RD = cast<CXXRecordDecl>(D->asDecl());
2172 bool IsVirtual = Ptr.isVirtualBaseClass();
2173 P = P.getBase();
2174 const Record *BaseRecord = P.getRecord();
2175
2176 const ASTRecordLayout &Layout =
2177 ASTCtx.getASTRecordLayout(cast<CXXRecordDecl>(BaseRecord->getDecl()));
2178 if (IsVirtual)
2179 Result += Layout.getVBaseClassOffset(RD).getQuantity();
2180 else
2181 Result += Layout.getBaseClassOffset(RD).getQuantity();
2182 } else if (P.isField()) {
2183 const FieldDecl *FD = P.getField();
2184 const ASTRecordLayout &Layout =
2185 ASTCtx.getASTRecordLayout(FD->getParent());
2186 unsigned FieldIndex = FD->getFieldIndex();
2187 uint64_t FieldOffset =
2188 ASTCtx.toCharUnitsFromBits(Layout.getFieldOffset(FieldIndex))
2189 .getQuantity();
2190 Result += FieldOffset;
2191 P = P.getBase();
2192 } else
2193 llvm_unreachable("Unhandled descriptor type");
2194 }
2195
2196 return Result;
2197}
2198
2199/// Does Ptr point to the last subobject?
2200static bool pointsToLastObject(const Pointer &Ptr) {
2201 Pointer P = Ptr;
2202 while (!P.isRoot()) {
2203
2204 if (P.isArrayElement()) {
2205 P = P.expand().getArray();
2206 continue;
2207 }
2208 if (P.isBaseClass()) {
2209 if (P.getRecord()->getNumFields() > 0)
2210 return false;
2211 P = P.getBase();
2212 continue;
2213 }
2214
2215 Pointer Base = P.getBase();
2216 if (const Record *R = Base.getRecord()) {
2217 assert(P.getField());
2218 if (P.getField()->getFieldIndex() != R->getNumFields() - 1)
2219 return false;
2220 }
2221 P = Base;
2222 }
2223
2224 return true;
2225}
2226
2227/// Does Ptr point to the last object AND to a flexible array member?
2228static bool isUserWritingOffTheEnd(const ASTContext &Ctx, const Pointer &Ptr) {
2229 auto isFlexibleArrayMember = [&](const Descriptor *FieldDesc) {
2231 FAMKind StrictFlexArraysLevel =
2232 Ctx.getLangOpts().getStrictFlexArraysLevel();
2233
2234 if (StrictFlexArraysLevel == FAMKind::Default)
2235 return true;
2236
2237 unsigned NumElems = FieldDesc->getNumElems();
2238 if (NumElems == 0 && StrictFlexArraysLevel != FAMKind::IncompleteOnly)
2239 return true;
2240
2241 if (NumElems == 1 && StrictFlexArraysLevel == FAMKind::OneZeroOrIncomplete)
2242 return true;
2243 return false;
2244 };
2245
2246 const Descriptor *FieldDesc = Ptr.getFieldDesc();
2247 if (!FieldDesc->isArray())
2248 return false;
2249
2250 return Ptr.isDummy() && pointsToLastObject(Ptr) &&
2251 isFlexibleArrayMember(FieldDesc);
2252}
2253
2255 const InterpFrame *Frame,
2256 const CallExpr *Call) {
2257 const ASTContext &ASTCtx = S.getASTContext();
2258 // From the GCC docs:
2259 // Kind is an integer constant from 0 to 3. If the least significant bit is
2260 // clear, objects are whole variables. If it is set, a closest surrounding
2261 // subobject is considered the object a pointer points to. The second bit
2262 // determines if maximum or minimum of remaining bytes is computed.
2263 unsigned Kind = popToAPSInt(S, Call->getArg(1)).getZExtValue();
2264 assert(Kind <= 3 && "unexpected kind");
2265 bool UseFieldDesc = (Kind & 1u);
2266 bool ReportMinimum = (Kind & 2u);
2267 const Pointer &Ptr = S.Stk.pop<Pointer>();
2268
2269 if (Call->getArg(0)->HasSideEffects(ASTCtx)) {
2270 // "If there are any side effects in them, it returns (size_t) -1
2271 // for type 0 or 1 and (size_t) 0 for type 2 or 3."
2272 pushInteger(S, Kind <= 1 ? -1 : 0, Call->getType());
2273 return true;
2274 }
2275
2276 if (Ptr.isZero() || !Ptr.isBlockPointer())
2277 return false;
2278
2279 // We can't load through pointers.
2280 if (Ptr.isDummy() && Ptr.getType()->isPointerType())
2281 return false;
2282
2283 bool DetermineForCompleteObject = Ptr.getFieldDesc() == Ptr.getDeclDesc();
2284 const Descriptor *DeclDesc = Ptr.getDeclDesc();
2285 assert(DeclDesc);
2286
2287 if (!UseFieldDesc || DetermineForCompleteObject) {
2288 // Lower bound, so we can't fall back to this.
2289 if (ReportMinimum && !DetermineForCompleteObject)
2290 return false;
2291
2292 // Can't read beyond the pointer decl desc.
2293 if (!UseFieldDesc && !ReportMinimum && DeclDesc->getType()->isPointerType())
2294 return false;
2295 } else {
2296 if (isUserWritingOffTheEnd(ASTCtx, Ptr.expand())) {
2297 // If we cannot determine the size of the initial allocation, then we
2298 // can't given an accurate upper-bound. However, we are still able to give
2299 // conservative lower-bounds for Type=3.
2300 if (Kind == 1)
2301 return false;
2302 }
2303 }
2304
2305 const Descriptor *Desc = UseFieldDesc ? Ptr.getFieldDesc() : DeclDesc;
2306 assert(Desc);
2307
2308 std::optional<unsigned> FullSize = computeFullDescSize(ASTCtx, Desc);
2309 if (!FullSize)
2310 return false;
2311
2312 unsigned ByteOffset;
2313 if (UseFieldDesc) {
2314 if (Ptr.isBaseClass())
2315 ByteOffset = computePointerOffset(ASTCtx, Ptr.getBase()) -
2316 computePointerOffset(ASTCtx, Ptr);
2317 else {
2318 if (Ptr.inArray())
2319 ByteOffset =
2320 computePointerOffset(ASTCtx, Ptr) -
2321 computePointerOffset(ASTCtx, Ptr.expand().atIndex(0).narrow());
2322 else
2323 ByteOffset = 0;
2324 }
2325 } else
2326 ByteOffset = computePointerOffset(ASTCtx, Ptr);
2327
2328 assert(ByteOffset <= *FullSize);
2329 unsigned Result = *FullSize - ByteOffset;
2330
2331 pushInteger(S, Result, Call->getType());
2332 return true;
2333}
2334
2336 const CallExpr *Call) {
2337
2338 if (!S.inConstantContext())
2339 return false;
2340
2341 const Pointer &Ptr = S.Stk.pop<Pointer>();
2342
2343 auto Error = [&](int Diag) {
2344 bool CalledFromStd = false;
2345 const auto *Callee = S.Current->getCallee();
2346 if (Callee && Callee->isInStdNamespace()) {
2347 const IdentifierInfo *Identifier = Callee->getIdentifier();
2348 CalledFromStd = Identifier && Identifier->isStr("is_within_lifetime");
2349 }
2350 S.CCEDiag(CalledFromStd
2352 : S.Current->getSource(OpPC),
2353 diag::err_invalid_is_within_lifetime)
2354 << (CalledFromStd ? "std::is_within_lifetime"
2355 : "__builtin_is_within_lifetime")
2356 << Diag;
2357 return false;
2358 };
2359
2360 if (Ptr.isZero())
2361 return Error(0);
2362 if (Ptr.isOnePastEnd())
2363 return Error(1);
2364
2365 bool Result = Ptr.getLifetime() != Lifetime::Ended;
2366 if (!Ptr.isActive()) {
2367 Result = false;
2368 } else {
2369 if (!CheckLive(S, OpPC, Ptr, AK_Read))
2370 return false;
2371 if (!CheckMutable(S, OpPC, Ptr))
2372 return false;
2373 if (!CheckDummy(S, OpPC, Ptr.block(), AK_Read))
2374 return false;
2375 }
2376
2377 // Check if we're currently running an initializer.
2378 if (llvm::is_contained(S.InitializingBlocks, Ptr.block()))
2379 return Error(2);
2380 if (S.EvaluatingDecl && Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl)
2381 return Error(2);
2382
2383 pushInteger(S, Result, Call->getType());
2384 return true;
2385}
2386
2388 InterpState &S, CodePtr OpPC, const CallExpr *Call,
2389 llvm::function_ref<APInt(const APSInt &)> Fn) {
2390 assert(Call->getNumArgs() == 1);
2391 assert(Call->getType()->isIntegerType());
2392
2393 // Single integer case.
2394 if (!Call->getArg(0)->getType()->isVectorType()) {
2395 APSInt Src = popToAPSInt(S, Call->getArg(0));
2396 APInt Result = Fn(Src);
2397 pushInteger(S, APSInt(std::move(Result), !Src.isSigned()), Call->getType());
2398 return true;
2399 }
2400
2401 // TODO: Add vector integer handling.
2402 return false;
2403}
2404
2406 InterpState &S, CodePtr OpPC, const CallExpr *Call,
2407 llvm::function_ref<APInt(const APSInt &, const APSInt &)> Fn) {
2408 assert(Call->getNumArgs() == 2);
2409
2410 // Single integer case.
2411 if (!Call->getArg(0)->getType()->isVectorType()) {
2412 assert(!Call->getArg(1)->getType()->isVectorType());
2413 APSInt RHS = popToAPSInt(S, Call->getArg(1));
2414 APSInt LHS = popToAPSInt(S, Call->getArg(0));
2415 APInt Result = Fn(LHS, RHS);
2416 pushInteger(S, APSInt(std::move(Result), !LHS.isSigned()), Call->getType());
2417 return true;
2418 }
2419
2420 const auto *VT = Call->getArg(0)->getType()->castAs<VectorType>();
2421 assert(VT->getElementType()->isIntegralOrEnumerationType());
2422 PrimType ElemT = *S.getContext().classify(VT->getElementType());
2423 unsigned NumElems = VT->getNumElements();
2424 bool DestUnsigned = Call->getType()->isUnsignedIntegerOrEnumerationType();
2425
2426 // Vector + Scalar case.
2427 if (!Call->getArg(1)->getType()->isVectorType()) {
2428 assert(Call->getArg(1)->getType()->isIntegralOrEnumerationType());
2429
2430 APSInt RHS = popToAPSInt(S, Call->getArg(1));
2431 const Pointer &LHS = S.Stk.pop<Pointer>();
2432 const Pointer &Dst = S.Stk.peek<Pointer>();
2433
2434 for (unsigned I = 0; I != NumElems; ++I) {
2436 Dst.elem<T>(I) = static_cast<T>(
2437 APSInt(Fn(LHS.elem<T>(I).toAPSInt(), RHS), DestUnsigned));
2438 });
2439 }
2441 return true;
2442 }
2443
2444 // Vector case.
2445 assert(Call->getArg(0)->getType()->isVectorType() &&
2446 Call->getArg(1)->getType()->isVectorType());
2447 assert(VT->getElementType() ==
2448 Call->getArg(1)->getType()->castAs<VectorType>()->getElementType());
2449 assert(VT->getNumElements() ==
2450 Call->getArg(1)->getType()->castAs<VectorType>()->getNumElements());
2451 assert(VT->getElementType()->isIntegralOrEnumerationType());
2452
2453 const Pointer &RHS = S.Stk.pop<Pointer>();
2454 const Pointer &LHS = S.Stk.pop<Pointer>();
2455 const Pointer &Dst = S.Stk.peek<Pointer>();
2456 for (unsigned I = 0; I != NumElems; ++I) {
2458 APSInt Elem1 = LHS.elem<T>(I).toAPSInt();
2459 APSInt Elem2 = RHS.elem<T>(I).toAPSInt();
2460 Dst.elem<T>(I) = static_cast<T>(APSInt(Fn(Elem1, Elem2), DestUnsigned));
2461 });
2462 }
2464
2465 return true;
2466}
2467
2468static bool
2470 llvm::function_ref<APInt(const APSInt &)> PackFn) {
2471 const auto *VT0 = E->getArg(0)->getType()->castAs<VectorType>();
2472 [[maybe_unused]] const auto *VT1 =
2473 E->getArg(1)->getType()->castAs<VectorType>();
2474 assert(VT0 && VT1 && "pack builtin VT0 and VT1 must be VectorType");
2475 assert(VT0->getElementType() == VT1->getElementType() &&
2476 VT0->getNumElements() == VT1->getNumElements() &&
2477 "pack builtin VT0 and VT1 ElementType must be same");
2478
2479 const Pointer &RHS = S.Stk.pop<Pointer>();
2480 const Pointer &LHS = S.Stk.pop<Pointer>();
2481 const Pointer &Dst = S.Stk.peek<Pointer>();
2482
2483 const ASTContext &ASTCtx = S.getASTContext();
2484 const unsigned SrcBits = ASTCtx.getIntWidth(VT0->getElementType());
2485 const unsigned LHSVecLen = VT0->getNumElements();
2486 const unsigned SrcPerLane = 128 / SrcBits;
2487 const unsigned Lanes = LHSVecLen * SrcBits / 128;
2488
2489 PrimType SrcT = *S.getContext().classify(VT0->getElementType());
2490 PrimType DstT = *S.getContext().classify(getElemType(Dst));
2491 const bool IsUnsigend = getElemType(Dst)->isUnsignedIntegerType();
2492
2493 for (unsigned Lane = 0; Lane != Lanes; ++Lane) {
2494 const unsigned BaseSrc = Lane * SrcPerLane;
2495 const unsigned BaseDst = Lane * (2 * SrcPerLane);
2496
2497 for (unsigned I = 0; I != SrcPerLane; ++I) {
2499 APSInt A = LHS.elem<T>(BaseSrc + I).toAPSInt();
2500 APSInt B = RHS.elem<T>(BaseSrc + I).toAPSInt();
2501
2502 assignInteger(S, Dst.atIndex(BaseDst + I), DstT,
2503 APSInt(PackFn(A), IsUnsigend));
2504 assignInteger(S, Dst.atIndex(BaseDst + SrcPerLane + I), DstT,
2505 APSInt(PackFn(B), IsUnsigend));
2506 });
2507 }
2508 }
2509
2510 Dst.initializeAllElements();
2511 return true;
2512}
2513
2515 const CallExpr *Call,
2516 unsigned BuiltinID) {
2517 assert(Call->getNumArgs() == 2);
2518
2519 QualType Arg0Type = Call->getArg(0)->getType();
2520
2521 // TODO: Support floating-point types.
2522 if (!(Arg0Type->isIntegerType() ||
2523 (Arg0Type->isVectorType() &&
2524 Arg0Type->castAs<VectorType>()->getElementType()->isIntegerType())))
2525 return false;
2526
2527 if (!Arg0Type->isVectorType()) {
2528 assert(!Call->getArg(1)->getType()->isVectorType());
2529 APSInt RHS = popToAPSInt(S, Call->getArg(1));
2530 APSInt LHS = popToAPSInt(S, Arg0Type);
2531 APInt Result;
2532 if (BuiltinID == Builtin::BI__builtin_elementwise_max) {
2533 Result = std::max(LHS, RHS);
2534 } else if (BuiltinID == Builtin::BI__builtin_elementwise_min) {
2535 Result = std::min(LHS, RHS);
2536 } else {
2537 llvm_unreachable("Wrong builtin ID");
2538 }
2539
2540 pushInteger(S, APSInt(Result, !LHS.isSigned()), Call->getType());
2541 return true;
2542 }
2543
2544 // Vector case.
2545 assert(Call->getArg(0)->getType()->isVectorType() &&
2546 Call->getArg(1)->getType()->isVectorType());
2547 const auto *VT = Call->getArg(0)->getType()->castAs<VectorType>();
2548 assert(VT->getElementType() ==
2549 Call->getArg(1)->getType()->castAs<VectorType>()->getElementType());
2550 assert(VT->getNumElements() ==
2551 Call->getArg(1)->getType()->castAs<VectorType>()->getNumElements());
2552 assert(VT->getElementType()->isIntegralOrEnumerationType());
2553
2554 const Pointer &RHS = S.Stk.pop<Pointer>();
2555 const Pointer &LHS = S.Stk.pop<Pointer>();
2556 const Pointer &Dst = S.Stk.peek<Pointer>();
2557 PrimType ElemT = *S.getContext().classify(VT->getElementType());
2558 unsigned NumElems = VT->getNumElements();
2559 for (unsigned I = 0; I != NumElems; ++I) {
2560 APSInt Elem1;
2561 APSInt Elem2;
2563 Elem1 = LHS.elem<T>(I).toAPSInt();
2564 Elem2 = RHS.elem<T>(I).toAPSInt();
2565 });
2566
2567 APSInt Result;
2568 if (BuiltinID == Builtin::BI__builtin_elementwise_max) {
2569 Result = APSInt(std::max(Elem1, Elem2),
2570 Call->getType()->isUnsignedIntegerOrEnumerationType());
2571 } else if (BuiltinID == Builtin::BI__builtin_elementwise_min) {
2572 Result = APSInt(std::min(Elem1, Elem2),
2573 Call->getType()->isUnsignedIntegerOrEnumerationType());
2574 } else {
2575 llvm_unreachable("Wrong builtin ID");
2576 }
2577
2579 { Dst.elem<T>(I) = static_cast<T>(Result); });
2580 }
2581 Dst.initializeAllElements();
2582
2583 return true;
2584}
2585
2587 const CallExpr *Call,
2588 unsigned BuiltinID) {
2589 assert(Call->getArg(0)->getType()->isVectorType() &&
2590 Call->getArg(1)->getType()->isVectorType());
2591 const Pointer &RHS = S.Stk.pop<Pointer>();
2592 const Pointer &LHS = S.Stk.pop<Pointer>();
2593 const Pointer &Dst = S.Stk.peek<Pointer>();
2594
2595 const auto *VT = Call->getArg(0)->getType()->castAs<VectorType>();
2596 PrimType ElemT = *S.getContext().classify(VT->getElementType());
2597 unsigned SourceLen = VT->getNumElements();
2598
2599 PrimType DstElemT = *S.getContext().classify(
2600 Call->getType()->castAs<VectorType>()->getElementType());
2601 unsigned DstElem = 0;
2602 for (unsigned I = 0; I != SourceLen; I += 2) {
2603 APSInt Elem1;
2604 APSInt Elem2;
2606 Elem1 = LHS.elem<T>(I).toAPSInt();
2607 Elem2 = RHS.elem<T>(I).toAPSInt();
2608 });
2609
2610 APSInt Result;
2611 switch (BuiltinID) {
2612 case clang::X86::BI__builtin_ia32_pmuludq128:
2613 case clang::X86::BI__builtin_ia32_pmuludq256:
2614 case clang::X86::BI__builtin_ia32_pmuludq512:
2615 Result = APSInt(llvm::APIntOps::muluExtended(Elem1, Elem2),
2616 /*IsUnsigned=*/true);
2617 break;
2618 case clang::X86::BI__builtin_ia32_pmuldq128:
2619 case clang::X86::BI__builtin_ia32_pmuldq256:
2620 case clang::X86::BI__builtin_ia32_pmuldq512:
2621 Result = APSInt(llvm::APIntOps::mulsExtended(Elem1, Elem2),
2622 /*IsUnsigned=*/false);
2623 break;
2624 }
2625 INT_TYPE_SWITCH_NO_BOOL(DstElemT,
2626 { Dst.elem<T>(DstElem) = static_cast<T>(Result); });
2627 ++DstElem;
2628 }
2629
2630 Dst.initializeAllElements();
2631 return true;
2632}
2633
2635 InterpState &S, CodePtr OpPC, const CallExpr *Call,
2636 llvm::function_ref<APFloat(const APFloat &, const APFloat &,
2637 const APFloat &, llvm::RoundingMode)>
2638 Fn) {
2639 assert(Call->getNumArgs() == 3);
2640
2641 FPOptions FPO = Call->getFPFeaturesInEffect(S.Ctx.getLangOpts());
2642 llvm::RoundingMode RM = getRoundingMode(FPO);
2643 const QualType Arg1Type = Call->getArg(0)->getType();
2644 const QualType Arg2Type = Call->getArg(1)->getType();
2645 const QualType Arg3Type = Call->getArg(2)->getType();
2646
2647 // Non-vector floating point types.
2648 if (!Arg1Type->isVectorType()) {
2649 assert(!Arg2Type->isVectorType());
2650 assert(!Arg3Type->isVectorType());
2651 (void)Arg2Type;
2652 (void)Arg3Type;
2653
2654 const Floating &Z = S.Stk.pop<Floating>();
2655 const Floating &Y = S.Stk.pop<Floating>();
2656 const Floating &X = S.Stk.pop<Floating>();
2657 APFloat F = Fn(X.getAPFloat(), Y.getAPFloat(), Z.getAPFloat(), RM);
2658 Floating Result = S.allocFloat(X.getSemantics());
2659 Result.copy(F);
2660 S.Stk.push<Floating>(Result);
2661 return true;
2662 }
2663
2664 // Vector type.
2665 assert(Arg1Type->isVectorType() && Arg2Type->isVectorType() &&
2666 Arg3Type->isVectorType());
2667
2668 const VectorType *VecT = Arg1Type->castAs<VectorType>();
2669 const QualType ElemT = VecT->getElementType();
2670 unsigned NumElems = VecT->getNumElements();
2671
2672 assert(ElemT == Arg2Type->castAs<VectorType>()->getElementType() &&
2673 ElemT == Arg3Type->castAs<VectorType>()->getElementType());
2674 assert(NumElems == Arg2Type->castAs<VectorType>()->getNumElements() &&
2675 NumElems == Arg3Type->castAs<VectorType>()->getNumElements());
2676 assert(ElemT->isRealFloatingType());
2677 (void)ElemT;
2678
2679 const Pointer &VZ = S.Stk.pop<Pointer>();
2680 const Pointer &VY = S.Stk.pop<Pointer>();
2681 const Pointer &VX = S.Stk.pop<Pointer>();
2682 const Pointer &Dst = S.Stk.peek<Pointer>();
2683 for (unsigned I = 0; I != NumElems; ++I) {
2684 using T = PrimConv<PT_Float>::T;
2685 APFloat X = VX.elem<T>(I).getAPFloat();
2686 APFloat Y = VY.elem<T>(I).getAPFloat();
2687 APFloat Z = VZ.elem<T>(I).getAPFloat();
2688 APFloat F = Fn(X, Y, Z, RM);
2689 Dst.elem<Floating>(I) = Floating(F);
2690 }
2692 return true;
2693}
2694
2695/// AVX512 predicated move: "Result = Mask[] ? LHS[] : RHS[]".
2697 const CallExpr *Call) {
2698 const Pointer &RHS = S.Stk.pop<Pointer>();
2699 const Pointer &LHS = S.Stk.pop<Pointer>();
2700 APSInt Mask = popToAPSInt(S, Call->getArg(0));
2701 const Pointer &Dst = S.Stk.peek<Pointer>();
2702
2703 assert(LHS.getNumElems() == RHS.getNumElems());
2704 assert(LHS.getNumElems() == Dst.getNumElems());
2705 unsigned NumElems = LHS.getNumElems();
2706 PrimType ElemT = LHS.getFieldDesc()->getPrimType();
2707 PrimType DstElemT = Dst.getFieldDesc()->getPrimType();
2708
2709 for (unsigned I = 0; I != NumElems; ++I) {
2710 if (ElemT == PT_Float) {
2711 assert(DstElemT == PT_Float);
2712 Dst.elem<Floating>(I) =
2713 Mask[I] ? LHS.elem<Floating>(I) : RHS.elem<Floating>(I);
2714 } else {
2715 APSInt Elem;
2716 INT_TYPE_SWITCH(ElemT, {
2717 Elem = Mask[I] ? LHS.elem<T>(I).toAPSInt() : RHS.elem<T>(I).toAPSInt();
2718 });
2719 INT_TYPE_SWITCH_NO_BOOL(DstElemT,
2720 { Dst.elem<T>(I) = static_cast<T>(Elem); });
2721 }
2722 }
2724
2725 return true;
2726}
2727
2729 const CallExpr *Call) {
2730 APSInt Mask = popToAPSInt(S, Call->getArg(2));
2731 const Pointer &TrueVec = S.Stk.pop<Pointer>();
2732 const Pointer &FalseVec = S.Stk.pop<Pointer>();
2733 const Pointer &Dst = S.Stk.peek<Pointer>();
2734
2735 assert(FalseVec.getNumElems() == TrueVec.getNumElems());
2736 assert(FalseVec.getNumElems() == Dst.getNumElems());
2737 unsigned NumElems = FalseVec.getNumElems();
2738 PrimType ElemT = FalseVec.getFieldDesc()->getPrimType();
2739 PrimType DstElemT = Dst.getFieldDesc()->getPrimType();
2740
2741 for (unsigned I = 0; I != NumElems; ++I) {
2742 bool MaskBit = Mask[I % 8];
2743 if (ElemT == PT_Float) {
2744 assert(DstElemT == PT_Float);
2745 Dst.elem<Floating>(I) =
2746 MaskBit ? TrueVec.elem<Floating>(I) : FalseVec.elem<Floating>(I);
2747 } else {
2748 assert(DstElemT == ElemT);
2749 INT_TYPE_SWITCH_NO_BOOL(DstElemT, {
2750 Dst.elem<T>(I) =
2751 static_cast<T>(MaskBit ? TrueVec.elem<T>(I).toAPSInt()
2752 : FalseVec.elem<T>(I).toAPSInt());
2753 });
2754 }
2755 }
2756 Dst.initializeAllElements();
2757
2758 return true;
2759}
2760
2762 const CallExpr *Call, bool IsShufHW) {
2763 assert(Call->getNumArgs() == 2 && "masked forms handled via select*");
2764 APSInt ControlImm = popToAPSInt(S, Call->getArg(1));
2765 const Pointer &Src = S.Stk.pop<Pointer>();
2766 const Pointer &Dst = S.Stk.peek<Pointer>();
2767
2768 unsigned NumElems = Dst.getNumElems();
2769 PrimType ElemT = Dst.getFieldDesc()->getPrimType();
2770
2771 unsigned ElemBits = static_cast<unsigned>(primSize(ElemT) * 8);
2772 if (ElemBits != 16 && ElemBits != 32)
2773 return false;
2774
2775 unsigned LaneElts = 128u / ElemBits;
2776 assert(LaneElts && (NumElems % LaneElts == 0));
2777
2778 uint8_t Ctl = static_cast<uint8_t>(ControlImm.getZExtValue());
2779
2780 for (unsigned Idx = 0; Idx != NumElems; Idx++) {
2781 unsigned LaneBase = (Idx / LaneElts) * LaneElts;
2782 unsigned LaneIdx = Idx % LaneElts;
2783 unsigned SrcIdx = Idx;
2784 unsigned Sel = (Ctl >> (2 * (LaneIdx & 0x3))) & 0x3;
2785 if (ElemBits == 32) {
2786 SrcIdx = LaneBase + Sel;
2787 } else {
2788 constexpr unsigned HalfSize = 4;
2789 bool InHigh = LaneIdx >= HalfSize;
2790 if (!IsShufHW && !InHigh) {
2791 SrcIdx = LaneBase + Sel;
2792 } else if (IsShufHW && InHigh) {
2793 SrcIdx = LaneBase + HalfSize + Sel;
2794 }
2795 }
2796
2797 INT_TYPE_SWITCH_NO_BOOL(ElemT, { Dst.elem<T>(Idx) = Src.elem<T>(SrcIdx); });
2798 }
2799 Dst.initializeAllElements();
2800 return true;
2801}
2802
2804 InterpState &S, CodePtr OpPC, const CallExpr *Call,
2805 llvm::function_ref<APInt(const APSInt &, const APSInt &, const APSInt &)>
2806 Fn) {
2807 assert(Call->getNumArgs() == 3);
2808
2809 QualType Arg0Type = Call->getArg(0)->getType();
2810 QualType Arg2Type = Call->getArg(2)->getType();
2811 // Non-vector integer types.
2812 if (!Arg0Type->isVectorType()) {
2813 const APSInt &Op2 = popToAPSInt(S, Arg2Type);
2814 const APSInt &Op1 = popToAPSInt(S, Call->getArg(1));
2815 const APSInt &Op0 = popToAPSInt(S, Arg0Type);
2816 APSInt Result = APSInt(Fn(Op0, Op1, Op2), Op0.isUnsigned());
2817 pushInteger(S, Result, Call->getType());
2818 return true;
2819 }
2820
2821 const auto *VecT = Arg0Type->castAs<VectorType>();
2822 const PrimType &ElemT = *S.getContext().classify(VecT->getElementType());
2823 unsigned NumElems = VecT->getNumElements();
2824 bool DestUnsigned = Call->getType()->isUnsignedIntegerOrEnumerationType();
2825
2826 // Vector + Vector + Scalar case.
2827 if (!Arg2Type->isVectorType()) {
2828 APSInt Op2 = popToAPSInt(S, Arg2Type);
2829
2830 const Pointer &Op1 = S.Stk.pop<Pointer>();
2831 const Pointer &Op0 = S.Stk.pop<Pointer>();
2832 const Pointer &Dst = S.Stk.peek<Pointer>();
2833 for (unsigned I = 0; I != NumElems; ++I) {
2835 Dst.elem<T>(I) = static_cast<T>(APSInt(
2836 Fn(Op0.elem<T>(I).toAPSInt(), Op1.elem<T>(I).toAPSInt(), Op2),
2837 DestUnsigned));
2838 });
2839 }
2841
2842 return true;
2843 }
2844
2845 // Vector type.
2846 const Pointer &Op2 = S.Stk.pop<Pointer>();
2847 const Pointer &Op1 = S.Stk.pop<Pointer>();
2848 const Pointer &Op0 = S.Stk.pop<Pointer>();
2849 const Pointer &Dst = S.Stk.peek<Pointer>();
2850 for (unsigned I = 0; I != NumElems; ++I) {
2851 APSInt Val0, Val1, Val2;
2853 Val0 = Op0.elem<T>(I).toAPSInt();
2854 Val1 = Op1.elem<T>(I).toAPSInt();
2855 Val2 = Op2.elem<T>(I).toAPSInt();
2856 });
2857 APSInt Result = APSInt(Fn(Val0, Val1, Val2), Val0.isUnsigned());
2859 { Dst.elem<T>(I) = static_cast<T>(Result); });
2860 }
2862
2863 return true;
2864}
2865
2867 const CallExpr *Call,
2868 unsigned ID) {
2869 assert(Call->getNumArgs() == 3);
2870
2871 APSInt ImmAPS = popToAPSInt(S, Call->getArg(2));
2872 uint64_t Index = ImmAPS.getZExtValue();
2873
2874 const Pointer &SubVec = S.Stk.pop<Pointer>();
2875 if (!SubVec.getFieldDesc()->isPrimitiveArray())
2876 return false;
2877
2878 const Pointer &BaseVec = S.Stk.pop<Pointer>();
2879 if (!BaseVec.getFieldDesc()->isPrimitiveArray())
2880 return false;
2881
2882 const Pointer &Dst = S.Stk.peek<Pointer>();
2883
2884 unsigned BaseElements = BaseVec.getNumElems();
2885 unsigned SubElements = SubVec.getNumElems();
2886
2887 assert(SubElements != 0 && BaseElements != 0 &&
2888 (BaseElements % SubElements) == 0);
2889
2890 unsigned NumLanes = BaseElements / SubElements;
2891 unsigned Lane = static_cast<unsigned>(Index % NumLanes);
2892 unsigned InsertPos = Lane * SubElements;
2893
2894 PrimType ElemPT = BaseVec.getFieldDesc()->getPrimType();
2895
2896 TYPE_SWITCH(ElemPT, {
2897 for (unsigned I = 0; I != BaseElements; ++I)
2898 Dst.elem<T>(I) = BaseVec.elem<T>(I);
2899 for (unsigned I = 0; I != SubElements; ++I)
2900 Dst.elem<T>(InsertPos + I) = SubVec.elem<T>(I);
2901 });
2902
2904 return true;
2905}
2906
2908 const CallExpr *Call, bool MaskZ) {
2909 assert(Call->getNumArgs() == 5);
2910
2911 APInt U = popToAPSInt(S, Call->getArg(4)); // Lane mask
2912 APInt Imm = popToAPSInt(S, Call->getArg(3)); // Ternary truth table
2913 const Pointer &C = S.Stk.pop<Pointer>();
2914 const Pointer &B = S.Stk.pop<Pointer>();
2915 const Pointer &A = S.Stk.pop<Pointer>();
2916 const Pointer &Dst = S.Stk.peek<Pointer>();
2917
2918 unsigned DstLen = A.getNumElems();
2919 const QualType ElemQT = getElemType(A);
2920 const OptPrimType ElemPT = S.getContext().classify(ElemQT);
2921 unsigned LaneWidth = S.getASTContext().getTypeSize(ElemQT);
2922 bool DstUnsigned = ElemQT->isUnsignedIntegerOrEnumerationType();
2923
2924 INT_TYPE_SWITCH_NO_BOOL(*ElemPT, {
2925 for (unsigned I = 0; I != DstLen; ++I) {
2926 APInt ALane = A.elem<T>(I).toAPSInt();
2927 APInt BLane = B.elem<T>(I).toAPSInt();
2928 APInt CLane = C.elem<T>(I).toAPSInt();
2929 APInt RLane(LaneWidth, 0);
2930 if (U[I]) { // If lane not masked, compute ternary logic.
2931 for (unsigned Bit = 0; Bit != LaneWidth; ++Bit) {
2932 unsigned ABit = ALane[Bit];
2933 unsigned BBit = BLane[Bit];
2934 unsigned CBit = CLane[Bit];
2935 unsigned Idx = (ABit << 2) | (BBit << 1) | (CBit);
2936 RLane.setBitVal(Bit, Imm[Idx]);
2937 }
2938 Dst.elem<T>(I) = static_cast<T>(APSInt(RLane, DstUnsigned));
2939 } else if (MaskZ) { // If zero masked, zero the lane.
2940 Dst.elem<T>(I) = static_cast<T>(APSInt(RLane, DstUnsigned));
2941 } else { // Just masked, put in A lane.
2942 Dst.elem<T>(I) = static_cast<T>(APSInt(ALane, DstUnsigned));
2943 }
2944 }
2945 });
2946 Dst.initializeAllElements();
2947 return true;
2948}
2949
2951 const CallExpr *Call, unsigned ID) {
2952 assert(Call->getNumArgs() == 2);
2953
2954 APSInt ImmAPS = popToAPSInt(S, Call->getArg(1));
2955 const Pointer &Vec = S.Stk.pop<Pointer>();
2956 if (!Vec.getFieldDesc()->isPrimitiveArray())
2957 return false;
2958
2959 unsigned NumElems = Vec.getNumElems();
2960 unsigned Index =
2961 static_cast<unsigned>(ImmAPS.getZExtValue() & (NumElems - 1));
2962
2963 PrimType ElemPT = Vec.getFieldDesc()->getPrimType();
2964 // FIXME(#161685): Replace float+int split with a numeric-only type switch
2965 if (ElemPT == PT_Float) {
2966 S.Stk.push<Floating>(Vec.elem<Floating>(Index));
2967 return true;
2968 }
2969 INT_TYPE_SWITCH_NO_BOOL(ElemPT, {
2970 APSInt V = Vec.elem<T>(Index).toAPSInt();
2971 pushInteger(S, V, Call->getType());
2972 });
2973
2974 return true;
2975}
2976
2978 const CallExpr *Call, unsigned ID) {
2979 assert(Call->getNumArgs() == 3);
2980
2981 APSInt ImmAPS = popToAPSInt(S, Call->getArg(2));
2982 APSInt ValAPS = popToAPSInt(S, Call->getArg(1));
2983
2984 const Pointer &Base = S.Stk.pop<Pointer>();
2985 if (!Base.getFieldDesc()->isPrimitiveArray())
2986 return false;
2987
2988 const Pointer &Dst = S.Stk.peek<Pointer>();
2989
2990 unsigned NumElems = Base.getNumElems();
2991 unsigned Index =
2992 static_cast<unsigned>(ImmAPS.getZExtValue() & (NumElems - 1));
2993
2994 PrimType ElemPT = Base.getFieldDesc()->getPrimType();
2995 INT_TYPE_SWITCH_NO_BOOL(ElemPT, {
2996 for (unsigned I = 0; I != NumElems; ++I)
2997 Dst.elem<T>(I) = Base.elem<T>(I);
2998 Dst.elem<T>(Index) = static_cast<T>(ValAPS);
2999 });
3000
3002 return true;
3003}
3004
3006 uint32_t BuiltinID) {
3007 if (!S.getASTContext().BuiltinInfo.isConstantEvaluated(BuiltinID))
3008 return Invalid(S, OpPC);
3009
3010 const InterpFrame *Frame = S.Current;
3011 switch (BuiltinID) {
3012 case Builtin::BI__builtin_is_constant_evaluated:
3014
3015 case Builtin::BI__builtin_assume:
3016 case Builtin::BI__assume:
3017 return interp__builtin_assume(S, OpPC, Frame, Call);
3018
3019 case Builtin::BI__builtin_strcmp:
3020 case Builtin::BIstrcmp:
3021 case Builtin::BI__builtin_strncmp:
3022 case Builtin::BIstrncmp:
3023 case Builtin::BI__builtin_wcsncmp:
3024 case Builtin::BIwcsncmp:
3025 case Builtin::BI__builtin_wcscmp:
3026 case Builtin::BIwcscmp:
3027 return interp__builtin_strcmp(S, OpPC, Frame, Call, BuiltinID);
3028
3029 case Builtin::BI__builtin_strlen:
3030 case Builtin::BIstrlen:
3031 case Builtin::BI__builtin_wcslen:
3032 case Builtin::BIwcslen:
3033 return interp__builtin_strlen(S, OpPC, Frame, Call, BuiltinID);
3034
3035 case Builtin::BI__builtin_nan:
3036 case Builtin::BI__builtin_nanf:
3037 case Builtin::BI__builtin_nanl:
3038 case Builtin::BI__builtin_nanf16:
3039 case Builtin::BI__builtin_nanf128:
3040 return interp__builtin_nan(S, OpPC, Frame, Call, /*Signaling=*/false);
3041
3042 case Builtin::BI__builtin_nans:
3043 case Builtin::BI__builtin_nansf:
3044 case Builtin::BI__builtin_nansl:
3045 case Builtin::BI__builtin_nansf16:
3046 case Builtin::BI__builtin_nansf128:
3047 return interp__builtin_nan(S, OpPC, Frame, Call, /*Signaling=*/true);
3048
3049 case Builtin::BI__builtin_huge_val:
3050 case Builtin::BI__builtin_huge_valf:
3051 case Builtin::BI__builtin_huge_vall:
3052 case Builtin::BI__builtin_huge_valf16:
3053 case Builtin::BI__builtin_huge_valf128:
3054 case Builtin::BI__builtin_inf:
3055 case Builtin::BI__builtin_inff:
3056 case Builtin::BI__builtin_infl:
3057 case Builtin::BI__builtin_inff16:
3058 case Builtin::BI__builtin_inff128:
3059 return interp__builtin_inf(S, OpPC, Frame, Call);
3060
3061 case Builtin::BI__builtin_copysign:
3062 case Builtin::BI__builtin_copysignf:
3063 case Builtin::BI__builtin_copysignl:
3064 case Builtin::BI__builtin_copysignf128:
3065 return interp__builtin_copysign(S, OpPC, Frame);
3066
3067 case Builtin::BI__builtin_fmin:
3068 case Builtin::BI__builtin_fminf:
3069 case Builtin::BI__builtin_fminl:
3070 case Builtin::BI__builtin_fminf16:
3071 case Builtin::BI__builtin_fminf128:
3072 return interp__builtin_fmin(S, OpPC, Frame, /*IsNumBuiltin=*/false);
3073
3074 case Builtin::BI__builtin_fminimum_num:
3075 case Builtin::BI__builtin_fminimum_numf:
3076 case Builtin::BI__builtin_fminimum_numl:
3077 case Builtin::BI__builtin_fminimum_numf16:
3078 case Builtin::BI__builtin_fminimum_numf128:
3079 return interp__builtin_fmin(S, OpPC, Frame, /*IsNumBuiltin=*/true);
3080
3081 case Builtin::BI__builtin_fmax:
3082 case Builtin::BI__builtin_fmaxf:
3083 case Builtin::BI__builtin_fmaxl:
3084 case Builtin::BI__builtin_fmaxf16:
3085 case Builtin::BI__builtin_fmaxf128:
3086 return interp__builtin_fmax(S, OpPC, Frame, /*IsNumBuiltin=*/false);
3087
3088 case Builtin::BI__builtin_fmaximum_num:
3089 case Builtin::BI__builtin_fmaximum_numf:
3090 case Builtin::BI__builtin_fmaximum_numl:
3091 case Builtin::BI__builtin_fmaximum_numf16:
3092 case Builtin::BI__builtin_fmaximum_numf128:
3093 return interp__builtin_fmax(S, OpPC, Frame, /*IsNumBuiltin=*/true);
3094
3095 case Builtin::BI__builtin_isnan:
3096 return interp__builtin_isnan(S, OpPC, Frame, Call);
3097
3098 case Builtin::BI__builtin_issignaling:
3099 return interp__builtin_issignaling(S, OpPC, Frame, Call);
3100
3101 case Builtin::BI__builtin_isinf:
3102 return interp__builtin_isinf(S, OpPC, Frame, /*Sign=*/false, Call);
3103
3104 case Builtin::BI__builtin_isinf_sign:
3105 return interp__builtin_isinf(S, OpPC, Frame, /*Sign=*/true, Call);
3106
3107 case Builtin::BI__builtin_isfinite:
3108 return interp__builtin_isfinite(S, OpPC, Frame, Call);
3109
3110 case Builtin::BI__builtin_isnormal:
3111 return interp__builtin_isnormal(S, OpPC, Frame, Call);
3112
3113 case Builtin::BI__builtin_issubnormal:
3114 return interp__builtin_issubnormal(S, OpPC, Frame, Call);
3115
3116 case Builtin::BI__builtin_iszero:
3117 return interp__builtin_iszero(S, OpPC, Frame, Call);
3118
3119 case Builtin::BI__builtin_signbit:
3120 case Builtin::BI__builtin_signbitf:
3121 case Builtin::BI__builtin_signbitl:
3122 return interp__builtin_signbit(S, OpPC, Frame, Call);
3123
3124 case Builtin::BI__builtin_isgreater:
3125 case Builtin::BI__builtin_isgreaterequal:
3126 case Builtin::BI__builtin_isless:
3127 case Builtin::BI__builtin_islessequal:
3128 case Builtin::BI__builtin_islessgreater:
3129 case Builtin::BI__builtin_isunordered:
3130 return interp_floating_comparison(S, OpPC, Call, BuiltinID);
3131
3132 case Builtin::BI__builtin_isfpclass:
3133 return interp__builtin_isfpclass(S, OpPC, Frame, Call);
3134
3135 case Builtin::BI__builtin_fpclassify:
3136 return interp__builtin_fpclassify(S, OpPC, Frame, Call);
3137
3138 case Builtin::BI__builtin_fabs:
3139 case Builtin::BI__builtin_fabsf:
3140 case Builtin::BI__builtin_fabsl:
3141 case Builtin::BI__builtin_fabsf128:
3142 return interp__builtin_fabs(S, OpPC, Frame);
3143
3144 case Builtin::BI__builtin_abs:
3145 case Builtin::BI__builtin_labs:
3146 case Builtin::BI__builtin_llabs:
3147 return interp__builtin_abs(S, OpPC, Frame, Call);
3148
3149 case Builtin::BI__builtin_popcount:
3150 case Builtin::BI__builtin_popcountl:
3151 case Builtin::BI__builtin_popcountll:
3152 case Builtin::BI__builtin_popcountg:
3153 case Builtin::BI__popcnt16: // Microsoft variants of popcount
3154 case Builtin::BI__popcnt:
3155 case Builtin::BI__popcnt64:
3156 return interp__builtin_popcount(S, OpPC, Frame, Call);
3157
3158 case Builtin::BI__builtin_parity:
3159 case Builtin::BI__builtin_parityl:
3160 case Builtin::BI__builtin_parityll:
3161 return interp__builtin_parity(S, OpPC, Frame, Call);
3162
3163 case Builtin::BI__builtin_clrsb:
3164 case Builtin::BI__builtin_clrsbl:
3165 case Builtin::BI__builtin_clrsbll:
3166 return interp__builtin_clrsb(S, OpPC, Frame, Call);
3167
3168 case Builtin::BI__builtin_bitreverse8:
3169 case Builtin::BI__builtin_bitreverse16:
3170 case Builtin::BI__builtin_bitreverse32:
3171 case Builtin::BI__builtin_bitreverse64:
3172 return interp__builtin_bitreverse(S, OpPC, Frame, Call);
3173
3174 case Builtin::BI__builtin_classify_type:
3175 return interp__builtin_classify_type(S, OpPC, Frame, Call);
3176
3177 case Builtin::BI__builtin_expect:
3178 case Builtin::BI__builtin_expect_with_probability:
3179 return interp__builtin_expect(S, OpPC, Frame, Call);
3180
3181 case Builtin::BI__builtin_rotateleft8:
3182 case Builtin::BI__builtin_rotateleft16:
3183 case Builtin::BI__builtin_rotateleft32:
3184 case Builtin::BI__builtin_rotateleft64:
3185 case Builtin::BI_rotl8: // Microsoft variants of rotate left
3186 case Builtin::BI_rotl16:
3187 case Builtin::BI_rotl:
3188 case Builtin::BI_lrotl:
3189 case Builtin::BI_rotl64:
3191 S, OpPC, Call, [](const APSInt &Value, const APSInt &Amount) -> APInt {
3192 return Value.rotl(Amount);
3193 });
3194
3195 case Builtin::BI__builtin_rotateright8:
3196 case Builtin::BI__builtin_rotateright16:
3197 case Builtin::BI__builtin_rotateright32:
3198 case Builtin::BI__builtin_rotateright64:
3199 case Builtin::BI_rotr8: // Microsoft variants of rotate right
3200 case Builtin::BI_rotr16:
3201 case Builtin::BI_rotr:
3202 case Builtin::BI_lrotr:
3203 case Builtin::BI_rotr64:
3205 S, OpPC, Call, [](const APSInt &Value, const APSInt &Amount) -> APInt {
3206 return Value.rotr(Amount);
3207 });
3208
3209 case Builtin::BI__builtin_ffs:
3210 case Builtin::BI__builtin_ffsl:
3211 case Builtin::BI__builtin_ffsll:
3212 return interp__builtin_ffs(S, OpPC, Frame, Call);
3213
3214 case Builtin::BIaddressof:
3215 case Builtin::BI__addressof:
3216 case Builtin::BI__builtin_addressof:
3217 assert(isNoopBuiltin(BuiltinID));
3218 return interp__builtin_addressof(S, OpPC, Frame, Call);
3219
3220 case Builtin::BIas_const:
3221 case Builtin::BIforward:
3222 case Builtin::BIforward_like:
3223 case Builtin::BImove:
3224 case Builtin::BImove_if_noexcept:
3225 assert(isNoopBuiltin(BuiltinID));
3226 return interp__builtin_move(S, OpPC, Frame, Call);
3227
3228 case Builtin::BI__builtin_eh_return_data_regno:
3230
3231 case Builtin::BI__builtin_launder:
3232 assert(isNoopBuiltin(BuiltinID));
3233 return true;
3234
3235 case Builtin::BI__builtin_add_overflow:
3236 case Builtin::BI__builtin_sub_overflow:
3237 case Builtin::BI__builtin_mul_overflow:
3238 case Builtin::BI__builtin_sadd_overflow:
3239 case Builtin::BI__builtin_uadd_overflow:
3240 case Builtin::BI__builtin_uaddl_overflow:
3241 case Builtin::BI__builtin_uaddll_overflow:
3242 case Builtin::BI__builtin_usub_overflow:
3243 case Builtin::BI__builtin_usubl_overflow:
3244 case Builtin::BI__builtin_usubll_overflow:
3245 case Builtin::BI__builtin_umul_overflow:
3246 case Builtin::BI__builtin_umull_overflow:
3247 case Builtin::BI__builtin_umulll_overflow:
3248 case Builtin::BI__builtin_saddl_overflow:
3249 case Builtin::BI__builtin_saddll_overflow:
3250 case Builtin::BI__builtin_ssub_overflow:
3251 case Builtin::BI__builtin_ssubl_overflow:
3252 case Builtin::BI__builtin_ssubll_overflow:
3253 case Builtin::BI__builtin_smul_overflow:
3254 case Builtin::BI__builtin_smull_overflow:
3255 case Builtin::BI__builtin_smulll_overflow:
3256 return interp__builtin_overflowop(S, OpPC, Call, BuiltinID);
3257
3258 case Builtin::BI__builtin_addcb:
3259 case Builtin::BI__builtin_addcs:
3260 case Builtin::BI__builtin_addc:
3261 case Builtin::BI__builtin_addcl:
3262 case Builtin::BI__builtin_addcll:
3263 case Builtin::BI__builtin_subcb:
3264 case Builtin::BI__builtin_subcs:
3265 case Builtin::BI__builtin_subc:
3266 case Builtin::BI__builtin_subcl:
3267 case Builtin::BI__builtin_subcll:
3268 return interp__builtin_carryop(S, OpPC, Frame, Call, BuiltinID);
3269
3270 case Builtin::BI__builtin_clz:
3271 case Builtin::BI__builtin_clzl:
3272 case Builtin::BI__builtin_clzll:
3273 case Builtin::BI__builtin_clzs:
3274 case Builtin::BI__builtin_clzg:
3275 case Builtin::BI__lzcnt16: // Microsoft variants of count leading-zeroes
3276 case Builtin::BI__lzcnt:
3277 case Builtin::BI__lzcnt64:
3278 return interp__builtin_clz(S, OpPC, Frame, Call, BuiltinID);
3279
3280 case Builtin::BI__builtin_ctz:
3281 case Builtin::BI__builtin_ctzl:
3282 case Builtin::BI__builtin_ctzll:
3283 case Builtin::BI__builtin_ctzs:
3284 case Builtin::BI__builtin_ctzg:
3285 return interp__builtin_ctz(S, OpPC, Frame, Call, BuiltinID);
3286
3287 case Builtin::BI__builtin_elementwise_clzg:
3288 case Builtin::BI__builtin_elementwise_ctzg:
3290 BuiltinID);
3291
3292 case Builtin::BI__builtin_bswap16:
3293 case Builtin::BI__builtin_bswap32:
3294 case Builtin::BI__builtin_bswap64:
3295 return interp__builtin_bswap(S, OpPC, Frame, Call);
3296
3297 case Builtin::BI__atomic_always_lock_free:
3298 case Builtin::BI__atomic_is_lock_free:
3299 return interp__builtin_atomic_lock_free(S, OpPC, Frame, Call, BuiltinID);
3300
3301 case Builtin::BI__c11_atomic_is_lock_free:
3303
3304 case Builtin::BI__builtin_complex:
3305 return interp__builtin_complex(S, OpPC, Frame, Call);
3306
3307 case Builtin::BI__builtin_is_aligned:
3308 case Builtin::BI__builtin_align_up:
3309 case Builtin::BI__builtin_align_down:
3310 return interp__builtin_is_aligned_up_down(S, OpPC, Frame, Call, BuiltinID);
3311
3312 case Builtin::BI__builtin_assume_aligned:
3313 return interp__builtin_assume_aligned(S, OpPC, Frame, Call);
3314
3315 case clang::X86::BI__builtin_ia32_bextr_u32:
3316 case clang::X86::BI__builtin_ia32_bextr_u64:
3317 case clang::X86::BI__builtin_ia32_bextri_u32:
3318 case clang::X86::BI__builtin_ia32_bextri_u64:
3320 S, OpPC, Call, [](const APSInt &Val, const APSInt &Idx) {
3321 unsigned BitWidth = Val.getBitWidth();
3322 uint64_t Shift = Idx.extractBitsAsZExtValue(8, 0);
3323 uint64_t Length = Idx.extractBitsAsZExtValue(8, 8);
3324 if (Length > BitWidth) {
3325 Length = BitWidth;
3326 }
3327
3328 // Handle out of bounds cases.
3329 if (Length == 0 || Shift >= BitWidth)
3330 return APInt(BitWidth, 0);
3331
3332 uint64_t Result = Val.getZExtValue() >> Shift;
3333 Result &= llvm::maskTrailingOnes<uint64_t>(Length);
3334 return APInt(BitWidth, Result);
3335 });
3336
3337 case clang::X86::BI__builtin_ia32_bzhi_si:
3338 case clang::X86::BI__builtin_ia32_bzhi_di:
3340 S, OpPC, Call, [](const APSInt &Val, const APSInt &Idx) {
3341 unsigned BitWidth = Val.getBitWidth();
3342 uint64_t Index = Idx.extractBitsAsZExtValue(8, 0);
3343 APSInt Result = Val;
3344
3345 if (Index < BitWidth)
3346 Result.clearHighBits(BitWidth - Index);
3347
3348 return Result;
3349 });
3350
3351 case clang::X86::BI__builtin_ia32_lzcnt_u16:
3352 case clang::X86::BI__builtin_ia32_lzcnt_u32:
3353 case clang::X86::BI__builtin_ia32_lzcnt_u64:
3355 S, OpPC, Call, [](const APSInt &Src) {
3356 return APInt(Src.getBitWidth(), Src.countLeadingZeros());
3357 });
3358
3359 case clang::X86::BI__builtin_ia32_tzcnt_u16:
3360 case clang::X86::BI__builtin_ia32_tzcnt_u32:
3361 case clang::X86::BI__builtin_ia32_tzcnt_u64:
3363 S, OpPC, Call, [](const APSInt &Src) {
3364 return APInt(Src.getBitWidth(), Src.countTrailingZeros());
3365 });
3366
3367 case clang::X86::BI__builtin_ia32_pdep_si:
3368 case clang::X86::BI__builtin_ia32_pdep_di:
3370 S, OpPC, Call, [](const APSInt &Val, const APSInt &Mask) {
3371 unsigned BitWidth = Val.getBitWidth();
3372 APInt Result = APInt::getZero(BitWidth);
3373
3374 for (unsigned I = 0, P = 0; I != BitWidth; ++I) {
3375 if (Mask[I])
3376 Result.setBitVal(I, Val[P++]);
3377 }
3378
3379 return Result;
3380 });
3381
3382 case clang::X86::BI__builtin_ia32_pext_si:
3383 case clang::X86::BI__builtin_ia32_pext_di:
3385 S, OpPC, Call, [](const APSInt &Val, const APSInt &Mask) {
3386 unsigned BitWidth = Val.getBitWidth();
3387 APInt Result = APInt::getZero(BitWidth);
3388
3389 for (unsigned I = 0, P = 0; I != BitWidth; ++I) {
3390 if (Mask[I])
3391 Result.setBitVal(P++, Val[I]);
3392 }
3393
3394 return Result;
3395 });
3396
3397 case clang::X86::BI__builtin_ia32_addcarryx_u32:
3398 case clang::X86::BI__builtin_ia32_addcarryx_u64:
3399 case clang::X86::BI__builtin_ia32_subborrow_u32:
3400 case clang::X86::BI__builtin_ia32_subborrow_u64:
3402 BuiltinID);
3403
3404 case Builtin::BI__builtin_os_log_format_buffer_size:
3406
3407 case Builtin::BI__builtin_ptrauth_string_discriminator:
3409
3410 case Builtin::BI__noop:
3411 pushInteger(S, 0, Call->getType());
3412 return true;
3413
3414 case Builtin::BI__builtin_operator_new:
3415 return interp__builtin_operator_new(S, OpPC, Frame, Call);
3416
3417 case Builtin::BI__builtin_operator_delete:
3418 return interp__builtin_operator_delete(S, OpPC, Frame, Call);
3419
3420 case Builtin::BI__arithmetic_fence:
3422
3423 case Builtin::BI__builtin_reduce_add:
3424 case Builtin::BI__builtin_reduce_mul:
3425 case Builtin::BI__builtin_reduce_and:
3426 case Builtin::BI__builtin_reduce_or:
3427 case Builtin::BI__builtin_reduce_xor:
3428 case Builtin::BI__builtin_reduce_min:
3429 case Builtin::BI__builtin_reduce_max:
3430 return interp__builtin_vector_reduce(S, OpPC, Call, BuiltinID);
3431
3432 case Builtin::BI__builtin_elementwise_popcount:
3433 case Builtin::BI__builtin_elementwise_bitreverse:
3435 BuiltinID);
3436
3437 case Builtin::BI__builtin_elementwise_abs:
3438 return interp__builtin_elementwise_abs(S, OpPC, Frame, Call, BuiltinID);
3439
3440 case Builtin::BI__builtin_memcpy:
3441 case Builtin::BImemcpy:
3442 case Builtin::BI__builtin_wmemcpy:
3443 case Builtin::BIwmemcpy:
3444 case Builtin::BI__builtin_memmove:
3445 case Builtin::BImemmove:
3446 case Builtin::BI__builtin_wmemmove:
3447 case Builtin::BIwmemmove:
3448 return interp__builtin_memcpy(S, OpPC, Frame, Call, BuiltinID);
3449
3450 case Builtin::BI__builtin_memcmp:
3451 case Builtin::BImemcmp:
3452 case Builtin::BI__builtin_bcmp:
3453 case Builtin::BIbcmp:
3454 case Builtin::BI__builtin_wmemcmp:
3455 case Builtin::BIwmemcmp:
3456 return interp__builtin_memcmp(S, OpPC, Frame, Call, BuiltinID);
3457
3458 case Builtin::BImemchr:
3459 case Builtin::BI__builtin_memchr:
3460 case Builtin::BIstrchr:
3461 case Builtin::BI__builtin_strchr:
3462 case Builtin::BIwmemchr:
3463 case Builtin::BI__builtin_wmemchr:
3464 case Builtin::BIwcschr:
3465 case Builtin::BI__builtin_wcschr:
3466 case Builtin::BI__builtin_char_memchr:
3467 return interp__builtin_memchr(S, OpPC, Call, BuiltinID);
3468
3469 case Builtin::BI__builtin_object_size:
3470 case Builtin::BI__builtin_dynamic_object_size:
3471 return interp__builtin_object_size(S, OpPC, Frame, Call);
3472
3473 case Builtin::BI__builtin_is_within_lifetime:
3475
3476 case Builtin::BI__builtin_elementwise_add_sat:
3478 S, OpPC, Call, [](const APSInt &LHS, const APSInt &RHS) {
3479 return LHS.isSigned() ? LHS.sadd_sat(RHS) : LHS.uadd_sat(RHS);
3480 });
3481
3482 case Builtin::BI__builtin_elementwise_sub_sat:
3484 S, OpPC, Call, [](const APSInt &LHS, const APSInt &RHS) {
3485 return LHS.isSigned() ? LHS.ssub_sat(RHS) : LHS.usub_sat(RHS);
3486 });
3487
3488 case clang::X86::BI__builtin_ia32_pavgb128:
3489 case clang::X86::BI__builtin_ia32_pavgw128:
3490 case clang::X86::BI__builtin_ia32_pavgb256:
3491 case clang::X86::BI__builtin_ia32_pavgw256:
3492 case clang::X86::BI__builtin_ia32_pavgb512:
3493 case clang::X86::BI__builtin_ia32_pavgw512:
3495 llvm::APIntOps::avgCeilU);
3496
3497 case clang::X86::BI__builtin_ia32_pmulhuw128:
3498 case clang::X86::BI__builtin_ia32_pmulhuw256:
3499 case clang::X86::BI__builtin_ia32_pmulhuw512:
3501 llvm::APIntOps::mulhu);
3502
3503 case clang::X86::BI__builtin_ia32_pmulhw128:
3504 case clang::X86::BI__builtin_ia32_pmulhw256:
3505 case clang::X86::BI__builtin_ia32_pmulhw512:
3507 llvm::APIntOps::mulhs);
3508
3509 case clang::X86::BI__builtin_ia32_psllv2di:
3510 case clang::X86::BI__builtin_ia32_psllv4di:
3511 case clang::X86::BI__builtin_ia32_psllv4si:
3512 case clang::X86::BI__builtin_ia32_psllv8di:
3513 case clang::X86::BI__builtin_ia32_psllv8hi:
3514 case clang::X86::BI__builtin_ia32_psllv8si:
3515 case clang::X86::BI__builtin_ia32_psllv16hi:
3516 case clang::X86::BI__builtin_ia32_psllv16si:
3517 case clang::X86::BI__builtin_ia32_psllv32hi:
3518 case clang::X86::BI__builtin_ia32_psllwi128:
3519 case clang::X86::BI__builtin_ia32_psllwi256:
3520 case clang::X86::BI__builtin_ia32_psllwi512:
3521 case clang::X86::BI__builtin_ia32_pslldi128:
3522 case clang::X86::BI__builtin_ia32_pslldi256:
3523 case clang::X86::BI__builtin_ia32_pslldi512:
3524 case clang::X86::BI__builtin_ia32_psllqi128:
3525 case clang::X86::BI__builtin_ia32_psllqi256:
3526 case clang::X86::BI__builtin_ia32_psllqi512:
3528 S, OpPC, Call, [](const APSInt &LHS, const APSInt &RHS) {
3529 if (RHS.uge(LHS.getBitWidth())) {
3530 return APInt::getZero(LHS.getBitWidth());
3531 }
3532 return LHS.shl(RHS.getZExtValue());
3533 });
3534
3535 case clang::X86::BI__builtin_ia32_psrav4si:
3536 case clang::X86::BI__builtin_ia32_psrav8di:
3537 case clang::X86::BI__builtin_ia32_psrav8hi:
3538 case clang::X86::BI__builtin_ia32_psrav8si:
3539 case clang::X86::BI__builtin_ia32_psrav16hi:
3540 case clang::X86::BI__builtin_ia32_psrav16si:
3541 case clang::X86::BI__builtin_ia32_psrav32hi:
3542 case clang::X86::BI__builtin_ia32_psravq128:
3543 case clang::X86::BI__builtin_ia32_psravq256:
3544 case clang::X86::BI__builtin_ia32_psrawi128:
3545 case clang::X86::BI__builtin_ia32_psrawi256:
3546 case clang::X86::BI__builtin_ia32_psrawi512:
3547 case clang::X86::BI__builtin_ia32_psradi128:
3548 case clang::X86::BI__builtin_ia32_psradi256:
3549 case clang::X86::BI__builtin_ia32_psradi512:
3550 case clang::X86::BI__builtin_ia32_psraqi128:
3551 case clang::X86::BI__builtin_ia32_psraqi256:
3552 case clang::X86::BI__builtin_ia32_psraqi512:
3554 S, OpPC, Call, [](const APSInt &LHS, const APSInt &RHS) {
3555 if (RHS.uge(LHS.getBitWidth())) {
3556 return LHS.ashr(LHS.getBitWidth() - 1);
3557 }
3558 return LHS.ashr(RHS.getZExtValue());
3559 });
3560
3561 case clang::X86::BI__builtin_ia32_psrlv2di:
3562 case clang::X86::BI__builtin_ia32_psrlv4di:
3563 case clang::X86::BI__builtin_ia32_psrlv4si:
3564 case clang::X86::BI__builtin_ia32_psrlv8di:
3565 case clang::X86::BI__builtin_ia32_psrlv8hi:
3566 case clang::X86::BI__builtin_ia32_psrlv8si:
3567 case clang::X86::BI__builtin_ia32_psrlv16hi:
3568 case clang::X86::BI__builtin_ia32_psrlv16si:
3569 case clang::X86::BI__builtin_ia32_psrlv32hi:
3570 case clang::X86::BI__builtin_ia32_psrlwi128:
3571 case clang::X86::BI__builtin_ia32_psrlwi256:
3572 case clang::X86::BI__builtin_ia32_psrlwi512:
3573 case clang::X86::BI__builtin_ia32_psrldi128:
3574 case clang::X86::BI__builtin_ia32_psrldi256:
3575 case clang::X86::BI__builtin_ia32_psrldi512:
3576 case clang::X86::BI__builtin_ia32_psrlqi128:
3577 case clang::X86::BI__builtin_ia32_psrlqi256:
3578 case clang::X86::BI__builtin_ia32_psrlqi512:
3580 S, OpPC, Call, [](const APSInt &LHS, const APSInt &RHS) {
3581 if (RHS.uge(LHS.getBitWidth())) {
3582 return APInt::getZero(LHS.getBitWidth());
3583 }
3584 return LHS.lshr(RHS.getZExtValue());
3585 });
3586 case clang::X86::BI__builtin_ia32_packsswb128:
3587 case clang::X86::BI__builtin_ia32_packsswb256:
3588 case clang::X86::BI__builtin_ia32_packsswb512:
3589 case clang::X86::BI__builtin_ia32_packssdw128:
3590 case clang::X86::BI__builtin_ia32_packssdw256:
3591 case clang::X86::BI__builtin_ia32_packssdw512:
3592 return interp__builtin_x86_pack(S, OpPC, Call, [](const APSInt &Src) {
3593 return APInt(Src).truncSSat(Src.getBitWidth() / 2);
3594 });
3595 case clang::X86::BI__builtin_ia32_packusdw128:
3596 case clang::X86::BI__builtin_ia32_packusdw256:
3597 case clang::X86::BI__builtin_ia32_packusdw512:
3598 case clang::X86::BI__builtin_ia32_packuswb128:
3599 case clang::X86::BI__builtin_ia32_packuswb256:
3600 case clang::X86::BI__builtin_ia32_packuswb512:
3601 return interp__builtin_x86_pack(S, OpPC, Call, [](const APSInt &Src) {
3602 unsigned DstBits = Src.getBitWidth() / 2;
3603 if (Src.isNegative())
3604 return APInt::getZero(DstBits);
3605 if (Src.isIntN(DstBits))
3606 return APInt(Src).trunc(DstBits);
3607 return APInt::getAllOnes(DstBits);
3608 });
3609
3610 case clang::X86::BI__builtin_ia32_vprotbi:
3611 case clang::X86::BI__builtin_ia32_vprotdi:
3612 case clang::X86::BI__builtin_ia32_vprotqi:
3613 case clang::X86::BI__builtin_ia32_vprotwi:
3614 case clang::X86::BI__builtin_ia32_prold128:
3615 case clang::X86::BI__builtin_ia32_prold256:
3616 case clang::X86::BI__builtin_ia32_prold512:
3617 case clang::X86::BI__builtin_ia32_prolq128:
3618 case clang::X86::BI__builtin_ia32_prolq256:
3619 case clang::X86::BI__builtin_ia32_prolq512:
3621 S, OpPC, Call,
3622 [](const APSInt &LHS, const APSInt &RHS) { return LHS.rotl(RHS); });
3623
3624 case clang::X86::BI__builtin_ia32_prord128:
3625 case clang::X86::BI__builtin_ia32_prord256:
3626 case clang::X86::BI__builtin_ia32_prord512:
3627 case clang::X86::BI__builtin_ia32_prorq128:
3628 case clang::X86::BI__builtin_ia32_prorq256:
3629 case clang::X86::BI__builtin_ia32_prorq512:
3631 S, OpPC, Call,
3632 [](const APSInt &LHS, const APSInt &RHS) { return LHS.rotr(RHS); });
3633
3634 case Builtin::BI__builtin_elementwise_max:
3635 case Builtin::BI__builtin_elementwise_min:
3636 return interp__builtin_elementwise_maxmin(S, OpPC, Call, BuiltinID);
3637
3638 case clang::X86::BI__builtin_ia32_pmuldq128:
3639 case clang::X86::BI__builtin_ia32_pmuldq256:
3640 case clang::X86::BI__builtin_ia32_pmuldq512:
3641 case clang::X86::BI__builtin_ia32_pmuludq128:
3642 case clang::X86::BI__builtin_ia32_pmuludq256:
3643 case clang::X86::BI__builtin_ia32_pmuludq512:
3644 return interp__builtin_ia32_pmul(S, OpPC, Call, BuiltinID);
3645
3646 case Builtin::BI__builtin_elementwise_fma:
3648 S, OpPC, Call,
3649 [](const APFloat &X, const APFloat &Y, const APFloat &Z,
3650 llvm::RoundingMode RM) {
3651 APFloat F = X;
3652 F.fusedMultiplyAdd(Y, Z, RM);
3653 return F;
3654 });
3655
3656 case X86::BI__builtin_ia32_vpshldd128:
3657 case X86::BI__builtin_ia32_vpshldd256:
3658 case X86::BI__builtin_ia32_vpshldd512:
3659 case X86::BI__builtin_ia32_vpshldq128:
3660 case X86::BI__builtin_ia32_vpshldq256:
3661 case X86::BI__builtin_ia32_vpshldq512:
3662 case X86::BI__builtin_ia32_vpshldw128:
3663 case X86::BI__builtin_ia32_vpshldw256:
3664 case X86::BI__builtin_ia32_vpshldw512:
3666 S, OpPC, Call,
3667 [](const APSInt &Hi, const APSInt &Lo, const APSInt &Amt) {
3668 return llvm::APIntOps::fshl(Hi, Lo, Amt);
3669 });
3670
3671 case X86::BI__builtin_ia32_vpshrdd128:
3672 case X86::BI__builtin_ia32_vpshrdd256:
3673 case X86::BI__builtin_ia32_vpshrdd512:
3674 case X86::BI__builtin_ia32_vpshrdq128:
3675 case X86::BI__builtin_ia32_vpshrdq256:
3676 case X86::BI__builtin_ia32_vpshrdq512:
3677 case X86::BI__builtin_ia32_vpshrdw128:
3678 case X86::BI__builtin_ia32_vpshrdw256:
3679 case X86::BI__builtin_ia32_vpshrdw512:
3680 // NOTE: Reversed Hi/Lo operands.
3682 S, OpPC, Call,
3683 [](const APSInt &Lo, const APSInt &Hi, const APSInt &Amt) {
3684 return llvm::APIntOps::fshr(Hi, Lo, Amt);
3685 });
3686
3687 case clang::X86::BI__builtin_ia32_blendpd:
3688 case clang::X86::BI__builtin_ia32_blendpd256:
3689 case clang::X86::BI__builtin_ia32_blendps:
3690 case clang::X86::BI__builtin_ia32_blendps256:
3691 case clang::X86::BI__builtin_ia32_pblendw128:
3692 case clang::X86::BI__builtin_ia32_pblendw256:
3693 case clang::X86::BI__builtin_ia32_pblendd128:
3694 case clang::X86::BI__builtin_ia32_pblendd256:
3695 return interp__builtin_blend(S, OpPC, Call);
3696
3697 case clang::X86::BI__builtin_ia32_blendvpd:
3698 case clang::X86::BI__builtin_ia32_blendvpd256:
3699 case clang::X86::BI__builtin_ia32_blendvps:
3700 case clang::X86::BI__builtin_ia32_blendvps256:
3702 S, OpPC, Call,
3703 [](const APFloat &F, const APFloat &T, const APFloat &C,
3704 llvm::RoundingMode) { return C.isNegative() ? T : F; });
3705
3706 case clang::X86::BI__builtin_ia32_pblendvb128:
3707 case clang::X86::BI__builtin_ia32_pblendvb256:
3709 S, OpPC, Call, [](const APSInt &F, const APSInt &T, const APSInt &C) {
3710 return ((APInt)C).isNegative() ? T : F;
3711 });
3712
3713 case X86::BI__builtin_ia32_selectb_128:
3714 case X86::BI__builtin_ia32_selectb_256:
3715 case X86::BI__builtin_ia32_selectb_512:
3716 case X86::BI__builtin_ia32_selectw_128:
3717 case X86::BI__builtin_ia32_selectw_256:
3718 case X86::BI__builtin_ia32_selectw_512:
3719 case X86::BI__builtin_ia32_selectd_128:
3720 case X86::BI__builtin_ia32_selectd_256:
3721 case X86::BI__builtin_ia32_selectd_512:
3722 case X86::BI__builtin_ia32_selectq_128:
3723 case X86::BI__builtin_ia32_selectq_256:
3724 case X86::BI__builtin_ia32_selectq_512:
3725 case X86::BI__builtin_ia32_selectph_128:
3726 case X86::BI__builtin_ia32_selectph_256:
3727 case X86::BI__builtin_ia32_selectph_512:
3728 case X86::BI__builtin_ia32_selectpbf_128:
3729 case X86::BI__builtin_ia32_selectpbf_256:
3730 case X86::BI__builtin_ia32_selectpbf_512:
3731 case X86::BI__builtin_ia32_selectps_128:
3732 case X86::BI__builtin_ia32_selectps_256:
3733 case X86::BI__builtin_ia32_selectps_512:
3734 case X86::BI__builtin_ia32_selectpd_128:
3735 case X86::BI__builtin_ia32_selectpd_256:
3736 case X86::BI__builtin_ia32_selectpd_512:
3737 return interp__builtin_select(S, OpPC, Call);
3738
3739 case X86::BI__builtin_ia32_pshuflw:
3740 case X86::BI__builtin_ia32_pshuflw256:
3741 case X86::BI__builtin_ia32_pshuflw512:
3742 return interp__builtin_ia32_pshuf(S, OpPC, Call, false);
3743
3744 case X86::BI__builtin_ia32_pshufhw:
3745 case X86::BI__builtin_ia32_pshufhw256:
3746 case X86::BI__builtin_ia32_pshufhw512:
3747 return interp__builtin_ia32_pshuf(S, OpPC, Call, true);
3748
3749 case X86::BI__builtin_ia32_pshufd:
3750 case X86::BI__builtin_ia32_pshufd256:
3751 case X86::BI__builtin_ia32_pshufd512:
3752 return interp__builtin_ia32_pshuf(S, OpPC, Call, false);
3753
3754 case X86::BI__builtin_ia32_kandqi:
3755 case X86::BI__builtin_ia32_kandhi:
3756 case X86::BI__builtin_ia32_kandsi:
3757 case X86::BI__builtin_ia32_kanddi:
3759 S, OpPC, Call,
3760 [](const APSInt &LHS, const APSInt &RHS) { return LHS & RHS; });
3761
3762 case X86::BI__builtin_ia32_kandnqi:
3763 case X86::BI__builtin_ia32_kandnhi:
3764 case X86::BI__builtin_ia32_kandnsi:
3765 case X86::BI__builtin_ia32_kandndi:
3767 S, OpPC, Call,
3768 [](const APSInt &LHS, const APSInt &RHS) { return ~LHS & RHS; });
3769
3770 case X86::BI__builtin_ia32_korqi:
3771 case X86::BI__builtin_ia32_korhi:
3772 case X86::BI__builtin_ia32_korsi:
3773 case X86::BI__builtin_ia32_kordi:
3775 S, OpPC, Call,
3776 [](const APSInt &LHS, const APSInt &RHS) { return LHS | RHS; });
3777
3778 case X86::BI__builtin_ia32_kxnorqi:
3779 case X86::BI__builtin_ia32_kxnorhi:
3780 case X86::BI__builtin_ia32_kxnorsi:
3781 case X86::BI__builtin_ia32_kxnordi:
3783 S, OpPC, Call,
3784 [](const APSInt &LHS, const APSInt &RHS) { return ~(LHS ^ RHS); });
3785
3786 case X86::BI__builtin_ia32_kxorqi:
3787 case X86::BI__builtin_ia32_kxorhi:
3788 case X86::BI__builtin_ia32_kxorsi:
3789 case X86::BI__builtin_ia32_kxordi:
3791 S, OpPC, Call,
3792 [](const APSInt &LHS, const APSInt &RHS) { return LHS ^ RHS; });
3793
3794 case X86::BI__builtin_ia32_knotqi:
3795 case X86::BI__builtin_ia32_knothi:
3796 case X86::BI__builtin_ia32_knotsi:
3797 case X86::BI__builtin_ia32_knotdi:
3799 S, OpPC, Call, [](const APSInt &Src) { return ~Src; });
3800
3801 case X86::BI__builtin_ia32_kaddqi:
3802 case X86::BI__builtin_ia32_kaddhi:
3803 case X86::BI__builtin_ia32_kaddsi:
3804 case X86::BI__builtin_ia32_kadddi:
3806 S, OpPC, Call,
3807 [](const APSInt &LHS, const APSInt &RHS) { return LHS + RHS; });
3808
3809 case X86::BI__builtin_ia32_pternlogd128_mask:
3810 case X86::BI__builtin_ia32_pternlogd256_mask:
3811 case X86::BI__builtin_ia32_pternlogd512_mask:
3812 case X86::BI__builtin_ia32_pternlogq128_mask:
3813 case X86::BI__builtin_ia32_pternlogq256_mask:
3814 case X86::BI__builtin_ia32_pternlogq512_mask:
3815 return interp__builtin_ia32_pternlog(S, OpPC, Call, /*MaskZ=*/false);
3816 case X86::BI__builtin_ia32_pternlogd128_maskz:
3817 case X86::BI__builtin_ia32_pternlogd256_maskz:
3818 case X86::BI__builtin_ia32_pternlogd512_maskz:
3819 case X86::BI__builtin_ia32_pternlogq128_maskz:
3820 case X86::BI__builtin_ia32_pternlogq256_maskz:
3821 case X86::BI__builtin_ia32_pternlogq512_maskz:
3822 return interp__builtin_ia32_pternlog(S, OpPC, Call, /*MaskZ=*/true);
3823 case Builtin::BI__builtin_elementwise_fshl:
3825 llvm::APIntOps::fshl);
3826 case Builtin::BI__builtin_elementwise_fshr:
3828 llvm::APIntOps::fshr);
3829
3830 case X86::BI__builtin_ia32_insertf32x4_256:
3831 case X86::BI__builtin_ia32_inserti32x4_256:
3832 case X86::BI__builtin_ia32_insertf64x2_256:
3833 case X86::BI__builtin_ia32_inserti64x2_256:
3834 case X86::BI__builtin_ia32_insertf32x4:
3835 case X86::BI__builtin_ia32_inserti32x4:
3836 case X86::BI__builtin_ia32_insertf64x2_512:
3837 case X86::BI__builtin_ia32_inserti64x2_512:
3838 case X86::BI__builtin_ia32_insertf32x8:
3839 case X86::BI__builtin_ia32_inserti32x8:
3840 case X86::BI__builtin_ia32_insertf64x4:
3841 case X86::BI__builtin_ia32_inserti64x4:
3842 case X86::BI__builtin_ia32_vinsertf128_ps256:
3843 case X86::BI__builtin_ia32_vinsertf128_pd256:
3844 case X86::BI__builtin_ia32_vinsertf128_si256:
3845 case X86::BI__builtin_ia32_insert128i256:
3846 return interp__builtin_x86_insert_subvector(S, OpPC, Call, BuiltinID);
3847
3848 case X86::BI__builtin_ia32_vec_ext_v4hi:
3849 case X86::BI__builtin_ia32_vec_ext_v16qi:
3850 case X86::BI__builtin_ia32_vec_ext_v8hi:
3851 case X86::BI__builtin_ia32_vec_ext_v4si:
3852 case X86::BI__builtin_ia32_vec_ext_v2di:
3853 case X86::BI__builtin_ia32_vec_ext_v32qi:
3854 case X86::BI__builtin_ia32_vec_ext_v16hi:
3855 case X86::BI__builtin_ia32_vec_ext_v8si:
3856 case X86::BI__builtin_ia32_vec_ext_v4di:
3857 case X86::BI__builtin_ia32_vec_ext_v4sf:
3858 return interp__builtin_vec_ext(S, OpPC, Call, BuiltinID);
3859
3860 case X86::BI__builtin_ia32_vec_set_v4hi:
3861 case X86::BI__builtin_ia32_vec_set_v16qi:
3862 case X86::BI__builtin_ia32_vec_set_v8hi:
3863 case X86::BI__builtin_ia32_vec_set_v4si:
3864 case X86::BI__builtin_ia32_vec_set_v2di:
3865 case X86::BI__builtin_ia32_vec_set_v32qi:
3866 case X86::BI__builtin_ia32_vec_set_v16hi:
3867 case X86::BI__builtin_ia32_vec_set_v8si:
3868 case X86::BI__builtin_ia32_vec_set_v4di:
3869 return interp__builtin_vec_set(S, OpPC, Call, BuiltinID);
3870
3871 default:
3872 S.FFDiag(S.Current->getLocation(OpPC),
3873 diag::note_invalid_subexpr_in_const_expr)
3874 << S.Current->getRange(OpPC);
3875
3876 return false;
3877 }
3878
3879 llvm_unreachable("Unhandled builtin ID");
3880}
3881
3883 ArrayRef<int64_t> ArrayIndices, int64_t &IntResult) {
3885 unsigned N = E->getNumComponents();
3886 assert(N > 0);
3887
3888 unsigned ArrayIndex = 0;
3889 QualType CurrentType = E->getTypeSourceInfo()->getType();
3890 for (unsigned I = 0; I != N; ++I) {
3891 const OffsetOfNode &Node = E->getComponent(I);
3892 switch (Node.getKind()) {
3893 case OffsetOfNode::Field: {
3894 const FieldDecl *MemberDecl = Node.getField();
3895 const auto *RD = CurrentType->getAsRecordDecl();
3896 if (!RD || RD->isInvalidDecl())
3897 return false;
3899 unsigned FieldIndex = MemberDecl->getFieldIndex();
3900 assert(FieldIndex < RL.getFieldCount() && "offsetof field in wrong type");
3901 Result +=
3903 CurrentType = MemberDecl->getType().getNonReferenceType();
3904 break;
3905 }
3906 case OffsetOfNode::Array: {
3907 // When generating bytecode, we put all the index expressions as Sint64 on
3908 // the stack.
3909 int64_t Index = ArrayIndices[ArrayIndex];
3910 const ArrayType *AT = S.getASTContext().getAsArrayType(CurrentType);
3911 if (!AT)
3912 return false;
3913 CurrentType = AT->getElementType();
3914 CharUnits ElementSize = S.getASTContext().getTypeSizeInChars(CurrentType);
3915 Result += Index * ElementSize;
3916 ++ArrayIndex;
3917 break;
3918 }
3919 case OffsetOfNode::Base: {
3920 const CXXBaseSpecifier *BaseSpec = Node.getBase();
3921 if (BaseSpec->isVirtual())
3922 return false;
3923
3924 // Find the layout of the class whose base we are looking into.
3925 const auto *RD = CurrentType->getAsCXXRecordDecl();
3926 if (!RD || RD->isInvalidDecl())
3927 return false;
3929
3930 // Find the base class itself.
3931 CurrentType = BaseSpec->getType();
3932 const auto *BaseRD = CurrentType->getAsCXXRecordDecl();
3933 if (!BaseRD)
3934 return false;
3935
3936 // Add the offset to the base.
3937 Result += RL.getBaseClassOffset(BaseRD);
3938 break;
3939 }
3941 llvm_unreachable("Dependent OffsetOfExpr?");
3942 }
3943 }
3944
3945 IntResult = Result.getQuantity();
3946
3947 return true;
3948}
3949
3951 const Pointer &Ptr, const APSInt &IntValue) {
3952
3953 const Record *R = Ptr.getRecord();
3954 assert(R);
3955 assert(R->getNumFields() == 1);
3956
3957 unsigned FieldOffset = R->getField(0u)->Offset;
3958 const Pointer &FieldPtr = Ptr.atField(FieldOffset);
3959 PrimType FieldT = *S.getContext().classify(FieldPtr.getType());
3960
3961 INT_TYPE_SWITCH(FieldT,
3962 FieldPtr.deref<T>() = T::from(IntValue.getSExtValue()));
3963 FieldPtr.initialize();
3964 return true;
3965}
3966
3967static void zeroAll(Pointer &Dest) {
3968 const Descriptor *Desc = Dest.getFieldDesc();
3969
3970 if (Desc->isPrimitive()) {
3971 TYPE_SWITCH(Desc->getPrimType(), {
3972 Dest.deref<T>().~T();
3973 new (&Dest.deref<T>()) T();
3974 });
3975 return;
3976 }
3977
3978 if (Desc->isRecord()) {
3979 const Record *R = Desc->ElemRecord;
3980 for (const Record::Field &F : R->fields()) {
3981 Pointer FieldPtr = Dest.atField(F.Offset);
3982 zeroAll(FieldPtr);
3983 }
3984 return;
3985 }
3986
3987 if (Desc->isPrimitiveArray()) {
3988 for (unsigned I = 0, N = Desc->getNumElems(); I != N; ++I) {
3989 TYPE_SWITCH(Desc->getPrimType(), {
3990 Dest.deref<T>().~T();
3991 new (&Dest.deref<T>()) T();
3992 });
3993 }
3994 return;
3995 }
3996
3997 if (Desc->isCompositeArray()) {
3998 for (unsigned I = 0, N = Desc->getNumElems(); I != N; ++I) {
3999 Pointer ElemPtr = Dest.atIndex(I).narrow();
4000 zeroAll(ElemPtr);
4001 }
4002 return;
4003 }
4004}
4005
4006static bool copyComposite(InterpState &S, CodePtr OpPC, const Pointer &Src,
4007 Pointer &Dest, bool Activate);
4008static bool copyRecord(InterpState &S, CodePtr OpPC, const Pointer &Src,
4009 Pointer &Dest, bool Activate = false) {
4010 [[maybe_unused]] const Descriptor *SrcDesc = Src.getFieldDesc();
4011 const Descriptor *DestDesc = Dest.getFieldDesc();
4012
4013 auto copyField = [&](const Record::Field &F, bool Activate) -> bool {
4014 Pointer DestField = Dest.atField(F.Offset);
4015 if (OptPrimType FT = S.Ctx.classify(F.Decl->getType())) {
4016 TYPE_SWITCH(*FT, {
4017 DestField.deref<T>() = Src.atField(F.Offset).deref<T>();
4018 if (Src.atField(F.Offset).isInitialized())
4019 DestField.initialize();
4020 if (Activate)
4021 DestField.activate();
4022 });
4023 return true;
4024 }
4025 // Composite field.
4026 return copyComposite(S, OpPC, Src.atField(F.Offset), DestField, Activate);
4027 };
4028
4029 assert(SrcDesc->isRecord());
4030 assert(SrcDesc->ElemRecord == DestDesc->ElemRecord);
4031 const Record *R = DestDesc->ElemRecord;
4032 for (const Record::Field &F : R->fields()) {
4033 if (R->isUnion()) {
4034 // For unions, only copy the active field. Zero all others.
4035 const Pointer &SrcField = Src.atField(F.Offset);
4036 if (SrcField.isActive()) {
4037 if (!copyField(F, /*Activate=*/true))
4038 return false;
4039 } else {
4040 if (!CheckMutable(S, OpPC, Src.atField(F.Offset)))
4041 return false;
4042 Pointer DestField = Dest.atField(F.Offset);
4043 zeroAll(DestField);
4044 }
4045 } else {
4046 if (!copyField(F, Activate))
4047 return false;
4048 }
4049 }
4050
4051 for (const Record::Base &B : R->bases()) {
4052 Pointer DestBase = Dest.atField(B.Offset);
4053 if (!copyRecord(S, OpPC, Src.atField(B.Offset), DestBase, Activate))
4054 return false;
4055 }
4056
4057 Dest.initialize();
4058 return true;
4059}
4060
4061static bool copyComposite(InterpState &S, CodePtr OpPC, const Pointer &Src,
4062 Pointer &Dest, bool Activate = false) {
4063 assert(Src.isLive() && Dest.isLive());
4064
4065 [[maybe_unused]] const Descriptor *SrcDesc = Src.getFieldDesc();
4066 const Descriptor *DestDesc = Dest.getFieldDesc();
4067
4068 assert(!DestDesc->isPrimitive() && !SrcDesc->isPrimitive());
4069
4070 if (DestDesc->isPrimitiveArray()) {
4071 assert(SrcDesc->isPrimitiveArray());
4072 assert(SrcDesc->getNumElems() == DestDesc->getNumElems());
4073 PrimType ET = DestDesc->getPrimType();
4074 for (unsigned I = 0, N = DestDesc->getNumElems(); I != N; ++I) {
4075 Pointer DestElem = Dest.atIndex(I);
4076 TYPE_SWITCH(ET, {
4077 DestElem.deref<T>() = Src.elem<T>(I);
4078 DestElem.initialize();
4079 });
4080 }
4081 return true;
4082 }
4083
4084 if (DestDesc->isCompositeArray()) {
4085 assert(SrcDesc->isCompositeArray());
4086 assert(SrcDesc->getNumElems() == DestDesc->getNumElems());
4087 for (unsigned I = 0, N = DestDesc->getNumElems(); I != N; ++I) {
4088 const Pointer &SrcElem = Src.atIndex(I).narrow();
4089 Pointer DestElem = Dest.atIndex(I).narrow();
4090 if (!copyComposite(S, OpPC, SrcElem, DestElem, Activate))
4091 return false;
4092 }
4093 return true;
4094 }
4095
4096 if (DestDesc->isRecord())
4097 return copyRecord(S, OpPC, Src, Dest, Activate);
4098 return Invalid(S, OpPC);
4099}
4100
4101bool DoMemcpy(InterpState &S, CodePtr OpPC, const Pointer &Src, Pointer &Dest) {
4102 return copyComposite(S, OpPC, Src, Dest);
4103}
4104
4105} // namespace interp
4106} // namespace clang
#define V(N, I)
Defines enum values for all the target-independent builtin functions.
llvm::APSInt APSInt
Definition Compiler.cpp:23
GCCTypeClass
Values returned by __builtin_classify_type, chosen to match the values produced by GCC's builtin.
CharUnits GetAlignOfExpr(const ASTContext &Ctx, const Expr *E, UnaryExprOrTypeTrait ExprKind)
GCCTypeClass EvaluateBuiltinClassifyType(QualType T, const LangOptions &LangOpts)
EvaluateBuiltinClassifyType - Evaluate __builtin_classify_type the same way as GCC.
static bool isOneByteCharacterType(QualType T)
static bool isUserWritingOffTheEnd(const ASTContext &Ctx, const LValue &LVal)
Attempts to detect a user writing into a piece of memory that's impossible to figure out the size of ...
TokenType getType() const
Returns the token's type, e.g.
#define X(type, name)
Definition Value.h:97
static DiagnosticBuilder Diag(DiagnosticsEngine *Diags, const LangOptions &Features, FullSourceLoc TokLoc, const char *TokBegin, const char *TokRangeBegin, const char *TokRangeEnd, unsigned DiagID)
Produce a diagnostic highlighting some portion of a literal.
#define INT_TYPE_SWITCH_NO_BOOL(Expr, B)
Definition PrimType.h:247
#define INT_TYPE_SWITCH(Expr, B)
Definition PrimType.h:228
#define TYPE_SWITCH(Expr, B)
Definition PrimType.h:207
static std::string toString(const clang::SanitizerSet &Sanitizers)
Produce a string containing comma-separated names of sanitizers in Sanitizers set.
static QualType getPointeeType(const MemRegion *R)
Enumerates target-specific builtins in their own namespaces within namespace clang.
APValue - This class implements a discriminated union of [uninitialized] [APSInt] [APFloat],...
Definition APValue.h:122
CharUnits & getLValueOffset()
Definition APValue.cpp:993
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
Definition ASTContext.h:220
CharUnits getTypeAlignInChars(QualType T) const
Return the ABI-specified alignment of a (complete) type T, in characters.
unsigned getIntWidth(QualType T) const
const llvm::fltSemantics & getFloatTypeSemantics(QualType T) const
Return the APFloat 'semantics' for the specified scalar floating point type.
const ASTRecordLayout & getASTRecordLayout(const RecordDecl *D) const
Get or compute information about the layout of the specified record (struct/union/class) D,...
Builtin::Context & BuiltinInfo
Definition ASTContext.h:774
QualType getConstantArrayType(QualType EltTy, const llvm::APInt &ArySize, const Expr *SizeExpr, ArraySizeModifier ASM, unsigned IndexTypeQuals) const
Return the unique reference to the type for a constant array of the specified element type.
const LangOptions & getLangOpts() const
Definition ASTContext.h:926
CharUnits getDeclAlign(const Decl *D, bool ForAlignof=false) const
Return a conservative estimate of the alignment of the specified decl D.
QualType getWCharType() const
Return the unique wchar_t type available in C++ (and available as __wchar_t as a Microsoft extension)...
bool hasSameUnqualifiedType(QualType T1, QualType T2) const
Determine whether the given types are equivalent after cvr-qualifiers have been removed.
const ArrayType * getAsArrayType(QualType T) const
Type Query functions.
uint64_t getTypeSize(QualType T) const
Return the size of the specified (complete) type T, in bits.
CharUnits getTypeSizeInChars(QualType T) const
Return the size of the specified (complete) type T, in characters.
const TargetInfo & getTargetInfo() const
Definition ASTContext.h:891
CharUnits toCharUnitsFromBits(int64_t BitSize) const
Convert a size in bits to a size in characters.
CanQualType getCanonicalTagType(const TagDecl *TD) const
uint64_t getCharWidth() const
Return the size of the character type, in bits.
ASTRecordLayout - This class contains layout information for one RecordDecl, which is a struct/union/...
unsigned getFieldCount() const
getFieldCount - Get the number of fields in the layout.
uint64_t getFieldOffset(unsigned FieldNo) const
getFieldOffset - Get the offset of the given field index, in bits.
CharUnits getBaseClassOffset(const CXXRecordDecl *Base) const
getBaseClassOffset - Get the offset, in chars, for the given base class.
CharUnits getVBaseClassOffset(const CXXRecordDecl *VBase) const
getVBaseClassOffset - Get the offset, in chars, for the given base class.
Represents an array type, per C99 6.7.5.2 - Array Declarators.
Definition TypeBase.h:3722
QualType getElementType() const
Definition TypeBase.h:3734
std::string getQuotedName(unsigned ID) const
Return the identifier name for the specified builtin inside single quotes for a diagnostic,...
Definition Builtins.cpp:85
bool isConstantEvaluated(unsigned ID) const
Return true if this function can be constant evaluated by Clang frontend.
Definition Builtins.h:431
Represents a base class of a C++ class.
Definition DeclCXX.h:146
bool isVirtual() const
Determines whether the base class is a virtual base class (or not).
Definition DeclCXX.h:203
QualType getType() const
Retrieves the type of the base class.
Definition DeclCXX.h:249
CallExpr - Represents a function call (C99 6.5.2.2, C++ [expr.call]).
Definition Expr.h:2877
Expr * getArg(unsigned Arg)
getArg - Return the specified argument.
Definition Expr.h:3081
CharUnits - This is an opaque type for sizes expressed in character units.
Definition CharUnits.h:38
CharUnits alignmentAtOffset(CharUnits offset) const
Given that this is a non-zero alignment value, what is the alignment at the given offset?
Definition CharUnits.h:207
bool isZero() const
isZero - Test whether the quantity equals zero.
Definition CharUnits.h:122
QuantityType getQuantity() const
getQuantity - Get the raw integer representation of this quantity.
Definition CharUnits.h:185
static CharUnits One()
One - Construct a CharUnits quantity of one.
Definition CharUnits.h:58
static CharUnits fromQuantity(QuantityType Quantity)
fromQuantity - Construct a CharUnits quantity from a raw integer type.
Definition CharUnits.h:63
CharUnits alignTo(const CharUnits &Align) const
alignTo - Returns the next integer (mod 2**64) that is greater than or equal to this quantity and is ...
Definition CharUnits.h:201
static unsigned getMaxSizeBits(const ASTContext &Context)
Determine the maximum number of active bits that an array's size can require, which limits the maximu...
Definition Type.cpp:254
This represents one expression.
Definition Expr.h:112
SourceLocation getExprLoc() const LLVM_READONLY
getExprLoc - Return the preferred location for the arrow when diagnosing a problem with a generic exp...
Definition Expr.cpp:273
QualType getType() const
Definition Expr.h:144
Represents a member of a struct/union/class.
Definition Decl.h:3160
unsigned getFieldIndex() const
Returns the index of this field within its record, as appropriate for passing to ASTRecordLayout::get...
Definition Decl.h:3245
const RecordDecl * getParent() const
Returns the parent of this field declaration, which is the struct in which this field is defined.
Definition Decl.h:3396
Represents a function declaration or definition.
Definition Decl.h:2000
One of these records is kept for each identifier that is lexed.
bool isStr(const char(&Str)[StrLen]) const
Return true if this is the identifier for the specified string.
OffsetOfExpr - [C99 7.17] - This represents an expression of the form offsetof(record-type,...
Definition Expr.h:2527
const OffsetOfNode & getComponent(unsigned Idx) const
Definition Expr.h:2574
TypeSourceInfo * getTypeSourceInfo() const
Definition Expr.h:2567
unsigned getNumComponents() const
Definition Expr.h:2582
Helper class for OffsetOfExpr.
Definition Expr.h:2421
FieldDecl * getField() const
For a field offsetof node, returns the field.
Definition Expr.h:2485
@ Array
An index into an array.
Definition Expr.h:2426
@ Identifier
A field in a dependent type, known only by its name.
Definition Expr.h:2430
@ Field
A field.
Definition Expr.h:2428
@ Base
An implicit indirection through a C++ base class, when the field found is in a base class.
Definition Expr.h:2433
Kind getKind() const
Determine what kind of offsetof node this is.
Definition Expr.h:2475
CXXBaseSpecifier * getBase() const
For a base class node, returns the base specifier.
Definition Expr.h:2495
PointerType - C99 6.7.5.1 - Pointer Declarators.
Definition TypeBase.h:3328
A (possibly-)qualified type.
Definition TypeBase.h:937
bool isTriviallyCopyableType(const ASTContext &Context) const
Return true if this is a trivially copyable type (C++0x [basic.types]p9)
Definition Type.cpp:2867
const Type * getTypePtr() const
Retrieves a pointer to the underlying (unqualified) type.
Definition TypeBase.h:8294
QualType getNonReferenceType() const
If Type is a reference type (e.g., const int&), returns the type that the reference refers to ("const...
Definition TypeBase.h:8479
SourceRange getSourceRange() const LLVM_READONLY
SourceLocation tokens are not useful in isolation - they are low level value objects created/interpre...
Definition Stmt.cpp:334
unsigned getMaxAtomicInlineWidth() const
Return the maximum width lock-free atomic operation which can be inlined given the supported features...
Definition TargetInfo.h:853
bool isBigEndian() const
virtual int getEHDataRegisterNumber(unsigned RegNo) const
Return the register number that __builtin_eh_return_regno would return with the specified argument.
virtual bool isNan2008() const
Returns true if NaN encoding is IEEE 754-2008.
QualType getType() const
Return the type wrapped by this type source info.
Definition TypeBase.h:8276
bool isBooleanType() const
Definition TypeBase.h:9017
bool isSignedIntegerOrEnumerationType() const
Determines whether this is an integer type that is signed or an enumeration types whose underlying ty...
Definition Type.cpp:2225
bool isUnsignedIntegerOrEnumerationType() const
Determines whether this is an integer type that is unsigned or an enumeration types whose underlying ...
Definition Type.cpp:2273
CXXRecordDecl * getAsCXXRecordDecl() const
Retrieves the CXXRecordDecl that this type refers to, either because the type is a RecordType or beca...
Definition Type.h:26
RecordDecl * getAsRecordDecl() const
Retrieves the RecordDecl this type refers to.
Definition Type.h:41
bool isPointerType() const
Definition TypeBase.h:8531
bool isIntegerType() const
isIntegerType() does not include complex integers (a GCC extension).
Definition TypeBase.h:8931
const T * castAs() const
Member-template castAs<specific type>.
Definition TypeBase.h:9174
QualType getPointeeType() const
If this is a pointer, ObjC object pointer, or block pointer, this returns the respective pointee.
Definition Type.cpp:752
bool isIncompleteType(NamedDecl **Def=nullptr) const
Types are partitioned into 3 broad categories (C99 6.2.5p1): object types, function types,...
Definition Type.cpp:2436
bool isVectorType() const
Definition TypeBase.h:8670
bool isFloatingType() const
Definition Type.cpp:2304
const T * getAs() const
Member-template getAs<specific type>'.
Definition TypeBase.h:9107
QualType getType() const
Definition Decl.h:723
Represents a GCC generic vector type.
Definition TypeBase.h:4175
unsigned getNumElements() const
Definition TypeBase.h:4190
QualType getElementType() const
Definition TypeBase.h:4189
A memory block, either on the stack or in the heap.
Definition InterpBlock.h:44
const Descriptor * getDescriptor() const
Returns the block's descriptor.
Definition InterpBlock.h:73
bool isDynamic() const
Definition InterpBlock.h:83
Wrapper around boolean types.
Definition Boolean.h:25
static Boolean from(T Value)
Definition Boolean.h:97
Pointer into the code segment.
Definition Source.h:30
const LangOptions & getLangOpts() const
Returns the language options.
Definition Context.cpp:326
OptPrimType classify(QualType T) const
Classifies a type.
Definition Context.cpp:360
unsigned getEvalID() const
Definition Context.h:145
Manages dynamic memory allocations done during bytecode interpretation.
bool deallocate(const Expr *Source, const Block *BlockToDelete, InterpState &S)
Deallocate the given source+block combination.
std::optional< Form > getAllocationForm(const Expr *Source) const
Checks whether the allocation done at the given source is an array allocation.
Block * allocate(const Descriptor *D, unsigned EvalID, Form AllocForm)
Allocate ONE element of the given descriptor.
If a Floating is constructed from Memory, it DOES NOT OWN THAT MEMORY.
Definition Floating.h:35
void copy(const APFloat &F)
Definition Floating.h:122
llvm::FPClassTest classify() const
Definition Floating.h:153
bool isSignaling() const
Definition Floating.h:148
bool isNormal() const
Definition Floating.h:151
ComparisonCategoryResult compare(const Floating &RHS) const
Definition Floating.h:156
bool isZero() const
Definition Floating.h:143
bool isNegative() const
Definition Floating.h:142
bool isFinite() const
Definition Floating.h:150
bool isDenormal() const
Definition Floating.h:152
APFloat::fltCategory getCategory() const
Definition Floating.h:154
APFloat getAPFloat() const
Definition Floating.h:63
Base class for stack frames, shared between VM and walker.
Definition Frame.h:25
virtual const FunctionDecl * getCallee() const =0
Returns the called function's declaration.
If an IntegralAP is constructed from Memory, it DOES NOT OWN THAT MEMORY.
Definition IntegralAP.h:36
Frame storing local variables.
Definition InterpFrame.h:27
const Expr * getExpr(CodePtr PC) const
InterpFrame * Caller
The frame of the previous function.
Definition InterpFrame.h:30
SourceInfo getSource(CodePtr PC) const
Map a location to a source.
CodePtr getRetPC() const
Returns the return address of the frame.
SourceLocation getLocation(CodePtr PC) const
SourceRange getRange(CodePtr PC) const
unsigned getDepth() const
const FunctionDecl * getCallee() const override
Returns the caller.
Stack frame storing temporaries and parameters.
Definition InterpStack.h:25
T pop()
Returns the value from the top of the stack and removes it.
Definition InterpStack.h:39
void push(Tys &&...Args)
Constructs a value in place on the top of the stack.
Definition InterpStack.h:33
void discard()
Discards the top value from the stack.
Definition InterpStack.h:50
T & peek() const
Returns a reference to the value on the top of the stack.
Definition InterpStack.h:62
Interpreter context.
Definition InterpState.h:43
Expr::EvalStatus & getEvalStatus() const override
Definition InterpState.h:67
Context & getContext() const
DynamicAllocator & getAllocator()
Context & Ctx
Interpreter Context.
Floating allocFloat(const llvm::fltSemantics &Sem)
llvm::SmallVector< const Block * > InitializingBlocks
List of blocks we're currently running either constructors or destructors for.
ASTContext & getASTContext() const override
Definition InterpState.h:70
InterpStack & Stk
Temporary stack.
const VarDecl * EvaluatingDecl
Declaration we're initializing/evaluting, if any.
InterpFrame * Current
The current frame.
T allocAP(unsigned BitWidth)
const LangOptions & getLangOpts() const
Definition InterpState.h:71
StdAllocatorCaller getStdAllocatorCaller(StringRef Name) const
Program & P
Reference to the module containing all bytecode.
PrimType value_or(PrimType PT) const
Definition PrimType.h:68
A pointer to a memory block, live or dead.
Definition Pointer.h:91
Pointer narrow() const
Restricts the scope of an array element pointer.
Definition Pointer.h:188
bool isInitialized() const
Checks if an object was initialized.
Definition Pointer.cpp:440
Pointer atIndex(uint64_t Idx) const
Offsets a pointer inside an array.
Definition Pointer.h:156
bool isDummy() const
Checks if the pointer points to a dummy value.
Definition Pointer.h:547
int64_t getIndex() const
Returns the index into an array.
Definition Pointer.h:612
bool isActive() const
Checks if the object is active.
Definition Pointer.h:536
Pointer atField(unsigned Off) const
Creates a pointer to a field.
Definition Pointer.h:173
T & deref() const
Dereferences the pointer, if it's live.
Definition Pointer.h:663
unsigned getNumElems() const
Returns the number of elements.
Definition Pointer.h:596
Pointer getArray() const
Returns the parent array.
Definition Pointer.h:316
bool isUnknownSizeArray() const
Checks if the structure is an array of unknown size.
Definition Pointer.h:415
void activate() const
Activats a field.
Definition Pointer.cpp:576
bool isIntegralPointer() const
Definition Pointer.h:469
QualType getType() const
Returns the type of the innermost field.
Definition Pointer.h:336
bool isArrayElement() const
Checks if the pointer points to an array.
Definition Pointer.h:421
void initializeAllElements() const
Initialize all elements of a primitive array at once.
Definition Pointer.cpp:545
bool isLive() const
Checks if the pointer is live.
Definition Pointer.h:268
bool inArray() const
Checks if the innermost field is an array.
Definition Pointer.h:397
T & elem(unsigned I) const
Dereferences the element at index I.
Definition Pointer.h:679
Pointer getBase() const
Returns a pointer to the object of which this pointer is a field.
Definition Pointer.h:307
std::string toDiagnosticString(const ASTContext &Ctx) const
Converts the pointer to a string usable in diagnostics.
Definition Pointer.cpp:427
bool isZero() const
Checks if the pointer is null.
Definition Pointer.h:254
bool isRoot() const
Pointer points directly to a block.
Definition Pointer.h:437
const Descriptor * getDeclDesc() const
Accessor for information about the declaration site.
Definition Pointer.h:282
static bool pointToSameBlock(const Pointer &A, const Pointer &B)
Checks if both given pointers point to the same block.
Definition Pointer.cpp:652
APValue toAPValue(const ASTContext &ASTCtx) const
Converts the pointer to an APValue.
Definition Pointer.cpp:171
bool isOnePastEnd() const
Checks if the index is one past end.
Definition Pointer.h:629
uint64_t getIntegerRepresentation() const
Definition Pointer.h:143
const FieldDecl * getField() const
Returns the field information.
Definition Pointer.h:481
Pointer expand() const
Expands a pointer to the containing array, undoing narrowing.
Definition Pointer.h:221
bool isBlockPointer() const
Definition Pointer.h:468
const Block * block() const
Definition Pointer.h:602
const Descriptor * getFieldDesc() const
Accessors for information about the innermost field.
Definition Pointer.h:326
bool isVirtualBaseClass() const
Definition Pointer.h:543
bool isBaseClass() const
Checks if a structure is a base class.
Definition Pointer.h:542
size_t elemSize() const
Returns the element size of the innermost field.
Definition Pointer.h:358
bool canBeInitialized() const
If this pointer has an InlineDescriptor we can use to initialize.
Definition Pointer.h:444
Lifetime getLifetime() const
Definition Pointer.h:724
void initialize() const
Initializes a field.
Definition Pointer.cpp:493
bool isField() const
Checks if the item is a field in an object.
Definition Pointer.h:274
const Record * getRecord() const
Returns the record descriptor of a class.
Definition Pointer.h:474
Descriptor * createDescriptor(const DeclTy &D, PrimType T, const Type *SourceTy=nullptr, Descriptor::MetadataSize MDSize=std::nullopt, bool IsConst=false, bool IsTemporary=false, bool IsMutable=false, bool IsVolatile=false)
Creates a descriptor for a primitive type.
Definition Program.h:119
Structure/Class descriptor.
Definition Record.h:25
const RecordDecl * getDecl() const
Returns the underlying declaration.
Definition Record.h:53
bool isUnion() const
Checks if the record is a union.
Definition Record.h:57
const Field * getField(const FieldDecl *FD) const
Returns a field.
Definition Record.cpp:47
llvm::iterator_range< const_base_iter > bases() const
Definition Record.h:92
unsigned getNumFields() const
Definition Record.h:88
llvm::iterator_range< const_field_iter > fields() const
Definition Record.h:84
Describes the statement/declaration an opcode was generated from.
Definition Source.h:73
OptionalDiagnostic Note(SourceLocation Loc, diag::kind DiagId)
Add a note to a prior diagnostic.
Definition State.cpp:63
DiagnosticBuilder report(SourceLocation Loc, diag::kind DiagId)
Directly reports a diagnostic message.
Definition State.cpp:74
OptionalDiagnostic FFDiag(SourceLocation Loc, diag::kind DiagId=diag::note_invalid_subexpr_in_const_expr, unsigned ExtraNotes=0)
Diagnose that the evaluation could not be folded (FF => FoldFailure)
Definition State.cpp:21
OptionalDiagnostic CCEDiag(SourceLocation Loc, diag::kind DiagId=diag::note_invalid_subexpr_in_const_expr, unsigned ExtraNotes=0)
Diagnose that the evaluation does not produce a C++11 core constant expression.
Definition State.cpp:42
bool checkingPotentialConstantExpression() const
Are we checking whether the expression is a potential constant expression?
Definition State.h:99
Defines the clang::TargetInfo interface.
bool computeOSLogBufferLayout(clang::ASTContext &Ctx, const clang::CallExpr *E, OSLogBufferLayout &layout)
Definition OSLog.cpp:192
static bool interp__builtin_is_within_lifetime(InterpState &S, CodePtr OpPC, const CallExpr *Call)
static void assignInteger(InterpState &S, const Pointer &Dest, PrimType ValueT, const APSInt &Value)
bool readPointerToBuffer(const Context &Ctx, const Pointer &FromPtr, BitcastBuffer &Buffer, bool ReturnOnUninit)
static Floating abs(InterpState &S, const Floating &In)
static bool interp__builtin_fmax(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, bool IsNumBuiltin)
static bool interp__builtin_elementwise_maxmin(InterpState &S, CodePtr OpPC, const CallExpr *Call, unsigned BuiltinID)
static bool interp__builtin_bswap(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_elementwise_triop(InterpState &S, CodePtr OpPC, const CallExpr *Call, llvm::function_ref< APInt(const APSInt &, const APSInt &, const APSInt &)> Fn)
static bool interp__builtin_assume(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC, DynamicAllocator::Form AllocForm, DynamicAllocator::Form DeleteForm, const Descriptor *D, const Expr *NewExpr)
Diagnose mismatched new[]/delete or new/delete[] pairs.
Definition Interp.cpp:1105
static bool interp__builtin_isnan(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
Defined as __builtin_isnan(...), to accommodate the fact that it can take a float,...
static llvm::RoundingMode getRoundingMode(FPOptions FPO)
Definition Interp.h:406
static bool interp__builtin_elementwise_countzeroes(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned BuiltinID)
Can be called with an integer or vector as the first and only parameter.
bool Call(InterpState &S, CodePtr OpPC, const Function *Func, uint32_t VarArgSize)
Definition Interp.cpp:1555
static bool interp__builtin_classify_type(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_fmin(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, bool IsNumBuiltin)
static bool interp__builtin_blend(InterpState &S, CodePtr OpPC, const CallExpr *Call)
bool SetThreeWayComparisonField(InterpState &S, CodePtr OpPC, const Pointer &Ptr, const APSInt &IntValue)
Sets the given integral value to the pointer, which is of a std::{weak,partial,strong}...
static bool interp__builtin_operator_delete(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_fabs(InterpState &S, CodePtr OpPC, const InterpFrame *Frame)
static bool interp__builtin_memcmp(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned ID)
static bool interp__builtin_atomic_lock_free(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned BuiltinOp)
bool __atomic_always_lock_free(size_t, void const volatile*) bool __atomic_is_lock_free(size_t,...
static llvm::APSInt convertBoolVectorToInt(const Pointer &Val)
static bool interp__builtin_move(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_clz(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned BuiltinOp)
bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr)
Checks if a pointer points to a mutable field.
Definition Interp.cpp:594
static bool interp__builtin_parity(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_is_aligned_up_down(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned BuiltinOp)
__builtin_is_aligned() __builtin_align_up() __builtin_align_down() The first parameter is either an i...
static bool interp__builtin_bitreverse(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool Activate(InterpState &S, CodePtr OpPC)
Definition Interp.h:1983
static bool interp__builtin_ia32_addcarry_subborrow(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned BuiltinOp)
(CarryIn, LHS, RHS, Result)
static bool isOneByteCharacterType(QualType T)
Determine if T is a character type for which we guarantee that sizeof(T) == 1.
static unsigned computePointerOffset(const ASTContext &ASTCtx, const Pointer &Ptr)
Compute the byte offset of Ptr in the full declaration.
static bool interp__builtin_strcmp(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned ID)
bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr, AccessKinds AK)
Checks if a value can be loaded from a block.
Definition Interp.cpp:793
static bool interp__builtin_overflowop(InterpState &S, CodePtr OpPC, const CallExpr *Call, unsigned BuiltinOp)
static bool interp__builtin_inf(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_vec_ext(InterpState &S, CodePtr OpPC, const CallExpr *Call, unsigned ID)
static bool interp__builtin_isinf(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, bool CheckSign, const CallExpr *Call)
static bool interp__builtin_os_log_format_buffer_size(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
bool InterpretOffsetOf(InterpState &S, CodePtr OpPC, const OffsetOfExpr *E, ArrayRef< int64_t > ArrayIndices, int64_t &Result)
Interpret an offsetof operation.
static bool interp__builtin_x86_insert_subvector(InterpState &S, CodePtr OpPC, const CallExpr *Call, unsigned ID)
bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, AccessKinds AK)
Checks if a pointer is in range.
Definition Interp.cpp:519
static bool pointsToLastObject(const Pointer &Ptr)
Does Ptr point to the last subobject?
static bool interp__builtin_select(InterpState &S, CodePtr OpPC, const CallExpr *Call)
AVX512 predicated move: "Result = Mask[] ? LHS[] : RHS[]".
static bool interp__builtin_ia32_pmul(InterpState &S, CodePtr OpPC, const CallExpr *Call, unsigned BuiltinID)
llvm::APFloat APFloat
Definition Floating.h:27
static void discard(InterpStack &Stk, PrimType T)
bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, AccessKinds AK)
Checks if a pointer is live and accessible.
Definition Interp.cpp:414
static bool interp__builtin_fpclassify(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
Five int values followed by one floating value.
static bool interp__builtin_abs(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp_floating_comparison(InterpState &S, CodePtr OpPC, const CallExpr *Call, unsigned ID)
static bool handleOverflow(InterpState &S, CodePtr OpPC, const T &SrcValue)
Definition Interp.h:153
llvm::APInt APInt
Definition FixedPoint.h:19
static bool copyComposite(InterpState &S, CodePtr OpPC, const Pointer &Src, Pointer &Dest, bool Activate)
static bool copyRecord(InterpState &S, CodePtr OpPC, const Pointer &Src, Pointer &Dest, bool Activate=false)
static bool interp__builtin_c11_atomic_is_lock_free(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
bool __c11_atomic_is_lock_free(size_t)
static void zeroAll(Pointer &Dest)
static bool interp__builtin_elementwise_int_binop(InterpState &S, CodePtr OpPC, const CallExpr *Call, llvm::function_ref< APInt(const APSInt &, const APSInt &)> Fn)
static bool interp__builtin_issubnormal(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_arithmetic_fence(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
PrimType
Enumeration of the primitive types of the VM.
Definition PrimType.h:34
static bool interp__builtin_isfinite(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
bool InterpretBuiltin(InterpState &S, CodePtr OpPC, const CallExpr *Call, uint32_t BuiltinID)
Interpret a builtin function.
static bool interp__builtin_expect(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_vec_set(InterpState &S, CodePtr OpPC, const CallExpr *Call, unsigned ID)
static bool interp__builtin_complex(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
__builtin_complex(Float A, float B);
static bool interp__builtin_ia32_pshuf(InterpState &S, CodePtr OpPC, const CallExpr *Call, bool IsShufHW)
bool CheckDummy(InterpState &S, CodePtr OpPC, const Block *B, AccessKinds AK)
Checks if a pointer is a dummy pointer.
Definition Interp.cpp:1156
static bool interp__builtin_assume_aligned(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
__builtin_assume_aligned(Ptr, Alignment[, ExtraOffset])
static bool interp__builtin_ptrauth_string_discriminator(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_ffs(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_memchr(InterpState &S, CodePtr OpPC, const CallExpr *Call, unsigned ID)
static bool interp__builtin_x86_pack(InterpState &S, CodePtr, const CallExpr *E, llvm::function_ref< APInt(const APSInt &)> PackFn)
static void pushInteger(InterpState &S, const APSInt &Val, QualType QT)
Pushes Val on the stack as the type given by QT.
static bool interp__builtin_operator_new(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_strlen(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned ID)
bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr)
Checks if the array is offsetable.
Definition Interp.cpp:406
static bool interp__builtin_elementwise_abs(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned BuiltinID)
static bool interp__builtin_copysign(InterpState &S, CodePtr OpPC, const InterpFrame *Frame)
static bool interp__builtin_iszero(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_elementwise_popcount(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned BuiltinID)
Can be called with an integer or vector as the first and only parameter.
static bool interp__builtin_addressof(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_signbit(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
bool Error(InterpState &S, CodePtr OpPC)
Do nothing and just abort execution.
Definition Interp.h:3308
static bool interp__builtin_vector_reduce(InterpState &S, CodePtr OpPC, const CallExpr *Call, unsigned ID)
static bool interp__builtin_memcpy(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned ID)
static bool interp__builtin_elementwise_triop_fp(InterpState &S, CodePtr OpPC, const CallExpr *Call, llvm::function_ref< APFloat(const APFloat &, const APFloat &, const APFloat &, llvm::RoundingMode)> Fn)
size_t primSize(PrimType Type)
Returns the size of a primitive type in bytes.
Definition PrimType.cpp:23
static bool interp__builtin_popcount(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_object_size(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_carryop(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned BuiltinOp)
Three integral values followed by a pointer (lhs, rhs, carry, carryOut).
bool CheckArraySize(InterpState &S, CodePtr OpPC, uint64_t NumElems)
Definition Interp.h:3701
static bool interp__builtin_ctz(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, unsigned BuiltinID)
static bool interp__builtin_is_constant_evaluated(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static APSInt popToAPSInt(InterpStack &Stk, PrimType T)
static std::optional< unsigned > computeFullDescSize(const ASTContext &ASTCtx, const Descriptor *Desc)
static bool interp__builtin_isfpclass(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
First parameter to __builtin_isfpclass is the floating value, the second one is an integral value.
static bool interp__builtin_issignaling(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static bool interp__builtin_nan(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call, bool Signaling)
static bool interp__builtin_clrsb(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
bool DoMemcpy(InterpState &S, CodePtr OpPC, const Pointer &Src, Pointer &Dest)
Copy the contents of Src into Dest.
static bool interp__builtin_elementwise_int_unaryop(InterpState &S, CodePtr OpPC, const CallExpr *Call, llvm::function_ref< APInt(const APSInt &)> Fn)
constexpr bool isIntegralType(PrimType T)
Definition PrimType.h:124
static bool interp__builtin_eh_return_data_regno(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static LLVM_ATTRIBUTE_UNUSED bool isNoopBuiltin(unsigned ID)
static void diagnoseNonConstexprBuiltin(InterpState &S, CodePtr OpPC, unsigned ID)
llvm::APSInt APSInt
Definition FixedPoint.h:20
static QualType getElemType(const Pointer &P)
static bool interp__builtin_ia32_pternlog(InterpState &S, CodePtr OpPC, const CallExpr *Call, bool MaskZ)
static bool interp__builtin_isnormal(InterpState &S, CodePtr OpPC, const InterpFrame *Frame, const CallExpr *Call)
static void swapBytes(std::byte *M, size_t N)
The JSON file list parser is used to communicate input to InstallAPI.
if(T->getSizeExpr()) TRY_TO(TraverseStmt(const_cast< Expr * >(T -> getSizeExpr())))
ComparisonCategoryResult
An enumeration representing the possible results of a three-way comparison.
@ Result
The result type of a method or function.
Definition TypeBase.h:905
@ AK_Read
Definition State.h:27
const FunctionProtoType * T
U cast(CodeGen::Address addr)
Definition Address.h:327
SmallVectorImpl< PartialDiagnosticAt > * Diag
Diag - If this is non-null, it will be filled in with a stack of notes indicating why evaluation fail...
Definition Expr.h:633
Track what bits have been initialized to known values and which ones have indeterminate value.
std::unique_ptr< std::byte[]> Data
A quantity in bits.
A quantity in bytes.
size_t getQuantity() const
Describes a memory block created by an allocation site.
Definition Descriptor.h:122
unsigned getNumElems() const
Returns the number of elements stored in the block.
Definition Descriptor.h:249
bool isPrimitive() const
Checks if the descriptor is of a primitive.
Definition Descriptor.h:263
QualType getElemQualType() const
bool isCompositeArray() const
Checks if the descriptor is of an array of composites.
Definition Descriptor.h:256
const ValueDecl * asValueDecl() const
Definition Descriptor.h:214
static constexpr unsigned MaxArrayElemBytes
Maximum number of bytes to be used for array elements.
Definition Descriptor.h:148
QualType getType() const
const Decl * asDecl() const
Definition Descriptor.h:210
static constexpr MetadataSize InlineDescMD
Definition Descriptor.h:144
unsigned getElemSize() const
returns the size of an element when the structure is viewed as an array.
Definition Descriptor.h:244
bool isPrimitiveArray() const
Checks if the descriptor is of an array of primitives.
Definition Descriptor.h:254
const VarDecl * asVarDecl() const
Definition Descriptor.h:218
PrimType getPrimType() const
Definition Descriptor.h:236
bool isRecord() const
Checks if the descriptor is of a record.
Definition Descriptor.h:268
const Record *const ElemRecord
Pointer to the record, if block contains records.
Definition Descriptor.h:153
const Expr * asExpr() const
Definition Descriptor.h:211
bool isArray() const
Checks if the descriptor is of an array.
Definition Descriptor.h:266
Mapping from primitive types to their representation.
Definition PrimType.h:134