clang  10.0.0svn
SemaChecking.cpp
Go to the documentation of this file.
1 //===- SemaChecking.cpp - Extra Semantic Checking -------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements extra semantic analysis beyond what is enforced
10 // by the C type system.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "clang/AST/APValue.h"
15 #include "clang/AST/ASTContext.h"
16 #include "clang/AST/Attr.h"
17 #include "clang/AST/AttrIterator.h"
18 #include "clang/AST/CharUnits.h"
19 #include "clang/AST/Decl.h"
20 #include "clang/AST/DeclBase.h"
21 #include "clang/AST/DeclCXX.h"
22 #include "clang/AST/DeclObjC.h"
25 #include "clang/AST/Expr.h"
26 #include "clang/AST/ExprCXX.h"
27 #include "clang/AST/ExprObjC.h"
28 #include "clang/AST/ExprOpenMP.h"
29 #include "clang/AST/FormatString.h"
30 #include "clang/AST/NSAPI.h"
33 #include "clang/AST/Stmt.h"
34 #include "clang/AST/TemplateBase.h"
35 #include "clang/AST/Type.h"
36 #include "clang/AST/TypeLoc.h"
39 #include "clang/Basic/CharInfo.h"
40 #include "clang/Basic/Diagnostic.h"
42 #include "clang/Basic/LLVM.h"
49 #include "clang/Basic/Specifiers.h"
50 #include "clang/Basic/SyncScope.h"
53 #include "clang/Basic/TargetInfo.h"
54 #include "clang/Basic/TypeTraits.h"
55 #include "clang/Lex/Lexer.h" // TODO: Extract static functions to fix layering.
57 #include "clang/Sema/Lookup.h"
58 #include "clang/Sema/Ownership.h"
59 #include "clang/Sema/Scope.h"
60 #include "clang/Sema/ScopeInfo.h"
61 #include "clang/Sema/Sema.h"
63 #include "llvm/ADT/APFloat.h"
64 #include "llvm/ADT/APInt.h"
65 #include "llvm/ADT/APSInt.h"
66 #include "llvm/ADT/ArrayRef.h"
67 #include "llvm/ADT/DenseMap.h"
68 #include "llvm/ADT/FoldingSet.h"
69 #include "llvm/ADT/None.h"
70 #include "llvm/ADT/Optional.h"
71 #include "llvm/ADT/STLExtras.h"
72 #include "llvm/ADT/SmallBitVector.h"
73 #include "llvm/ADT/SmallPtrSet.h"
74 #include "llvm/ADT/SmallString.h"
75 #include "llvm/ADT/SmallVector.h"
76 #include "llvm/ADT/StringRef.h"
77 #include "llvm/ADT/StringSwitch.h"
78 #include "llvm/ADT/Triple.h"
79 #include "llvm/Support/AtomicOrdering.h"
80 #include "llvm/Support/Casting.h"
81 #include "llvm/Support/Compiler.h"
82 #include "llvm/Support/ConvertUTF.h"
83 #include "llvm/Support/ErrorHandling.h"
84 #include "llvm/Support/Format.h"
85 #include "llvm/Support/Locale.h"
86 #include "llvm/Support/MathExtras.h"
87 #include "llvm/Support/SaveAndRestore.h"
88 #include "llvm/Support/raw_ostream.h"
89 #include <algorithm>
90 #include <cassert>
91 #include <cstddef>
92 #include <cstdint>
93 #include <functional>
94 #include <limits>
95 #include <string>
96 #include <tuple>
97 #include <utility>
98 
99 using namespace clang;
100 using namespace sema;
101 
103  unsigned ByteNo) const {
104  return SL->getLocationOfByte(ByteNo, getSourceManager(), LangOpts,
105  Context.getTargetInfo());
106 }
107 
108 /// Checks that a call expression's argument count is the desired number.
109 /// This is useful when doing custom type-checking. Returns true on error.
110 static bool checkArgCount(Sema &S, CallExpr *call, unsigned desiredArgCount) {
111  unsigned argCount = call->getNumArgs();
112  if (argCount == desiredArgCount) return false;
113 
114  if (argCount < desiredArgCount)
115  return S.Diag(call->getEndLoc(), diag::err_typecheck_call_too_few_args)
116  << 0 /*function call*/ << desiredArgCount << argCount
117  << call->getSourceRange();
118 
119  // Highlight all the excess arguments.
120  SourceRange range(call->getArg(desiredArgCount)->getBeginLoc(),
121  call->getArg(argCount - 1)->getEndLoc());
122 
123  return S.Diag(range.getBegin(), diag::err_typecheck_call_too_many_args)
124  << 0 /*function call*/ << desiredArgCount << argCount
125  << call->getArg(1)->getSourceRange();
126 }
127 
128 /// Check that the first argument to __builtin_annotation is an integer
129 /// and the second argument is a non-wide string literal.
130 static bool SemaBuiltinAnnotation(Sema &S, CallExpr *TheCall) {
131  if (checkArgCount(S, TheCall, 2))
132  return true;
133 
134  // First argument should be an integer.
135  Expr *ValArg = TheCall->getArg(0);
136  QualType Ty = ValArg->getType();
137  if (!Ty->isIntegerType()) {
138  S.Diag(ValArg->getBeginLoc(), diag::err_builtin_annotation_first_arg)
139  << ValArg->getSourceRange();
140  return true;
141  }
142 
143  // Second argument should be a constant string.
144  Expr *StrArg = TheCall->getArg(1)->IgnoreParenCasts();
145  StringLiteral *Literal = dyn_cast<StringLiteral>(StrArg);
146  if (!Literal || !Literal->isAscii()) {
147  S.Diag(StrArg->getBeginLoc(), diag::err_builtin_annotation_second_arg)
148  << StrArg->getSourceRange();
149  return true;
150  }
151 
152  TheCall->setType(Ty);
153  return false;
154 }
155 
156 static bool SemaBuiltinMSVCAnnotation(Sema &S, CallExpr *TheCall) {
157  // We need at least one argument.
158  if (TheCall->getNumArgs() < 1) {
159  S.Diag(TheCall->getEndLoc(), diag::err_typecheck_call_too_few_args_at_least)
160  << 0 << 1 << TheCall->getNumArgs()
161  << TheCall->getCallee()->getSourceRange();
162  return true;
163  }
164 
165  // All arguments should be wide string literals.
166  for (Expr *Arg : TheCall->arguments()) {
167  auto *Literal = dyn_cast<StringLiteral>(Arg->IgnoreParenCasts());
168  if (!Literal || !Literal->isWide()) {
169  S.Diag(Arg->getBeginLoc(), diag::err_msvc_annotation_wide_str)
170  << Arg->getSourceRange();
171  return true;
172  }
173  }
174 
175  return false;
176 }
177 
178 /// Check that the argument to __builtin_addressof is a glvalue, and set the
179 /// result type to the corresponding pointer type.
180 static bool SemaBuiltinAddressof(Sema &S, CallExpr *TheCall) {
181  if (checkArgCount(S, TheCall, 1))
182  return true;
183 
184  ExprResult Arg(TheCall->getArg(0));
185  QualType ResultType = S.CheckAddressOfOperand(Arg, TheCall->getBeginLoc());
186  if (ResultType.isNull())
187  return true;
188 
189  TheCall->setArg(0, Arg.get());
190  TheCall->setType(ResultType);
191  return false;
192 }
193 
194 /// Check the number of arguments and set the result type to
195 /// the argument type.
196 static bool SemaBuiltinPreserveAI(Sema &S, CallExpr *TheCall) {
197  if (checkArgCount(S, TheCall, 1))
198  return true;
199 
200  TheCall->setType(TheCall->getArg(0)->getType());
201  return false;
202 }
203 
204 static bool SemaBuiltinOverflow(Sema &S, CallExpr *TheCall) {
205  if (checkArgCount(S, TheCall, 3))
206  return true;
207 
208  // First two arguments should be integers.
209  for (unsigned I = 0; I < 2; ++I) {
210  ExprResult Arg = TheCall->getArg(I);
211  QualType Ty = Arg.get()->getType();
212  if (!Ty->isIntegerType()) {
213  S.Diag(Arg.get()->getBeginLoc(), diag::err_overflow_builtin_must_be_int)
214  << Ty << Arg.get()->getSourceRange();
215  return true;
216  }
218  S.getASTContext(), Ty, /*consume*/ false);
219  Arg = S.PerformCopyInitialization(Entity, SourceLocation(), Arg);
220  if (Arg.isInvalid())
221  return true;
222  TheCall->setArg(I, Arg.get());
223  }
224 
225  // Third argument should be a pointer to a non-const integer.
226  // IRGen correctly handles volatile, restrict, and address spaces, and
227  // the other qualifiers aren't possible.
228  {
229  ExprResult Arg = TheCall->getArg(2);
230  QualType Ty = Arg.get()->getType();
231  const auto *PtrTy = Ty->getAs<PointerType>();
232  if (!(PtrTy && PtrTy->getPointeeType()->isIntegerType() &&
233  !PtrTy->getPointeeType().isConstQualified())) {
234  S.Diag(Arg.get()->getBeginLoc(),
235  diag::err_overflow_builtin_must_be_ptr_int)
236  << Ty << Arg.get()->getSourceRange();
237  return true;
238  }
240  S.getASTContext(), Ty, /*consume*/ false);
241  Arg = S.PerformCopyInitialization(Entity, SourceLocation(), Arg);
242  if (Arg.isInvalid())
243  return true;
244  TheCall->setArg(2, Arg.get());
245  }
246  return false;
247 }
248 
249 static bool SemaBuiltinCallWithStaticChain(Sema &S, CallExpr *BuiltinCall) {
250  if (checkArgCount(S, BuiltinCall, 2))
251  return true;
252 
253  SourceLocation BuiltinLoc = BuiltinCall->getBeginLoc();
254  Expr *Builtin = BuiltinCall->getCallee()->IgnoreImpCasts();
255  Expr *Call = BuiltinCall->getArg(0);
256  Expr *Chain = BuiltinCall->getArg(1);
257 
258  if (Call->getStmtClass() != Stmt::CallExprClass) {
259  S.Diag(BuiltinLoc, diag::err_first_argument_to_cwsc_not_call)
260  << Call->getSourceRange();
261  return true;
262  }
263 
264  auto CE = cast<CallExpr>(Call);
265  if (CE->getCallee()->getType()->isBlockPointerType()) {
266  S.Diag(BuiltinLoc, diag::err_first_argument_to_cwsc_block_call)
267  << Call->getSourceRange();
268  return true;
269  }
270 
271  const Decl *TargetDecl = CE->getCalleeDecl();
272  if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(TargetDecl))
273  if (FD->getBuiltinID()) {
274  S.Diag(BuiltinLoc, diag::err_first_argument_to_cwsc_builtin_call)
275  << Call->getSourceRange();
276  return true;
277  }
278 
279  if (isa<CXXPseudoDestructorExpr>(CE->getCallee()->IgnoreParens())) {
280  S.Diag(BuiltinLoc, diag::err_first_argument_to_cwsc_pdtor_call)
281  << Call->getSourceRange();
282  return true;
283  }
284 
285  ExprResult ChainResult = S.UsualUnaryConversions(Chain);
286  if (ChainResult.isInvalid())
287  return true;
288  if (!ChainResult.get()->getType()->isPointerType()) {
289  S.Diag(BuiltinLoc, diag::err_second_argument_to_cwsc_not_pointer)
290  << Chain->getSourceRange();
291  return true;
292  }
293 
294  QualType ReturnTy = CE->getCallReturnType(S.Context);
295  QualType ArgTys[2] = { ReturnTy, ChainResult.get()->getType() };
296  QualType BuiltinTy = S.Context.getFunctionType(
297  ReturnTy, ArgTys, FunctionProtoType::ExtProtoInfo());
298  QualType BuiltinPtrTy = S.Context.getPointerType(BuiltinTy);
299 
300  Builtin =
301  S.ImpCastExprToType(Builtin, BuiltinPtrTy, CK_BuiltinFnToFnPtr).get();
302 
303  BuiltinCall->setType(CE->getType());
304  BuiltinCall->setValueKind(CE->getValueKind());
305  BuiltinCall->setObjectKind(CE->getObjectKind());
306  BuiltinCall->setCallee(Builtin);
307  BuiltinCall->setArg(1, ChainResult.get());
308 
309  return false;
310 }
311 
312 /// Check a call to BuiltinID for buffer overflows. If BuiltinID is a
313 /// __builtin_*_chk function, then use the object size argument specified in the
314 /// source. Otherwise, infer the object size using __builtin_object_size.
315 void Sema::checkFortifiedBuiltinMemoryFunction(FunctionDecl *FD,
316  CallExpr *TheCall) {
317  // FIXME: There are some more useful checks we could be doing here:
318  // - Analyze the format string of sprintf to see how much of buffer is used.
319  // - Evaluate strlen of strcpy arguments, use as object size.
320 
321  if (TheCall->isValueDependent() || TheCall->isTypeDependent() ||
322  isConstantEvaluated())
323  return;
324 
325  unsigned BuiltinID = FD->getBuiltinID(/*ConsiderWrappers=*/true);
326  if (!BuiltinID)
327  return;
328 
329  unsigned DiagID = 0;
330  bool IsChkVariant = false;
331  unsigned SizeIndex, ObjectIndex;
332  switch (BuiltinID) {
333  default:
334  return;
335  case Builtin::BI__builtin___memcpy_chk:
336  case Builtin::BI__builtin___memmove_chk:
337  case Builtin::BI__builtin___memset_chk:
338  case Builtin::BI__builtin___strlcat_chk:
339  case Builtin::BI__builtin___strlcpy_chk:
340  case Builtin::BI__builtin___strncat_chk:
341  case Builtin::BI__builtin___strncpy_chk:
342  case Builtin::BI__builtin___stpncpy_chk:
343  case Builtin::BI__builtin___memccpy_chk: {
344  DiagID = diag::warn_builtin_chk_overflow;
345  IsChkVariant = true;
346  SizeIndex = TheCall->getNumArgs() - 2;
347  ObjectIndex = TheCall->getNumArgs() - 1;
348  break;
349  }
350 
351  case Builtin::BI__builtin___snprintf_chk:
352  case Builtin::BI__builtin___vsnprintf_chk: {
353  DiagID = diag::warn_builtin_chk_overflow;
354  IsChkVariant = true;
355  SizeIndex = 1;
356  ObjectIndex = 3;
357  break;
358  }
359 
360  case Builtin::BIstrncat:
361  case Builtin::BI__builtin_strncat:
362  case Builtin::BIstrncpy:
363  case Builtin::BI__builtin_strncpy:
364  case Builtin::BIstpncpy:
365  case Builtin::BI__builtin_stpncpy: {
366  // Whether these functions overflow depends on the runtime strlen of the
367  // string, not just the buffer size, so emitting the "always overflow"
368  // diagnostic isn't quite right. We should still diagnose passing a buffer
369  // size larger than the destination buffer though; this is a runtime abort
370  // in _FORTIFY_SOURCE mode, and is quite suspicious otherwise.
371  DiagID = diag::warn_fortify_source_size_mismatch;
372  SizeIndex = TheCall->getNumArgs() - 1;
373  ObjectIndex = 0;
374  break;
375  }
376 
377  case Builtin::BImemcpy:
378  case Builtin::BI__builtin_memcpy:
379  case Builtin::BImemmove:
380  case Builtin::BI__builtin_memmove:
381  case Builtin::BImemset:
382  case Builtin::BI__builtin_memset: {
383  DiagID = diag::warn_fortify_source_overflow;
384  SizeIndex = TheCall->getNumArgs() - 1;
385  ObjectIndex = 0;
386  break;
387  }
388  case Builtin::BIsnprintf:
389  case Builtin::BI__builtin_snprintf:
390  case Builtin::BIvsnprintf:
391  case Builtin::BI__builtin_vsnprintf: {
392  DiagID = diag::warn_fortify_source_size_mismatch;
393  SizeIndex = 1;
394  ObjectIndex = 0;
395  break;
396  }
397  }
398 
399  llvm::APSInt ObjectSize;
400  // For __builtin___*_chk, the object size is explicitly provided by the caller
401  // (usually using __builtin_object_size). Use that value to check this call.
402  if (IsChkVariant) {
403  Expr::EvalResult Result;
404  Expr *SizeArg = TheCall->getArg(ObjectIndex);
405  if (!SizeArg->EvaluateAsInt(Result, getASTContext()))
406  return;
407  ObjectSize = Result.Val.getInt();
408 
409  // Otherwise, try to evaluate an imaginary call to __builtin_object_size.
410  } else {
411  // If the parameter has a pass_object_size attribute, then we should use its
412  // (potentially) more strict checking mode. Otherwise, conservatively assume
413  // type 0.
414  int BOSType = 0;
415  if (const auto *POS =
416  FD->getParamDecl(ObjectIndex)->getAttr<PassObjectSizeAttr>())
417  BOSType = POS->getType();
418 
419  Expr *ObjArg = TheCall->getArg(ObjectIndex);
420  uint64_t Result;
421  if (!ObjArg->tryEvaluateObjectSize(Result, getASTContext(), BOSType))
422  return;
423  // Get the object size in the target's size_t width.
424  const TargetInfo &TI = getASTContext().getTargetInfo();
425  unsigned SizeTypeWidth = TI.getTypeWidth(TI.getSizeType());
426  ObjectSize = llvm::APSInt::getUnsigned(Result).extOrTrunc(SizeTypeWidth);
427  }
428 
429  // Evaluate the number of bytes of the object that this call will use.
430  Expr::EvalResult Result;
431  Expr *UsedSizeArg = TheCall->getArg(SizeIndex);
432  if (!UsedSizeArg->EvaluateAsInt(Result, getASTContext()))
433  return;
434  llvm::APSInt UsedSize = Result.Val.getInt();
435 
436  if (UsedSize.ule(ObjectSize))
437  return;
438 
439  StringRef FunctionName = getASTContext().BuiltinInfo.getName(BuiltinID);
440  // Skim off the details of whichever builtin was called to produce a better
441  // diagnostic, as it's unlikley that the user wrote the __builtin explicitly.
442  if (IsChkVariant) {
443  FunctionName = FunctionName.drop_front(std::strlen("__builtin___"));
444  FunctionName = FunctionName.drop_back(std::strlen("_chk"));
445  } else if (FunctionName.startswith("__builtin_")) {
446  FunctionName = FunctionName.drop_front(std::strlen("__builtin_"));
447  }
448 
449  DiagRuntimeBehavior(TheCall->getBeginLoc(), TheCall,
450  PDiag(DiagID)
451  << FunctionName << ObjectSize.toString(/*Radix=*/10)
452  << UsedSize.toString(/*Radix=*/10));
453 }
454 
455 static bool SemaBuiltinSEHScopeCheck(Sema &SemaRef, CallExpr *TheCall,
456  Scope::ScopeFlags NeededScopeFlags,
457  unsigned DiagID) {
458  // Scopes aren't available during instantiation. Fortunately, builtin
459  // functions cannot be template args so they cannot be formed through template
460  // instantiation. Therefore checking once during the parse is sufficient.
461  if (SemaRef.inTemplateInstantiation())
462  return false;
463 
464  Scope *S = SemaRef.getCurScope();
465  while (S && !S->isSEHExceptScope())
466  S = S->getParent();
467  if (!S || !(S->getFlags() & NeededScopeFlags)) {
468  auto *DRE = cast<DeclRefExpr>(TheCall->getCallee()->IgnoreParenCasts());
469  SemaRef.Diag(TheCall->getExprLoc(), DiagID)
470  << DRE->getDecl()->getIdentifier();
471  return true;
472  }
473 
474  return false;
475 }
476 
477 static inline bool isBlockPointer(Expr *Arg) {
478  return Arg->getType()->isBlockPointerType();
479 }
480 
481 /// OpenCL C v2.0, s6.13.17.2 - Checks that the block parameters are all local
482 /// void*, which is a requirement of device side enqueue.
483 static bool checkOpenCLBlockArgs(Sema &S, Expr *BlockArg) {
484  const BlockPointerType *BPT =
485  cast<BlockPointerType>(BlockArg->getType().getCanonicalType());
486  ArrayRef<QualType> Params =
487  BPT->getPointeeType()->castAs<FunctionProtoType>()->getParamTypes();
488  unsigned ArgCounter = 0;
489  bool IllegalParams = false;
490  // Iterate through the block parameters until either one is found that is not
491  // a local void*, or the block is valid.
492  for (ArrayRef<QualType>::iterator I = Params.begin(), E = Params.end();
493  I != E; ++I, ++ArgCounter) {
494  if (!(*I)->isPointerType() || !(*I)->getPointeeType()->isVoidType() ||
495  (*I)->getPointeeType().getQualifiers().getAddressSpace() !=
497  // Get the location of the error. If a block literal has been passed
498  // (BlockExpr) then we can point straight to the offending argument,
499  // else we just point to the variable reference.
500  SourceLocation ErrorLoc;
501  if (isa<BlockExpr>(BlockArg)) {
502  BlockDecl *BD = cast<BlockExpr>(BlockArg)->getBlockDecl();
503  ErrorLoc = BD->getParamDecl(ArgCounter)->getBeginLoc();
504  } else if (isa<DeclRefExpr>(BlockArg)) {
505  ErrorLoc = cast<DeclRefExpr>(BlockArg)->getBeginLoc();
506  }
507  S.Diag(ErrorLoc,
508  diag::err_opencl_enqueue_kernel_blocks_non_local_void_args);
509  IllegalParams = true;
510  }
511  }
512 
513  return IllegalParams;
514 }
515 
516 static bool checkOpenCLSubgroupExt(Sema &S, CallExpr *Call) {
517  if (!S.getOpenCLOptions().isEnabled("cl_khr_subgroups")) {
518  S.Diag(Call->getBeginLoc(), diag::err_opencl_requires_extension)
519  << 1 << Call->getDirectCallee() << "cl_khr_subgroups";
520  return true;
521  }
522  return false;
523 }
524 
525 static bool SemaOpenCLBuiltinNDRangeAndBlock(Sema &S, CallExpr *TheCall) {
526  if (checkArgCount(S, TheCall, 2))
527  return true;
528 
529  if (checkOpenCLSubgroupExt(S, TheCall))
530  return true;
531 
532  // First argument is an ndrange_t type.
533  Expr *NDRangeArg = TheCall->getArg(0);
534  if (NDRangeArg->getType().getUnqualifiedType().getAsString() != "ndrange_t") {
535  S.Diag(NDRangeArg->getBeginLoc(), diag::err_opencl_builtin_expected_type)
536  << TheCall->getDirectCallee() << "'ndrange_t'";
537  return true;
538  }
539 
540  Expr *BlockArg = TheCall->getArg(1);
541  if (!isBlockPointer(BlockArg)) {
542  S.Diag(BlockArg->getBeginLoc(), diag::err_opencl_builtin_expected_type)
543  << TheCall->getDirectCallee() << "block";
544  return true;
545  }
546  return checkOpenCLBlockArgs(S, BlockArg);
547 }
548 
549 /// OpenCL C v2.0, s6.13.17.6 - Check the argument to the
550 /// get_kernel_work_group_size
551 /// and get_kernel_preferred_work_group_size_multiple builtin functions.
553  if (checkArgCount(S, TheCall, 1))
554  return true;
555 
556  Expr *BlockArg = TheCall->getArg(0);
557  if (!isBlockPointer(BlockArg)) {
558  S.Diag(BlockArg->getBeginLoc(), diag::err_opencl_builtin_expected_type)
559  << TheCall->getDirectCallee() << "block";
560  return true;
561  }
562  return checkOpenCLBlockArgs(S, BlockArg);
563 }
564 
565 /// Diagnose integer type and any valid implicit conversion to it.
566 static bool checkOpenCLEnqueueIntType(Sema &S, Expr *E,
567  const QualType &IntType);
568 
570  unsigned Start, unsigned End) {
571  bool IllegalParams = false;
572  for (unsigned I = Start; I <= End; ++I)
573  IllegalParams |= checkOpenCLEnqueueIntType(S, TheCall->getArg(I),
574  S.Context.getSizeType());
575  return IllegalParams;
576 }
577 
578 /// OpenCL v2.0, s6.13.17.1 - Check that sizes are provided for all
579 /// 'local void*' parameter of passed block.
581  Expr *BlockArg,
582  unsigned NumNonVarArgs) {
583  const BlockPointerType *BPT =
584  cast<BlockPointerType>(BlockArg->getType().getCanonicalType());
585  unsigned NumBlockParams =
586  BPT->getPointeeType()->castAs<FunctionProtoType>()->getNumParams();
587  unsigned TotalNumArgs = TheCall->getNumArgs();
588 
589  // For each argument passed to the block, a corresponding uint needs to
590  // be passed to describe the size of the local memory.
591  if (TotalNumArgs != NumBlockParams + NumNonVarArgs) {
592  S.Diag(TheCall->getBeginLoc(),
593  diag::err_opencl_enqueue_kernel_local_size_args);
594  return true;
595  }
596 
597  // Check that the sizes of the local memory are specified by integers.
598  return checkOpenCLEnqueueLocalSizeArgs(S, TheCall, NumNonVarArgs,
599  TotalNumArgs - 1);
600 }
601 
602 /// OpenCL C v2.0, s6.13.17 - Enqueue kernel function contains four different
603 /// overload formats specified in Table 6.13.17.1.
604 /// int enqueue_kernel(queue_t queue,
605 /// kernel_enqueue_flags_t flags,
606 /// const ndrange_t ndrange,
607 /// void (^block)(void))
608 /// int enqueue_kernel(queue_t queue,
609 /// kernel_enqueue_flags_t flags,
610 /// const ndrange_t ndrange,
611 /// uint num_events_in_wait_list,
612 /// clk_event_t *event_wait_list,
613 /// clk_event_t *event_ret,
614 /// void (^block)(void))
615 /// int enqueue_kernel(queue_t queue,
616 /// kernel_enqueue_flags_t flags,
617 /// const ndrange_t ndrange,
618 /// void (^block)(local void*, ...),
619 /// uint size0, ...)
620 /// int enqueue_kernel(queue_t queue,
621 /// kernel_enqueue_flags_t flags,
622 /// const ndrange_t ndrange,
623 /// uint num_events_in_wait_list,
624 /// clk_event_t *event_wait_list,
625 /// clk_event_t *event_ret,
626 /// void (^block)(local void*, ...),
627 /// uint size0, ...)
628 static bool SemaOpenCLBuiltinEnqueueKernel(Sema &S, CallExpr *TheCall) {
629  unsigned NumArgs = TheCall->getNumArgs();
630 
631  if (NumArgs < 4) {
632  S.Diag(TheCall->getBeginLoc(),
633  diag::err_typecheck_call_too_few_args_at_least)
634  << 0 << 4 << NumArgs;
635  return true;
636  }
637 
638  Expr *Arg0 = TheCall->getArg(0);
639  Expr *Arg1 = TheCall->getArg(1);
640  Expr *Arg2 = TheCall->getArg(2);
641  Expr *Arg3 = TheCall->getArg(3);
642 
643  // First argument always needs to be a queue_t type.
644  if (!Arg0->getType()->isQueueT()) {
645  S.Diag(TheCall->getArg(0)->getBeginLoc(),
646  diag::err_opencl_builtin_expected_type)
647  << TheCall->getDirectCallee() << S.Context.OCLQueueTy;
648  return true;
649  }
650 
651  // Second argument always needs to be a kernel_enqueue_flags_t enum value.
652  if (!Arg1->getType()->isIntegerType()) {
653  S.Diag(TheCall->getArg(1)->getBeginLoc(),
654  diag::err_opencl_builtin_expected_type)
655  << TheCall->getDirectCallee() << "'kernel_enqueue_flags_t' (i.e. uint)";
656  return true;
657  }
658 
659  // Third argument is always an ndrange_t type.
660  if (Arg2->getType().getUnqualifiedType().getAsString() != "ndrange_t") {
661  S.Diag(TheCall->getArg(2)->getBeginLoc(),
662  diag::err_opencl_builtin_expected_type)
663  << TheCall->getDirectCallee() << "'ndrange_t'";
664  return true;
665  }
666 
667  // With four arguments, there is only one form that the function could be
668  // called in: no events and no variable arguments.
669  if (NumArgs == 4) {
670  // check that the last argument is the right block type.
671  if (!isBlockPointer(Arg3)) {
672  S.Diag(Arg3->getBeginLoc(), diag::err_opencl_builtin_expected_type)
673  << TheCall->getDirectCallee() << "block";
674  return true;
675  }
676  // we have a block type, check the prototype
677  const BlockPointerType *BPT =
678  cast<BlockPointerType>(Arg3->getType().getCanonicalType());
679  if (BPT->getPointeeType()->castAs<FunctionProtoType>()->getNumParams() > 0) {
680  S.Diag(Arg3->getBeginLoc(),
681  diag::err_opencl_enqueue_kernel_blocks_no_args);
682  return true;
683  }
684  return false;
685  }
686  // we can have block + varargs.
687  if (isBlockPointer(Arg3))
688  return (checkOpenCLBlockArgs(S, Arg3) ||
689  checkOpenCLEnqueueVariadicArgs(S, TheCall, Arg3, 4));
690  // last two cases with either exactly 7 args or 7 args and varargs.
691  if (NumArgs >= 7) {
692  // check common block argument.
693  Expr *Arg6 = TheCall->getArg(6);
694  if (!isBlockPointer(Arg6)) {
695  S.Diag(Arg6->getBeginLoc(), diag::err_opencl_builtin_expected_type)
696  << TheCall->getDirectCallee() << "block";
697  return true;
698  }
699  if (checkOpenCLBlockArgs(S, Arg6))
700  return true;
701 
702  // Forth argument has to be any integer type.
703  if (!Arg3->getType()->isIntegerType()) {
704  S.Diag(TheCall->getArg(3)->getBeginLoc(),
705  diag::err_opencl_builtin_expected_type)
706  << TheCall->getDirectCallee() << "integer";
707  return true;
708  }
709  // check remaining common arguments.
710  Expr *Arg4 = TheCall->getArg(4);
711  Expr *Arg5 = TheCall->getArg(5);
712 
713  // Fifth argument is always passed as a pointer to clk_event_t.
714  if (!Arg4->isNullPointerConstant(S.Context,
717  S.Diag(TheCall->getArg(4)->getBeginLoc(),
718  diag::err_opencl_builtin_expected_type)
719  << TheCall->getDirectCallee()
721  return true;
722  }
723 
724  // Sixth argument is always passed as a pointer to clk_event_t.
725  if (!Arg5->isNullPointerConstant(S.Context,
727  !(Arg5->getType()->isPointerType() &&
728  Arg5->getType()->getPointeeType()->isClkEventT())) {
729  S.Diag(TheCall->getArg(5)->getBeginLoc(),
730  diag::err_opencl_builtin_expected_type)
731  << TheCall->getDirectCallee()
733  return true;
734  }
735 
736  if (NumArgs == 7)
737  return false;
738 
739  return checkOpenCLEnqueueVariadicArgs(S, TheCall, Arg6, 7);
740  }
741 
742  // None of the specific case has been detected, give generic error
743  S.Diag(TheCall->getBeginLoc(),
744  diag::err_opencl_enqueue_kernel_incorrect_args);
745  return true;
746 }
747 
748 /// Returns OpenCL access qual.
749 static OpenCLAccessAttr *getOpenCLArgAccess(const Decl *D) {
750  return D->getAttr<OpenCLAccessAttr>();
751 }
752 
753 /// Returns true if pipe element type is different from the pointer.
754 static bool checkOpenCLPipeArg(Sema &S, CallExpr *Call) {
755  const Expr *Arg0 = Call->getArg(0);
756  // First argument type should always be pipe.
757  if (!Arg0->getType()->isPipeType()) {
758  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_first_arg)
759  << Call->getDirectCallee() << Arg0->getSourceRange();
760  return true;
761  }
762  OpenCLAccessAttr *AccessQual =
763  getOpenCLArgAccess(cast<DeclRefExpr>(Arg0)->getDecl());
764  // Validates the access qualifier is compatible with the call.
765  // OpenCL v2.0 s6.13.16 - The access qualifiers for pipe should only be
766  // read_only and write_only, and assumed to be read_only if no qualifier is
767  // specified.
768  switch (Call->getDirectCallee()->getBuiltinID()) {
769  case Builtin::BIread_pipe:
770  case Builtin::BIreserve_read_pipe:
771  case Builtin::BIcommit_read_pipe:
772  case Builtin::BIwork_group_reserve_read_pipe:
773  case Builtin::BIsub_group_reserve_read_pipe:
774  case Builtin::BIwork_group_commit_read_pipe:
775  case Builtin::BIsub_group_commit_read_pipe:
776  if (!(!AccessQual || AccessQual->isReadOnly())) {
777  S.Diag(Arg0->getBeginLoc(),
778  diag::err_opencl_builtin_pipe_invalid_access_modifier)
779  << "read_only" << Arg0->getSourceRange();
780  return true;
781  }
782  break;
783  case Builtin::BIwrite_pipe:
784  case Builtin::BIreserve_write_pipe:
785  case Builtin::BIcommit_write_pipe:
786  case Builtin::BIwork_group_reserve_write_pipe:
787  case Builtin::BIsub_group_reserve_write_pipe:
788  case Builtin::BIwork_group_commit_write_pipe:
789  case Builtin::BIsub_group_commit_write_pipe:
790  if (!(AccessQual && AccessQual->isWriteOnly())) {
791  S.Diag(Arg0->getBeginLoc(),
792  diag::err_opencl_builtin_pipe_invalid_access_modifier)
793  << "write_only" << Arg0->getSourceRange();
794  return true;
795  }
796  break;
797  default:
798  break;
799  }
800  return false;
801 }
802 
803 /// Returns true if pipe element type is different from the pointer.
804 static bool checkOpenCLPipePacketType(Sema &S, CallExpr *Call, unsigned Idx) {
805  const Expr *Arg0 = Call->getArg(0);
806  const Expr *ArgIdx = Call->getArg(Idx);
807  const PipeType *PipeTy = cast<PipeType>(Arg0->getType());
808  const QualType EltTy = PipeTy->getElementType();
809  const PointerType *ArgTy = ArgIdx->getType()->getAs<PointerType>();
810  // The Idx argument should be a pointer and the type of the pointer and
811  // the type of pipe element should also be the same.
812  if (!ArgTy ||
813  !S.Context.hasSameType(
814  EltTy, ArgTy->getPointeeType()->getCanonicalTypeInternal())) {
815  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_invalid_arg)
816  << Call->getDirectCallee() << S.Context.getPointerType(EltTy)
817  << ArgIdx->getType() << ArgIdx->getSourceRange();
818  return true;
819  }
820  return false;
821 }
822 
823 // Performs semantic analysis for the read/write_pipe call.
824 // \param S Reference to the semantic analyzer.
825 // \param Call A pointer to the builtin call.
826 // \return True if a semantic error has been found, false otherwise.
827 static bool SemaBuiltinRWPipe(Sema &S, CallExpr *Call) {
828  // OpenCL v2.0 s6.13.16.2 - The built-in read/write
829  // functions have two forms.
830  switch (Call->getNumArgs()) {
831  case 2:
832  if (checkOpenCLPipeArg(S, Call))
833  return true;
834  // The call with 2 arguments should be
835  // read/write_pipe(pipe T, T*).
836  // Check packet type T.
837  if (checkOpenCLPipePacketType(S, Call, 1))
838  return true;
839  break;
840 
841  case 4: {
842  if (checkOpenCLPipeArg(S, Call))
843  return true;
844  // The call with 4 arguments should be
845  // read/write_pipe(pipe T, reserve_id_t, uint, T*).
846  // Check reserve_id_t.
847  if (!Call->getArg(1)->getType()->isReserveIDT()) {
848  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_invalid_arg)
849  << Call->getDirectCallee() << S.Context.OCLReserveIDTy
850  << Call->getArg(1)->getType() << Call->getArg(1)->getSourceRange();
851  return true;
852  }
853 
854  // Check the index.
855  const Expr *Arg2 = Call->getArg(2);
856  if (!Arg2->getType()->isIntegerType() &&
857  !Arg2->getType()->isUnsignedIntegerType()) {
858  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_invalid_arg)
859  << Call->getDirectCallee() << S.Context.UnsignedIntTy
860  << Arg2->getType() << Arg2->getSourceRange();
861  return true;
862  }
863 
864  // Check packet type T.
865  if (checkOpenCLPipePacketType(S, Call, 3))
866  return true;
867  } break;
868  default:
869  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_arg_num)
870  << Call->getDirectCallee() << Call->getSourceRange();
871  return true;
872  }
873 
874  return false;
875 }
876 
877 // Performs a semantic analysis on the {work_group_/sub_group_
878 // /_}reserve_{read/write}_pipe
879 // \param S Reference to the semantic analyzer.
880 // \param Call The call to the builtin function to be analyzed.
881 // \return True if a semantic error was found, false otherwise.
882 static bool SemaBuiltinReserveRWPipe(Sema &S, CallExpr *Call) {
883  if (checkArgCount(S, Call, 2))
884  return true;
885 
886  if (checkOpenCLPipeArg(S, Call))
887  return true;
888 
889  // Check the reserve size.
890  if (!Call->getArg(1)->getType()->isIntegerType() &&
891  !Call->getArg(1)->getType()->isUnsignedIntegerType()) {
892  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_invalid_arg)
893  << Call->getDirectCallee() << S.Context.UnsignedIntTy
894  << Call->getArg(1)->getType() << Call->getArg(1)->getSourceRange();
895  return true;
896  }
897 
898  // Since return type of reserve_read/write_pipe built-in function is
899  // reserve_id_t, which is not defined in the builtin def file , we used int
900  // as return type and need to override the return type of these functions.
901  Call->setType(S.Context.OCLReserveIDTy);
902 
903  return false;
904 }
905 
906 // Performs a semantic analysis on {work_group_/sub_group_
907 // /_}commit_{read/write}_pipe
908 // \param S Reference to the semantic analyzer.
909 // \param Call The call to the builtin function to be analyzed.
910 // \return True if a semantic error was found, false otherwise.
911 static bool SemaBuiltinCommitRWPipe(Sema &S, CallExpr *Call) {
912  if (checkArgCount(S, Call, 2))
913  return true;
914 
915  if (checkOpenCLPipeArg(S, Call))
916  return true;
917 
918  // Check reserve_id_t.
919  if (!Call->getArg(1)->getType()->isReserveIDT()) {
920  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_invalid_arg)
921  << Call->getDirectCallee() << S.Context.OCLReserveIDTy
922  << Call->getArg(1)->getType() << Call->getArg(1)->getSourceRange();
923  return true;
924  }
925 
926  return false;
927 }
928 
929 // Performs a semantic analysis on the call to built-in Pipe
930 // Query Functions.
931 // \param S Reference to the semantic analyzer.
932 // \param Call The call to the builtin function to be analyzed.
933 // \return True if a semantic error was found, false otherwise.
934 static bool SemaBuiltinPipePackets(Sema &S, CallExpr *Call) {
935  if (checkArgCount(S, Call, 1))
936  return true;
937 
938  if (!Call->getArg(0)->getType()->isPipeType()) {
939  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_first_arg)
940  << Call->getDirectCallee() << Call->getArg(0)->getSourceRange();
941  return true;
942  }
943 
944  return false;
945 }
946 
947 // OpenCL v2.0 s6.13.9 - Address space qualifier functions.
948 // Performs semantic analysis for the to_global/local/private call.
949 // \param S Reference to the semantic analyzer.
950 // \param BuiltinID ID of the builtin function.
951 // \param Call A pointer to the builtin call.
952 // \return True if a semantic error has been found, false otherwise.
953 static bool SemaOpenCLBuiltinToAddr(Sema &S, unsigned BuiltinID,
954  CallExpr *Call) {
955  if (Call->getNumArgs() != 1) {
956  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_to_addr_arg_num)
957  << Call->getDirectCallee() << Call->getSourceRange();
958  return true;
959  }
960 
961  auto RT = Call->getArg(0)->getType();
962  if (!RT->isPointerType() || RT->getPointeeType()
963  .getAddressSpace() == LangAS::opencl_constant) {
964  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_to_addr_invalid_arg)
965  << Call->getArg(0) << Call->getDirectCallee() << Call->getSourceRange();
966  return true;
967  }
968 
969  if (RT->getPointeeType().getAddressSpace() != LangAS::opencl_generic) {
970  S.Diag(Call->getArg(0)->getBeginLoc(),
971  diag::warn_opencl_generic_address_space_arg)
972  << Call->getDirectCallee()->getNameInfo().getAsString()
973  << Call->getArg(0)->getSourceRange();
974  }
975 
976  RT = RT->getPointeeType();
977  auto Qual = RT.getQualifiers();
978  switch (BuiltinID) {
979  case Builtin::BIto_global:
980  Qual.setAddressSpace(LangAS::opencl_global);
981  break;
982  case Builtin::BIto_local:
983  Qual.setAddressSpace(LangAS::opencl_local);
984  break;
985  case Builtin::BIto_private:
986  Qual.setAddressSpace(LangAS::opencl_private);
987  break;
988  default:
989  llvm_unreachable("Invalid builtin function");
990  }
992  RT.getUnqualifiedType(), Qual)));
993 
994  return false;
995 }
996 
998  if (checkArgCount(S, TheCall, 1))
999  return ExprError();
1000 
1001  // Compute __builtin_launder's parameter type from the argument.
1002  // The parameter type is:
1003  // * The type of the argument if it's not an array or function type,
1004  // Otherwise,
1005  // * The decayed argument type.
1006  QualType ParamTy = [&]() {
1007  QualType ArgTy = TheCall->getArg(0)->getType();
1008  if (const ArrayType *Ty = ArgTy->getAsArrayTypeUnsafe())
1009  return S.Context.getPointerType(Ty->getElementType());
1010  if (ArgTy->isFunctionType()) {
1011  return S.Context.getPointerType(ArgTy);
1012  }
1013  return ArgTy;
1014  }();
1015 
1016  TheCall->setType(ParamTy);
1017 
1018  auto DiagSelect = [&]() -> llvm::Optional<unsigned> {
1019  if (!ParamTy->isPointerType())
1020  return 0;
1021  if (ParamTy->isFunctionPointerType())
1022  return 1;
1023  if (ParamTy->isVoidPointerType())
1024  return 2;
1025  return llvm::Optional<unsigned>{};
1026  }();
1027  if (DiagSelect.hasValue()) {
1028  S.Diag(TheCall->getBeginLoc(), diag::err_builtin_launder_invalid_arg)
1029  << DiagSelect.getValue() << TheCall->getSourceRange();
1030  return ExprError();
1031  }
1032 
1033  // We either have an incomplete class type, or we have a class template
1034  // whose instantiation has not been forced. Example:
1035  //
1036  // template <class T> struct Foo { T value; };
1037  // Foo<int> *p = nullptr;
1038  // auto *d = __builtin_launder(p);
1039  if (S.RequireCompleteType(TheCall->getBeginLoc(), ParamTy->getPointeeType(),
1040  diag::err_incomplete_type))
1041  return ExprError();
1042 
1043  assert(ParamTy->getPointeeType()->isObjectType() &&
1044  "Unhandled non-object pointer case");
1045 
1046  InitializedEntity Entity =
1048  ExprResult Arg =
1049  S.PerformCopyInitialization(Entity, SourceLocation(), TheCall->getArg(0));
1050  if (Arg.isInvalid())
1051  return ExprError();
1052  TheCall->setArg(0, Arg.get());
1053 
1054  return TheCall;
1055 }
1056 
1057 // Emit an error and return true if the current architecture is not in the list
1058 // of supported architectures.
1059 static bool
1060 CheckBuiltinTargetSupport(Sema &S, unsigned BuiltinID, CallExpr *TheCall,
1061  ArrayRef<llvm::Triple::ArchType> SupportedArchs) {
1062  llvm::Triple::ArchType CurArch =
1063  S.getASTContext().getTargetInfo().getTriple().getArch();
1064  if (llvm::is_contained(SupportedArchs, CurArch))
1065  return false;
1066  S.Diag(TheCall->getBeginLoc(), diag::err_builtin_target_unsupported)
1067  << TheCall->getSourceRange();
1068  return true;
1069 }
1070 
1071 ExprResult
1072 Sema::CheckBuiltinFunctionCall(FunctionDecl *FDecl, unsigned BuiltinID,
1073  CallExpr *TheCall) {
1074  ExprResult TheCallResult(TheCall);
1075 
1076  // Find out if any arguments are required to be integer constant expressions.
1077  unsigned ICEArguments = 0;
1079  Context.GetBuiltinType(BuiltinID, Error, &ICEArguments);
1080  if (Error != ASTContext::GE_None)
1081  ICEArguments = 0; // Don't diagnose previously diagnosed errors.
1082 
1083  // If any arguments are required to be ICE's, check and diagnose.
1084  for (unsigned ArgNo = 0; ICEArguments != 0; ++ArgNo) {
1085  // Skip arguments not required to be ICE's.
1086  if ((ICEArguments & (1 << ArgNo)) == 0) continue;
1087 
1088  llvm::APSInt Result;
1089  if (SemaBuiltinConstantArg(TheCall, ArgNo, Result))
1090  return true;
1091  ICEArguments &= ~(1 << ArgNo);
1092  }
1093 
1094  switch (BuiltinID) {
1095  case Builtin::BI__builtin___CFStringMakeConstantString:
1096  assert(TheCall->getNumArgs() == 1 &&
1097  "Wrong # arguments to builtin CFStringMakeConstantString");
1098  if (CheckObjCString(TheCall->getArg(0)))
1099  return ExprError();
1100  break;
1101  case Builtin::BI__builtin_ms_va_start:
1102  case Builtin::BI__builtin_stdarg_start:
1103  case Builtin::BI__builtin_va_start:
1104  if (SemaBuiltinVAStart(BuiltinID, TheCall))
1105  return ExprError();
1106  break;
1107  case Builtin::BI__va_start: {
1108  switch (Context.getTargetInfo().getTriple().getArch()) {
1109  case llvm::Triple::aarch64:
1110  case llvm::Triple::arm:
1111  case llvm::Triple::thumb:
1112  if (SemaBuiltinVAStartARMMicrosoft(TheCall))
1113  return ExprError();
1114  break;
1115  default:
1116  if (SemaBuiltinVAStart(BuiltinID, TheCall))
1117  return ExprError();
1118  break;
1119  }
1120  break;
1121  }
1122 
1123  // The acquire, release, and no fence variants are ARM and AArch64 only.
1124  case Builtin::BI_interlockedbittestandset_acq:
1125  case Builtin::BI_interlockedbittestandset_rel:
1126  case Builtin::BI_interlockedbittestandset_nf:
1127  case Builtin::BI_interlockedbittestandreset_acq:
1128  case Builtin::BI_interlockedbittestandreset_rel:
1129  case Builtin::BI_interlockedbittestandreset_nf:
1131  *this, BuiltinID, TheCall,
1132  {llvm::Triple::arm, llvm::Triple::thumb, llvm::Triple::aarch64}))
1133  return ExprError();
1134  break;
1135 
1136  // The 64-bit bittest variants are x64, ARM, and AArch64 only.
1137  case Builtin::BI_bittest64:
1138  case Builtin::BI_bittestandcomplement64:
1139  case Builtin::BI_bittestandreset64:
1140  case Builtin::BI_bittestandset64:
1141  case Builtin::BI_interlockedbittestandreset64:
1142  case Builtin::BI_interlockedbittestandset64:
1143  if (CheckBuiltinTargetSupport(*this, BuiltinID, TheCall,
1144  {llvm::Triple::x86_64, llvm::Triple::arm,
1145  llvm::Triple::thumb, llvm::Triple::aarch64}))
1146  return ExprError();
1147  break;
1148 
1149  case Builtin::BI__builtin_isgreater:
1150  case Builtin::BI__builtin_isgreaterequal:
1151  case Builtin::BI__builtin_isless:
1152  case Builtin::BI__builtin_islessequal:
1153  case Builtin::BI__builtin_islessgreater:
1154  case Builtin::BI__builtin_isunordered:
1155  if (SemaBuiltinUnorderedCompare(TheCall))
1156  return ExprError();
1157  break;
1158  case Builtin::BI__builtin_fpclassify:
1159  if (SemaBuiltinFPClassification(TheCall, 6))
1160  return ExprError();
1161  break;
1162  case Builtin::BI__builtin_isfinite:
1163  case Builtin::BI__builtin_isinf:
1164  case Builtin::BI__builtin_isinf_sign:
1165  case Builtin::BI__builtin_isnan:
1166  case Builtin::BI__builtin_isnormal:
1167  case Builtin::BI__builtin_signbit:
1168  case Builtin::BI__builtin_signbitf:
1169  case Builtin::BI__builtin_signbitl:
1170  if (SemaBuiltinFPClassification(TheCall, 1))
1171  return ExprError();
1172  break;
1173  case Builtin::BI__builtin_shufflevector:
1174  return SemaBuiltinShuffleVector(TheCall);
1175  // TheCall will be freed by the smart pointer here, but that's fine, since
1176  // SemaBuiltinShuffleVector guts it, but then doesn't release it.
1177  case Builtin::BI__builtin_prefetch:
1178  if (SemaBuiltinPrefetch(TheCall))
1179  return ExprError();
1180  break;
1181  case Builtin::BI__builtin_alloca_with_align:
1182  if (SemaBuiltinAllocaWithAlign(TheCall))
1183  return ExprError();
1184  LLVM_FALLTHROUGH;
1185  case Builtin::BI__builtin_alloca:
1186  Diag(TheCall->getBeginLoc(), diag::warn_alloca)
1187  << TheCall->getDirectCallee();
1188  break;
1189  case Builtin::BI__assume:
1190  case Builtin::BI__builtin_assume:
1191  if (SemaBuiltinAssume(TheCall))
1192  return ExprError();
1193  break;
1194  case Builtin::BI__builtin_assume_aligned:
1195  if (SemaBuiltinAssumeAligned(TheCall))
1196  return ExprError();
1197  break;
1198  case Builtin::BI__builtin_dynamic_object_size:
1199  case Builtin::BI__builtin_object_size:
1200  if (SemaBuiltinConstantArgRange(TheCall, 1, 0, 3))
1201  return ExprError();
1202  break;
1203  case Builtin::BI__builtin_longjmp:
1204  if (SemaBuiltinLongjmp(TheCall))
1205  return ExprError();
1206  break;
1207  case Builtin::BI__builtin_setjmp:
1208  if (SemaBuiltinSetjmp(TheCall))
1209  return ExprError();
1210  break;
1211  case Builtin::BI_setjmp:
1212  case Builtin::BI_setjmpex:
1213  if (checkArgCount(*this, TheCall, 1))
1214  return true;
1215  break;
1216  case Builtin::BI__builtin_classify_type:
1217  if (checkArgCount(*this, TheCall, 1)) return true;
1218  TheCall->setType(Context.IntTy);
1219  break;
1220  case Builtin::BI__builtin_constant_p: {
1221  if (checkArgCount(*this, TheCall, 1)) return true;
1222  ExprResult Arg = DefaultFunctionArrayLvalueConversion(TheCall->getArg(0));
1223  if (Arg.isInvalid()) return true;
1224  TheCall->setArg(0, Arg.get());
1225  TheCall->setType(Context.IntTy);
1226  break;
1227  }
1228  case Builtin::BI__builtin_launder:
1229  return SemaBuiltinLaunder(*this, TheCall);
1230  case Builtin::BI__sync_fetch_and_add:
1231  case Builtin::BI__sync_fetch_and_add_1:
1232  case Builtin::BI__sync_fetch_and_add_2:
1233  case Builtin::BI__sync_fetch_and_add_4:
1234  case Builtin::BI__sync_fetch_and_add_8:
1235  case Builtin::BI__sync_fetch_and_add_16:
1236  case Builtin::BI__sync_fetch_and_sub:
1237  case Builtin::BI__sync_fetch_and_sub_1:
1238  case Builtin::BI__sync_fetch_and_sub_2:
1239  case Builtin::BI__sync_fetch_and_sub_4:
1240  case Builtin::BI__sync_fetch_and_sub_8:
1241  case Builtin::BI__sync_fetch_and_sub_16:
1242  case Builtin::BI__sync_fetch_and_or:
1243  case Builtin::BI__sync_fetch_and_or_1:
1244  case Builtin::BI__sync_fetch_and_or_2:
1245  case Builtin::BI__sync_fetch_and_or_4:
1246  case Builtin::BI__sync_fetch_and_or_8:
1247  case Builtin::BI__sync_fetch_and_or_16:
1248  case Builtin::BI__sync_fetch_and_and:
1249  case Builtin::BI__sync_fetch_and_and_1:
1250  case Builtin::BI__sync_fetch_and_and_2:
1251  case Builtin::BI__sync_fetch_and_and_4:
1252  case Builtin::BI__sync_fetch_and_and_8:
1253  case Builtin::BI__sync_fetch_and_and_16:
1254  case Builtin::BI__sync_fetch_and_xor:
1255  case Builtin::BI__sync_fetch_and_xor_1:
1256  case Builtin::BI__sync_fetch_and_xor_2:
1257  case Builtin::BI__sync_fetch_and_xor_4:
1258  case Builtin::BI__sync_fetch_and_xor_8:
1259  case Builtin::BI__sync_fetch_and_xor_16:
1260  case Builtin::BI__sync_fetch_and_nand:
1261  case Builtin::BI__sync_fetch_and_nand_1:
1262  case Builtin::BI__sync_fetch_and_nand_2:
1263  case Builtin::BI__sync_fetch_and_nand_4:
1264  case Builtin::BI__sync_fetch_and_nand_8:
1265  case Builtin::BI__sync_fetch_and_nand_16:
1266  case Builtin::BI__sync_add_and_fetch:
1267  case Builtin::BI__sync_add_and_fetch_1:
1268  case Builtin::BI__sync_add_and_fetch_2:
1269  case Builtin::BI__sync_add_and_fetch_4:
1270  case Builtin::BI__sync_add_and_fetch_8:
1271  case Builtin::BI__sync_add_and_fetch_16:
1272  case Builtin::BI__sync_sub_and_fetch:
1273  case Builtin::BI__sync_sub_and_fetch_1:
1274  case Builtin::BI__sync_sub_and_fetch_2:
1275  case Builtin::BI__sync_sub_and_fetch_4:
1276  case Builtin::BI__sync_sub_and_fetch_8:
1277  case Builtin::BI__sync_sub_and_fetch_16:
1278  case Builtin::BI__sync_and_and_fetch:
1279  case Builtin::BI__sync_and_and_fetch_1:
1280  case Builtin::BI__sync_and_and_fetch_2:
1281  case Builtin::BI__sync_and_and_fetch_4:
1282  case Builtin::BI__sync_and_and_fetch_8:
1283  case Builtin::BI__sync_and_and_fetch_16:
1284  case Builtin::BI__sync_or_and_fetch:
1285  case Builtin::BI__sync_or_and_fetch_1:
1286  case Builtin::BI__sync_or_and_fetch_2:
1287  case Builtin::BI__sync_or_and_fetch_4:
1288  case Builtin::BI__sync_or_and_fetch_8:
1289  case Builtin::BI__sync_or_and_fetch_16:
1290  case Builtin::BI__sync_xor_and_fetch:
1291  case Builtin::BI__sync_xor_and_fetch_1:
1292  case Builtin::BI__sync_xor_and_fetch_2:
1293  case Builtin::BI__sync_xor_and_fetch_4:
1294  case Builtin::BI__sync_xor_and_fetch_8:
1295  case Builtin::BI__sync_xor_and_fetch_16:
1296  case Builtin::BI__sync_nand_and_fetch:
1297  case Builtin::BI__sync_nand_and_fetch_1:
1298  case Builtin::BI__sync_nand_and_fetch_2:
1299  case Builtin::BI__sync_nand_and_fetch_4:
1300  case Builtin::BI__sync_nand_and_fetch_8:
1301  case Builtin::BI__sync_nand_and_fetch_16:
1302  case Builtin::BI__sync_val_compare_and_swap:
1303  case Builtin::BI__sync_val_compare_and_swap_1:
1304  case Builtin::BI__sync_val_compare_and_swap_2:
1305  case Builtin::BI__sync_val_compare_and_swap_4:
1306  case Builtin::BI__sync_val_compare_and_swap_8:
1307  case Builtin::BI__sync_val_compare_and_swap_16:
1308  case Builtin::BI__sync_bool_compare_and_swap:
1309  case Builtin::BI__sync_bool_compare_and_swap_1:
1310  case Builtin::BI__sync_bool_compare_and_swap_2:
1311  case Builtin::BI__sync_bool_compare_and_swap_4:
1312  case Builtin::BI__sync_bool_compare_and_swap_8:
1313  case Builtin::BI__sync_bool_compare_and_swap_16:
1314  case Builtin::BI__sync_lock_test_and_set:
1315  case Builtin::BI__sync_lock_test_and_set_1:
1316  case Builtin::BI__sync_lock_test_and_set_2:
1317  case Builtin::BI__sync_lock_test_and_set_4:
1318  case Builtin::BI__sync_lock_test_and_set_8:
1319  case Builtin::BI__sync_lock_test_and_set_16:
1320  case Builtin::BI__sync_lock_release:
1321  case Builtin::BI__sync_lock_release_1:
1322  case Builtin::BI__sync_lock_release_2:
1323  case Builtin::BI__sync_lock_release_4:
1324  case Builtin::BI__sync_lock_release_8:
1325  case Builtin::BI__sync_lock_release_16:
1326  case Builtin::BI__sync_swap:
1327  case Builtin::BI__sync_swap_1:
1328  case Builtin::BI__sync_swap_2:
1329  case Builtin::BI__sync_swap_4:
1330  case Builtin::BI__sync_swap_8:
1331  case Builtin::BI__sync_swap_16:
1332  return SemaBuiltinAtomicOverloaded(TheCallResult);
1333  case Builtin::BI__sync_synchronize:
1334  Diag(TheCall->getBeginLoc(), diag::warn_atomic_implicit_seq_cst)
1335  << TheCall->getCallee()->getSourceRange();
1336  break;
1337  case Builtin::BI__builtin_nontemporal_load:
1338  case Builtin::BI__builtin_nontemporal_store:
1339  return SemaBuiltinNontemporalOverloaded(TheCallResult);
1340 #define BUILTIN(ID, TYPE, ATTRS)
1341 #define ATOMIC_BUILTIN(ID, TYPE, ATTRS) \
1342  case Builtin::BI##ID: \
1343  return SemaAtomicOpsOverloaded(TheCallResult, AtomicExpr::AO##ID);
1344 #include "clang/Basic/Builtins.def"
1345  case Builtin::BI__annotation:
1346  if (SemaBuiltinMSVCAnnotation(*this, TheCall))
1347  return ExprError();
1348  break;
1349  case Builtin::BI__builtin_annotation:
1350  if (SemaBuiltinAnnotation(*this, TheCall))
1351  return ExprError();
1352  break;
1353  case Builtin::BI__builtin_addressof:
1354  if (SemaBuiltinAddressof(*this, TheCall))
1355  return ExprError();
1356  break;
1357  case Builtin::BI__builtin_add_overflow:
1358  case Builtin::BI__builtin_sub_overflow:
1359  case Builtin::BI__builtin_mul_overflow:
1360  if (SemaBuiltinOverflow(*this, TheCall))
1361  return ExprError();
1362  break;
1363  case Builtin::BI__builtin_operator_new:
1364  case Builtin::BI__builtin_operator_delete: {
1365  bool IsDelete = BuiltinID == Builtin::BI__builtin_operator_delete;
1366  ExprResult Res =
1367  SemaBuiltinOperatorNewDeleteOverloaded(TheCallResult, IsDelete);
1368  if (Res.isInvalid())
1369  CorrectDelayedTyposInExpr(TheCallResult.get());
1370  return Res;
1371  }
1372  case Builtin::BI__builtin_dump_struct: {
1373  // We first want to ensure we are called with 2 arguments
1374  if (checkArgCount(*this, TheCall, 2))
1375  return ExprError();
1376  // Ensure that the first argument is of type 'struct XX *'
1377  const Expr *PtrArg = TheCall->getArg(0)->IgnoreParenImpCasts();
1378  const QualType PtrArgType = PtrArg->getType();
1379  if (!PtrArgType->isPointerType() ||
1380  !PtrArgType->getPointeeType()->isRecordType()) {
1381  Diag(PtrArg->getBeginLoc(), diag::err_typecheck_convert_incompatible)
1382  << PtrArgType << "structure pointer" << 1 << 0 << 3 << 1 << PtrArgType
1383  << "structure pointer";
1384  return ExprError();
1385  }
1386 
1387  // Ensure that the second argument is of type 'FunctionType'
1388  const Expr *FnPtrArg = TheCall->getArg(1)->IgnoreImpCasts();
1389  const QualType FnPtrArgType = FnPtrArg->getType();
1390  if (!FnPtrArgType->isPointerType()) {
1391  Diag(FnPtrArg->getBeginLoc(), diag::err_typecheck_convert_incompatible)
1392  << FnPtrArgType << "'int (*)(const char *, ...)'" << 1 << 0 << 3 << 2
1393  << FnPtrArgType << "'int (*)(const char *, ...)'";
1394  return ExprError();
1395  }
1396 
1397  const auto *FuncType =
1398  FnPtrArgType->getPointeeType()->getAs<FunctionType>();
1399 
1400  if (!FuncType) {
1401  Diag(FnPtrArg->getBeginLoc(), diag::err_typecheck_convert_incompatible)
1402  << FnPtrArgType << "'int (*)(const char *, ...)'" << 1 << 0 << 3 << 2
1403  << FnPtrArgType << "'int (*)(const char *, ...)'";
1404  return ExprError();
1405  }
1406 
1407  if (const auto *FT = dyn_cast<FunctionProtoType>(FuncType)) {
1408  if (!FT->getNumParams()) {
1409  Diag(FnPtrArg->getBeginLoc(), diag::err_typecheck_convert_incompatible)
1410  << FnPtrArgType << "'int (*)(const char *, ...)'" << 1 << 0 << 3
1411  << 2 << FnPtrArgType << "'int (*)(const char *, ...)'";
1412  return ExprError();
1413  }
1414  QualType PT = FT->getParamType(0);
1415  if (!FT->isVariadic() || FT->getReturnType() != Context.IntTy ||
1416  !PT->isPointerType() || !PT->getPointeeType()->isCharType() ||
1417  !PT->getPointeeType().isConstQualified()) {
1418  Diag(FnPtrArg->getBeginLoc(), diag::err_typecheck_convert_incompatible)
1419  << FnPtrArgType << "'int (*)(const char *, ...)'" << 1 << 0 << 3
1420  << 2 << FnPtrArgType << "'int (*)(const char *, ...)'";
1421  return ExprError();
1422  }
1423  }
1424 
1425  TheCall->setType(Context.IntTy);
1426  break;
1427  }
1428  case Builtin::BI__builtin_preserve_access_index:
1429  if (SemaBuiltinPreserveAI(*this, TheCall))
1430  return ExprError();
1431  break;
1432  case Builtin::BI__builtin_call_with_static_chain:
1433  if (SemaBuiltinCallWithStaticChain(*this, TheCall))
1434  return ExprError();
1435  break;
1436  case Builtin::BI__exception_code:
1437  case Builtin::BI_exception_code:
1438  if (SemaBuiltinSEHScopeCheck(*this, TheCall, Scope::SEHExceptScope,
1439  diag::err_seh___except_block))
1440  return ExprError();
1441  break;
1442  case Builtin::BI__exception_info:
1443  case Builtin::BI_exception_info:
1444  if (SemaBuiltinSEHScopeCheck(*this, TheCall, Scope::SEHFilterScope,
1445  diag::err_seh___except_filter))
1446  return ExprError();
1447  break;
1448  case Builtin::BI__GetExceptionInfo:
1449  if (checkArgCount(*this, TheCall, 1))
1450  return ExprError();
1451 
1452  if (CheckCXXThrowOperand(
1453  TheCall->getBeginLoc(),
1454  Context.getExceptionObjectType(FDecl->getParamDecl(0)->getType()),
1455  TheCall))
1456  return ExprError();
1457 
1458  TheCall->setType(Context.VoidPtrTy);
1459  break;
1460  // OpenCL v2.0, s6.13.16 - Pipe functions
1461  case Builtin::BIread_pipe:
1462  case Builtin::BIwrite_pipe:
1463  // Since those two functions are declared with var args, we need a semantic
1464  // check for the argument.
1465  if (SemaBuiltinRWPipe(*this, TheCall))
1466  return ExprError();
1467  break;
1468  case Builtin::BIreserve_read_pipe:
1469  case Builtin::BIreserve_write_pipe:
1470  case Builtin::BIwork_group_reserve_read_pipe:
1471  case Builtin::BIwork_group_reserve_write_pipe:
1472  if (SemaBuiltinReserveRWPipe(*this, TheCall))
1473  return ExprError();
1474  break;
1475  case Builtin::BIsub_group_reserve_read_pipe:
1476  case Builtin::BIsub_group_reserve_write_pipe:
1477  if (checkOpenCLSubgroupExt(*this, TheCall) ||
1478  SemaBuiltinReserveRWPipe(*this, TheCall))
1479  return ExprError();
1480  break;
1481  case Builtin::BIcommit_read_pipe:
1482  case Builtin::BIcommit_write_pipe:
1483  case Builtin::BIwork_group_commit_read_pipe:
1484  case Builtin::BIwork_group_commit_write_pipe:
1485  if (SemaBuiltinCommitRWPipe(*this, TheCall))
1486  return ExprError();
1487  break;
1488  case Builtin::BIsub_group_commit_read_pipe:
1489  case Builtin::BIsub_group_commit_write_pipe:
1490  if (checkOpenCLSubgroupExt(*this, TheCall) ||
1491  SemaBuiltinCommitRWPipe(*this, TheCall))
1492  return ExprError();
1493  break;
1494  case Builtin::BIget_pipe_num_packets:
1495  case Builtin::BIget_pipe_max_packets:
1496  if (SemaBuiltinPipePackets(*this, TheCall))
1497  return ExprError();
1498  break;
1499  case Builtin::BIto_global:
1500  case Builtin::BIto_local:
1501  case Builtin::BIto_private:
1502  if (SemaOpenCLBuiltinToAddr(*this, BuiltinID, TheCall))
1503  return ExprError();
1504  break;
1505  // OpenCL v2.0, s6.13.17 - Enqueue kernel functions.
1506  case Builtin::BIenqueue_kernel:
1507  if (SemaOpenCLBuiltinEnqueueKernel(*this, TheCall))
1508  return ExprError();
1509  break;
1510  case Builtin::BIget_kernel_work_group_size:
1511  case Builtin::BIget_kernel_preferred_work_group_size_multiple:
1512  if (SemaOpenCLBuiltinKernelWorkGroupSize(*this, TheCall))
1513  return ExprError();
1514  break;
1515  case Builtin::BIget_kernel_max_sub_group_size_for_ndrange:
1516  case Builtin::BIget_kernel_sub_group_count_for_ndrange:
1517  if (SemaOpenCLBuiltinNDRangeAndBlock(*this, TheCall))
1518  return ExprError();
1519  break;
1520  case Builtin::BI__builtin_os_log_format:
1521  case Builtin::BI__builtin_os_log_format_buffer_size:
1522  if (SemaBuiltinOSLogFormat(TheCall))
1523  return ExprError();
1524  break;
1525  }
1526 
1527  // Since the target specific builtins for each arch overlap, only check those
1528  // of the arch we are compiling for.
1529  if (Context.BuiltinInfo.isTSBuiltin(BuiltinID)) {
1530  switch (Context.getTargetInfo().getTriple().getArch()) {
1531  case llvm::Triple::arm:
1532  case llvm::Triple::armeb:
1533  case llvm::Triple::thumb:
1534  case llvm::Triple::thumbeb:
1535  if (CheckARMBuiltinFunctionCall(BuiltinID, TheCall))
1536  return ExprError();
1537  break;
1538  case llvm::Triple::aarch64:
1539  case llvm::Triple::aarch64_be:
1540  if (CheckAArch64BuiltinFunctionCall(BuiltinID, TheCall))
1541  return ExprError();
1542  break;
1543  case llvm::Triple::bpfeb:
1544  case llvm::Triple::bpfel:
1545  if (CheckBPFBuiltinFunctionCall(BuiltinID, TheCall))
1546  return ExprError();
1547  break;
1548  case llvm::Triple::hexagon:
1549  if (CheckHexagonBuiltinFunctionCall(BuiltinID, TheCall))
1550  return ExprError();
1551  break;
1552  case llvm::Triple::mips:
1553  case llvm::Triple::mipsel:
1554  case llvm::Triple::mips64:
1555  case llvm::Triple::mips64el:
1556  if (CheckMipsBuiltinFunctionCall(BuiltinID, TheCall))
1557  return ExprError();
1558  break;
1559  case llvm::Triple::systemz:
1560  if (CheckSystemZBuiltinFunctionCall(BuiltinID, TheCall))
1561  return ExprError();
1562  break;
1563  case llvm::Triple::x86:
1564  case llvm::Triple::x86_64:
1565  if (CheckX86BuiltinFunctionCall(BuiltinID, TheCall))
1566  return ExprError();
1567  break;
1568  case llvm::Triple::ppc:
1569  case llvm::Triple::ppc64:
1570  case llvm::Triple::ppc64le:
1571  if (CheckPPCBuiltinFunctionCall(BuiltinID, TheCall))
1572  return ExprError();
1573  break;
1574  default:
1575  break;
1576  }
1577  }
1578 
1579  return TheCallResult;
1580 }
1581 
1582 // Get the valid immediate range for the specified NEON type code.
1583 static unsigned RFT(unsigned t, bool shift = false, bool ForceQuad = false) {
1584  NeonTypeFlags Type(t);
1585  int IsQuad = ForceQuad ? true : Type.isQuad();
1586  switch (Type.getEltType()) {
1587  case NeonTypeFlags::Int8:
1588  case NeonTypeFlags::Poly8:
1589  return shift ? 7 : (8 << IsQuad) - 1;
1590  case NeonTypeFlags::Int16:
1591  case NeonTypeFlags::Poly16:
1592  return shift ? 15 : (4 << IsQuad) - 1;
1593  case NeonTypeFlags::Int32:
1594  return shift ? 31 : (2 << IsQuad) - 1;
1595  case NeonTypeFlags::Int64:
1596  case NeonTypeFlags::Poly64:
1597  return shift ? 63 : (1 << IsQuad) - 1;
1599  return shift ? 127 : (1 << IsQuad) - 1;
1601  assert(!shift && "cannot shift float types!");
1602  return (4 << IsQuad) - 1;
1604  assert(!shift && "cannot shift float types!");
1605  return (2 << IsQuad) - 1;
1607  assert(!shift && "cannot shift float types!");
1608  return (1 << IsQuad) - 1;
1609  }
1610  llvm_unreachable("Invalid NeonTypeFlag!");
1611 }
1612 
1613 /// getNeonEltType - Return the QualType corresponding to the elements of
1614 /// the vector type specified by the NeonTypeFlags. This is used to check
1615 /// the pointer arguments for Neon load/store intrinsics.
1617  bool IsPolyUnsigned, bool IsInt64Long) {
1618  switch (Flags.getEltType()) {
1619  case NeonTypeFlags::Int8:
1620  return Flags.isUnsigned() ? Context.UnsignedCharTy : Context.SignedCharTy;
1621  case NeonTypeFlags::Int16:
1622  return Flags.isUnsigned() ? Context.UnsignedShortTy : Context.ShortTy;
1623  case NeonTypeFlags::Int32:
1624  return Flags.isUnsigned() ? Context.UnsignedIntTy : Context.IntTy;
1625  case NeonTypeFlags::Int64:
1626  if (IsInt64Long)
1627  return Flags.isUnsigned() ? Context.UnsignedLongTy : Context.LongTy;
1628  else
1629  return Flags.isUnsigned() ? Context.UnsignedLongLongTy
1630  : Context.LongLongTy;
1631  case NeonTypeFlags::Poly8:
1632  return IsPolyUnsigned ? Context.UnsignedCharTy : Context.SignedCharTy;
1633  case NeonTypeFlags::Poly16:
1634  return IsPolyUnsigned ? Context.UnsignedShortTy : Context.ShortTy;
1635  case NeonTypeFlags::Poly64:
1636  if (IsInt64Long)
1637  return Context.UnsignedLongTy;
1638  else
1639  return Context.UnsignedLongLongTy;
1641  break;
1643  return Context.HalfTy;
1645  return Context.FloatTy;
1647  return Context.DoubleTy;
1648  }
1649  llvm_unreachable("Invalid NeonTypeFlag!");
1650 }
1651 
1652 bool Sema::CheckNeonBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
1653  llvm::APSInt Result;
1654  uint64_t mask = 0;
1655  unsigned TV = 0;
1656  int PtrArgNum = -1;
1657  bool HasConstPtr = false;
1658  switch (BuiltinID) {
1659 #define GET_NEON_OVERLOAD_CHECK
1660 #include "clang/Basic/arm_neon.inc"
1661 #include "clang/Basic/arm_fp16.inc"
1662 #undef GET_NEON_OVERLOAD_CHECK
1663  }
1664 
1665  // For NEON intrinsics which are overloaded on vector element type, validate
1666  // the immediate which specifies which variant to emit.
1667  unsigned ImmArg = TheCall->getNumArgs()-1;
1668  if (mask) {
1669  if (SemaBuiltinConstantArg(TheCall, ImmArg, Result))
1670  return true;
1671 
1672  TV = Result.getLimitedValue(64);
1673  if ((TV > 63) || (mask & (1ULL << TV)) == 0)
1674  return Diag(TheCall->getBeginLoc(), diag::err_invalid_neon_type_code)
1675  << TheCall->getArg(ImmArg)->getSourceRange();
1676  }
1677 
1678  if (PtrArgNum >= 0) {
1679  // Check that pointer arguments have the specified type.
1680  Expr *Arg = TheCall->getArg(PtrArgNum);
1681  if (ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(Arg))
1682  Arg = ICE->getSubExpr();
1683  ExprResult RHS = DefaultFunctionArrayLvalueConversion(Arg);
1684  QualType RHSTy = RHS.get()->getType();
1685 
1686  llvm::Triple::ArchType Arch = Context.getTargetInfo().getTriple().getArch();
1687  bool IsPolyUnsigned = Arch == llvm::Triple::aarch64 ||
1688  Arch == llvm::Triple::aarch64_be;
1689  bool IsInt64Long =
1691  QualType EltTy =
1692  getNeonEltType(NeonTypeFlags(TV), Context, IsPolyUnsigned, IsInt64Long);
1693  if (HasConstPtr)
1694  EltTy = EltTy.withConst();
1695  QualType LHSTy = Context.getPointerType(EltTy);
1696  AssignConvertType ConvTy;
1697  ConvTy = CheckSingleAssignmentConstraints(LHSTy, RHS);
1698  if (RHS.isInvalid())
1699  return true;
1700  if (DiagnoseAssignmentResult(ConvTy, Arg->getBeginLoc(), LHSTy, RHSTy,
1701  RHS.get(), AA_Assigning))
1702  return true;
1703  }
1704 
1705  // For NEON intrinsics which take an immediate value as part of the
1706  // instruction, range check them here.
1707  unsigned i = 0, l = 0, u = 0;
1708  switch (BuiltinID) {
1709  default:
1710  return false;
1711  #define GET_NEON_IMMEDIATE_CHECK
1712  #include "clang/Basic/arm_neon.inc"
1713  #include "clang/Basic/arm_fp16.inc"
1714  #undef GET_NEON_IMMEDIATE_CHECK
1715  }
1716 
1717  return SemaBuiltinConstantArgRange(TheCall, i, l, u + l);
1718 }
1719 
1720 bool Sema::CheckARMBuiltinExclusiveCall(unsigned BuiltinID, CallExpr *TheCall,
1721  unsigned MaxWidth) {
1722  assert((BuiltinID == ARM::BI__builtin_arm_ldrex ||
1723  BuiltinID == ARM::BI__builtin_arm_ldaex ||
1724  BuiltinID == ARM::BI__builtin_arm_strex ||
1725  BuiltinID == ARM::BI__builtin_arm_stlex ||
1726  BuiltinID == AArch64::BI__builtin_arm_ldrex ||
1727  BuiltinID == AArch64::BI__builtin_arm_ldaex ||
1728  BuiltinID == AArch64::BI__builtin_arm_strex ||
1729  BuiltinID == AArch64::BI__builtin_arm_stlex) &&
1730  "unexpected ARM builtin");
1731  bool IsLdrex = BuiltinID == ARM::BI__builtin_arm_ldrex ||
1732  BuiltinID == ARM::BI__builtin_arm_ldaex ||
1733  BuiltinID == AArch64::BI__builtin_arm_ldrex ||
1734  BuiltinID == AArch64::BI__builtin_arm_ldaex;
1735 
1736  DeclRefExpr *DRE =cast<DeclRefExpr>(TheCall->getCallee()->IgnoreParenCasts());
1737 
1738  // Ensure that we have the proper number of arguments.
1739  if (checkArgCount(*this, TheCall, IsLdrex ? 1 : 2))
1740  return true;
1741 
1742  // Inspect the pointer argument of the atomic builtin. This should always be
1743  // a pointer type, whose element is an integral scalar or pointer type.
1744  // Because it is a pointer type, we don't have to worry about any implicit
1745  // casts here.
1746  Expr *PointerArg = TheCall->getArg(IsLdrex ? 0 : 1);
1747  ExprResult PointerArgRes = DefaultFunctionArrayLvalueConversion(PointerArg);
1748  if (PointerArgRes.isInvalid())
1749  return true;
1750  PointerArg = PointerArgRes.get();
1751 
1752  const PointerType *pointerType = PointerArg->getType()->getAs<PointerType>();
1753  if (!pointerType) {
1754  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_must_be_pointer)
1755  << PointerArg->getType() << PointerArg->getSourceRange();
1756  return true;
1757  }
1758 
1759  // ldrex takes a "const volatile T*" and strex takes a "volatile T*". Our next
1760  // task is to insert the appropriate casts into the AST. First work out just
1761  // what the appropriate type is.
1762  QualType ValType = pointerType->getPointeeType();
1763  QualType AddrType = ValType.getUnqualifiedType().withVolatile();
1764  if (IsLdrex)
1765  AddrType.addConst();
1766 
1767  // Issue a warning if the cast is dodgy.
1768  CastKind CastNeeded = CK_NoOp;
1769  if (!AddrType.isAtLeastAsQualifiedAs(ValType)) {
1770  CastNeeded = CK_BitCast;
1771  Diag(DRE->getBeginLoc(), diag::ext_typecheck_convert_discards_qualifiers)
1772  << PointerArg->getType() << Context.getPointerType(AddrType)
1773  << AA_Passing << PointerArg->getSourceRange();
1774  }
1775 
1776  // Finally, do the cast and replace the argument with the corrected version.
1777  AddrType = Context.getPointerType(AddrType);
1778  PointerArgRes = ImpCastExprToType(PointerArg, AddrType, CastNeeded);
1779  if (PointerArgRes.isInvalid())
1780  return true;
1781  PointerArg = PointerArgRes.get();
1782 
1783  TheCall->setArg(IsLdrex ? 0 : 1, PointerArg);
1784 
1785  // In general, we allow ints, floats and pointers to be loaded and stored.
1786  if (!ValType->isIntegerType() && !ValType->isAnyPointerType() &&
1787  !ValType->isBlockPointerType() && !ValType->isFloatingType()) {
1788  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_must_be_pointer_intfltptr)
1789  << PointerArg->getType() << PointerArg->getSourceRange();
1790  return true;
1791  }
1792 
1793  // But ARM doesn't have instructions to deal with 128-bit versions.
1794  if (Context.getTypeSize(ValType) > MaxWidth) {
1795  assert(MaxWidth == 64 && "Diagnostic unexpectedly inaccurate");
1796  Diag(DRE->getBeginLoc(), diag::err_atomic_exclusive_builtin_pointer_size)
1797  << PointerArg->getType() << PointerArg->getSourceRange();
1798  return true;
1799  }
1800 
1801  switch (ValType.getObjCLifetime()) {
1802  case Qualifiers::OCL_None:
1804  // okay
1805  break;
1806 
1807  case Qualifiers::OCL_Weak:
1810  Diag(DRE->getBeginLoc(), diag::err_arc_atomic_ownership)
1811  << ValType << PointerArg->getSourceRange();
1812  return true;
1813  }
1814 
1815  if (IsLdrex) {
1816  TheCall->setType(ValType);
1817  return false;
1818  }
1819 
1820  // Initialize the argument to be stored.
1821  ExprResult ValArg = TheCall->getArg(0);
1823  Context, ValType, /*consume*/ false);
1824  ValArg = PerformCopyInitialization(Entity, SourceLocation(), ValArg);
1825  if (ValArg.isInvalid())
1826  return true;
1827  TheCall->setArg(0, ValArg.get());
1828 
1829  // __builtin_arm_strex always returns an int. It's marked as such in the .def,
1830  // but the custom checker bypasses all default analysis.
1831  TheCall->setType(Context.IntTy);
1832  return false;
1833 }
1834 
1835 bool Sema::CheckARMBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
1836  if (BuiltinID == ARM::BI__builtin_arm_ldrex ||
1837  BuiltinID == ARM::BI__builtin_arm_ldaex ||
1838  BuiltinID == ARM::BI__builtin_arm_strex ||
1839  BuiltinID == ARM::BI__builtin_arm_stlex) {
1840  return CheckARMBuiltinExclusiveCall(BuiltinID, TheCall, 64);
1841  }
1842 
1843  if (BuiltinID == ARM::BI__builtin_arm_prefetch) {
1844  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 1) ||
1845  SemaBuiltinConstantArgRange(TheCall, 2, 0, 1);
1846  }
1847 
1848  if (BuiltinID == ARM::BI__builtin_arm_rsr64 ||
1849  BuiltinID == ARM::BI__builtin_arm_wsr64)
1850  return SemaBuiltinARMSpecialReg(BuiltinID, TheCall, 0, 3, false);
1851 
1852  if (BuiltinID == ARM::BI__builtin_arm_rsr ||
1853  BuiltinID == ARM::BI__builtin_arm_rsrp ||
1854  BuiltinID == ARM::BI__builtin_arm_wsr ||
1855  BuiltinID == ARM::BI__builtin_arm_wsrp)
1856  return SemaBuiltinARMSpecialReg(BuiltinID, TheCall, 0, 5, true);
1857 
1858  if (CheckNeonBuiltinFunctionCall(BuiltinID, TheCall))
1859  return true;
1860 
1861  // For intrinsics which take an immediate value as part of the instruction,
1862  // range check them here.
1863  // FIXME: VFP Intrinsics should error if VFP not present.
1864  switch (BuiltinID) {
1865  default: return false;
1866  case ARM::BI__builtin_arm_ssat:
1867  return SemaBuiltinConstantArgRange(TheCall, 1, 1, 32);
1868  case ARM::BI__builtin_arm_usat:
1869  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 31);
1870  case ARM::BI__builtin_arm_ssat16:
1871  return SemaBuiltinConstantArgRange(TheCall, 1, 1, 16);
1872  case ARM::BI__builtin_arm_usat16:
1873  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 15);
1874  case ARM::BI__builtin_arm_vcvtr_f:
1875  case ARM::BI__builtin_arm_vcvtr_d:
1876  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 1);
1877  case ARM::BI__builtin_arm_dmb:
1878  case ARM::BI__builtin_arm_dsb:
1879  case ARM::BI__builtin_arm_isb:
1880  case ARM::BI__builtin_arm_dbg:
1881  return SemaBuiltinConstantArgRange(TheCall, 0, 0, 15);
1882  }
1883 }
1884 
1885 bool Sema::CheckAArch64BuiltinFunctionCall(unsigned BuiltinID,
1886  CallExpr *TheCall) {
1887  if (BuiltinID == AArch64::BI__builtin_arm_ldrex ||
1888  BuiltinID == AArch64::BI__builtin_arm_ldaex ||
1889  BuiltinID == AArch64::BI__builtin_arm_strex ||
1890  BuiltinID == AArch64::BI__builtin_arm_stlex) {
1891  return CheckARMBuiltinExclusiveCall(BuiltinID, TheCall, 128);
1892  }
1893 
1894  if (BuiltinID == AArch64::BI__builtin_arm_prefetch) {
1895  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 1) ||
1896  SemaBuiltinConstantArgRange(TheCall, 2, 0, 2) ||
1897  SemaBuiltinConstantArgRange(TheCall, 3, 0, 1) ||
1898  SemaBuiltinConstantArgRange(TheCall, 4, 0, 1);
1899  }
1900 
1901  if (BuiltinID == AArch64::BI__builtin_arm_rsr64 ||
1902  BuiltinID == AArch64::BI__builtin_arm_wsr64)
1903  return SemaBuiltinARMSpecialReg(BuiltinID, TheCall, 0, 5, true);
1904 
1905  // Memory Tagging Extensions (MTE) Intrinsics
1906  if (BuiltinID == AArch64::BI__builtin_arm_irg ||
1907  BuiltinID == AArch64::BI__builtin_arm_addg ||
1908  BuiltinID == AArch64::BI__builtin_arm_gmi ||
1909  BuiltinID == AArch64::BI__builtin_arm_ldg ||
1910  BuiltinID == AArch64::BI__builtin_arm_stg ||
1911  BuiltinID == AArch64::BI__builtin_arm_subp) {
1912  return SemaBuiltinARMMemoryTaggingCall(BuiltinID, TheCall);
1913  }
1914 
1915  if (BuiltinID == AArch64::BI__builtin_arm_rsr ||
1916  BuiltinID == AArch64::BI__builtin_arm_rsrp ||
1917  BuiltinID == AArch64::BI__builtin_arm_wsr ||
1918  BuiltinID == AArch64::BI__builtin_arm_wsrp)
1919  return SemaBuiltinARMSpecialReg(BuiltinID, TheCall, 0, 5, true);
1920 
1921  // Only check the valid encoding range. Any constant in this range would be
1922  // converted to a register of the form S1_2_C3_C4_5. Let the hardware throw
1923  // an exception for incorrect registers. This matches MSVC behavior.
1924  if (BuiltinID == AArch64::BI_ReadStatusReg ||
1925  BuiltinID == AArch64::BI_WriteStatusReg)
1926  return SemaBuiltinConstantArgRange(TheCall, 0, 0, 0x7fff);
1927 
1928  if (BuiltinID == AArch64::BI__getReg)
1929  return SemaBuiltinConstantArgRange(TheCall, 0, 0, 31);
1930 
1931  if (CheckNeonBuiltinFunctionCall(BuiltinID, TheCall))
1932  return true;
1933 
1934  // For intrinsics which take an immediate value as part of the instruction,
1935  // range check them here.
1936  unsigned i = 0, l = 0, u = 0;
1937  switch (BuiltinID) {
1938  default: return false;
1939  case AArch64::BI__builtin_arm_dmb:
1940  case AArch64::BI__builtin_arm_dsb:
1941  case AArch64::BI__builtin_arm_isb: l = 0; u = 15; break;
1942  case AArch64::BI__builtin_arm_tcancel: l = 0; u = 65535; break;
1943  }
1944 
1945  return SemaBuiltinConstantArgRange(TheCall, i, l, u + l);
1946 }
1947 
1948 bool Sema::CheckBPFBuiltinFunctionCall(unsigned BuiltinID,
1949  CallExpr *TheCall) {
1950  assert(BuiltinID == BPF::BI__builtin_preserve_field_info &&
1951  "unexpected ARM builtin");
1952 
1953  if (checkArgCount(*this, TheCall, 2))
1954  return true;
1955 
1956  // The first argument needs to be a record field access.
1957  // If it is an array element access, we delay decision
1958  // to BPF backend to check whether the access is a
1959  // field access or not.
1960  Expr *Arg = TheCall->getArg(0);
1961  if (Arg->getType()->getAsPlaceholderType() ||
1962  (Arg->IgnoreParens()->getObjectKind() != OK_BitField &&
1963  !dyn_cast<MemberExpr>(Arg->IgnoreParens()) &&
1964  !dyn_cast<ArraySubscriptExpr>(Arg->IgnoreParens()))) {
1965  Diag(Arg->getBeginLoc(), diag::err_preserve_field_info_not_field)
1966  << 1 << Arg->getSourceRange();
1967  return true;
1968  }
1969 
1970  // The second argument needs to be a constant int
1972  if (!TheCall->getArg(1)->isIntegerConstantExpr(Value, Context)) {
1973  Diag(Arg->getBeginLoc(), diag::err_preserve_field_info_not_const)
1974  << 2 << Arg->getSourceRange();
1975  return true;
1976  }
1977 
1978  TheCall->setType(Context.UnsignedIntTy);
1979  return false;
1980 }
1981 
1982 bool Sema::CheckHexagonBuiltinCpu(unsigned BuiltinID, CallExpr *TheCall) {
1983  struct BuiltinAndString {
1984  unsigned BuiltinID;
1985  const char *Str;
1986  };
1987 
1988  static BuiltinAndString ValidCPU[] = {
1989  { Hexagon::BI__builtin_HEXAGON_A6_vcmpbeq_notany, "v65,v66" },
1990  { Hexagon::BI__builtin_HEXAGON_A6_vminub_RdP, "v62,v65,v66" },
1991  { Hexagon::BI__builtin_HEXAGON_F2_dfadd, "v66" },
1992  { Hexagon::BI__builtin_HEXAGON_F2_dfsub, "v66" },
1993  { Hexagon::BI__builtin_HEXAGON_M2_mnaci, "v66" },
1994  { Hexagon::BI__builtin_HEXAGON_M6_vabsdiffb, "v62,v65,v66" },
1995  { Hexagon::BI__builtin_HEXAGON_M6_vabsdiffub, "v62,v65,v66" },
1996  { Hexagon::BI__builtin_HEXAGON_S2_mask, "v66" },
1997  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_acc, "v60,v62,v65,v66" },
1998  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_and, "v60,v62,v65,v66" },
1999  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_nac, "v60,v62,v65,v66" },
2000  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_or, "v60,v62,v65,v66" },
2001  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p, "v60,v62,v65,v66" },
2002  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_xacc, "v60,v62,v65,v66" },
2003  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_acc, "v60,v62,v65,v66" },
2004  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_and, "v60,v62,v65,v66" },
2005  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_nac, "v60,v62,v65,v66" },
2006  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_or, "v60,v62,v65,v66" },
2007  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r, "v60,v62,v65,v66" },
2008  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_xacc, "v60,v62,v65,v66" },
2009  { Hexagon::BI__builtin_HEXAGON_S6_vsplatrbp, "v62,v65,v66" },
2010  { Hexagon::BI__builtin_HEXAGON_S6_vtrunehb_ppp, "v62,v65,v66" },
2011  { Hexagon::BI__builtin_HEXAGON_S6_vtrunohb_ppp, "v62,v65,v66" },
2012  };
2013 
2014  static BuiltinAndString ValidHVX[] = {
2015  { Hexagon::BI__builtin_HEXAGON_V6_hi, "v60,v62,v65,v66" },
2016  { Hexagon::BI__builtin_HEXAGON_V6_hi_128B, "v60,v62,v65,v66" },
2017  { Hexagon::BI__builtin_HEXAGON_V6_lo, "v60,v62,v65,v66" },
2018  { Hexagon::BI__builtin_HEXAGON_V6_lo_128B, "v60,v62,v65,v66" },
2019  { Hexagon::BI__builtin_HEXAGON_V6_extractw, "v60,v62,v65,v66" },
2020  { Hexagon::BI__builtin_HEXAGON_V6_extractw_128B, "v60,v62,v65,v66" },
2021  { Hexagon::BI__builtin_HEXAGON_V6_lvsplatb, "v62,v65,v66" },
2022  { Hexagon::BI__builtin_HEXAGON_V6_lvsplatb_128B, "v62,v65,v66" },
2023  { Hexagon::BI__builtin_HEXAGON_V6_lvsplath, "v62,v65,v66" },
2024  { Hexagon::BI__builtin_HEXAGON_V6_lvsplath_128B, "v62,v65,v66" },
2025  { Hexagon::BI__builtin_HEXAGON_V6_lvsplatw, "v60,v62,v65,v66" },
2026  { Hexagon::BI__builtin_HEXAGON_V6_lvsplatw_128B, "v60,v62,v65,v66" },
2027  { Hexagon::BI__builtin_HEXAGON_V6_pred_and, "v60,v62,v65,v66" },
2028  { Hexagon::BI__builtin_HEXAGON_V6_pred_and_128B, "v60,v62,v65,v66" },
2029  { Hexagon::BI__builtin_HEXAGON_V6_pred_and_n, "v60,v62,v65,v66" },
2030  { Hexagon::BI__builtin_HEXAGON_V6_pred_and_n_128B, "v60,v62,v65,v66" },
2031  { Hexagon::BI__builtin_HEXAGON_V6_pred_not, "v60,v62,v65,v66" },
2032  { Hexagon::BI__builtin_HEXAGON_V6_pred_not_128B, "v60,v62,v65,v66" },
2033  { Hexagon::BI__builtin_HEXAGON_V6_pred_or, "v60,v62,v65,v66" },
2034  { Hexagon::BI__builtin_HEXAGON_V6_pred_or_128B, "v60,v62,v65,v66" },
2035  { Hexagon::BI__builtin_HEXAGON_V6_pred_or_n, "v60,v62,v65,v66" },
2036  { Hexagon::BI__builtin_HEXAGON_V6_pred_or_n_128B, "v60,v62,v65,v66" },
2037  { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2, "v60,v62,v65,v66" },
2038  { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2_128B, "v60,v62,v65,v66" },
2039  { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2v2, "v62,v65,v66" },
2040  { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2v2_128B, "v62,v65,v66" },
2041  { Hexagon::BI__builtin_HEXAGON_V6_pred_xor, "v60,v62,v65,v66" },
2042  { Hexagon::BI__builtin_HEXAGON_V6_pred_xor_128B, "v60,v62,v65,v66" },
2043  { Hexagon::BI__builtin_HEXAGON_V6_shuffeqh, "v62,v65,v66" },
2044  { Hexagon::BI__builtin_HEXAGON_V6_shuffeqh_128B, "v62,v65,v66" },
2045  { Hexagon::BI__builtin_HEXAGON_V6_shuffeqw, "v62,v65,v66" },
2046  { Hexagon::BI__builtin_HEXAGON_V6_shuffeqw_128B, "v62,v65,v66" },
2047  { Hexagon::BI__builtin_HEXAGON_V6_vabsb, "v65,v66" },
2048  { Hexagon::BI__builtin_HEXAGON_V6_vabsb_128B, "v65,v66" },
2049  { Hexagon::BI__builtin_HEXAGON_V6_vabsb_sat, "v65,v66" },
2050  { Hexagon::BI__builtin_HEXAGON_V6_vabsb_sat_128B, "v65,v66" },
2051  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffh, "v60,v62,v65,v66" },
2052  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffh_128B, "v60,v62,v65,v66" },
2053  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffub, "v60,v62,v65,v66" },
2054  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffub_128B, "v60,v62,v65,v66" },
2055  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffuh, "v60,v62,v65,v66" },
2056  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffuh_128B, "v60,v62,v65,v66" },
2057  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffw, "v60,v62,v65,v66" },
2058  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffw_128B, "v60,v62,v65,v66" },
2059  { Hexagon::BI__builtin_HEXAGON_V6_vabsh, "v60,v62,v65,v66" },
2060  { Hexagon::BI__builtin_HEXAGON_V6_vabsh_128B, "v60,v62,v65,v66" },
2061  { Hexagon::BI__builtin_HEXAGON_V6_vabsh_sat, "v60,v62,v65,v66" },
2062  { Hexagon::BI__builtin_HEXAGON_V6_vabsh_sat_128B, "v60,v62,v65,v66" },
2063  { Hexagon::BI__builtin_HEXAGON_V6_vabsw, "v60,v62,v65,v66" },
2064  { Hexagon::BI__builtin_HEXAGON_V6_vabsw_128B, "v60,v62,v65,v66" },
2065  { Hexagon::BI__builtin_HEXAGON_V6_vabsw_sat, "v60,v62,v65,v66" },
2066  { Hexagon::BI__builtin_HEXAGON_V6_vabsw_sat_128B, "v60,v62,v65,v66" },
2067  { Hexagon::BI__builtin_HEXAGON_V6_vaddb, "v60,v62,v65,v66" },
2068  { Hexagon::BI__builtin_HEXAGON_V6_vaddb_128B, "v60,v62,v65,v66" },
2069  { Hexagon::BI__builtin_HEXAGON_V6_vaddb_dv, "v60,v62,v65,v66" },
2070  { Hexagon::BI__builtin_HEXAGON_V6_vaddb_dv_128B, "v60,v62,v65,v66" },
2071  { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat, "v62,v65,v66" },
2072  { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat_128B, "v62,v65,v66" },
2073  { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat_dv, "v62,v65,v66" },
2074  { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat_dv_128B, "v62,v65,v66" },
2075  { Hexagon::BI__builtin_HEXAGON_V6_vaddcarry, "v62,v65,v66" },
2076  { Hexagon::BI__builtin_HEXAGON_V6_vaddcarry_128B, "v62,v65,v66" },
2077  { Hexagon::BI__builtin_HEXAGON_V6_vaddcarrysat, "v66" },
2078  { Hexagon::BI__builtin_HEXAGON_V6_vaddcarrysat_128B, "v66" },
2079  { Hexagon::BI__builtin_HEXAGON_V6_vaddclbh, "v62,v65,v66" },
2080  { Hexagon::BI__builtin_HEXAGON_V6_vaddclbh_128B, "v62,v65,v66" },
2081  { Hexagon::BI__builtin_HEXAGON_V6_vaddclbw, "v62,v65,v66" },
2082  { Hexagon::BI__builtin_HEXAGON_V6_vaddclbw_128B, "v62,v65,v66" },
2083  { Hexagon::BI__builtin_HEXAGON_V6_vaddh, "v60,v62,v65,v66" },
2084  { Hexagon::BI__builtin_HEXAGON_V6_vaddh_128B, "v60,v62,v65,v66" },
2085  { Hexagon::BI__builtin_HEXAGON_V6_vaddh_dv, "v60,v62,v65,v66" },
2086  { Hexagon::BI__builtin_HEXAGON_V6_vaddh_dv_128B, "v60,v62,v65,v66" },
2087  { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat, "v60,v62,v65,v66" },
2088  { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat_128B, "v60,v62,v65,v66" },
2089  { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat_dv, "v60,v62,v65,v66" },
2090  { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat_dv_128B, "v60,v62,v65,v66" },
2091  { Hexagon::BI__builtin_HEXAGON_V6_vaddhw, "v60,v62,v65,v66" },
2092  { Hexagon::BI__builtin_HEXAGON_V6_vaddhw_128B, "v60,v62,v65,v66" },
2093  { Hexagon::BI__builtin_HEXAGON_V6_vaddhw_acc, "v62,v65,v66" },
2094  { Hexagon::BI__builtin_HEXAGON_V6_vaddhw_acc_128B, "v62,v65,v66" },
2095  { Hexagon::BI__builtin_HEXAGON_V6_vaddubh, "v60,v62,v65,v66" },
2096  { Hexagon::BI__builtin_HEXAGON_V6_vaddubh_128B, "v60,v62,v65,v66" },
2097  { Hexagon::BI__builtin_HEXAGON_V6_vaddubh_acc, "v62,v65,v66" },
2098  { Hexagon::BI__builtin_HEXAGON_V6_vaddubh_acc_128B, "v62,v65,v66" },
2099  { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat, "v60,v62,v65,v66" },
2100  { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat_128B, "v60,v62,v65,v66" },
2101  { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat_dv, "v60,v62,v65,v66" },
2102  { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat_dv_128B, "v60,v62,v65,v66" },
2103  { Hexagon::BI__builtin_HEXAGON_V6_vaddububb_sat, "v62,v65,v66" },
2104  { Hexagon::BI__builtin_HEXAGON_V6_vaddububb_sat_128B, "v62,v65,v66" },
2105  { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat, "v60,v62,v65,v66" },
2106  { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat_128B, "v60,v62,v65,v66" },
2107  { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat_dv, "v60,v62,v65,v66" },
2108  { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat_dv_128B, "v60,v62,v65,v66" },
2109  { Hexagon::BI__builtin_HEXAGON_V6_vadduhw, "v60,v62,v65,v66" },
2110  { Hexagon::BI__builtin_HEXAGON_V6_vadduhw_128B, "v60,v62,v65,v66" },
2111  { Hexagon::BI__builtin_HEXAGON_V6_vadduhw_acc, "v62,v65,v66" },
2112  { Hexagon::BI__builtin_HEXAGON_V6_vadduhw_acc_128B, "v62,v65,v66" },
2113  { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat, "v62,v65,v66" },
2114  { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat_128B, "v62,v65,v66" },
2115  { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat_dv, "v62,v65,v66" },
2116  { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat_dv_128B, "v62,v65,v66" },
2117  { Hexagon::BI__builtin_HEXAGON_V6_vaddw, "v60,v62,v65,v66" },
2118  { Hexagon::BI__builtin_HEXAGON_V6_vaddw_128B, "v60,v62,v65,v66" },
2119  { Hexagon::BI__builtin_HEXAGON_V6_vaddw_dv, "v60,v62,v65,v66" },
2120  { Hexagon::BI__builtin_HEXAGON_V6_vaddw_dv_128B, "v60,v62,v65,v66" },
2121  { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat, "v60,v62,v65,v66" },
2122  { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat_128B, "v60,v62,v65,v66" },
2123  { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat_dv, "v60,v62,v65,v66" },
2124  { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat_dv_128B, "v60,v62,v65,v66" },
2125  { Hexagon::BI__builtin_HEXAGON_V6_valignb, "v60,v62,v65,v66" },
2126  { Hexagon::BI__builtin_HEXAGON_V6_valignb_128B, "v60,v62,v65,v66" },
2127  { Hexagon::BI__builtin_HEXAGON_V6_valignbi, "v60,v62,v65,v66" },
2128  { Hexagon::BI__builtin_HEXAGON_V6_valignbi_128B, "v60,v62,v65,v66" },
2129  { Hexagon::BI__builtin_HEXAGON_V6_vand, "v60,v62,v65,v66" },
2130  { Hexagon::BI__builtin_HEXAGON_V6_vand_128B, "v60,v62,v65,v66" },
2131  { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt, "v62,v65,v66" },
2132  { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt_128B, "v62,v65,v66" },
2133  { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt_acc, "v62,v65,v66" },
2134  { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt_acc_128B, "v62,v65,v66" },
2135  { Hexagon::BI__builtin_HEXAGON_V6_vandqrt, "v60,v62,v65,v66" },
2136  { Hexagon::BI__builtin_HEXAGON_V6_vandqrt_128B, "v60,v62,v65,v66" },
2137  { Hexagon::BI__builtin_HEXAGON_V6_vandqrt_acc, "v60,v62,v65,v66" },
2138  { Hexagon::BI__builtin_HEXAGON_V6_vandqrt_acc_128B, "v60,v62,v65,v66" },
2139  { Hexagon::BI__builtin_HEXAGON_V6_vandvnqv, "v62,v65,v66" },
2140  { Hexagon::BI__builtin_HEXAGON_V6_vandvnqv_128B, "v62,v65,v66" },
2141  { Hexagon::BI__builtin_HEXAGON_V6_vandvqv, "v62,v65,v66" },
2142  { Hexagon::BI__builtin_HEXAGON_V6_vandvqv_128B, "v62,v65,v66" },
2143  { Hexagon::BI__builtin_HEXAGON_V6_vandvrt, "v60,v62,v65,v66" },
2144  { Hexagon::BI__builtin_HEXAGON_V6_vandvrt_128B, "v60,v62,v65,v66" },
2145  { Hexagon::BI__builtin_HEXAGON_V6_vandvrt_acc, "v60,v62,v65,v66" },
2146  { Hexagon::BI__builtin_HEXAGON_V6_vandvrt_acc_128B, "v60,v62,v65,v66" },
2147  { Hexagon::BI__builtin_HEXAGON_V6_vaslh, "v60,v62,v65,v66" },
2148  { Hexagon::BI__builtin_HEXAGON_V6_vaslh_128B, "v60,v62,v65,v66" },
2149  { Hexagon::BI__builtin_HEXAGON_V6_vaslh_acc, "v65,v66" },
2150  { Hexagon::BI__builtin_HEXAGON_V6_vaslh_acc_128B, "v65,v66" },
2151  { Hexagon::BI__builtin_HEXAGON_V6_vaslhv, "v60,v62,v65,v66" },
2152  { Hexagon::BI__builtin_HEXAGON_V6_vaslhv_128B, "v60,v62,v65,v66" },
2153  { Hexagon::BI__builtin_HEXAGON_V6_vaslw, "v60,v62,v65,v66" },
2154  { Hexagon::BI__builtin_HEXAGON_V6_vaslw_128B, "v60,v62,v65,v66" },
2155  { Hexagon::BI__builtin_HEXAGON_V6_vaslw_acc, "v60,v62,v65,v66" },
2156  { Hexagon::BI__builtin_HEXAGON_V6_vaslw_acc_128B, "v60,v62,v65,v66" },
2157  { Hexagon::BI__builtin_HEXAGON_V6_vaslwv, "v60,v62,v65,v66" },
2158  { Hexagon::BI__builtin_HEXAGON_V6_vaslwv_128B, "v60,v62,v65,v66" },
2159  { Hexagon::BI__builtin_HEXAGON_V6_vasrh, "v60,v62,v65,v66" },
2160  { Hexagon::BI__builtin_HEXAGON_V6_vasrh_128B, "v60,v62,v65,v66" },
2161  { Hexagon::BI__builtin_HEXAGON_V6_vasrh_acc, "v65,v66" },
2162  { Hexagon::BI__builtin_HEXAGON_V6_vasrh_acc_128B, "v65,v66" },
2163  { Hexagon::BI__builtin_HEXAGON_V6_vasrhbrndsat, "v60,v62,v65,v66" },
2164  { Hexagon::BI__builtin_HEXAGON_V6_vasrhbrndsat_128B, "v60,v62,v65,v66" },
2165  { Hexagon::BI__builtin_HEXAGON_V6_vasrhbsat, "v62,v65,v66" },
2166  { Hexagon::BI__builtin_HEXAGON_V6_vasrhbsat_128B, "v62,v65,v66" },
2167  { Hexagon::BI__builtin_HEXAGON_V6_vasrhubrndsat, "v60,v62,v65,v66" },
2168  { Hexagon::BI__builtin_HEXAGON_V6_vasrhubrndsat_128B, "v60,v62,v65,v66" },
2169  { Hexagon::BI__builtin_HEXAGON_V6_vasrhubsat, "v60,v62,v65,v66" },
2170  { Hexagon::BI__builtin_HEXAGON_V6_vasrhubsat_128B, "v60,v62,v65,v66" },
2171  { Hexagon::BI__builtin_HEXAGON_V6_vasrhv, "v60,v62,v65,v66" },
2172  { Hexagon::BI__builtin_HEXAGON_V6_vasrhv_128B, "v60,v62,v65,v66" },
2173  { Hexagon::BI__builtin_HEXAGON_V6_vasr_into, "v66" },
2174  { Hexagon::BI__builtin_HEXAGON_V6_vasr_into_128B, "v66" },
2175  { Hexagon::BI__builtin_HEXAGON_V6_vasruhubrndsat, "v65,v66" },
2176  { Hexagon::BI__builtin_HEXAGON_V6_vasruhubrndsat_128B, "v65,v66" },
2177  { Hexagon::BI__builtin_HEXAGON_V6_vasruhubsat, "v65,v66" },
2178  { Hexagon::BI__builtin_HEXAGON_V6_vasruhubsat_128B, "v65,v66" },
2179  { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhrndsat, "v62,v65,v66" },
2180  { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhrndsat_128B, "v62,v65,v66" },
2181  { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhsat, "v65,v66" },
2182  { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhsat_128B, "v65,v66" },
2183  { Hexagon::BI__builtin_HEXAGON_V6_vasrw, "v60,v62,v65,v66" },
2184  { Hexagon::BI__builtin_HEXAGON_V6_vasrw_128B, "v60,v62,v65,v66" },
2185  { Hexagon::BI__builtin_HEXAGON_V6_vasrw_acc, "v60,v62,v65,v66" },
2186  { Hexagon::BI__builtin_HEXAGON_V6_vasrw_acc_128B, "v60,v62,v65,v66" },
2187  { Hexagon::BI__builtin_HEXAGON_V6_vasrwh, "v60,v62,v65,v66" },
2188  { Hexagon::BI__builtin_HEXAGON_V6_vasrwh_128B, "v60,v62,v65,v66" },
2189  { Hexagon::BI__builtin_HEXAGON_V6_vasrwhrndsat, "v60,v62,v65,v66" },
2190  { Hexagon::BI__builtin_HEXAGON_V6_vasrwhrndsat_128B, "v60,v62,v65,v66" },
2191  { Hexagon::BI__builtin_HEXAGON_V6_vasrwhsat, "v60,v62,v65,v66" },
2192  { Hexagon::BI__builtin_HEXAGON_V6_vasrwhsat_128B, "v60,v62,v65,v66" },
2193  { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhrndsat, "v62,v65,v66" },
2194  { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhrndsat_128B, "v62,v65,v66" },
2195  { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhsat, "v60,v62,v65,v66" },
2196  { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhsat_128B, "v60,v62,v65,v66" },
2197  { Hexagon::BI__builtin_HEXAGON_V6_vasrwv, "v60,v62,v65,v66" },
2198  { Hexagon::BI__builtin_HEXAGON_V6_vasrwv_128B, "v60,v62,v65,v66" },
2199  { Hexagon::BI__builtin_HEXAGON_V6_vassign, "v60,v62,v65,v66" },
2200  { Hexagon::BI__builtin_HEXAGON_V6_vassign_128B, "v60,v62,v65,v66" },
2201  { Hexagon::BI__builtin_HEXAGON_V6_vassignp, "v60,v62,v65,v66" },
2202  { Hexagon::BI__builtin_HEXAGON_V6_vassignp_128B, "v60,v62,v65,v66" },
2203  { Hexagon::BI__builtin_HEXAGON_V6_vavgb, "v65,v66" },
2204  { Hexagon::BI__builtin_HEXAGON_V6_vavgb_128B, "v65,v66" },
2205  { Hexagon::BI__builtin_HEXAGON_V6_vavgbrnd, "v65,v66" },
2206  { Hexagon::BI__builtin_HEXAGON_V6_vavgbrnd_128B, "v65,v66" },
2207  { Hexagon::BI__builtin_HEXAGON_V6_vavgh, "v60,v62,v65,v66" },
2208  { Hexagon::BI__builtin_HEXAGON_V6_vavgh_128B, "v60,v62,v65,v66" },
2209  { Hexagon::BI__builtin_HEXAGON_V6_vavghrnd, "v60,v62,v65,v66" },
2210  { Hexagon::BI__builtin_HEXAGON_V6_vavghrnd_128B, "v60,v62,v65,v66" },
2211  { Hexagon::BI__builtin_HEXAGON_V6_vavgub, "v60,v62,v65,v66" },
2212  { Hexagon::BI__builtin_HEXAGON_V6_vavgub_128B, "v60,v62,v65,v66" },
2213  { Hexagon::BI__builtin_HEXAGON_V6_vavgubrnd, "v60,v62,v65,v66" },
2214  { Hexagon::BI__builtin_HEXAGON_V6_vavgubrnd_128B, "v60,v62,v65,v66" },
2215  { Hexagon::BI__builtin_HEXAGON_V6_vavguh, "v60,v62,v65,v66" },
2216  { Hexagon::BI__builtin_HEXAGON_V6_vavguh_128B, "v60,v62,v65,v66" },
2217  { Hexagon::BI__builtin_HEXAGON_V6_vavguhrnd, "v60,v62,v65,v66" },
2218  { Hexagon::BI__builtin_HEXAGON_V6_vavguhrnd_128B, "v60,v62,v65,v66" },
2219  { Hexagon::BI__builtin_HEXAGON_V6_vavguw, "v65,v66" },
2220  { Hexagon::BI__builtin_HEXAGON_V6_vavguw_128B, "v65,v66" },
2221  { Hexagon::BI__builtin_HEXAGON_V6_vavguwrnd, "v65,v66" },
2222  { Hexagon::BI__builtin_HEXAGON_V6_vavguwrnd_128B, "v65,v66" },
2223  { Hexagon::BI__builtin_HEXAGON_V6_vavgw, "v60,v62,v65,v66" },
2224  { Hexagon::BI__builtin_HEXAGON_V6_vavgw_128B, "v60,v62,v65,v66" },
2225  { Hexagon::BI__builtin_HEXAGON_V6_vavgwrnd, "v60,v62,v65,v66" },
2226  { Hexagon::BI__builtin_HEXAGON_V6_vavgwrnd_128B, "v60,v62,v65,v66" },
2227  { Hexagon::BI__builtin_HEXAGON_V6_vcl0h, "v60,v62,v65,v66" },
2228  { Hexagon::BI__builtin_HEXAGON_V6_vcl0h_128B, "v60,v62,v65,v66" },
2229  { Hexagon::BI__builtin_HEXAGON_V6_vcl0w, "v60,v62,v65,v66" },
2230  { Hexagon::BI__builtin_HEXAGON_V6_vcl0w_128B, "v60,v62,v65,v66" },
2231  { Hexagon::BI__builtin_HEXAGON_V6_vcombine, "v60,v62,v65,v66" },
2232  { Hexagon::BI__builtin_HEXAGON_V6_vcombine_128B, "v60,v62,v65,v66" },
2233  { Hexagon::BI__builtin_HEXAGON_V6_vd0, "v60,v62,v65,v66" },
2234  { Hexagon::BI__builtin_HEXAGON_V6_vd0_128B, "v60,v62,v65,v66" },
2235  { Hexagon::BI__builtin_HEXAGON_V6_vdd0, "v65,v66" },
2236  { Hexagon::BI__builtin_HEXAGON_V6_vdd0_128B, "v65,v66" },
2237  { Hexagon::BI__builtin_HEXAGON_V6_vdealb, "v60,v62,v65,v66" },
2238  { Hexagon::BI__builtin_HEXAGON_V6_vdealb_128B, "v60,v62,v65,v66" },
2239  { Hexagon::BI__builtin_HEXAGON_V6_vdealb4w, "v60,v62,v65,v66" },
2240  { Hexagon::BI__builtin_HEXAGON_V6_vdealb4w_128B, "v60,v62,v65,v66" },
2241  { Hexagon::BI__builtin_HEXAGON_V6_vdealh, "v60,v62,v65,v66" },
2242  { Hexagon::BI__builtin_HEXAGON_V6_vdealh_128B, "v60,v62,v65,v66" },
2243  { Hexagon::BI__builtin_HEXAGON_V6_vdealvdd, "v60,v62,v65,v66" },
2244  { Hexagon::BI__builtin_HEXAGON_V6_vdealvdd_128B, "v60,v62,v65,v66" },
2245  { Hexagon::BI__builtin_HEXAGON_V6_vdelta, "v60,v62,v65,v66" },
2246  { Hexagon::BI__builtin_HEXAGON_V6_vdelta_128B, "v60,v62,v65,v66" },
2247  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus, "v60,v62,v65,v66" },
2248  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_128B, "v60,v62,v65,v66" },
2249  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_acc, "v60,v62,v65,v66" },
2250  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_acc_128B, "v60,v62,v65,v66" },
2251  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv, "v60,v62,v65,v66" },
2252  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv_128B, "v60,v62,v65,v66" },
2253  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv_acc, "v60,v62,v65,v66" },
2254  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv_acc_128B, "v60,v62,v65,v66" },
2255  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb, "v60,v62,v65,v66" },
2256  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_128B, "v60,v62,v65,v66" },
2257  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_acc, "v60,v62,v65,v66" },
2258  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_acc_128B, "v60,v62,v65,v66" },
2259  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv, "v60,v62,v65,v66" },
2260  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv_128B, "v60,v62,v65,v66" },
2261  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv_acc, "v60,v62,v65,v66" },
2262  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv_acc_128B, "v60,v62,v65,v66" },
2263  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat, "v60,v62,v65,v66" },
2264  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat_128B, "v60,v62,v65,v66" },
2265  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat_acc, "v60,v62,v65,v66" },
2266  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat_acc_128B, "v60,v62,v65,v66" },
2267  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat, "v60,v62,v65,v66" },
2268  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat_128B, "v60,v62,v65,v66" },
2269  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat_acc, "v60,v62,v65,v66" },
2270  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat_acc_128B, "v60,v62,v65,v66" },
2271  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat, "v60,v62,v65,v66" },
2272  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat_128B, "v60,v62,v65,v66" },
2273  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat_acc, "v60,v62,v65,v66" },
2274  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat_acc_128B, "v60,v62,v65,v66" },
2275  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat, "v60,v62,v65,v66" },
2276  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat_128B, "v60,v62,v65,v66" },
2277  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat_acc, "v60,v62,v65,v66" },
2278  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat_acc_128B, "v60,v62,v65,v66" },
2279  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat, "v60,v62,v65,v66" },
2280  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat_128B, "v60,v62,v65,v66" },
2281  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat_acc, "v60,v62,v65,v66" },
2282  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat_acc_128B, "v60,v62,v65,v66" },
2283  { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh, "v60,v62,v65,v66" },
2284  { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh_128B, "v60,v62,v65,v66" },
2285  { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh_acc, "v60,v62,v65,v66" },
2286  { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh_acc_128B, "v60,v62,v65,v66" },
2287  { Hexagon::BI__builtin_HEXAGON_V6_veqb, "v60,v62,v65,v66" },
2288  { Hexagon::BI__builtin_HEXAGON_V6_veqb_128B, "v60,v62,v65,v66" },
2289  { Hexagon::BI__builtin_HEXAGON_V6_veqb_and, "v60,v62,v65,v66" },
2290  { Hexagon::BI__builtin_HEXAGON_V6_veqb_and_128B, "v60,v62,v65,v66" },
2291  { Hexagon::BI__builtin_HEXAGON_V6_veqb_or, "v60,v62,v65,v66" },
2292  { Hexagon::BI__builtin_HEXAGON_V6_veqb_or_128B, "v60,v62,v65,v66" },
2293  { Hexagon::BI__builtin_HEXAGON_V6_veqb_xor, "v60,v62,v65,v66" },
2294  { Hexagon::BI__builtin_HEXAGON_V6_veqb_xor_128B, "v60,v62,v65,v66" },
2295  { Hexagon::BI__builtin_HEXAGON_V6_veqh, "v60,v62,v65,v66" },
2296  { Hexagon::BI__builtin_HEXAGON_V6_veqh_128B, "v60,v62,v65,v66" },
2297  { Hexagon::BI__builtin_HEXAGON_V6_veqh_and, "v60,v62,v65,v66" },
2298  { Hexagon::BI__builtin_HEXAGON_V6_veqh_and_128B, "v60,v62,v65,v66" },
2299  { Hexagon::BI__builtin_HEXAGON_V6_veqh_or, "v60,v62,v65,v66" },
2300  { Hexagon::BI__builtin_HEXAGON_V6_veqh_or_128B, "v60,v62,v65,v66" },
2301  { Hexagon::BI__builtin_HEXAGON_V6_veqh_xor, "v60,v62,v65,v66" },
2302  { Hexagon::BI__builtin_HEXAGON_V6_veqh_xor_128B, "v60,v62,v65,v66" },
2303  { Hexagon::BI__builtin_HEXAGON_V6_veqw, "v60,v62,v65,v66" },
2304  { Hexagon::BI__builtin_HEXAGON_V6_veqw_128B, "v60,v62,v65,v66" },
2305  { Hexagon::BI__builtin_HEXAGON_V6_veqw_and, "v60,v62,v65,v66" },
2306  { Hexagon::BI__builtin_HEXAGON_V6_veqw_and_128B, "v60,v62,v65,v66" },
2307  { Hexagon::BI__builtin_HEXAGON_V6_veqw_or, "v60,v62,v65,v66" },
2308  { Hexagon::BI__builtin_HEXAGON_V6_veqw_or_128B, "v60,v62,v65,v66" },
2309  { Hexagon::BI__builtin_HEXAGON_V6_veqw_xor, "v60,v62,v65,v66" },
2310  { Hexagon::BI__builtin_HEXAGON_V6_veqw_xor_128B, "v60,v62,v65,v66" },
2311  { Hexagon::BI__builtin_HEXAGON_V6_vgtb, "v60,v62,v65,v66" },
2312  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_128B, "v60,v62,v65,v66" },
2313  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_and, "v60,v62,v65,v66" },
2314  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_and_128B, "v60,v62,v65,v66" },
2315  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_or, "v60,v62,v65,v66" },
2316  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_or_128B, "v60,v62,v65,v66" },
2317  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_xor, "v60,v62,v65,v66" },
2318  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_xor_128B, "v60,v62,v65,v66" },
2319  { Hexagon::BI__builtin_HEXAGON_V6_vgth, "v60,v62,v65,v66" },
2320  { Hexagon::BI__builtin_HEXAGON_V6_vgth_128B, "v60,v62,v65,v66" },
2321  { Hexagon::BI__builtin_HEXAGON_V6_vgth_and, "v60,v62,v65,v66" },
2322  { Hexagon::BI__builtin_HEXAGON_V6_vgth_and_128B, "v60,v62,v65,v66" },
2323  { Hexagon::BI__builtin_HEXAGON_V6_vgth_or, "v60,v62,v65,v66" },
2324  { Hexagon::BI__builtin_HEXAGON_V6_vgth_or_128B, "v60,v62,v65,v66" },
2325  { Hexagon::BI__builtin_HEXAGON_V6_vgth_xor, "v60,v62,v65,v66" },
2326  { Hexagon::BI__builtin_HEXAGON_V6_vgth_xor_128B, "v60,v62,v65,v66" },
2327  { Hexagon::BI__builtin_HEXAGON_V6_vgtub, "v60,v62,v65,v66" },
2328  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_128B, "v60,v62,v65,v66" },
2329  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_and, "v60,v62,v65,v66" },
2330  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_and_128B, "v60,v62,v65,v66" },
2331  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_or, "v60,v62,v65,v66" },
2332  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_or_128B, "v60,v62,v65,v66" },
2333  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_xor, "v60,v62,v65,v66" },
2334  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_xor_128B, "v60,v62,v65,v66" },
2335  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh, "v60,v62,v65,v66" },
2336  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_128B, "v60,v62,v65,v66" },
2337  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_and, "v60,v62,v65,v66" },
2338  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_and_128B, "v60,v62,v65,v66" },
2339  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_or, "v60,v62,v65,v66" },
2340  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_or_128B, "v60,v62,v65,v66" },
2341  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_xor, "v60,v62,v65,v66" },
2342  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_xor_128B, "v60,v62,v65,v66" },
2343  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw, "v60,v62,v65,v66" },
2344  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_128B, "v60,v62,v65,v66" },
2345  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_and, "v60,v62,v65,v66" },
2346  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_and_128B, "v60,v62,v65,v66" },
2347  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_or, "v60,v62,v65,v66" },
2348  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_or_128B, "v60,v62,v65,v66" },
2349  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_xor, "v60,v62,v65,v66" },
2350  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_xor_128B, "v60,v62,v65,v66" },
2351  { Hexagon::BI__builtin_HEXAGON_V6_vgtw, "v60,v62,v65,v66" },
2352  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_128B, "v60,v62,v65,v66" },
2353  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_and, "v60,v62,v65,v66" },
2354  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_and_128B, "v60,v62,v65,v66" },
2355  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_or, "v60,v62,v65,v66" },
2356  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_or_128B, "v60,v62,v65,v66" },
2357  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_xor, "v60,v62,v65,v66" },
2358  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_xor_128B, "v60,v62,v65,v66" },
2359  { Hexagon::BI__builtin_HEXAGON_V6_vinsertwr, "v60,v62,v65,v66" },
2360  { Hexagon::BI__builtin_HEXAGON_V6_vinsertwr_128B, "v60,v62,v65,v66" },
2361  { Hexagon::BI__builtin_HEXAGON_V6_vlalignb, "v60,v62,v65,v66" },
2362  { Hexagon::BI__builtin_HEXAGON_V6_vlalignb_128B, "v60,v62,v65,v66" },
2363  { Hexagon::BI__builtin_HEXAGON_V6_vlalignbi, "v60,v62,v65,v66" },
2364  { Hexagon::BI__builtin_HEXAGON_V6_vlalignbi_128B, "v60,v62,v65,v66" },
2365  { Hexagon::BI__builtin_HEXAGON_V6_vlsrb, "v62,v65,v66" },
2366  { Hexagon::BI__builtin_HEXAGON_V6_vlsrb_128B, "v62,v65,v66" },
2367  { Hexagon::BI__builtin_HEXAGON_V6_vlsrh, "v60,v62,v65,v66" },
2368  { Hexagon::BI__builtin_HEXAGON_V6_vlsrh_128B, "v60,v62,v65,v66" },
2369  { Hexagon::BI__builtin_HEXAGON_V6_vlsrhv, "v60,v62,v65,v66" },
2370  { Hexagon::BI__builtin_HEXAGON_V6_vlsrhv_128B, "v60,v62,v65,v66" },
2371  { Hexagon::BI__builtin_HEXAGON_V6_vlsrw, "v60,v62,v65,v66" },
2372  { Hexagon::BI__builtin_HEXAGON_V6_vlsrw_128B, "v60,v62,v65,v66" },
2373  { Hexagon::BI__builtin_HEXAGON_V6_vlsrwv, "v60,v62,v65,v66" },
2374  { Hexagon::BI__builtin_HEXAGON_V6_vlsrwv_128B, "v60,v62,v65,v66" },
2375  { Hexagon::BI__builtin_HEXAGON_V6_vlut4, "v65,v66" },
2376  { Hexagon::BI__builtin_HEXAGON_V6_vlut4_128B, "v65,v66" },
2377  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb, "v60,v62,v65,v66" },
2378  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_128B, "v60,v62,v65,v66" },
2379  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvbi, "v62,v65,v66" },
2380  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvbi_128B, "v62,v65,v66" },
2381  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_nm, "v62,v65,v66" },
2382  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_nm_128B, "v62,v65,v66" },
2383  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracc, "v60,v62,v65,v66" },
2384  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracc_128B, "v60,v62,v65,v66" },
2385  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracci, "v62,v65,v66" },
2386  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracci_128B, "v62,v65,v66" },
2387  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh, "v60,v62,v65,v66" },
2388  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_128B, "v60,v62,v65,v66" },
2389  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwhi, "v62,v65,v66" },
2390  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwhi_128B, "v62,v65,v66" },
2391  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_nm, "v62,v65,v66" },
2392  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_nm_128B, "v62,v65,v66" },
2393  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracc, "v60,v62,v65,v66" },
2394  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracc_128B, "v60,v62,v65,v66" },
2395  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracci, "v62,v65,v66" },
2396  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracci_128B, "v62,v65,v66" },
2397  { Hexagon::BI__builtin_HEXAGON_V6_vmaxb, "v62,v65,v66" },
2398  { Hexagon::BI__builtin_HEXAGON_V6_vmaxb_128B, "v62,v65,v66" },
2399  { Hexagon::BI__builtin_HEXAGON_V6_vmaxh, "v60,v62,v65,v66" },
2400  { Hexagon::BI__builtin_HEXAGON_V6_vmaxh_128B, "v60,v62,v65,v66" },
2401  { Hexagon::BI__builtin_HEXAGON_V6_vmaxub, "v60,v62,v65,v66" },
2402  { Hexagon::BI__builtin_HEXAGON_V6_vmaxub_128B, "v60,v62,v65,v66" },
2403  { Hexagon::BI__builtin_HEXAGON_V6_vmaxuh, "v60,v62,v65,v66" },
2404  { Hexagon::BI__builtin_HEXAGON_V6_vmaxuh_128B, "v60,v62,v65,v66" },
2405  { Hexagon::BI__builtin_HEXAGON_V6_vmaxw, "v60,v62,v65,v66" },
2406  { Hexagon::BI__builtin_HEXAGON_V6_vmaxw_128B, "v60,v62,v65,v66" },
2407  { Hexagon::BI__builtin_HEXAGON_V6_vminb, "v62,v65,v66" },
2408  { Hexagon::BI__builtin_HEXAGON_V6_vminb_128B, "v62,v65,v66" },
2409  { Hexagon::BI__builtin_HEXAGON_V6_vminh, "v60,v62,v65,v66" },
2410  { Hexagon::BI__builtin_HEXAGON_V6_vminh_128B, "v60,v62,v65,v66" },
2411  { Hexagon::BI__builtin_HEXAGON_V6_vminub, "v60,v62,v65,v66" },
2412  { Hexagon::BI__builtin_HEXAGON_V6_vminub_128B, "v60,v62,v65,v66" },
2413  { Hexagon::BI__builtin_HEXAGON_V6_vminuh, "v60,v62,v65,v66" },
2414  { Hexagon::BI__builtin_HEXAGON_V6_vminuh_128B, "v60,v62,v65,v66" },
2415  { Hexagon::BI__builtin_HEXAGON_V6_vminw, "v60,v62,v65,v66" },
2416  { Hexagon::BI__builtin_HEXAGON_V6_vminw_128B, "v60,v62,v65,v66" },
2417  { Hexagon::BI__builtin_HEXAGON_V6_vmpabus, "v60,v62,v65,v66" },
2418  { Hexagon::BI__builtin_HEXAGON_V6_vmpabus_128B, "v60,v62,v65,v66" },
2419  { Hexagon::BI__builtin_HEXAGON_V6_vmpabus_acc, "v60,v62,v65,v66" },
2420  { Hexagon::BI__builtin_HEXAGON_V6_vmpabus_acc_128B, "v60,v62,v65,v66" },
2421  { Hexagon::BI__builtin_HEXAGON_V6_vmpabusv, "v60,v62,v65,v66" },
2422  { Hexagon::BI__builtin_HEXAGON_V6_vmpabusv_128B, "v60,v62,v65,v66" },
2423  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu, "v65,v66" },
2424  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu_128B, "v65,v66" },
2425  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu_acc, "v65,v66" },
2426  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu_acc_128B, "v65,v66" },
2427  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuuv, "v60,v62,v65,v66" },
2428  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuuv_128B, "v60,v62,v65,v66" },
2429  { Hexagon::BI__builtin_HEXAGON_V6_vmpahb, "v60,v62,v65,v66" },
2430  { Hexagon::BI__builtin_HEXAGON_V6_vmpahb_128B, "v60,v62,v65,v66" },
2431  { Hexagon::BI__builtin_HEXAGON_V6_vmpahb_acc, "v60,v62,v65,v66" },
2432  { Hexagon::BI__builtin_HEXAGON_V6_vmpahb_acc_128B, "v60,v62,v65,v66" },
2433  { Hexagon::BI__builtin_HEXAGON_V6_vmpahhsat, "v65,v66" },
2434  { Hexagon::BI__builtin_HEXAGON_V6_vmpahhsat_128B, "v65,v66" },
2435  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb, "v62,v65,v66" },
2436  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb_128B, "v62,v65,v66" },
2437  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb_acc, "v62,v65,v66" },
2438  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb_acc_128B, "v62,v65,v66" },
2439  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhuhsat, "v65,v66" },
2440  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhuhsat_128B, "v65,v66" },
2441  { Hexagon::BI__builtin_HEXAGON_V6_vmpsuhuhsat, "v65,v66" },
2442  { Hexagon::BI__builtin_HEXAGON_V6_vmpsuhuhsat_128B, "v65,v66" },
2443  { Hexagon::BI__builtin_HEXAGON_V6_vmpybus, "v60,v62,v65,v66" },
2444  { Hexagon::BI__builtin_HEXAGON_V6_vmpybus_128B, "v60,v62,v65,v66" },
2445  { Hexagon::BI__builtin_HEXAGON_V6_vmpybus_acc, "v60,v62,v65,v66" },
2446  { Hexagon::BI__builtin_HEXAGON_V6_vmpybus_acc_128B, "v60,v62,v65,v66" },
2447  { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv, "v60,v62,v65,v66" },
2448  { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv_128B, "v60,v62,v65,v66" },
2449  { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv_acc, "v60,v62,v65,v66" },
2450  { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv_acc_128B, "v60,v62,v65,v66" },
2451  { Hexagon::BI__builtin_HEXAGON_V6_vmpybv, "v60,v62,v65,v66" },
2452  { Hexagon::BI__builtin_HEXAGON_V6_vmpybv_128B, "v60,v62,v65,v66" },
2453  { Hexagon::BI__builtin_HEXAGON_V6_vmpybv_acc, "v60,v62,v65,v66" },
2454  { Hexagon::BI__builtin_HEXAGON_V6_vmpybv_acc_128B, "v60,v62,v65,v66" },
2455  { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh, "v60,v62,v65,v66" },
2456  { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh_128B, "v60,v62,v65,v66" },
2457  { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh_64, "v62,v65,v66" },
2458  { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh_64_128B, "v62,v65,v66" },
2459  { Hexagon::BI__builtin_HEXAGON_V6_vmpyh, "v60,v62,v65,v66" },
2460  { Hexagon::BI__builtin_HEXAGON_V6_vmpyh_128B, "v60,v62,v65,v66" },
2461  { Hexagon::BI__builtin_HEXAGON_V6_vmpyh_acc, "v65,v66" },
2462  { Hexagon::BI__builtin_HEXAGON_V6_vmpyh_acc_128B, "v65,v66" },
2463  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsat_acc, "v60,v62,v65,v66" },
2464  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsat_acc_128B, "v60,v62,v65,v66" },
2465  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsrs, "v60,v62,v65,v66" },
2466  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsrs_128B, "v60,v62,v65,v66" },
2467  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhss, "v60,v62,v65,v66" },
2468  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhss_128B, "v60,v62,v65,v66" },
2469  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus, "v60,v62,v65,v66" },
2470  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus_128B, "v60,v62,v65,v66" },
2471  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus_acc, "v60,v62,v65,v66" },
2472  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus_acc_128B, "v60,v62,v65,v66" },
2473  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv, "v60,v62,v65,v66" },
2474  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv_128B, "v60,v62,v65,v66" },
2475  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv_acc, "v60,v62,v65,v66" },
2476  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv_acc_128B, "v60,v62,v65,v66" },
2477  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhvsrs, "v60,v62,v65,v66" },
2478  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhvsrs_128B, "v60,v62,v65,v66" },
2479  { Hexagon::BI__builtin_HEXAGON_V6_vmpyieoh, "v60,v62,v65,v66" },
2480  { Hexagon::BI__builtin_HEXAGON_V6_vmpyieoh_128B, "v60,v62,v65,v66" },
2481  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewh_acc, "v60,v62,v65,v66" },
2482  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewh_acc_128B, "v60,v62,v65,v66" },
2483  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh, "v60,v62,v65,v66" },
2484  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh_128B, "v60,v62,v65,v66" },
2485  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh_acc, "v60,v62,v65,v66" },
2486  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh_acc_128B, "v60,v62,v65,v66" },
2487  { Hexagon::BI__builtin_HEXAGON_V6_vmpyih, "v60,v62,v65,v66" },
2488  { Hexagon::BI__builtin_HEXAGON_V6_vmpyih_128B, "v60,v62,v65,v66" },
2489  { Hexagon::BI__builtin_HEXAGON_V6_vmpyih_acc, "v60,v62,v65,v66" },
2490  { Hexagon::BI__builtin_HEXAGON_V6_vmpyih_acc_128B, "v60,v62,v65,v66" },
2491  { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb, "v60,v62,v65,v66" },
2492  { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb_128B, "v60,v62,v65,v66" },
2493  { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb_acc, "v60,v62,v65,v66" },
2494  { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb_acc_128B, "v60,v62,v65,v66" },
2495  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiowh, "v60,v62,v65,v66" },
2496  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiowh_128B, "v60,v62,v65,v66" },
2497  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb, "v60,v62,v65,v66" },
2498  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb_128B, "v60,v62,v65,v66" },
2499  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb_acc, "v60,v62,v65,v66" },
2500  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb_acc_128B, "v60,v62,v65,v66" },
2501  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh, "v60,v62,v65,v66" },
2502  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh_128B, "v60,v62,v65,v66" },
2503  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh_acc, "v60,v62,v65,v66" },
2504  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh_acc_128B, "v60,v62,v65,v66" },
2505  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub, "v62,v65,v66" },
2506  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub_128B, "v62,v65,v66" },
2507  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub_acc, "v62,v65,v66" },
2508  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub_acc_128B, "v62,v65,v66" },
2509  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh, "v60,v62,v65,v66" },
2510  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_128B, "v60,v62,v65,v66" },
2511  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_64_acc, "v62,v65,v66" },
2512  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_64_acc_128B, "v62,v65,v66" },
2513  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd, "v60,v62,v65,v66" },
2514  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd_128B, "v60,v62,v65,v66" },
2515  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd_sacc, "v60,v62,v65,v66" },
2516  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd_sacc_128B, "v60,v62,v65,v66" },
2517  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_sacc, "v60,v62,v65,v66" },
2518  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_sacc_128B, "v60,v62,v65,v66" },
2519  { Hexagon::BI__builtin_HEXAGON_V6_vmpyub, "v60,v62,v65,v66" },
2520  { Hexagon::BI__builtin_HEXAGON_V6_vmpyub_128B, "v60,v62,v65,v66" },
2521  { Hexagon::BI__builtin_HEXAGON_V6_vmpyub_acc, "v60,v62,v65,v66" },
2522  { Hexagon::BI__builtin_HEXAGON_V6_vmpyub_acc_128B, "v60,v62,v65,v66" },
2523  { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv, "v60,v62,v65,v66" },
2524  { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv_128B, "v60,v62,v65,v66" },
2525  { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv_acc, "v60,v62,v65,v66" },
2526  { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv_acc_128B, "v60,v62,v65,v66" },
2527  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh, "v60,v62,v65,v66" },
2528  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh_128B, "v60,v62,v65,v66" },
2529  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh_acc, "v60,v62,v65,v66" },
2530  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh_acc_128B, "v60,v62,v65,v66" },
2531  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe, "v65,v66" },
2532  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe_128B, "v65,v66" },
2533  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe_acc, "v65,v66" },
2534  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe_acc_128B, "v65,v66" },
2535  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv, "v60,v62,v65,v66" },
2536  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv_128B, "v60,v62,v65,v66" },
2537  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv_acc, "v60,v62,v65,v66" },
2538  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv_acc_128B, "v60,v62,v65,v66" },
2539  { Hexagon::BI__builtin_HEXAGON_V6_vmux, "v60,v62,v65,v66" },
2540  { Hexagon::BI__builtin_HEXAGON_V6_vmux_128B, "v60,v62,v65,v66" },
2541  { Hexagon::BI__builtin_HEXAGON_V6_vnavgb, "v65,v66" },
2542  { Hexagon::BI__builtin_HEXAGON_V6_vnavgb_128B, "v65,v66" },
2543  { Hexagon::BI__builtin_HEXAGON_V6_vnavgh, "v60,v62,v65,v66" },
2544  { Hexagon::BI__builtin_HEXAGON_V6_vnavgh_128B, "v60,v62,v65,v66" },
2545  { Hexagon::BI__builtin_HEXAGON_V6_vnavgub, "v60,v62,v65,v66" },
2546  { Hexagon::BI__builtin_HEXAGON_V6_vnavgub_128B, "v60,v62,v65,v66" },
2547  { Hexagon::BI__builtin_HEXAGON_V6_vnavgw, "v60,v62,v65,v66" },
2548  { Hexagon::BI__builtin_HEXAGON_V6_vnavgw_128B, "v60,v62,v65,v66" },
2549  { Hexagon::BI__builtin_HEXAGON_V6_vnormamth, "v60,v62,v65,v66" },
2550  { Hexagon::BI__builtin_HEXAGON_V6_vnormamth_128B, "v60,v62,v65,v66" },
2551  { Hexagon::BI__builtin_HEXAGON_V6_vnormamtw, "v60,v62,v65,v66" },
2552  { Hexagon::BI__builtin_HEXAGON_V6_vnormamtw_128B, "v60,v62,v65,v66" },
2553  { Hexagon::BI__builtin_HEXAGON_V6_vnot, "v60,v62,v65,v66" },
2554  { Hexagon::BI__builtin_HEXAGON_V6_vnot_128B, "v60,v62,v65,v66" },
2555  { Hexagon::BI__builtin_HEXAGON_V6_vor, "v60,v62,v65,v66" },
2556  { Hexagon::BI__builtin_HEXAGON_V6_vor_128B, "v60,v62,v65,v66" },
2557  { Hexagon::BI__builtin_HEXAGON_V6_vpackeb, "v60,v62,v65,v66" },
2558  { Hexagon::BI__builtin_HEXAGON_V6_vpackeb_128B, "v60,v62,v65,v66" },
2559  { Hexagon::BI__builtin_HEXAGON_V6_vpackeh, "v60,v62,v65,v66" },
2560  { Hexagon::BI__builtin_HEXAGON_V6_vpackeh_128B, "v60,v62,v65,v66" },
2561  { Hexagon::BI__builtin_HEXAGON_V6_vpackhb_sat, "v60,v62,v65,v66" },
2562  { Hexagon::BI__builtin_HEXAGON_V6_vpackhb_sat_128B, "v60,v62,v65,v66" },
2563  { Hexagon::BI__builtin_HEXAGON_V6_vpackhub_sat, "v60,v62,v65,v66" },
2564  { Hexagon::BI__builtin_HEXAGON_V6_vpackhub_sat_128B, "v60,v62,v65,v66" },
2565  { Hexagon::BI__builtin_HEXAGON_V6_vpackob, "v60,v62,v65,v66" },
2566  { Hexagon::BI__builtin_HEXAGON_V6_vpackob_128B, "v60,v62,v65,v66" },
2567  { Hexagon::BI__builtin_HEXAGON_V6_vpackoh, "v60,v62,v65,v66" },
2568  { Hexagon::BI__builtin_HEXAGON_V6_vpackoh_128B, "v60,v62,v65,v66" },
2569  { Hexagon::BI__builtin_HEXAGON_V6_vpackwh_sat, "v60,v62,v65,v66" },
2570  { Hexagon::BI__builtin_HEXAGON_V6_vpackwh_sat_128B, "v60,v62,v65,v66" },
2571  { Hexagon::BI__builtin_HEXAGON_V6_vpackwuh_sat, "v60,v62,v65,v66" },
2572  { Hexagon::BI__builtin_HEXAGON_V6_vpackwuh_sat_128B, "v60,v62,v65,v66" },
2573  { Hexagon::BI__builtin_HEXAGON_V6_vpopcounth, "v60,v62,v65,v66" },
2574  { Hexagon::BI__builtin_HEXAGON_V6_vpopcounth_128B, "v60,v62,v65,v66" },
2575  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqb, "v65,v66" },
2576  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqb_128B, "v65,v66" },
2577  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqh, "v65,v66" },
2578  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqh_128B, "v65,v66" },
2579  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqw, "v65,v66" },
2580  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqw_128B, "v65,v66" },
2581  { Hexagon::BI__builtin_HEXAGON_V6_vrdelta, "v60,v62,v65,v66" },
2582  { Hexagon::BI__builtin_HEXAGON_V6_vrdelta_128B, "v60,v62,v65,v66" },
2583  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt, "v65" },
2584  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt_128B, "v65" },
2585  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt_acc, "v65" },
2586  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt_acc_128B, "v65" },
2587  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus, "v60,v62,v65,v66" },
2588  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus_128B, "v60,v62,v65,v66" },
2589  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus_acc, "v60,v62,v65,v66" },
2590  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus_acc_128B, "v60,v62,v65,v66" },
2591  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi, "v60,v62,v65,v66" },
2592  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_128B, "v60,v62,v65,v66" },
2593  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_acc, "v60,v62,v65,v66" },
2594  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_acc_128B, "v60,v62,v65,v66" },
2595  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv, "v60,v62,v65,v66" },
2596  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv_128B, "v60,v62,v65,v66" },
2597  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv_acc, "v60,v62,v65,v66" },
2598  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv_acc_128B, "v60,v62,v65,v66" },
2599  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv, "v60,v62,v65,v66" },
2600  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv_128B, "v60,v62,v65,v66" },
2601  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv_acc, "v60,v62,v65,v66" },
2602  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv_acc_128B, "v60,v62,v65,v66" },
2603  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub, "v60,v62,v65,v66" },
2604  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_128B, "v60,v62,v65,v66" },
2605  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_acc, "v60,v62,v65,v66" },
2606  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_acc_128B, "v60,v62,v65,v66" },
2607  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi, "v60,v62,v65,v66" },
2608  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_128B, "v60,v62,v65,v66" },
2609  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_acc, "v60,v62,v65,v66" },
2610  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_acc_128B, "v60,v62,v65,v66" },
2611  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt, "v65" },
2612  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt_128B, "v65" },
2613  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt_acc, "v65" },
2614  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt_acc_128B, "v65" },
2615  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv, "v60,v62,v65,v66" },
2616  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv_128B, "v60,v62,v65,v66" },
2617  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv_acc, "v60,v62,v65,v66" },
2618  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv_acc_128B, "v60,v62,v65,v66" },
2619  { Hexagon::BI__builtin_HEXAGON_V6_vror, "v60,v62,v65,v66" },
2620  { Hexagon::BI__builtin_HEXAGON_V6_vror_128B, "v60,v62,v65,v66" },
2621  { Hexagon::BI__builtin_HEXAGON_V6_vrotr, "v66" },
2622  { Hexagon::BI__builtin_HEXAGON_V6_vrotr_128B, "v66" },
2623  { Hexagon::BI__builtin_HEXAGON_V6_vroundhb, "v60,v62,v65,v66" },
2624  { Hexagon::BI__builtin_HEXAGON_V6_vroundhb_128B, "v60,v62,v65,v66" },
2625  { Hexagon::BI__builtin_HEXAGON_V6_vroundhub, "v60,v62,v65,v66" },
2626  { Hexagon::BI__builtin_HEXAGON_V6_vroundhub_128B, "v60,v62,v65,v66" },
2627  { Hexagon::BI__builtin_HEXAGON_V6_vrounduhub, "v62,v65,v66" },
2628  { Hexagon::BI__builtin_HEXAGON_V6_vrounduhub_128B, "v62,v65,v66" },
2629  { Hexagon::BI__builtin_HEXAGON_V6_vrounduwuh, "v62,v65,v66" },
2630  { Hexagon::BI__builtin_HEXAGON_V6_vrounduwuh_128B, "v62,v65,v66" },
2631  { Hexagon::BI__builtin_HEXAGON_V6_vroundwh, "v60,v62,v65,v66" },
2632  { Hexagon::BI__builtin_HEXAGON_V6_vroundwh_128B, "v60,v62,v65,v66" },
2633  { Hexagon::BI__builtin_HEXAGON_V6_vroundwuh, "v60,v62,v65,v66" },
2634  { Hexagon::BI__builtin_HEXAGON_V6_vroundwuh_128B, "v60,v62,v65,v66" },
2635  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi, "v60,v62,v65,v66" },
2636  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_128B, "v60,v62,v65,v66" },
2637  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_acc, "v60,v62,v65,v66" },
2638  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_acc_128B, "v60,v62,v65,v66" },
2639  { Hexagon::BI__builtin_HEXAGON_V6_vsatdw, "v66" },
2640  { Hexagon::BI__builtin_HEXAGON_V6_vsatdw_128B, "v66" },
2641  { Hexagon::BI__builtin_HEXAGON_V6_vsathub, "v60,v62,v65,v66" },
2642  { Hexagon::BI__builtin_HEXAGON_V6_vsathub_128B, "v60,v62,v65,v66" },
2643  { Hexagon::BI__builtin_HEXAGON_V6_vsatuwuh, "v62,v65,v66" },
2644  { Hexagon::BI__builtin_HEXAGON_V6_vsatuwuh_128B, "v62,v65,v66" },
2645  { Hexagon::BI__builtin_HEXAGON_V6_vsatwh, "v60,v62,v65,v66" },
2646  { Hexagon::BI__builtin_HEXAGON_V6_vsatwh_128B, "v60,v62,v65,v66" },
2647  { Hexagon::BI__builtin_HEXAGON_V6_vsb, "v60,v62,v65,v66" },
2648  { Hexagon::BI__builtin_HEXAGON_V6_vsb_128B, "v60,v62,v65,v66" },
2649  { Hexagon::BI__builtin_HEXAGON_V6_vsh, "v60,v62,v65,v66" },
2650  { Hexagon::BI__builtin_HEXAGON_V6_vsh_128B, "v60,v62,v65,v66" },
2651  { Hexagon::BI__builtin_HEXAGON_V6_vshufeh, "v60,v62,v65,v66" },
2652  { Hexagon::BI__builtin_HEXAGON_V6_vshufeh_128B, "v60,v62,v65,v66" },
2653  { Hexagon::BI__builtin_HEXAGON_V6_vshuffb, "v60,v62,v65,v66" },
2654  { Hexagon::BI__builtin_HEXAGON_V6_vshuffb_128B, "v60,v62,v65,v66" },
2655  { Hexagon::BI__builtin_HEXAGON_V6_vshuffeb, "v60,v62,v65,v66" },
2656  { Hexagon::BI__builtin_HEXAGON_V6_vshuffeb_128B, "v60,v62,v65,v66" },
2657  { Hexagon::BI__builtin_HEXAGON_V6_vshuffh, "v60,v62,v65,v66" },
2658  { Hexagon::BI__builtin_HEXAGON_V6_vshuffh_128B, "v60,v62,v65,v66" },
2659  { Hexagon::BI__builtin_HEXAGON_V6_vshuffob, "v60,v62,v65,v66" },
2660  { Hexagon::BI__builtin_HEXAGON_V6_vshuffob_128B, "v60,v62,v65,v66" },
2661  { Hexagon::BI__builtin_HEXAGON_V6_vshuffvdd, "v60,v62,v65,v66" },
2662  { Hexagon::BI__builtin_HEXAGON_V6_vshuffvdd_128B, "v60,v62,v65,v66" },
2663  { Hexagon::BI__builtin_HEXAGON_V6_vshufoeb, "v60,v62,v65,v66" },
2664  { Hexagon::BI__builtin_HEXAGON_V6_vshufoeb_128B, "v60,v62,v65,v66" },
2665  { Hexagon::BI__builtin_HEXAGON_V6_vshufoeh, "v60,v62,v65,v66" },
2666  { Hexagon::BI__builtin_HEXAGON_V6_vshufoeh_128B, "v60,v62,v65,v66" },
2667  { Hexagon::BI__builtin_HEXAGON_V6_vshufoh, "v60,v62,v65,v66" },
2668  { Hexagon::BI__builtin_HEXAGON_V6_vshufoh_128B, "v60,v62,v65,v66" },
2669  { Hexagon::BI__builtin_HEXAGON_V6_vsubb, "v60,v62,v65,v66" },
2670  { Hexagon::BI__builtin_HEXAGON_V6_vsubb_128B, "v60,v62,v65,v66" },
2671  { Hexagon::BI__builtin_HEXAGON_V6_vsubb_dv, "v60,v62,v65,v66" },
2672  { Hexagon::BI__builtin_HEXAGON_V6_vsubb_dv_128B, "v60,v62,v65,v66" },
2673  { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat, "v62,v65,v66" },
2674  { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat_128B, "v62,v65,v66" },
2675  { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat_dv, "v62,v65,v66" },
2676  { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat_dv_128B, "v62,v65,v66" },
2677  { Hexagon::BI__builtin_HEXAGON_V6_vsubcarry, "v62,v65,v66" },
2678  { Hexagon::BI__builtin_HEXAGON_V6_vsubcarry_128B, "v62,v65,v66" },
2679  { Hexagon::BI__builtin_HEXAGON_V6_vsubh, "v60,v62,v65,v66" },
2680  { Hexagon::BI__builtin_HEXAGON_V6_vsubh_128B, "v60,v62,v65,v66" },
2681  { Hexagon::BI__builtin_HEXAGON_V6_vsubh_dv, "v60,v62,v65,v66" },
2682  { Hexagon::BI__builtin_HEXAGON_V6_vsubh_dv_128B, "v60,v62,v65,v66" },
2683  { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat, "v60,v62,v65,v66" },
2684  { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat_128B, "v60,v62,v65,v66" },
2685  { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat_dv, "v60,v62,v65,v66" },
2686  { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat_dv_128B, "v60,v62,v65,v66" },
2687  { Hexagon::BI__builtin_HEXAGON_V6_vsubhw, "v60,v62,v65,v66" },
2688  { Hexagon::BI__builtin_HEXAGON_V6_vsubhw_128B, "v60,v62,v65,v66" },
2689  { Hexagon::BI__builtin_HEXAGON_V6_vsububh, "v60,v62,v65,v66" },
2690  { Hexagon::BI__builtin_HEXAGON_V6_vsububh_128B, "v60,v62,v65,v66" },
2691  { Hexagon::BI__builtin_HEXAGON_V6_vsububsat, "v60,v62,v65,v66" },
2692  { Hexagon::BI__builtin_HEXAGON_V6_vsububsat_128B, "v60,v62,v65,v66" },
2693  { Hexagon::BI__builtin_HEXAGON_V6_vsububsat_dv, "v60,v62,v65,v66" },
2694  { Hexagon::BI__builtin_HEXAGON_V6_vsububsat_dv_128B, "v60,v62,v65,v66" },
2695  { Hexagon::BI__builtin_HEXAGON_V6_vsubububb_sat, "v62,v65,v66" },
2696  { Hexagon::BI__builtin_HEXAGON_V6_vsubububb_sat_128B, "v62,v65,v66" },
2697  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat, "v60,v62,v65,v66" },
2698  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat_128B, "v60,v62,v65,v66" },
2699  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat_dv, "v60,v62,v65,v66" },
2700  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat_dv_128B, "v60,v62,v65,v66" },
2701  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhw, "v60,v62,v65,v66" },
2702  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhw_128B, "v60,v62,v65,v66" },
2703  { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat, "v62,v65,v66" },
2704  { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat_128B, "v62,v65,v66" },
2705  { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat_dv, "v62,v65,v66" },
2706  { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat_dv_128B, "v62,v65,v66" },
2707  { Hexagon::BI__builtin_HEXAGON_V6_vsubw, "v60,v62,v65,v66" },
2708  { Hexagon::BI__builtin_HEXAGON_V6_vsubw_128B, "v60,v62,v65,v66" },
2709  { Hexagon::BI__builtin_HEXAGON_V6_vsubw_dv, "v60,v62,v65,v66" },
2710  { Hexagon::BI__builtin_HEXAGON_V6_vsubw_dv_128B, "v60,v62,v65,v66" },
2711  { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat, "v60,v62,v65,v66" },
2712  { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat_128B, "v60,v62,v65,v66" },
2713  { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat_dv, "v60,v62,v65,v66" },
2714  { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat_dv_128B, "v60,v62,v65,v66" },
2715  { Hexagon::BI__builtin_HEXAGON_V6_vswap, "v60,v62,v65,v66" },
2716  { Hexagon::BI__builtin_HEXAGON_V6_vswap_128B, "v60,v62,v65,v66" },
2717  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb, "v60,v62,v65,v66" },
2718  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb_128B, "v60,v62,v65,v66" },
2719  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb_acc, "v60,v62,v65,v66" },
2720  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb_acc_128B, "v60,v62,v65,v66" },
2721  { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus, "v60,v62,v65,v66" },
2722  { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus_128B, "v60,v62,v65,v66" },
2723  { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus_acc, "v60,v62,v65,v66" },
2724  { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus_acc_128B, "v60,v62,v65,v66" },
2725  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb, "v60,v62,v65,v66" },
2726  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb_128B, "v60,v62,v65,v66" },
2727  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb_acc, "v60,v62,v65,v66" },
2728  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb_acc_128B, "v60,v62,v65,v66" },
2729  { Hexagon::BI__builtin_HEXAGON_V6_vunpackb, "v60,v62,v65,v66" },
2730  { Hexagon::BI__builtin_HEXAGON_V6_vunpackb_128B, "v60,v62,v65,v66" },
2731  { Hexagon::BI__builtin_HEXAGON_V6_vunpackh, "v60,v62,v65,v66" },
2732  { Hexagon::BI__builtin_HEXAGON_V6_vunpackh_128B, "v60,v62,v65,v66" },
2733  { Hexagon::BI__builtin_HEXAGON_V6_vunpackob, "v60,v62,v65,v66" },
2734  { Hexagon::BI__builtin_HEXAGON_V6_vunpackob_128B, "v60,v62,v65,v66" },
2735  { Hexagon::BI__builtin_HEXAGON_V6_vunpackoh, "v60,v62,v65,v66" },
2736  { Hexagon::BI__builtin_HEXAGON_V6_vunpackoh_128B, "v60,v62,v65,v66" },
2737  { Hexagon::BI__builtin_HEXAGON_V6_vunpackub, "v60,v62,v65,v66" },
2738  { Hexagon::BI__builtin_HEXAGON_V6_vunpackub_128B, "v60,v62,v65,v66" },
2739  { Hexagon::BI__builtin_HEXAGON_V6_vunpackuh, "v60,v62,v65,v66" },
2740  { Hexagon::BI__builtin_HEXAGON_V6_vunpackuh_128B, "v60,v62,v65,v66" },
2741  { Hexagon::BI__builtin_HEXAGON_V6_vxor, "v60,v62,v65,v66" },
2742  { Hexagon::BI__builtin_HEXAGON_V6_vxor_128B, "v60,v62,v65,v66" },
2743  { Hexagon::BI__builtin_HEXAGON_V6_vzb, "v60,v62,v65,v66" },
2744  { Hexagon::BI__builtin_HEXAGON_V6_vzb_128B, "v60,v62,v65,v66" },
2745  { Hexagon::BI__builtin_HEXAGON_V6_vzh, "v60,v62,v65,v66" },
2746  { Hexagon::BI__builtin_HEXAGON_V6_vzh_128B, "v60,v62,v65,v66" },
2747  };
2748 
2749  // Sort the tables on first execution so we can binary search them.
2750  auto SortCmp = [](const BuiltinAndString &LHS, const BuiltinAndString &RHS) {
2751  return LHS.BuiltinID < RHS.BuiltinID;
2752  };
2753  static const bool SortOnce =
2754  (llvm::sort(ValidCPU, SortCmp),
2755  llvm::sort(ValidHVX, SortCmp), true);
2756  (void)SortOnce;
2757  auto LowerBoundCmp = [](const BuiltinAndString &BI, unsigned BuiltinID) {
2758  return BI.BuiltinID < BuiltinID;
2759  };
2760 
2761  const TargetInfo &TI = Context.getTargetInfo();
2762 
2763  const BuiltinAndString *FC =
2764  llvm::lower_bound(ValidCPU, BuiltinID, LowerBoundCmp);
2765  if (FC != std::end(ValidCPU) && FC->BuiltinID == BuiltinID) {
2766  const TargetOptions &Opts = TI.getTargetOpts();
2767  StringRef CPU = Opts.CPU;
2768  if (!CPU.empty()) {
2769  assert(CPU.startswith("hexagon") && "Unexpected CPU name");
2770  CPU.consume_front("hexagon");
2772  StringRef(FC->Str).split(CPUs, ',');
2773  if (llvm::none_of(CPUs, [CPU](StringRef S) { return S == CPU; }))
2774  return Diag(TheCall->getBeginLoc(),
2775  diag::err_hexagon_builtin_unsupported_cpu);
2776  }
2777  }
2778 
2779  const BuiltinAndString *FH =
2780  llvm::lower_bound(ValidHVX, BuiltinID, LowerBoundCmp);
2781  if (FH != std::end(ValidHVX) && FH->BuiltinID == BuiltinID) {
2782  if (!TI.hasFeature("hvx"))
2783  return Diag(TheCall->getBeginLoc(),
2784  diag::err_hexagon_builtin_requires_hvx);
2785 
2787  StringRef(FH->Str).split(HVXs, ',');
2788  bool IsValid = llvm::any_of(HVXs,
2789  [&TI] (StringRef V) {
2790  std::string F = "hvx" + V.str();
2791  return TI.hasFeature(F);
2792  });
2793  if (!IsValid)
2794  return Diag(TheCall->getBeginLoc(),
2795  diag::err_hexagon_builtin_unsupported_hvx);
2796  }
2797 
2798  return false;
2799 }
2800 
2801 bool Sema::CheckHexagonBuiltinArgument(unsigned BuiltinID, CallExpr *TheCall) {
2802  struct ArgInfo {
2803  uint8_t OpNum;
2804  bool IsSigned;
2805  uint8_t BitWidth;
2806  uint8_t Align;
2807  };
2808  struct BuiltinInfo {
2809  unsigned BuiltinID;
2810  ArgInfo Infos[2];
2811  };
2812 
2813  static BuiltinInfo Infos[] = {
2814  { Hexagon::BI__builtin_circ_ldd, {{ 3, true, 4, 3 }} },
2815  { Hexagon::BI__builtin_circ_ldw, {{ 3, true, 4, 2 }} },
2816  { Hexagon::BI__builtin_circ_ldh, {{ 3, true, 4, 1 }} },
2817  { Hexagon::BI__builtin_circ_lduh, {{ 3, true, 4, 0 }} },
2818  { Hexagon::BI__builtin_circ_ldb, {{ 3, true, 4, 0 }} },
2819  { Hexagon::BI__builtin_circ_ldub, {{ 3, true, 4, 0 }} },
2820  { Hexagon::BI__builtin_circ_std, {{ 3, true, 4, 3 }} },
2821  { Hexagon::BI__builtin_circ_stw, {{ 3, true, 4, 2 }} },
2822  { Hexagon::BI__builtin_circ_sth, {{ 3, true, 4, 1 }} },
2823  { Hexagon::BI__builtin_circ_sthhi, {{ 3, true, 4, 1 }} },
2824  { Hexagon::BI__builtin_circ_stb, {{ 3, true, 4, 0 }} },
2825 
2826  { Hexagon::BI__builtin_HEXAGON_L2_loadrub_pci, {{ 1, true, 4, 0 }} },
2827  { Hexagon::BI__builtin_HEXAGON_L2_loadrb_pci, {{ 1, true, 4, 0 }} },
2828  { Hexagon::BI__builtin_HEXAGON_L2_loadruh_pci, {{ 1, true, 4, 1 }} },
2829  { Hexagon::BI__builtin_HEXAGON_L2_loadrh_pci, {{ 1, true, 4, 1 }} },
2830  { Hexagon::BI__builtin_HEXAGON_L2_loadri_pci, {{ 1, true, 4, 2 }} },
2831  { Hexagon::BI__builtin_HEXAGON_L2_loadrd_pci, {{ 1, true, 4, 3 }} },
2832  { Hexagon::BI__builtin_HEXAGON_S2_storerb_pci, {{ 1, true, 4, 0 }} },
2833  { Hexagon::BI__builtin_HEXAGON_S2_storerh_pci, {{ 1, true, 4, 1 }} },
2834  { Hexagon::BI__builtin_HEXAGON_S2_storerf_pci, {{ 1, true, 4, 1 }} },
2835  { Hexagon::BI__builtin_HEXAGON_S2_storeri_pci, {{ 1, true, 4, 2 }} },
2836  { Hexagon::BI__builtin_HEXAGON_S2_storerd_pci, {{ 1, true, 4, 3 }} },
2837 
2838  { Hexagon::BI__builtin_HEXAGON_A2_combineii, {{ 1, true, 8, 0 }} },
2839  { Hexagon::BI__builtin_HEXAGON_A2_tfrih, {{ 1, false, 16, 0 }} },
2840  { Hexagon::BI__builtin_HEXAGON_A2_tfril, {{ 1, false, 16, 0 }} },
2841  { Hexagon::BI__builtin_HEXAGON_A2_tfrpi, {{ 0, true, 8, 0 }} },
2842  { Hexagon::BI__builtin_HEXAGON_A4_bitspliti, {{ 1, false, 5, 0 }} },
2843  { Hexagon::BI__builtin_HEXAGON_A4_cmpbeqi, {{ 1, false, 8, 0 }} },
2844  { Hexagon::BI__builtin_HEXAGON_A4_cmpbgti, {{ 1, true, 8, 0 }} },
2845  { Hexagon::BI__builtin_HEXAGON_A4_cround_ri, {{ 1, false, 5, 0 }} },
2846  { Hexagon::BI__builtin_HEXAGON_A4_round_ri, {{ 1, false, 5, 0 }} },
2847  { Hexagon::BI__builtin_HEXAGON_A4_round_ri_sat, {{ 1, false, 5, 0 }} },
2848  { Hexagon::BI__builtin_HEXAGON_A4_vcmpbeqi, {{ 1, false, 8, 0 }} },
2849  { Hexagon::BI__builtin_HEXAGON_A4_vcmpbgti, {{ 1, true, 8, 0 }} },
2850  { Hexagon::BI__builtin_HEXAGON_A4_vcmpbgtui, {{ 1, false, 7, 0 }} },
2851  { Hexagon::BI__builtin_HEXAGON_A4_vcmpheqi, {{ 1, true, 8, 0 }} },
2852  { Hexagon::BI__builtin_HEXAGON_A4_vcmphgti, {{ 1, true, 8, 0 }} },
2853  { Hexagon::BI__builtin_HEXAGON_A4_vcmphgtui, {{ 1, false, 7, 0 }} },
2854  { Hexagon::BI__builtin_HEXAGON_A4_vcmpweqi, {{ 1, true, 8, 0 }} },
2855  { Hexagon::BI__builtin_HEXAGON_A4_vcmpwgti, {{ 1, true, 8, 0 }} },
2856  { Hexagon::BI__builtin_HEXAGON_A4_vcmpwgtui, {{ 1, false, 7, 0 }} },
2857  { Hexagon::BI__builtin_HEXAGON_C2_bitsclri, {{ 1, false, 6, 0 }} },
2858  { Hexagon::BI__builtin_HEXAGON_C2_muxii, {{ 2, true, 8, 0 }} },
2859  { Hexagon::BI__builtin_HEXAGON_C4_nbitsclri, {{ 1, false, 6, 0 }} },
2860  { Hexagon::BI__builtin_HEXAGON_F2_dfclass, {{ 1, false, 5, 0 }} },
2861  { Hexagon::BI__builtin_HEXAGON_F2_dfimm_n, {{ 0, false, 10, 0 }} },
2862  { Hexagon::BI__builtin_HEXAGON_F2_dfimm_p, {{ 0, false, 10, 0 }} },
2863  { Hexagon::BI__builtin_HEXAGON_F2_sfclass, {{ 1, false, 5, 0 }} },
2864  { Hexagon::BI__builtin_HEXAGON_F2_sfimm_n, {{ 0, false, 10, 0 }} },
2865  { Hexagon::BI__builtin_HEXAGON_F2_sfimm_p, {{ 0, false, 10, 0 }} },
2866  { Hexagon::BI__builtin_HEXAGON_M4_mpyri_addi, {{ 2, false, 6, 0 }} },
2867  { Hexagon::BI__builtin_HEXAGON_M4_mpyri_addr_u2, {{ 1, false, 6, 2 }} },
2868  { Hexagon::BI__builtin_HEXAGON_S2_addasl_rrri, {{ 2, false, 3, 0 }} },
2869  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_acc, {{ 2, false, 6, 0 }} },
2870  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_and, {{ 2, false, 6, 0 }} },
2871  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p, {{ 1, false, 6, 0 }} },
2872  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_nac, {{ 2, false, 6, 0 }} },
2873  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_or, {{ 2, false, 6, 0 }} },
2874  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_xacc, {{ 2, false, 6, 0 }} },
2875  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_acc, {{ 2, false, 5, 0 }} },
2876  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_and, {{ 2, false, 5, 0 }} },
2877  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r, {{ 1, false, 5, 0 }} },
2878  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_nac, {{ 2, false, 5, 0 }} },
2879  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_or, {{ 2, false, 5, 0 }} },
2880  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_sat, {{ 1, false, 5, 0 }} },
2881  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_xacc, {{ 2, false, 5, 0 }} },
2882  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_vh, {{ 1, false, 4, 0 }} },
2883  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_vw, {{ 1, false, 5, 0 }} },
2884  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_acc, {{ 2, false, 6, 0 }} },
2885  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_and, {{ 2, false, 6, 0 }} },
2886  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p, {{ 1, false, 6, 0 }} },
2887  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_nac, {{ 2, false, 6, 0 }} },
2888  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_or, {{ 2, false, 6, 0 }} },
2889  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_rnd_goodsyntax,
2890  {{ 1, false, 6, 0 }} },
2891  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_rnd, {{ 1, false, 6, 0 }} },
2892  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_acc, {{ 2, false, 5, 0 }} },
2893  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_and, {{ 2, false, 5, 0 }} },
2894  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r, {{ 1, false, 5, 0 }} },
2895  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_nac, {{ 2, false, 5, 0 }} },
2896  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_or, {{ 2, false, 5, 0 }} },
2897  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_rnd_goodsyntax,
2898  {{ 1, false, 5, 0 }} },
2899  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_rnd, {{ 1, false, 5, 0 }} },
2900  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_svw_trun, {{ 1, false, 5, 0 }} },
2901  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_vh, {{ 1, false, 4, 0 }} },
2902  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_vw, {{ 1, false, 5, 0 }} },
2903  { Hexagon::BI__builtin_HEXAGON_S2_clrbit_i, {{ 1, false, 5, 0 }} },
2904  { Hexagon::BI__builtin_HEXAGON_S2_extractu, {{ 1, false, 5, 0 },
2905  { 2, false, 5, 0 }} },
2906  { Hexagon::BI__builtin_HEXAGON_S2_extractup, {{ 1, false, 6, 0 },
2907  { 2, false, 6, 0 }} },
2908  { Hexagon::BI__builtin_HEXAGON_S2_insert, {{ 2, false, 5, 0 },
2909  { 3, false, 5, 0 }} },
2910  { Hexagon::BI__builtin_HEXAGON_S2_insertp, {{ 2, false, 6, 0 },
2911  { 3, false, 6, 0 }} },
2912  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_acc, {{ 2, false, 6, 0 }} },
2913  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_and, {{ 2, false, 6, 0 }} },
2914  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p, {{ 1, false, 6, 0 }} },
2915  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_nac, {{ 2, false, 6, 0 }} },
2916  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_or, {{ 2, false, 6, 0 }} },
2917  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_xacc, {{ 2, false, 6, 0 }} },
2918  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_acc, {{ 2, false, 5, 0 }} },
2919  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_and, {{ 2, false, 5, 0 }} },
2920  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r, {{ 1, false, 5, 0 }} },
2921  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_nac, {{ 2, false, 5, 0 }} },
2922  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_or, {{ 2, false, 5, 0 }} },
2923  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_xacc, {{ 2, false, 5, 0 }} },
2924  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_vh, {{ 1, false, 4, 0 }} },
2925  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_vw, {{ 1, false, 5, 0 }} },
2926  { Hexagon::BI__builtin_HEXAGON_S2_setbit_i, {{ 1, false, 5, 0 }} },
2927  { Hexagon::BI__builtin_HEXAGON_S2_tableidxb_goodsyntax,
2928  {{ 2, false, 4, 0 },
2929  { 3, false, 5, 0 }} },
2930  { Hexagon::BI__builtin_HEXAGON_S2_tableidxd_goodsyntax,
2931  {{ 2, false, 4, 0 },
2932  { 3, false, 5, 0 }} },
2933  { Hexagon::BI__builtin_HEXAGON_S2_tableidxh_goodsyntax,
2934  {{ 2, false, 4, 0 },
2935  { 3, false, 5, 0 }} },
2936  { Hexagon::BI__builtin_HEXAGON_S2_tableidxw_goodsyntax,
2937  {{ 2, false, 4, 0 },
2938  { 3, false, 5, 0 }} },
2939  { Hexagon::BI__builtin_HEXAGON_S2_togglebit_i, {{ 1, false, 5, 0 }} },
2940  { Hexagon::BI__builtin_HEXAGON_S2_tstbit_i, {{ 1, false, 5, 0 }} },
2941  { Hexagon::BI__builtin_HEXAGON_S2_valignib, {{ 2, false, 3, 0 }} },
2942  { Hexagon::BI__builtin_HEXAGON_S2_vspliceib, {{ 2, false, 3, 0 }} },
2943  { Hexagon::BI__builtin_HEXAGON_S4_addi_asl_ri, {{ 2, false, 5, 0 }} },
2944  { Hexagon::BI__builtin_HEXAGON_S4_addi_lsr_ri, {{ 2, false, 5, 0 }} },
2945  { Hexagon::BI__builtin_HEXAGON_S4_andi_asl_ri, {{ 2, false, 5, 0 }} },
2946  { Hexagon::BI__builtin_HEXAGON_S4_andi_lsr_ri, {{ 2, false, 5, 0 }} },
2947  { Hexagon::BI__builtin_HEXAGON_S4_clbaddi, {{ 1, true , 6, 0 }} },
2948  { Hexagon::BI__builtin_HEXAGON_S4_clbpaddi, {{ 1, true, 6, 0 }} },
2949  { Hexagon::BI__builtin_HEXAGON_S4_extract, {{ 1, false, 5, 0 },
2950  { 2, false, 5, 0 }} },
2951  { Hexagon::BI__builtin_HEXAGON_S4_extractp, {{ 1, false, 6, 0 },
2952  { 2, false, 6, 0 }} },
2953  { Hexagon::BI__builtin_HEXAGON_S4_lsli, {{ 0, true, 6, 0 }} },
2954  { Hexagon::BI__builtin_HEXAGON_S4_ntstbit_i, {{ 1, false, 5, 0 }} },
2955  { Hexagon::BI__builtin_HEXAGON_S4_ori_asl_ri, {{ 2, false, 5, 0 }} },
2956  { Hexagon::BI__builtin_HEXAGON_S4_ori_lsr_ri, {{ 2, false, 5, 0 }} },
2957  { Hexagon::BI__builtin_HEXAGON_S4_subi_asl_ri, {{ 2, false, 5, 0 }} },
2958  { Hexagon::BI__builtin_HEXAGON_S4_subi_lsr_ri, {{ 2, false, 5, 0 }} },
2959  { Hexagon::BI__builtin_HEXAGON_S4_vrcrotate_acc, {{ 3, false, 2, 0 }} },
2960  { Hexagon::BI__builtin_HEXAGON_S4_vrcrotate, {{ 2, false, 2, 0 }} },
2961  { Hexagon::BI__builtin_HEXAGON_S5_asrhub_rnd_sat_goodsyntax,
2962  {{ 1, false, 4, 0 }} },
2963  { Hexagon::BI__builtin_HEXAGON_S5_asrhub_sat, {{ 1, false, 4, 0 }} },
2964  { Hexagon::BI__builtin_HEXAGON_S5_vasrhrnd_goodsyntax,
2965  {{ 1, false, 4, 0 }} },
2966  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p, {{ 1, false, 6, 0 }} },
2967  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_acc, {{ 2, false, 6, 0 }} },
2968  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_and, {{ 2, false, 6, 0 }} },
2969  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_nac, {{ 2, false, 6, 0 }} },
2970  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_or, {{ 2, false, 6, 0 }} },
2971  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_xacc, {{ 2, false, 6, 0 }} },
2972  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r, {{ 1, false, 5, 0 }} },
2973  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_acc, {{ 2, false, 5, 0 }} },
2974  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_and, {{ 2, false, 5, 0 }} },
2975  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_nac, {{ 2, false, 5, 0 }} },
2976  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_or, {{ 2, false, 5, 0 }} },
2977  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_xacc, {{ 2, false, 5, 0 }} },
2978  { Hexagon::BI__builtin_HEXAGON_V6_valignbi, {{ 2, false, 3, 0 }} },
2979  { Hexagon::BI__builtin_HEXAGON_V6_valignbi_128B, {{ 2, false, 3, 0 }} },
2980  { Hexagon::BI__builtin_HEXAGON_V6_vlalignbi, {{ 2, false, 3, 0 }} },
2981  { Hexagon::BI__builtin_HEXAGON_V6_vlalignbi_128B, {{ 2, false, 3, 0 }} },
2982  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi, {{ 2, false, 1, 0 }} },
2983  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_128B, {{ 2, false, 1, 0 }} },
2984  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_acc, {{ 3, false, 1, 0 }} },
2985  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_acc_128B,
2986  {{ 3, false, 1, 0 }} },
2987  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi, {{ 2, false, 1, 0 }} },
2988  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_128B, {{ 2, false, 1, 0 }} },
2989  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_acc, {{ 3, false, 1, 0 }} },
2990  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_acc_128B,
2991  {{ 3, false, 1, 0 }} },
2992  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi, {{ 2, false, 1, 0 }} },
2993  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_128B, {{ 2, false, 1, 0 }} },
2994  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_acc, {{ 3, false, 1, 0 }} },
2995  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_acc_128B,
2996  {{ 3, false, 1, 0 }} },
2997  };
2998 
2999  // Use a dynamically initialized static to sort the table exactly once on
3000  // first run.
3001  static const bool SortOnce =
3002  (llvm::sort(Infos,
3003  [](const BuiltinInfo &LHS, const BuiltinInfo &RHS) {
3004  return LHS.BuiltinID < RHS.BuiltinID;
3005  }),
3006  true);
3007  (void)SortOnce;
3008 
3009  const BuiltinInfo *F = llvm::partition_point(
3010  Infos, [=](const BuiltinInfo &BI) { return BI.BuiltinID < BuiltinID; });
3011  if (F == std::end(Infos) || F->BuiltinID != BuiltinID)
3012  return false;
3013 
3014  bool Error = false;
3015 
3016  for (const ArgInfo &A : F->Infos) {
3017  // Ignore empty ArgInfo elements.
3018  if (A.BitWidth == 0)
3019  continue;
3020 
3021  int32_t Min = A.IsSigned ? -(1 << (A.BitWidth - 1)) : 0;
3022  int32_t Max = (1 << (A.IsSigned ? A.BitWidth - 1 : A.BitWidth)) - 1;
3023  if (!A.Align) {
3024  Error |= SemaBuiltinConstantArgRange(TheCall, A.OpNum, Min, Max);
3025  } else {
3026  unsigned M = 1 << A.Align;
3027  Min *= M;
3028  Max *= M;
3029  Error |= SemaBuiltinConstantArgRange(TheCall, A.OpNum, Min, Max) |
3030  SemaBuiltinConstantArgMultiple(TheCall, A.OpNum, M);
3031  }
3032  }
3033  return Error;
3034 }
3035 
3036 bool Sema::CheckHexagonBuiltinFunctionCall(unsigned BuiltinID,
3037  CallExpr *TheCall) {
3038  return CheckHexagonBuiltinCpu(BuiltinID, TheCall) ||
3039  CheckHexagonBuiltinArgument(BuiltinID, TheCall);
3040 }
3041 
3042 
3043 // CheckMipsBuiltinFunctionCall - Checks the constant value passed to the
3044 // intrinsic is correct. The switch statement is ordered by DSP, MSA. The
3045 // ordering for DSP is unspecified. MSA is ordered by the data format used
3046 // by the underlying instruction i.e., df/m, df/n and then by size.
3047 //
3048 // FIXME: The size tests here should instead be tablegen'd along with the
3049 // definitions from include/clang/Basic/BuiltinsMips.def.
3050 // FIXME: GCC is strict on signedness for some of these intrinsics, we should
3051 // be too.
3052 bool Sema::CheckMipsBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
3053  unsigned i = 0, l = 0, u = 0, m = 0;
3054  switch (BuiltinID) {
3055  default: return false;
3056  case Mips::BI__builtin_mips_wrdsp: i = 1; l = 0; u = 63; break;
3057  case Mips::BI__builtin_mips_rddsp: i = 0; l = 0; u = 63; break;
3058  case Mips::BI__builtin_mips_append: i = 2; l = 0; u = 31; break;
3059  case Mips::BI__builtin_mips_balign: i = 2; l = 0; u = 3; break;
3060  case Mips::BI__builtin_mips_precr_sra_ph_w: i = 2; l = 0; u = 31; break;
3061  case Mips::BI__builtin_mips_precr_sra_r_ph_w: i = 2; l = 0; u = 31; break;
3062  case Mips::BI__builtin_mips_prepend: i = 2; l = 0; u = 31; break;
3063  // MSA intrinsics. Instructions (which the intrinsics maps to) which use the
3064  // df/m field.
3065  // These intrinsics take an unsigned 3 bit immediate.
3066  case Mips::BI__builtin_msa_bclri_b:
3067  case Mips::BI__builtin_msa_bnegi_b:
3068  case Mips::BI__builtin_msa_bseti_b:
3069  case Mips::BI__builtin_msa_sat_s_b:
3070  case Mips::BI__builtin_msa_sat_u_b:
3071  case Mips::BI__builtin_msa_slli_b:
3072  case Mips::BI__builtin_msa_srai_b:
3073  case Mips::BI__builtin_msa_srari_b:
3074  case Mips::BI__builtin_msa_srli_b:
3075  case Mips::BI__builtin_msa_srlri_b: i = 1; l = 0; u = 7; break;
3076  case Mips::BI__builtin_msa_binsli_b:
3077  case Mips::BI__builtin_msa_binsri_b: i = 2; l = 0; u = 7; break;
3078  // These intrinsics take an unsigned 4 bit immediate.
3079  case Mips::BI__builtin_msa_bclri_h:
3080  case Mips::BI__builtin_msa_bnegi_h:
3081  case Mips::BI__builtin_msa_bseti_h:
3082  case Mips::BI__builtin_msa_sat_s_h:
3083  case Mips::BI__builtin_msa_sat_u_h:
3084  case Mips::BI__builtin_msa_slli_h:
3085  case Mips::BI__builtin_msa_srai_h:
3086  case Mips::BI__builtin_msa_srari_h:
3087  case Mips::BI__builtin_msa_srli_h:
3088  case Mips::BI__builtin_msa_srlri_h: i = 1; l = 0; u = 15; break;
3089  case Mips::BI__builtin_msa_binsli_h:
3090  case Mips::BI__builtin_msa_binsri_h: i = 2; l = 0; u = 15; break;
3091  // These intrinsics take an unsigned 5 bit immediate.
3092  // The first block of intrinsics actually have an unsigned 5 bit field,
3093  // not a df/n field.
3094  case Mips::BI__builtin_msa_cfcmsa:
3095  case Mips::BI__builtin_msa_ctcmsa: i = 0; l = 0; u = 31; break;
3096  case Mips::BI__builtin_msa_clei_u_b:
3097  case Mips::BI__builtin_msa_clei_u_h:
3098  case Mips::BI__builtin_msa_clei_u_w:
3099  case Mips::BI__builtin_msa_clei_u_d:
3100  case Mips::BI__builtin_msa_clti_u_b:
3101  case Mips::BI__builtin_msa_clti_u_h:
3102  case Mips::BI__builtin_msa_clti_u_w:
3103  case Mips::BI__builtin_msa_clti_u_d:
3104  case Mips::BI__builtin_msa_maxi_u_b:
3105  case Mips::BI__builtin_msa_maxi_u_h:
3106  case Mips::BI__builtin_msa_maxi_u_w:
3107  case Mips::BI__builtin_msa_maxi_u_d:
3108  case Mips::BI__builtin_msa_mini_u_b:
3109  case Mips::BI__builtin_msa_mini_u_h:
3110  case Mips::BI__builtin_msa_mini_u_w:
3111  case Mips::BI__builtin_msa_mini_u_d:
3112  case Mips::BI__builtin_msa_addvi_b:
3113  case Mips::BI__builtin_msa_addvi_h:
3114  case Mips::BI__builtin_msa_addvi_w:
3115  case Mips::BI__builtin_msa_addvi_d:
3116  case Mips::BI__builtin_msa_bclri_w:
3117  case Mips::BI__builtin_msa_bnegi_w:
3118  case Mips::BI__builtin_msa_bseti_w:
3119  case Mips::BI__builtin_msa_sat_s_w:
3120  case Mips::BI__builtin_msa_sat_u_w:
3121  case Mips::BI__builtin_msa_slli_w:
3122  case Mips::BI__builtin_msa_srai_w:
3123  case Mips::BI__builtin_msa_srari_w:
3124  case Mips::BI__builtin_msa_srli_w:
3125  case Mips::BI__builtin_msa_srlri_w:
3126  case Mips::BI__builtin_msa_subvi_b:
3127  case Mips::BI__builtin_msa_subvi_h:
3128  case Mips::BI__builtin_msa_subvi_w:
3129  case Mips::BI__builtin_msa_subvi_d: i = 1; l = 0; u = 31; break;
3130  case Mips::BI__builtin_msa_binsli_w:
3131  case Mips::BI__builtin_msa_binsri_w: i = 2; l = 0; u = 31; break;
3132  // These intrinsics take an unsigned 6 bit immediate.
3133  case Mips::BI__builtin_msa_bclri_d:
3134  case Mips::BI__builtin_msa_bnegi_d:
3135  case Mips::BI__builtin_msa_bseti_d:
3136  case Mips::BI__builtin_msa_sat_s_d:
3137  case Mips::BI__builtin_msa_sat_u_d:
3138  case Mips::BI__builtin_msa_slli_d:
3139  case Mips::BI__builtin_msa_srai_d:
3140  case Mips::BI__builtin_msa_srari_d:
3141  case Mips::BI__builtin_msa_srli_d:
3142  case Mips::BI__builtin_msa_srlri_d: i = 1; l = 0; u = 63; break;
3143  case Mips::BI__builtin_msa_binsli_d:
3144  case Mips::BI__builtin_msa_binsri_d: i = 2; l = 0; u = 63; break;
3145  // These intrinsics take a signed 5 bit immediate.
3146  case Mips::BI__builtin_msa_ceqi_b:
3147  case Mips::BI__builtin_msa_ceqi_h:
3148  case Mips::BI__builtin_msa_ceqi_w:
3149  case Mips::BI__builtin_msa_ceqi_d:
3150  case Mips::BI__builtin_msa_clti_s_b:
3151  case Mips::BI__builtin_msa_clti_s_h:
3152  case Mips::BI__builtin_msa_clti_s_w:
3153  case Mips::BI__builtin_msa_clti_s_d:
3154  case Mips::BI__builtin_msa_clei_s_b:
3155  case Mips::BI__builtin_msa_clei_s_h:
3156  case Mips::BI__builtin_msa_clei_s_w:
3157  case Mips::BI__builtin_msa_clei_s_d:
3158  case Mips::BI__builtin_msa_maxi_s_b:
3159  case Mips::BI__builtin_msa_maxi_s_h:
3160  case Mips::BI__builtin_msa_maxi_s_w:
3161  case Mips::BI__builtin_msa_maxi_s_d:
3162  case Mips::BI__builtin_msa_mini_s_b:
3163  case Mips::BI__builtin_msa_mini_s_h:
3164  case Mips::BI__builtin_msa_mini_s_w:
3165  case Mips::BI__builtin_msa_mini_s_d: i = 1; l = -16; u = 15; break;
3166  // These intrinsics take an unsigned 8 bit immediate.
3167  case Mips::BI__builtin_msa_andi_b:
3168  case Mips::BI__builtin_msa_nori_b:
3169  case Mips::BI__builtin_msa_ori_b:
3170  case Mips::BI__builtin_msa_shf_b:
3171  case Mips::BI__builtin_msa_shf_h:
3172  case Mips::BI__builtin_msa_shf_w:
3173  case Mips::BI__builtin_msa_xori_b: i = 1; l = 0; u = 255; break;
3174  case Mips::BI__builtin_msa_bseli_b:
3175  case Mips::BI__builtin_msa_bmnzi_b:
3176  case Mips::BI__builtin_msa_bmzi_b: i = 2; l = 0; u = 255; break;
3177  // df/n format
3178  // These intrinsics take an unsigned 4 bit immediate.
3179  case Mips::BI__builtin_msa_copy_s_b:
3180  case Mips::BI__builtin_msa_copy_u_b:
3181  case Mips::BI__builtin_msa_insve_b:
3182  case Mips::BI__builtin_msa_splati_b: i = 1; l = 0; u = 15; break;
3183  case Mips::BI__builtin_msa_sldi_b: i = 2; l = 0; u = 15; break;
3184  // These intrinsics take an unsigned 3 bit immediate.
3185  case Mips::BI__builtin_msa_copy_s_h:
3186  case Mips::BI__builtin_msa_copy_u_h:
3187  case Mips::BI__builtin_msa_insve_h:
3188  case Mips::BI__builtin_msa_splati_h: i = 1; l = 0; u = 7; break;
3189  case Mips::BI__builtin_msa_sldi_h: i = 2; l = 0; u = 7; break;
3190  // These intrinsics take an unsigned 2 bit immediate.
3191  case Mips::BI__builtin_msa_copy_s_w:
3192  case Mips::BI__builtin_msa_copy_u_w:
3193  case Mips::BI__builtin_msa_insve_w:
3194  case Mips::BI__builtin_msa_splati_w: i = 1; l = 0; u = 3; break;
3195  case Mips::BI__builtin_msa_sldi_w: i = 2; l = 0; u = 3; break;
3196  // These intrinsics take an unsigned 1 bit immediate.
3197  case Mips::BI__builtin_msa_copy_s_d:
3198  case Mips::BI__builtin_msa_copy_u_d:
3199  case Mips::BI__builtin_msa_insve_d:
3200  case Mips::BI__builtin_msa_splati_d: i = 1; l = 0; u = 1; break;
3201  case Mips::BI__builtin_msa_sldi_d: i = 2; l = 0; u = 1; break;
3202  // Memory offsets and immediate loads.
3203  // These intrinsics take a signed 10 bit immediate.
3204  case Mips::BI__builtin_msa_ldi_b: i = 0; l = -128; u = 255; break;
3205  case Mips::BI__builtin_msa_ldi_h:
3206  case Mips::BI__builtin_msa_ldi_w:
3207  case Mips::BI__builtin_msa_ldi_d: i = 0; l = -512; u = 511; break;
3208  case Mips::BI__builtin_msa_ld_b: i = 1; l = -512; u = 511; m = 1; break;
3209  case Mips::BI__builtin_msa_ld_h: i = 1; l = -1024; u = 1022; m = 2; break;
3210  case Mips::BI__builtin_msa_ld_w: i = 1; l = -2048; u = 2044; m = 4; break;
3211  case Mips::BI__builtin_msa_ld_d: i = 1; l = -4096; u = 4088; m = 8; break;
3212  case Mips::BI__builtin_msa_st_b: i = 2; l = -512; u = 511; m = 1; break;
3213  case Mips::BI__builtin_msa_st_h: i = 2; l = -1024; u = 1022; m = 2; break;
3214  case Mips::BI__builtin_msa_st_w: i = 2; l = -2048; u = 2044; m = 4; break;
3215  case Mips::BI__builtin_msa_st_d: i = 2; l = -4096; u = 4088; m = 8; break;
3216  }
3217 
3218  if (!m)
3219  return SemaBuiltinConstantArgRange(TheCall, i, l, u);
3220 
3221  return SemaBuiltinConstantArgRange(TheCall, i, l, u) ||
3222  SemaBuiltinConstantArgMultiple(TheCall, i, m);
3223 }
3224 
3225 bool Sema::CheckPPCBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
3226  unsigned i = 0, l = 0, u = 0;
3227  bool Is64BitBltin = BuiltinID == PPC::BI__builtin_divde ||
3228  BuiltinID == PPC::BI__builtin_divdeu ||
3229  BuiltinID == PPC::BI__builtin_bpermd;
3230  bool IsTarget64Bit = Context.getTargetInfo()
3231  .getTypeWidth(Context
3232  .getTargetInfo()
3233  .getIntPtrType()) == 64;
3234  bool IsBltinExtDiv = BuiltinID == PPC::BI__builtin_divwe ||
3235  BuiltinID == PPC::BI__builtin_divweu ||
3236  BuiltinID == PPC::BI__builtin_divde ||
3237  BuiltinID == PPC::BI__builtin_divdeu;
3238 
3239  if (Is64BitBltin && !IsTarget64Bit)
3240  return Diag(TheCall->getBeginLoc(), diag::err_64_bit_builtin_32_bit_tgt)
3241  << TheCall->getSourceRange();
3242 
3243  if ((IsBltinExtDiv && !Context.getTargetInfo().hasFeature("extdiv")) ||
3244  (BuiltinID == PPC::BI__builtin_bpermd &&
3245  !Context.getTargetInfo().hasFeature("bpermd")))
3246  return Diag(TheCall->getBeginLoc(), diag::err_ppc_builtin_only_on_pwr7)
3247  << TheCall->getSourceRange();
3248 
3249  auto SemaVSXCheck = [&](CallExpr *TheCall) -> bool {
3250  if (!Context.getTargetInfo().hasFeature("vsx"))
3251  return Diag(TheCall->getBeginLoc(), diag::err_ppc_builtin_only_on_pwr7)
3252  << TheCall->getSourceRange();
3253  return false;
3254  };
3255 
3256  switch (BuiltinID) {
3257  default: return false;
3258  case PPC::BI__builtin_altivec_crypto_vshasigmaw:
3259  case PPC::BI__builtin_altivec_crypto_vshasigmad:
3260  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 1) ||
3261  SemaBuiltinConstantArgRange(TheCall, 2, 0, 15);
3262  case PPC::BI__builtin_altivec_dss:
3263  return SemaBuiltinConstantArgRange(TheCall, 0, 0, 3);
3264  case PPC::BI__builtin_tbegin:
3265  case PPC::BI__builtin_tend: i = 0; l = 0; u = 1; break;
3266  case PPC::BI__builtin_tsr: i = 0; l = 0; u = 7; break;
3267  case PPC::BI__builtin_tabortwc:
3268  case PPC::BI__builtin_tabortdc: i = 0; l = 0; u = 31; break;
3269  case PPC::BI__builtin_tabortwci:
3270  case PPC::BI__builtin_tabortdci:
3271  return SemaBuiltinConstantArgRange(TheCall, 0, 0, 31) ||
3272  SemaBuiltinConstantArgRange(TheCall, 2, 0, 31);
3273  case PPC::BI__builtin_altivec_dst:
3274  case PPC::BI__builtin_altivec_dstt:
3275  case PPC::BI__builtin_altivec_dstst:
3276  case PPC::BI__builtin_altivec_dststt:
3277  return SemaBuiltinConstantArgRange(TheCall, 2, 0, 3);
3278  case PPC::BI__builtin_vsx_xxpermdi:
3279  case PPC::BI__builtin_vsx_xxsldwi:
3280  return SemaBuiltinVSX(TheCall);
3281  case PPC::BI__builtin_unpack_vector_int128:
3282  return SemaVSXCheck(TheCall) ||
3283  SemaBuiltinConstantArgRange(TheCall, 1, 0, 1);
3284  case PPC::BI__builtin_pack_vector_int128:
3285  return SemaVSXCheck(TheCall);
3286  }
3287  return SemaBuiltinConstantArgRange(TheCall, i, l, u);
3288 }
3289 
3290 bool Sema::CheckSystemZBuiltinFunctionCall(unsigned BuiltinID,
3291  CallExpr *TheCall) {
3292  if (BuiltinID == SystemZ::BI__builtin_tabort) {
3293  Expr *Arg = TheCall->getArg(0);
3294  llvm::APSInt AbortCode(32);
3295  if (Arg->isIntegerConstantExpr(AbortCode, Context) &&
3296  AbortCode.getSExtValue() >= 0 && AbortCode.getSExtValue() < 256)
3297  return Diag(Arg->getBeginLoc(), diag::err_systemz_invalid_tabort_code)
3298  << Arg->getSourceRange();
3299  }
3300 
3301  // For intrinsics which take an immediate value as part of the instruction,
3302  // range check them here.
3303  unsigned i = 0, l = 0, u = 0;
3304  switch (BuiltinID) {
3305  default: return false;
3306  case SystemZ::BI__builtin_s390_lcbb: i = 1; l = 0; u = 15; break;
3307  case SystemZ::BI__builtin_s390_verimb:
3308  case SystemZ::BI__builtin_s390_verimh:
3309  case SystemZ::BI__builtin_s390_verimf:
3310  case SystemZ::BI__builtin_s390_verimg: i = 3; l = 0; u = 255; break;
3311  case SystemZ::BI__builtin_s390_vfaeb:
3312  case SystemZ::BI__builtin_s390_vfaeh:
3313  case SystemZ::BI__builtin_s390_vfaef:
3314  case SystemZ::BI__builtin_s390_vfaebs:
3315  case SystemZ::BI__builtin_s390_vfaehs:
3316  case SystemZ::BI__builtin_s390_vfaefs:
3317  case SystemZ::BI__builtin_s390_vfaezb:
3318  case SystemZ::BI__builtin_s390_vfaezh:
3319  case SystemZ::BI__builtin_s390_vfaezf:
3320  case SystemZ::BI__builtin_s390_vfaezbs:
3321  case SystemZ::BI__builtin_s390_vfaezhs:
3322  case SystemZ::BI__builtin_s390_vfaezfs: i = 2; l = 0; u = 15; break;
3323  case SystemZ::BI__builtin_s390_vfisb:
3324  case SystemZ::BI__builtin_s390_vfidb:
3325  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 15) ||
3326  SemaBuiltinConstantArgRange(TheCall, 2, 0, 15);
3327  case SystemZ::BI__builtin_s390_vftcisb:
3328  case SystemZ::BI__builtin_s390_vftcidb: i = 1; l = 0; u = 4095; break;
3329  case SystemZ::BI__builtin_s390_vlbb: i = 1; l = 0; u = 15; break;
3330  case SystemZ::BI__builtin_s390_vpdi: i = 2; l = 0; u = 15; break;
3331  case SystemZ::BI__builtin_s390_vsldb: i = 2; l = 0; u = 15; break;
3332  case SystemZ::BI__builtin_s390_vstrcb:
3333  case SystemZ::BI__builtin_s390_vstrch:
3334  case SystemZ::BI__builtin_s390_vstrcf:
3335  case SystemZ::BI__builtin_s390_vstrczb:
3336  case SystemZ::BI__builtin_s390_vstrczh:
3337  case SystemZ::BI__builtin_s390_vstrczf:
3338  case SystemZ::BI__builtin_s390_vstrcbs:
3339  case SystemZ::BI__builtin_s390_vstrchs:
3340  case SystemZ::BI__builtin_s390_vstrcfs:
3341  case SystemZ::BI__builtin_s390_vstrczbs:
3342  case SystemZ::BI__builtin_s390_vstrczhs:
3343  case SystemZ::BI__builtin_s390_vstrczfs: i = 3; l = 0; u = 15; break;
3344  case SystemZ::BI__builtin_s390_vmslg: i = 3; l = 0; u = 15; break;
3345  case SystemZ::BI__builtin_s390_vfminsb:
3346  case SystemZ::BI__builtin_s390_vfmaxsb:
3347  case SystemZ::BI__builtin_s390_vfmindb:
3348  case SystemZ::BI__builtin_s390_vfmaxdb: i = 2; l = 0; u = 15; break;
3349  case SystemZ::BI__builtin_s390_vsld: i = 2; l = 0; u = 7; break;
3350  case SystemZ::BI__builtin_s390_vsrd: i = 2; l = 0; u = 7; break;
3351  }
3352  return SemaBuiltinConstantArgRange(TheCall, i, l, u);
3353 }
3354 
3355 /// SemaBuiltinCpuSupports - Handle __builtin_cpu_supports(char *).
3356 /// This checks that the target supports __builtin_cpu_supports and
3357 /// that the string argument is constant and valid.
3358 static bool SemaBuiltinCpuSupports(Sema &S, CallExpr *TheCall) {
3359  Expr *Arg = TheCall->getArg(0);
3360 
3361  // Check if the argument is a string literal.
3362  if (!isa<StringLiteral>(Arg->IgnoreParenImpCasts()))
3363  return S.Diag(TheCall->getBeginLoc(), diag::err_expr_not_string_literal)
3364  << Arg->getSourceRange();
3365 
3366  // Check the contents of the string.
3367  StringRef Feature =
3368  cast<StringLiteral>(Arg->IgnoreParenImpCasts())->getString();
3369  if (!S.Context.getTargetInfo().validateCpuSupports(Feature))
3370  return S.Diag(TheCall->getBeginLoc(), diag::err_invalid_cpu_supports)
3371  << Arg->getSourceRange();
3372  return false;
3373 }
3374 
3375 /// SemaBuiltinCpuIs - Handle __builtin_cpu_is(char *).
3376 /// This checks that the target supports __builtin_cpu_is and
3377 /// that the string argument is constant and valid.
3378 static bool SemaBuiltinCpuIs(Sema &S, CallExpr *TheCall) {
3379  Expr *Arg = TheCall->getArg(0);
3380 
3381  // Check if the argument is a string literal.
3382  if (!isa<StringLiteral>(Arg->IgnoreParenImpCasts()))
3383  return S.Diag(TheCall->getBeginLoc(), diag::err_expr_not_string_literal)
3384  << Arg->getSourceRange();
3385 
3386  // Check the contents of the string.
3387  StringRef Feature =
3388  cast<StringLiteral>(Arg->IgnoreParenImpCasts())->getString();
3389  if (!S.Context.getTargetInfo().validateCpuIs(Feature))
3390  return S.Diag(TheCall->getBeginLoc(), diag::err_invalid_cpu_is)
3391  << Arg->getSourceRange();
3392  return false;
3393 }
3394 
3395 // Check if the rounding mode is legal.
3396 bool Sema::CheckX86BuiltinRoundingOrSAE(unsigned BuiltinID, CallExpr *TheCall) {
3397  // Indicates if this instruction has rounding control or just SAE.
3398  bool HasRC = false;
3399 
3400  unsigned ArgNum = 0;
3401  switch (BuiltinID) {
3402  default:
3403  return false;
3404  case X86::BI__builtin_ia32_vcvttsd2si32:
3405  case X86::BI__builtin_ia32_vcvttsd2si64:
3406  case X86::BI__builtin_ia32_vcvttsd2usi32:
3407  case X86::BI__builtin_ia32_vcvttsd2usi64:
3408  case X86::BI__builtin_ia32_vcvttss2si32:
3409  case X86::BI__builtin_ia32_vcvttss2si64:
3410  case X86::BI__builtin_ia32_vcvttss2usi32:
3411  case X86::BI__builtin_ia32_vcvttss2usi64:
3412  ArgNum = 1;
3413  break;
3414  case X86::BI__builtin_ia32_maxpd512:
3415  case X86::BI__builtin_ia32_maxps512:
3416  case X86::BI__builtin_ia32_minpd512:
3417  case X86::BI__builtin_ia32_minps512:
3418  ArgNum = 2;
3419  break;
3420  case X86::BI__builtin_ia32_cvtps2pd512_mask:
3421  case X86::BI__builtin_ia32_cvttpd2dq512_mask:
3422  case X86::BI__builtin_ia32_cvttpd2qq512_mask:
3423  case X86::BI__builtin_ia32_cvttpd2udq512_mask:
3424  case X86::BI__builtin_ia32_cvttpd2uqq512_mask:
3425  case X86::BI__builtin_ia32_cvttps2dq512_mask:
3426  case X86::BI__builtin_ia32_cvttps2qq512_mask:
3427  case X86::BI__builtin_ia32_cvttps2udq512_mask:
3428  case X86::BI__builtin_ia32_cvttps2uqq512_mask:
3429  case X86::BI__builtin_ia32_exp2pd_mask:
3430  case X86::BI__builtin_ia32_exp2ps_mask:
3431  case X86::BI__builtin_ia32_getexppd512_mask:
3432  case X86::BI__builtin_ia32_getexpps512_mask:
3433  case X86::BI__builtin_ia32_rcp28pd_mask:
3434  case X86::BI__builtin_ia32_rcp28ps_mask:
3435  case X86::BI__builtin_ia32_rsqrt28pd_mask:
3436  case X86::BI__builtin_ia32_rsqrt28ps_mask:
3437  case X86::BI__builtin_ia32_vcomisd:
3438  case X86::BI__builtin_ia32_vcomiss:
3439  case X86::BI__builtin_ia32_vcvtph2ps512_mask:
3440  ArgNum = 3;
3441  break;
3442  case X86::BI__builtin_ia32_cmppd512_mask:
3443  case X86::BI__builtin_ia32_cmpps512_mask:
3444  case X86::BI__builtin_ia32_cmpsd_mask:
3445  case X86::BI__builtin_ia32_cmpss_mask:
3446  case X86::BI__builtin_ia32_cvtss2sd_round_mask:
3447  case X86::BI__builtin_ia32_getexpsd128_round_mask:
3448  case X86::BI__builtin_ia32_getexpss128_round_mask:
3449  case X86::BI__builtin_ia32_getmantpd512_mask:
3450  case X86::BI__builtin_ia32_getmantps512_mask:
3451  case X86::BI__builtin_ia32_maxsd_round_mask:
3452  case X86::BI__builtin_ia32_maxss_round_mask:
3453  case X86::BI__builtin_ia32_minsd_round_mask:
3454  case X86::BI__builtin_ia32_minss_round_mask:
3455  case X86::BI__builtin_ia32_rcp28sd_round_mask:
3456  case X86::BI__builtin_ia32_rcp28ss_round_mask:
3457  case X86::BI__builtin_ia32_reducepd512_mask:
3458  case X86::BI__builtin_ia32_reduceps512_mask:
3459  case X86::BI__builtin_ia32_rndscalepd_mask:
3460  case X86::BI__builtin_ia32_rndscaleps_mask:
3461  case X86::BI__builtin_ia32_rsqrt28sd_round_mask:
3462  case X86::BI__builtin_ia32_rsqrt28ss_round_mask:
3463  ArgNum = 4;
3464  break;
3465  case X86::BI__builtin_ia32_fixupimmpd512_mask:
3466  case X86::BI__builtin_ia32_fixupimmpd512_maskz:
3467  case X86::BI__builtin_ia32_fixupimmps512_mask:
3468  case X86::BI__builtin_ia32_fixupimmps512_maskz:
3469  case X86::BI__builtin_ia32_fixupimmsd_mask:
3470  case X86::BI__builtin_ia32_fixupimmsd_maskz:
3471  case X86::BI__builtin_ia32_fixupimmss_mask:
3472  case X86::BI__builtin_ia32_fixupimmss_maskz:
3473  case X86::BI__builtin_ia32_getmantsd_round_mask:
3474  case X86::BI__builtin_ia32_getmantss_round_mask:
3475  case X86::BI__builtin_ia32_rangepd512_mask:
3476  case X86::BI__builtin_ia32_rangeps512_mask:
3477  case X86::BI__builtin_ia32_rangesd128_round_mask:
3478  case X86::BI__builtin_ia32_rangess128_round_mask:
3479  case X86::BI__builtin_ia32_reducesd_mask:
3480  case X86::BI__builtin_ia32_reducess_mask:
3481  case X86::BI__builtin_ia32_rndscalesd_round_mask:
3482  case X86::BI__builtin_ia32_rndscaless_round_mask:
3483  ArgNum = 5;
3484  break;
3485  case X86::BI__builtin_ia32_vcvtsd2si64:
3486  case X86::BI__builtin_ia32_vcvtsd2si32:
3487  case X86::BI__builtin_ia32_vcvtsd2usi32:
3488  case X86::BI__builtin_ia32_vcvtsd2usi64:
3489  case X86::BI__builtin_ia32_vcvtss2si32:
3490  case X86::BI__builtin_ia32_vcvtss2si64:
3491  case X86::BI__builtin_ia32_vcvtss2usi32:
3492  case X86::BI__builtin_ia32_vcvtss2usi64:
3493  case X86::BI__builtin_ia32_sqrtpd512:
3494  case X86::BI__builtin_ia32_sqrtps512:
3495  ArgNum = 1;
3496  HasRC = true;
3497  break;
3498  case X86::BI__builtin_ia32_addpd512:
3499  case X86::BI__builtin_ia32_addps512:
3500  case X86::BI__builtin_ia32_divpd512:
3501  case X86::BI__builtin_ia32_divps512:
3502  case X86::BI__builtin_ia32_mulpd512:
3503  case X86::BI__builtin_ia32_mulps512:
3504  case X86::BI__builtin_ia32_subpd512:
3505  case X86::BI__builtin_ia32_subps512:
3506  case X86::BI__builtin_ia32_cvtsi2sd64:
3507  case X86::BI__builtin_ia32_cvtsi2ss32:
3508  case X86::BI__builtin_ia32_cvtsi2ss64:
3509  case X86::BI__builtin_ia32_cvtusi2sd64:
3510  case X86::BI__builtin_ia32_cvtusi2ss32:
3511  case X86::BI__builtin_ia32_cvtusi2ss64:
3512  ArgNum = 2;
3513  HasRC = true;
3514  break;
3515  case X86::BI__builtin_ia32_cvtdq2ps512_mask:
3516  case X86::BI__builtin_ia32_cvtudq2ps512_mask:
3517  case X86::BI__builtin_ia32_cvtpd2ps512_mask:
3518  case X86::BI__builtin_ia32_cvtpd2dq512_mask:
3519  case X86::BI__builtin_ia32_cvtpd2qq512_mask:
3520  case X86::BI__builtin_ia32_cvtpd2udq512_mask:
3521  case X86::BI__builtin_ia32_cvtpd2uqq512_mask:
3522  case X86::BI__builtin_ia32_cvtps2dq512_mask:
3523  case X86::BI__builtin_ia32_cvtps2qq512_mask:
3524  case X86::BI__builtin_ia32_cvtps2udq512_mask:
3525  case X86::BI__builtin_ia32_cvtps2uqq512_mask:
3526  case X86::BI__builtin_ia32_cvtqq2pd512_mask:
3527  case X86::BI__builtin_ia32_cvtqq2ps512_mask:
3528  case X86::BI__builtin_ia32_cvtuqq2pd512_mask:
3529  case X86::BI__builtin_ia32_cvtuqq2ps512_mask:
3530  ArgNum = 3;
3531  HasRC = true;
3532  break;
3533  case X86::BI__builtin_ia32_addss_round_mask:
3534  case X86::BI__builtin_ia32_addsd_round_mask:
3535  case X86::BI__builtin_ia32_divss_round_mask:
3536  case X86::BI__builtin_ia32_divsd_round_mask:
3537  case X86::BI__builtin_ia32_mulss_round_mask:
3538  case X86::BI__builtin_ia32_mulsd_round_mask:
3539  case X86::BI__builtin_ia32_subss_round_mask:
3540  case X86::BI__builtin_ia32_subsd_round_mask:
3541  case X86::BI__builtin_ia32_scalefpd512_mask:
3542  case X86::BI__builtin_ia32_scalefps512_mask:
3543  case X86::BI__builtin_ia32_scalefsd_round_mask:
3544  case X86::BI__builtin_ia32_scalefss_round_mask:
3545  case X86::BI__builtin_ia32_cvtsd2ss_round_mask:
3546  case X86::BI__builtin_ia32_sqrtsd_round_mask:
3547  case X86::BI__builtin_ia32_sqrtss_round_mask:
3548  case X86::BI__builtin_ia32_vfmaddsd3_mask:
3549  case X86::BI__builtin_ia32_vfmaddsd3_maskz:
3550  case X86::BI__builtin_ia32_vfmaddsd3_mask3:
3551  case X86::BI__builtin_ia32_vfmaddss3_mask:
3552  case X86::BI__builtin_ia32_vfmaddss3_maskz:
3553  case X86::BI__builtin_ia32_vfmaddss3_mask3:
3554  case X86::BI__builtin_ia32_vfmaddpd512_mask:
3555  case X86::BI__builtin_ia32_vfmaddpd512_maskz:
3556  case X86::BI__builtin_ia32_vfmaddpd512_mask3:
3557  case X86::BI__builtin_ia32_vfmsubpd512_mask3:
3558  case X86::BI__builtin_ia32_vfmaddps512_mask:
3559  case X86::BI__builtin_ia32_vfmaddps512_maskz:
3560  case X86::BI__builtin_ia32_vfmaddps512_mask3:
3561  case X86::BI__builtin_ia32_vfmsubps512_mask3:
3562  case X86::BI__builtin_ia32_vfmaddsubpd512_mask:
3563  case X86::BI__builtin_ia32_vfmaddsubpd512_maskz:
3564  case X86::BI__builtin_ia32_vfmaddsubpd512_mask3:
3565  case X86::BI__builtin_ia32_vfmsubaddpd512_mask3:
3566  case X86::BI__builtin_ia32_vfmaddsubps512_mask:
3567  case X86::BI__builtin_ia32_vfmaddsubps512_maskz:
3568  case X86::BI__builtin_ia32_vfmaddsubps512_mask3:
3569  case X86::BI__builtin_ia32_vfmsubaddps512_mask3:
3570  ArgNum = 4;
3571  HasRC = true;
3572  break;
3573  }
3574 
3575  llvm::APSInt Result;
3576 
3577  // We can't check the value of a dependent argument.
3578  Expr *Arg = TheCall->getArg(ArgNum);
3579  if (Arg->isTypeDependent() || Arg->isValueDependent())
3580  return false;
3581 
3582  // Check constant-ness first.
3583  if (SemaBuiltinConstantArg(TheCall, ArgNum, Result))
3584  return true;
3585 
3586  // Make sure rounding mode is either ROUND_CUR_DIRECTION or ROUND_NO_EXC bit
3587  // is set. If the intrinsic has rounding control(bits 1:0), make sure its only
3588  // combined with ROUND_NO_EXC. If the intrinsic does not have rounding
3589  // control, allow ROUND_NO_EXC and ROUND_CUR_DIRECTION together.
3590  if (Result == 4/*ROUND_CUR_DIRECTION*/ ||
3591  Result == 8/*ROUND_NO_EXC*/ ||
3592  (!HasRC && Result == 12/*ROUND_CUR_DIRECTION|ROUND_NO_EXC*/) ||
3593  (HasRC && Result.getZExtValue() >= 8 && Result.getZExtValue() <= 11))
3594  return false;
3595 
3596  return Diag(TheCall->getBeginLoc(), diag::err_x86_builtin_invalid_rounding)
3597  << Arg->getSourceRange();
3598 }
3599 
3600 // Check if the gather/scatter scale is legal.
3601 bool Sema::CheckX86BuiltinGatherScatterScale(unsigned BuiltinID,
3602  CallExpr *TheCall) {
3603  unsigned ArgNum = 0;
3604  switch (BuiltinID) {
3605  default:
3606  return false;
3607  case X86::BI__builtin_ia32_gatherpfdpd:
3608  case X86::BI__builtin_ia32_gatherpfdps:
3609  case X86::BI__builtin_ia32_gatherpfqpd:
3610  case X86::BI__builtin_ia32_gatherpfqps:
3611  case X86::BI__builtin_ia32_scatterpfdpd:
3612  case X86::BI__builtin_ia32_scatterpfdps:
3613  case X86::BI__builtin_ia32_scatterpfqpd:
3614  case X86::BI__builtin_ia32_scatterpfqps:
3615  ArgNum = 3;
3616  break;
3617  case X86::BI__builtin_ia32_gatherd_pd:
3618  case X86::BI__builtin_ia32_gatherd_pd256:
3619  case X86::BI__builtin_ia32_gatherq_pd:
3620  case X86::BI__builtin_ia32_gatherq_pd256:
3621  case X86::BI__builtin_ia32_gatherd_ps:
3622  case X86::BI__builtin_ia32_gatherd_ps256:
3623  case X86::BI__builtin_ia32_gatherq_ps:
3624  case X86::BI__builtin_ia32_gatherq_ps256:
3625  case X86::BI__builtin_ia32_gatherd_q:
3626  case X86::BI__builtin_ia32_gatherd_q256:
3627  case X86::BI__builtin_ia32_gatherq_q:
3628  case X86::BI__builtin_ia32_gatherq_q256:
3629  case X86::BI__builtin_ia32_gatherd_d:
3630  case X86::BI__builtin_ia32_gatherd_d256:
3631  case X86::BI__builtin_ia32_gatherq_d:
3632  case X86::BI__builtin_ia32_gatherq_d256:
3633  case X86::BI__builtin_ia32_gather3div2df:
3634  case X86::BI__builtin_ia32_gather3div2di:
3635  case X86::BI__builtin_ia32_gather3div4df:
3636  case X86::BI__builtin_ia32_gather3div4di:
3637  case X86::BI__builtin_ia32_gather3div4sf:
3638  case X86::BI__builtin_ia32_gather3div4si:
3639  case X86::BI__builtin_ia32_gather3div8sf:
3640  case X86::BI__builtin_ia32_gather3div8si:
3641  case X86::BI__builtin_ia32_gather3siv2df:
3642  case X86::BI__builtin_ia32_gather3siv2di:
3643  case X86::BI__builtin_ia32_gather3siv4df:
3644  case X86::BI__builtin_ia32_gather3siv4di:
3645  case X86::BI__builtin_ia32_gather3siv4sf:
3646  case X86::BI__builtin_ia32_gather3siv4si:
3647  case X86::BI__builtin_ia32_gather3siv8sf:
3648  case X86::BI__builtin_ia32_gather3siv8si:
3649  case X86::BI__builtin_ia32_gathersiv8df:
3650  case X86::BI__builtin_ia32_gathersiv16sf:
3651  case X86::BI__builtin_ia32_gatherdiv8df:
3652  case X86::BI__builtin_ia32_gatherdiv16sf:
3653  case X86::BI__builtin_ia32_gathersiv8di:
3654  case X86::BI__builtin_ia32_gathersiv16si:
3655  case X86::BI__builtin_ia32_gatherdiv8di:
3656  case X86::BI__builtin_ia32_gatherdiv16si:
3657  case X86::BI__builtin_ia32_scatterdiv2df:
3658  case X86::BI__builtin_ia32_scatterdiv2di:
3659  case X86::BI__builtin_ia32_scatterdiv4df:
3660  case X86::BI__builtin_ia32_scatterdiv4di:
3661  case X86::BI__builtin_ia32_scatterdiv4sf:
3662  case X86::BI__builtin_ia32_scatterdiv4si:
3663  case X86::BI__builtin_ia32_scatterdiv8sf:
3664  case X86::BI__builtin_ia32_scatterdiv8si:
3665  case X86::BI__builtin_ia32_scattersiv2df:
3666  case X86::BI__builtin_ia32_scattersiv2di:
3667  case X86::BI__builtin_ia32_scattersiv4df:
3668  case X86::BI__builtin_ia32_scattersiv4di:
3669  case X86::BI__builtin_ia32_scattersiv4sf:
3670  case X86::BI__builtin_ia32_scattersiv4si:
3671  case X86::BI__builtin_ia32_scattersiv8sf:
3672  case X86::BI__builtin_ia32_scattersiv8si:
3673  case X86::BI__builtin_ia32_scattersiv8df:
3674  case X86::BI__builtin_ia32_scattersiv16sf:
3675  case X86::BI__builtin_ia32_scatterdiv8df:
3676  case X86::BI__builtin_ia32_scatterdiv16sf:
3677  case X86::BI__builtin_ia32_scattersiv8di:
3678  case X86::BI__builtin_ia32_scattersiv16si:
3679  case X86::BI__builtin_ia32_scatterdiv8di:
3680  case X86::BI__builtin_ia32_scatterdiv16si:
3681  ArgNum = 4;
3682  break;
3683  }
3684 
3685  llvm::APSInt Result;
3686 
3687  // We can't check the value of a dependent argument.
3688  Expr *Arg = TheCall->getArg(ArgNum);
3689  if (Arg->isTypeDependent() || Arg->isValueDependent())
3690  return false;
3691 
3692  // Check constant-ness first.
3693  if (SemaBuiltinConstantArg(TheCall, ArgNum, Result))
3694  return true;
3695 
3696  if (Result == 1 || Result == 2 || Result == 4 || Result == 8)
3697  return false;
3698 
3699  return Diag(TheCall->getBeginLoc(), diag::err_x86_builtin_invalid_scale)
3700  << Arg->getSourceRange();
3701 }
3702 
3703 static bool isX86_32Builtin(unsigned BuiltinID) {
3704  // These builtins only work on x86-32 targets.
3705  switch (BuiltinID) {
3706  case X86::BI__builtin_ia32_readeflags_u32:
3707  case X86::BI__builtin_ia32_writeeflags_u32:
3708  return true;
3709  }
3710 
3711  return false;
3712 }
3713 
3714 bool Sema::CheckX86BuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
3715  if (BuiltinID == X86::BI__builtin_cpu_supports)
3716  return SemaBuiltinCpuSupports(*this, TheCall);
3717 
3718  if (BuiltinID == X86::BI__builtin_cpu_is)
3719  return SemaBuiltinCpuIs(*this, TheCall);
3720 
3721  // Check for 32-bit only builtins on a 64-bit target.
3722  const llvm::Triple &TT = Context.getTargetInfo().getTriple();
3723  if (TT.getArch() != llvm::Triple::x86 && isX86_32Builtin(BuiltinID))
3724  return Diag(TheCall->getCallee()->getBeginLoc(),
3725  diag::err_32_bit_builtin_64_bit_tgt);
3726 
3727  // If the intrinsic has rounding or SAE make sure its valid.
3728  if (CheckX86BuiltinRoundingOrSAE(BuiltinID, TheCall))
3729  return true;
3730 
3731  // If the intrinsic has a gather/scatter scale immediate make sure its valid.
3732  if (CheckX86BuiltinGatherScatterScale(BuiltinID, TheCall))
3733  return true;
3734 
3735  // For intrinsics which take an immediate value as part of the instruction,
3736  // range check them here.
3737  int i = 0, l = 0, u = 0;
3738  switch (BuiltinID) {
3739  default:
3740  return false;
3741  case X86::BI__builtin_ia32_vec_ext_v2si:
3742  case X86::BI__builtin_ia32_vec_ext_v2di:
3743  case X86::BI__builtin_ia32_vextractf128_pd256:
3744  case X86::BI__builtin_ia32_vextractf128_ps256:
3745  case X86::BI__builtin_ia32_vextractf128_si256:
3746  case X86::BI__builtin_ia32_extract128i256:
3747  case X86::BI__builtin_ia32_extractf64x4_mask:
3748  case X86::BI__builtin_ia32_extracti64x4_mask:
3749  case X86::BI__builtin_ia32_extractf32x8_mask:
3750  case X86::BI__builtin_ia32_extracti32x8_mask:
3751  case X86::BI__builtin_ia32_extractf64x2_256_mask:
3752  case X86::BI__builtin_ia32_extracti64x2_256_mask:
3753  case X86::BI__builtin_ia32_extractf32x4_256_mask:
3754  case X86::BI__builtin_ia32_extracti32x4_256_mask:
3755  i = 1; l = 0; u = 1;
3756  break;
3757  case X86::BI__builtin_ia32_vec_set_v2di:
3758  case X86::BI__builtin_ia32_vinsertf128_pd256:
3759  case X86::BI__builtin_ia32_vinsertf128_ps256:
3760  case X86::BI__builtin_ia32_vinsertf128_si256:
3761  case X86::BI__builtin_ia32_insert128i256:
3762  case X86::BI__builtin_ia32_insertf32x8:
3763  case X86::BI__builtin_ia32_inserti32x8:
3764  case X86::BI__builtin_ia32_insertf64x4:
3765  case X86::BI__builtin_ia32_inserti64x4:
3766  case X86::BI__builtin_ia32_insertf64x2_256:
3767  case X86::BI__builtin_ia32_inserti64x2_256:
3768  case X86::BI__builtin_ia32_insertf32x4_256:
3769  case X86::BI__builtin_ia32_inserti32x4_256:
3770  i = 2; l = 0; u = 1;
3771  break;
3772  case X86::BI__builtin_ia32_vpermilpd:
3773  case X86::BI__builtin_ia32_vec_ext_v4hi:
3774  case X86::BI__builtin_ia32_vec_ext_v4si:
3775  case X86::BI__builtin_ia32_vec_ext_v4sf:
3776  case X86::BI__builtin_ia32_vec_ext_v4di:
3777  case X86::BI__builtin_ia32_extractf32x4_mask:
3778  case X86::BI__builtin_ia32_extracti32x4_mask:
3779  case X86::BI__builtin_ia32_extractf64x2_512_mask:
3780  case X86::BI__builtin_ia32_extracti64x2_512_mask:
3781  i = 1; l = 0; u = 3;
3782  break;
3783  case X86::BI_mm_prefetch:
3784  case X86::BI__builtin_ia32_vec_ext_v8hi:
3785  case X86::BI__builtin_ia32_vec_ext_v8si:
3786  i = 1; l = 0; u = 7;
3787  break;
3788  case X86::BI__builtin_ia32_sha1rnds4:
3789  case X86::BI__builtin_ia32_blendpd:
3790  case X86::BI__builtin_ia32_shufpd:
3791  case X86::BI__builtin_ia32_vec_set_v4hi:
3792  case X86::BI__builtin_ia32_vec_set_v4si:
3793  case X86::BI__builtin_ia32_vec_set_v4di:
3794  case X86::BI__builtin_ia32_shuf_f32x4_256:
3795  case X86::BI__builtin_ia32_shuf_f64x2_256:
3796  case X86::BI__builtin_ia32_shuf_i32x4_256:
3797  case X86::BI__builtin_ia32_shuf_i64x2_256:
3798  case X86::BI__builtin_ia32_insertf64x2_512:
3799  case X86::BI__builtin_ia32_inserti64x2_512:
3800  case X86::BI__builtin_ia32_insertf32x4:
3801  case X86::BI__builtin_ia32_inserti32x4:
3802  i = 2; l = 0; u = 3;
3803  break;
3804  case X86::BI__builtin_ia32_vpermil2pd:
3805  case X86::BI__builtin_ia32_vpermil2pd256:
3806  case X86::BI__builtin_ia32_vpermil2ps:
3807  case X86::BI__builtin_ia32_vpermil2ps256:
3808  i = 3; l = 0; u = 3;
3809  break;
3810  case X86::BI__builtin_ia32_cmpb128_mask:
3811  case X86::BI__builtin_ia32_cmpw128_mask:
3812  case X86::BI__builtin_ia32_cmpd128_mask:
3813  case X86::BI__builtin_ia32_cmpq128_mask:
3814  case X86::BI__builtin_ia32_cmpb256_mask:
3815  case X86::BI__builtin_ia32_cmpw256_mask:
3816  case X86::BI__builtin_ia32_cmpd256_mask:
3817  case X86::BI__builtin_ia32_cmpq256_mask:
3818  case X86::BI__builtin_ia32_cmpb512_mask:
3819  case X86::BI__builtin_ia32_cmpw512_mask:
3820  case X86::BI__builtin_ia32_cmpd512_mask:
3821  case X86::BI__builtin_ia32_cmpq512_mask:
3822  case X86::BI__builtin_ia32_ucmpb128_mask:
3823  case X86::BI__builtin_ia32_ucmpw128_mask:
3824  case X86::BI__builtin_ia32_ucmpd128_mask:
3825  case X86::BI__builtin_ia32_ucmpq128_mask:
3826  case X86::BI__builtin_ia32_ucmpb256_mask:
3827  case X86::BI__builtin_ia32_ucmpw256_mask:
3828  case X86::BI__builtin_ia32_ucmpd256_mask:
3829  case X86::BI__builtin_ia32_ucmpq256_mask:
3830  case X86::BI__builtin_ia32_ucmpb512_mask:
3831  case X86::BI__builtin_ia32_ucmpw512_mask:
3832  case X86::BI__builtin_ia32_ucmpd512_mask:
3833  case X86::BI__builtin_ia32_ucmpq512_mask:
3834  case X86::BI__builtin_ia32_vpcomub:
3835  case X86::BI__builtin_ia32_vpcomuw:
3836  case X86::BI__builtin_ia32_vpcomud:
3837  case X86::BI__builtin_ia32_vpcomuq:
3838  case X86::BI__builtin_ia32_vpcomb:
3839  case X86::BI__builtin_ia32_vpcomw:
3840  case X86::BI__builtin_ia32_vpcomd:
3841  case X86::BI__builtin_ia32_vpcomq:
3842  case X86::BI__builtin_ia32_vec_set_v8hi:
3843  case X86::BI__builtin_ia32_vec_set_v8si:
3844  i = 2; l = 0; u = 7;
3845  break;
3846  case X86::BI__builtin_ia32_vpermilpd256:
3847  case X86::BI__builtin_ia32_roundps:
3848  case X86::BI__builtin_ia32_roundpd:
3849  case X86::BI__builtin_ia32_roundps256:
3850  case X86::BI__builtin_ia32_roundpd256:
3851  case X86::BI__builtin_ia32_getmantpd128_mask:
3852  case X86::BI__builtin_ia32_getmantpd256_mask:
3853  case X86::BI__builtin_ia32_getmantps128_mask:
3854  case X86::BI__builtin_ia32_getmantps256_mask:
3855  case X86::BI__builtin_ia32_getmantpd512_mask:
3856  case X86::BI__builtin_ia32_getmantps512_mask:
3857  case X86::BI__builtin_ia32_vec_ext_v16qi:
3858  case X86::BI__builtin_ia32_vec_ext_v16hi:
3859  i = 1; l = 0; u = 15;
3860  break;
3861  case X86::BI__builtin_ia32_pblendd128:
3862  case X86::BI__builtin_ia32_blendps:
3863  case X86::BI__builtin_ia32_blendpd256:
3864  case X86::BI__builtin_ia32_shufpd256:
3865  case X86::BI__builtin_ia32_roundss:
3866  case X86::BI__builtin_ia32_roundsd:
3867  case X86::BI__builtin_ia32_rangepd128_mask:
3868  case X86::BI__builtin_ia32_rangepd256_mask:
3869  case X86::BI__builtin_ia32_rangepd512_mask:
3870  case X86::BI__builtin_ia32_rangeps128_mask:
3871  case X86::BI__builtin_ia32_rangeps256_mask:
3872  case X86::BI__builtin_ia32_rangeps512_mask:
3873  case X86::BI__builtin_ia32_getmantsd_round_mask:
3874  case X86::BI__builtin_ia32_getmantss_round_mask:
3875  case X86::BI__builtin_ia32_vec_set_v16qi:
3876  case X86::BI__builtin_ia32_vec_set_v16hi:
3877  i = 2; l = 0; u = 15;
3878  break;
3879  case X86::BI__builtin_ia32_vec_ext_v32qi:
3880  i = 1; l = 0; u = 31;
3881  break;
3882  case X86::BI__builtin_ia32_cmpps:
3883  case X86::BI__builtin_ia32_cmpss:
3884  case X86::BI__builtin_ia32_cmppd:
3885  case X86::BI__builtin_ia32_cmpsd:
3886  case X86::BI__builtin_ia32_cmpps256:
3887  case X86::BI__builtin_ia32_cmppd256:
3888  case X86::BI__builtin_ia32_cmpps128_mask:
3889  case X86::BI__builtin_ia32_cmppd128_mask:
3890  case X86::BI__builtin_ia32_cmpps256_mask:
3891  case X86::BI__builtin_ia32_cmppd256_mask:
3892  case X86::BI__builtin_ia32_cmpps512_mask:
3893  case X86::BI__builtin_ia32_cmppd512_mask:
3894  case X86::BI__builtin_ia32_cmpsd_mask:
3895  case X86::BI__builtin_ia32_cmpss_mask:
3896  case X86::BI__builtin_ia32_vec_set_v32qi:
3897  i = 2; l = 0; u = 31;
3898  break;
3899  case X86::BI__builtin_ia32_permdf256:
3900  case X86::BI__builtin_ia32_permdi256:
3901  case X86::BI__builtin_ia32_permdf512:
3902  case X86::BI__builtin_ia32_permdi512:
3903  case X86::BI__builtin_ia32_vpermilps:
3904  case X86::BI__builtin_ia32_vpermilps256:
3905  case X86::BI__builtin_ia32_vpermilpd512:
3906  case X86::BI__builtin_ia32_vpermilps512:
3907  case X86::BI__builtin_ia32_pshufd:
3908  case X86::BI__builtin_ia32_pshufd256:
3909  case X86::BI__builtin_ia32_pshufd512:
3910  case X86::BI__builtin_ia32_pshufhw:
3911  case X86::BI__builtin_ia32_pshufhw256:
3912  case X86::BI__builtin_ia32_pshufhw512:
3913  case X86::BI__builtin_ia32_pshuflw:
3914  case X86::BI__builtin_ia32_pshuflw256:
3915  case X86::BI__builtin_ia32_pshuflw512:
3916  case X86::BI__builtin_ia32_vcvtps2ph:
3917  case X86::BI__builtin_ia32_vcvtps2ph_mask:
3918  case X86::BI__builtin_ia32_vcvtps2ph256:
3919  case X86::BI__builtin_ia32_vcvtps2ph256_mask:
3920  case X86::BI__builtin_ia32_vcvtps2ph512_mask:
3921  case X86::BI__builtin_ia32_rndscaleps_128_mask:
3922  case X86::BI__builtin_ia32_rndscalepd_128_mask:
3923  case X86::BI__builtin_ia32_rndscaleps_256_mask:
3924  case X86::BI__builtin_ia32_rndscalepd_256_mask:
3925  case X86::BI__builtin_ia32_rndscaleps_mask:
3926  case X86::BI__builtin_ia32_rndscalepd_mask:
3927  case X86::BI__builtin_ia32_reducepd128_mask:
3928  case X86::BI__builtin_ia32_reducepd256_mask:
3929  case X86::BI__builtin_ia32_reducepd512_mask:
3930  case X86::BI__builtin_ia32_reduceps128_mask:
3931  case X86::BI__builtin_ia32_reduceps256_mask:
3932  case X86::BI__builtin_ia32_reduceps512_mask:
3933  case X86::BI__builtin_ia32_prold512:
3934  case X86::BI__builtin_ia32_prolq512:
3935  case X86::BI__builtin_ia32_prold128:
3936  case X86::BI__builtin_ia32_prold256:
3937  case X86::BI__builtin_ia32_prolq128:
3938  case X86::BI__builtin_ia32_prolq256:
3939  case X86::BI__builtin_ia32_prord512:
3940  case X86::BI__builtin_ia32_prorq512:
3941  case X86::BI__builtin_ia32_prord128:
3942  case X86::BI__builtin_ia32_prord256:
3943  case X86::BI__builtin_ia32_prorq128:
3944  case X86::BI__builtin_ia32_prorq256:
3945  case X86::BI__builtin_ia32_fpclasspd128_mask:
3946  case X86::BI__builtin_ia32_fpclasspd256_mask:
3947  case X86::BI__builtin_ia32_fpclassps128_mask:
3948  case X86::BI__builtin_ia32_fpclassps256_mask:
3949  case X86::BI__builtin_ia32_fpclassps512_mask:
3950  case X86::BI__builtin_ia32_fpclasspd512_mask:
3951  case X86::BI__builtin_ia32_fpclasssd_mask:
3952  case X86::BI__builtin_ia32_fpclassss_mask:
3953  case X86::BI__builtin_ia32_pslldqi128_byteshift:
3954  case X86::BI__builtin_ia32_pslldqi256_byteshift:
3955  case X86::BI__builtin_ia32_pslldqi512_byteshift:
3956  case X86::BI__builtin_ia32_psrldqi128_byteshift:
3957  case X86::BI__builtin_ia32_psrldqi256_byteshift:
3958  case X86::BI__builtin_ia32_psrldqi512_byteshift:
3959  case X86::BI__builtin_ia32_kshiftliqi:
3960  case X86::BI__builtin_ia32_kshiftlihi:
3961  case X86::BI__builtin_ia32_kshiftlisi:
3962  case X86::BI__builtin_ia32_kshiftlidi:
3963  case X86::BI__builtin_ia32_kshiftriqi:
3964  case X86::BI__builtin_ia32_kshiftrihi:
3965  case X86::BI__builtin_ia32_kshiftrisi:
3966  case X86::BI__builtin_ia32_kshiftridi:
3967  i = 1; l = 0; u = 255;
3968  break;
3969  case X86::BI__builtin_ia32_vperm2f128_pd256:
3970  case X86::BI__builtin_ia32_vperm2f128_ps256:
3971  case X86::BI__builtin_ia32_vperm2f128_si256:
3972  case X86::BI__builtin_ia32_permti256:
3973  case X86::BI__builtin_ia32_pblendw128:
3974  case X86::BI__builtin_ia32_pblendw256:
3975  case X86::BI__builtin_ia32_blendps256:
3976  case X86::BI__builtin_ia32_pblendd256:
3977  case X86::BI__builtin_ia32_palignr128:
3978  case X86::BI__builtin_ia32_palignr256:
3979  case X86::BI__builtin_ia32_palignr512:
3980  case X86::BI__builtin_ia32_alignq512:
3981  case X86::BI__builtin_ia32_alignd512:
3982  case X86::BI__builtin_ia32_alignd128:
3983  case X86::BI__builtin_ia32_alignd256:
3984  case X86::BI__builtin_ia32_alignq128:
3985  case X86::BI__builtin_ia32_alignq256:
3986  case X86::BI__builtin_ia32_vcomisd:
3987  case X86::BI__builtin_ia32_vcomiss:
3988  case X86::BI__builtin_ia32_shuf_f32x4:
3989  case X86::BI__builtin_ia32_shuf_f64x2:
3990  case X86::BI__builtin_ia32_shuf_i32x4:
3991  case X86::BI__builtin_ia32_shuf_i64x2:
3992  case X86::BI__builtin_ia32_shufpd512:
3993  case X86::BI__builtin_ia32_shufps:
3994  case X86::BI__builtin_ia32_shufps256:
3995  case X86::BI__builtin_ia32_shufps512:
3996  case X86::BI__builtin_ia32_dbpsadbw128:
3997  case X86::BI__builtin_ia32_dbpsadbw256:
3998  case X86::BI__builtin_ia32_dbpsadbw512:
3999  case X86::BI__builtin_ia32_vpshldd128:
4000  case X86::BI__builtin_ia32_vpshldd256:
4001  case X86::BI__builtin_ia32_vpshldd512:
4002  case X86::BI__builtin_ia32_vpshldq128:
4003  case X86::BI__builtin_ia32_vpshldq256:
4004  case X86::BI__builtin_ia32_vpshldq512:
4005  case X86::BI__builtin_ia32_vpshldw128:
4006  case X86::BI__builtin_ia32_vpshldw256:
4007  case X86::BI__builtin_ia32_vpshldw512:
4008  case X86::BI__builtin_ia32_vpshrdd128:
4009  case X86::BI__builtin_ia32_vpshrdd256:
4010  case X86::BI__builtin_ia32_vpshrdd512:
4011  case X86::BI__builtin_ia32_vpshrdq128:
4012  case X86::BI__builtin_ia32_vpshrdq256:
4013  case X86::BI__builtin_ia32_vpshrdq512:
4014  case X86::BI__builtin_ia32_vpshrdw128:
4015  case X86::BI__builtin_ia32_vpshrdw256:
4016  case X86::BI__builtin_ia32_vpshrdw512:
4017  i = 2; l = 0; u = 255;
4018  break;
4019  case X86::BI__builtin_ia32_fixupimmpd512_mask:
4020  case X86::BI__builtin_ia32_fixupimmpd512_maskz:
4021  case X86::BI__builtin_ia32_fixupimmps512_mask:
4022  case X86::BI__builtin_ia32_fixupimmps512_maskz:
4023  case X86::BI__builtin_ia32_fixupimmsd_mask:
4024  case X86::BI__builtin_ia32_fixupimmsd_maskz:
4025  case X86::BI__builtin_ia32_fixupimmss_mask:
4026  case X86::BI__builtin_ia32_fixupimmss_maskz:
4027  case X86::BI__builtin_ia32_fixupimmpd128_mask:
4028  case X86::BI__builtin_ia32_fixupimmpd128_maskz:
4029  case X86::BI__builtin_ia32_fixupimmpd256_mask:
4030  case X86::BI__builtin_ia32_fixupimmpd256_maskz:
4031  case X86::BI__builtin_ia32_fixupimmps128_mask:
4032  case X86::BI__builtin_ia32_fixupimmps128_maskz:
4033  case X86::BI__builtin_ia32_fixupimmps256_mask:
4034  case X86::BI__builtin_ia32_fixupimmps256_maskz:
4035  case X86::BI__builtin_ia32_pternlogd512_mask:
4036  case X86::BI__builtin_ia32_pternlogd512_maskz:
4037  case X86::BI__builtin_ia32_pternlogq512_mask:
4038  case X86::BI__builtin_ia32_pternlogq512_maskz:
4039  case X86::BI__builtin_ia32_pternlogd128_mask:
4040  case X86::BI__builtin_ia32_pternlogd128_maskz:
4041  case X86::BI__builtin_ia32_pternlogd256_mask:
4042  case X86::BI__builtin_ia32_pternlogd256_maskz:
4043  case X86::BI__builtin_ia32_pternlogq128_mask:
4044  case X86::BI__builtin_ia32_pternlogq128_maskz:
4045  case X86::BI__builtin_ia32_pternlogq256_mask:
4046  case X86::BI__builtin_ia32_pternlogq256_maskz:
4047  i = 3; l = 0; u = 255;
4048  break;
4049  case X86::BI__builtin_ia32_gatherpfdpd:
4050  case X86::BI__builtin_ia32_gatherpfdps:
4051  case X86::BI__builtin_ia32_gatherpfqpd:
4052  case X86::BI__builtin_ia32_gatherpfqps:
4053  case X86::BI__builtin_ia32_scatterpfdpd:
4054  case X86::BI__builtin_ia32_scatterpfdps:
4055  case X86::BI__builtin_ia32_scatterpfqpd:
4056  case X86::BI__builtin_ia32_scatterpfqps:
4057  i = 4; l = 2; u = 3;
4058  break;
4059  case X86::BI__builtin_ia32_reducesd_mask:
4060  case X86::BI__builtin_ia32_reducess_mask:
4061  case X86::BI__builtin_ia32_rndscalesd_round_mask:
4062  case X86::BI__builtin_ia32_rndscaless_round_mask:
4063  i = 4; l = 0; u = 255;
4064  break;
4065  }
4066 
4067  // Note that we don't force a hard error on the range check here, allowing
4068  // template-generated or macro-generated dead code to potentially have out-of-
4069  // range values. These need to code generate, but don't need to necessarily
4070  // make any sense. We use a warning that defaults to an error.
4071  return SemaBuiltinConstantArgRange(TheCall, i, l, u, /*RangeIsError*/ false);
4072 }
4073 
4074 /// Given a FunctionDecl's FormatAttr, attempts to populate the FomatStringInfo
4075 /// parameter with the FormatAttr's correct format_idx and firstDataArg.
4076 /// Returns true when the format fits the function and the FormatStringInfo has
4077 /// been populated.
4078 bool Sema::getFormatStringInfo(const FormatAttr *Format, bool IsCXXMember,
4079  FormatStringInfo *FSI) {
4080  FSI->HasVAListArg = Format->getFirstArg() == 0;
4081  FSI->FormatIdx = Format->getFormatIdx() - 1;
4082  FSI->FirstDataArg = FSI->HasVAListArg ? 0 : Format->getFirstArg() - 1;
4083 
4084  // The way the format attribute works in GCC, the implicit this argument
4085  // of member functions is counted. However, it doesn't appear in our own
4086  // lists, so decrement format_idx in that case.
4087  if (IsCXXMember) {
4088  if(FSI->FormatIdx == 0)
4089  return false;
4090  --FSI->FormatIdx;
4091  if (FSI->FirstDataArg != 0)
4092  --FSI->FirstDataArg;
4093  }
4094  return true;
4095 }
4096 
4097 /// Checks if a the given expression evaluates to null.
4098 ///
4099 /// Returns true if the value evaluates to null.
4100 static bool CheckNonNullExpr(Sema &S, const Expr *Expr) {
4101  // If the expression has non-null type, it doesn't evaluate to null.
4102  if (auto nullability
4103  = Expr->IgnoreImplicit()->getType()->getNullability(S.Context)) {
4104  if (*nullability == NullabilityKind::NonNull)
4105  return false;
4106  }
4107 
4108  // As a special case, transparent unions initialized with zero are
4109  // considered null for the purposes of the nonnull attribute.
4110  if (const RecordType *UT = Expr->getType()->getAsUnionType()) {
4111  if (UT->getDecl()->hasAttr<TransparentUnionAttr>())
4112  if (const CompoundLiteralExpr *CLE =
4113  dyn_cast<CompoundLiteralExpr>(Expr))
4114  if (const InitListExpr *ILE =
4115  dyn_cast<InitListExpr>(CLE->getInitializer()))
4116  Expr = ILE->getInit(0);
4117  }
4118 
4119  bool Result;
4120  return (!Expr->isValueDependent() &&
4121  Expr->EvaluateAsBooleanCondition(Result, S.Context) &&
4122  !Result);
4123 }
4124 
4126  const Expr *ArgExpr,
4127  SourceLocation CallSiteLoc) {
4128  if (CheckNonNullExpr(S, ArgExpr))
4129  S.DiagRuntimeBehavior(CallSiteLoc, ArgExpr,
4130  S.PDiag(diag::warn_null_arg)
4131  << ArgExpr->getSourceRange());
4132 }
4133 
4134 bool Sema::GetFormatNSStringIdx(const FormatAttr *Format, unsigned &Idx) {
4135  FormatStringInfo FSI;
4136  if ((GetFormatStringType(Format) == FST_NSString) &&
4137  getFormatStringInfo(Format, false, &FSI)) {
4138  Idx = FSI.FormatIdx;
4139  return true;
4140  }
4141  return false;
4142 }
4143 
4144 /// Diagnose use of %s directive in an NSString which is being passed
4145 /// as formatting string to formatting method.
4146 static void
4148  const NamedDecl *FDecl,
4149  Expr **Args,
4150  unsigned NumArgs) {
4151  unsigned Idx = 0;
4152  bool Format = false;
4154  if (SFFamily == ObjCStringFormatFamily::SFF_CFString) {
4155  Idx = 2;
4156  Format = true;
4157  }
4158  else
4159  for (const auto *I : FDecl->specific_attrs<FormatAttr>()) {
4160  if (S.GetFormatNSStringIdx(I, Idx)) {
4161  Format = true;
4162  break;
4163  }
4164  }
4165  if (!Format || NumArgs <= Idx)
4166  return;
4167  const Expr *FormatExpr = Args[Idx];
4168  if (const CStyleCastExpr *CSCE = dyn_cast<CStyleCastExpr>(FormatExpr))
4169  FormatExpr = CSCE->getSubExpr();
4170  const StringLiteral *FormatString;
4171  if (const ObjCStringLiteral *OSL =
4172  dyn_cast<ObjCStringLiteral>(FormatExpr->IgnoreParenImpCasts()))
4173  FormatString = OSL->getString();
4174  else
4175  FormatString = dyn_cast<StringLiteral>(FormatExpr->IgnoreParenImpCasts());
4176  if (!FormatString)
4177  return;
4178  if (S.FormatStringHasSArg(FormatString)) {
4179  S.Diag(FormatExpr->getExprLoc(), diag::warn_objc_cdirective_format_string)
4180  << "%s" << 1 << 1;
4181  S.Diag(FDecl->getLocation(), diag::note_entity_declared_at)
4182  << FDecl->getDeclName();
4183  }
4184 }
4185 
4186 /// Determine whether the given type has a non-null nullability annotation.
4188  if (auto nullability = type->getNullability(ctx))
4189  return *nullability == NullabilityKind::NonNull;
4190 
4191  return false;
4192 }
4193 
4195  const NamedDecl *FDecl,
4196  const FunctionProtoType *Proto,
4197  ArrayRef<const Expr *> Args,
4198  SourceLocation CallSiteLoc) {
4199  assert((FDecl || Proto) && "Need a function declaration or prototype");
4200 
4201  // Already checked by by constant evaluator.
4202  if (S.isConstantEvaluated())
4203  return;
4204  // Check the attributes attached to the method/function itself.
4205  llvm::SmallBitVector NonNullArgs;
4206  if (FDecl) {
4207  // Handle the nonnull attribute on the function/method declaration itself.
4208  for (const auto *NonNull : FDecl->specific_attrs<NonNullAttr>()) {
4209  if (!NonNull->args_size()) {
4210  // Easy case: all pointer arguments are nonnull.
4211  for (const auto *Arg : Args)
4212  if (S.isValidPointerAttrType(Arg->getType()))
4213  CheckNonNullArgument(S, Arg, CallSiteLoc);
4214  return;
4215  }
4216 
4217  for (const ParamIdx &Idx : NonNull->args()) {
4218  unsigned IdxAST = Idx.getASTIndex();
4219  if (IdxAST >= Args.size())
4220  continue;
4221  if (NonNullArgs.empty())
4222  NonNullArgs.resize(Args.size());
4223  NonNullArgs.set(IdxAST);
4224  }
4225  }
4226  }
4227 
4228  if (FDecl && (isa<FunctionDecl>(FDecl) || isa<ObjCMethodDecl>(FDecl))) {
4229  // Handle the nonnull attribute on the parameters of the
4230  // function/method.
4231  ArrayRef<ParmVarDecl*> parms;
4232  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(FDecl))
4233  parms = FD->parameters();
4234  else
4235  parms = cast<ObjCMethodDecl>(FDecl)->parameters();
4236 
4237  unsigned ParamIndex = 0;
4238  for (ArrayRef<ParmVarDecl*>::iterator I = parms.begin(), E = parms.end();
4239  I != E; ++I, ++ParamIndex) {
4240  const ParmVarDecl *PVD = *I;
4241  if (PVD->hasAttr<NonNullAttr>() ||
4242  isNonNullType(S.Context, PVD->getType())) {
4243  if (NonNullArgs.empty())
4244  NonNullArgs.resize(Args.size());
4245 
4246  NonNullArgs.set(ParamIndex);
4247  }
4248  }
4249  } else {
4250  // If we have a non-function, non-method declaration but no
4251  // function prototype, try to dig out the function prototype.
4252  if (!Proto) {
4253  if (const ValueDecl *VD = dyn_cast<ValueDecl>(FDecl)) {
4254  QualType type = VD->getType().getNonReferenceType();
4255  if (auto pointerType = type->getAs<PointerType>())
4256  type = pointerType->getPointeeType();
4257  else if (auto blockType = type->getAs<BlockPointerType>())
4258  type = blockType->getPointeeType();
4259  // FIXME: data member pointers?
4260 
4261  // Dig out the function prototype, if there is one.
4262  Proto = type->getAs<FunctionProtoType>();
4263  }
4264  }
4265 
4266  // Fill in non-null argument information from the nullability
4267  // information on the parameter types (if we have them).
4268  if (Proto) {
4269  unsigned Index = 0;
4270  for (auto paramType : Proto->getParamTypes()) {
4271  if (isNonNullType(S.Context, paramType)) {
4272  if (NonNullArgs.empty())
4273  NonNullArgs.resize(Args.size());
4274 
4275  NonNullArgs.set(Index);
4276  }
4277 
4278  ++Index;
4279  }
4280  }
4281  }
4282 
4283  // Check for non-null arguments.
4284  for (unsigned ArgIndex = 0, ArgIndexEnd = NonNullArgs.size();
4285  ArgIndex != ArgIndexEnd; ++ArgIndex) {
4286  if (NonNullArgs[ArgIndex])
4287  CheckNonNullArgument(S, Args[ArgIndex], CallSiteLoc);
4288  }
4289 }
4290 
4291 /// Handles the checks for format strings, non-POD arguments to vararg
4292 /// functions, NULL arguments passed to non-NULL parameters, and diagnose_if
4293 /// attributes.
4294 void Sema::checkCall(NamedDecl *FDecl, const FunctionProtoType *Proto,
4295  const Expr *ThisArg, ArrayRef<const Expr *> Args,
4296  bool IsMemberFunction, SourceLocation Loc,
4297  SourceRange Range, VariadicCallType CallType) {
4298  // FIXME: We should check as much as we can in the template definition.
4299  if (CurContext->isDependentContext())
4300  return;
4301 
4302  // Printf and scanf checking.
4303  llvm::SmallBitVector CheckedVarArgs;
4304  if (FDecl) {
4305  for (const auto *I : FDecl->specific_attrs<FormatAttr>()) {
4306  // Only create vector if there are format attributes.
4307  CheckedVarArgs.resize(Args.size());
4308 
4309  CheckFormatArguments(I, Args, IsMemberFunction, CallType, Loc, Range,
4310  CheckedVarArgs);
4311  }
4312  }
4313 
4314  // Refuse POD arguments that weren't caught by the format string
4315  // checks above.
4316  auto *FD = dyn_cast_or_null<FunctionDecl>(FDecl);
4317  if (CallType != VariadicDoesNotApply &&
4318  (!FD || FD->getBuiltinID() != Builtin::BI__noop)) {
4319  unsigned NumParams = Proto ? Proto->getNumParams()
4320  : FDecl && isa<FunctionDecl>(FDecl)
4321  ? cast<FunctionDecl>(FDecl)->getNumParams()
4322  : FDecl && isa<ObjCMethodDecl>(FDecl)
4323  ? cast<ObjCMethodDecl>(FDecl)->param_size()
4324  : 0;
4325 
4326  for (unsigned ArgIdx = NumParams; ArgIdx < Args.size(); ++ArgIdx) {
4327  // Args[ArgIdx] can be null in malformed code.
4328  if (const Expr *Arg = Args[ArgIdx]) {
4329  if (CheckedVarArgs.empty() || !CheckedVarArgs[ArgIdx])
4330  checkVariadicArgument(Arg, CallType);
4331  }
4332  }
4333  }
4334 
4335  if (FDecl || Proto) {
4336  CheckNonNullArguments(*this, FDecl, Proto, Args, Loc);
4337 
4338  // Type safety checking.
4339  if (FDecl) {
4340  for (const auto *I : FDecl->specific_attrs<ArgumentWithTypeTagAttr>())
4341  CheckArgumentWithTypeTag(I, Args, Loc);
4342  }
4343  }
4344 
4345  if (FD)
4346  diagnoseArgDependentDiagnoseIfAttrs(FD, ThisArg, Args, Loc);
4347 }
4348 
4349 /// CheckConstructorCall - Check a constructor call for correctness and safety
4350 /// properties not enforced by the C type system.
4351 void Sema::CheckConstructorCall(FunctionDecl *FDecl,
4352  ArrayRef<const Expr *> Args,
4353  const FunctionProtoType *Proto,
4354  SourceLocation Loc) {
4355  VariadicCallType CallType =
4356  Proto->isVariadic() ? VariadicConstructor : VariadicDoesNotApply;
4357  checkCall(FDecl, Proto, /*ThisArg=*/nullptr, Args, /*IsMemberFunction=*/true,
4358  Loc, SourceRange(), CallType);
4359 }
4360 
4361 /// CheckFunctionCall - Check a direct function call for various correctness
4362 /// and safety properties not strictly enforced by the C type system.
4363 bool Sema::CheckFunctionCall(FunctionDecl *FDecl, CallExpr *TheCall,
4364  const FunctionProtoType *Proto) {
4365  bool IsMemberOperatorCall = isa<CXXOperatorCallExpr>(TheCall) &&
4366  isa<CXXMethodDecl>(FDecl);
4367  bool IsMemberFunction = isa<CXXMemberCallExpr>(TheCall) ||
4368  IsMemberOperatorCall;
4369  VariadicCallType CallType = getVariadicCallType(FDecl, Proto,
4370  TheCall->getCallee());
4371  Expr** Args = TheCall->getArgs();
4372  unsigned NumArgs = TheCall->getNumArgs();
4373 
4374  Expr *ImplicitThis = nullptr;
4375  if (IsMemberOperatorCall) {
4376  // If this is a call to a member operator, hide the first argument
4377  // from checkCall.
4378  // FIXME: Our choice of AST representation here is less than ideal.
4379  ImplicitThis = Args[0];
4380  ++Args;
4381  --NumArgs;
4382  } else if (IsMemberFunction)
4383  ImplicitThis =
4384  cast<CXXMemberCallExpr>(TheCall)->getImplicitObjectArgument();
4385 
4386  checkCall(FDecl, Proto, ImplicitThis, llvm::makeArrayRef(Args, NumArgs),
4387  IsMemberFunction, TheCall->getRParenLoc(),
4388  TheCall->getCallee()->getSourceRange(), CallType);
4389 
4390  IdentifierInfo *FnInfo = FDecl->getIdentifier();
4391  // None of the checks below are needed for functions that don't have
4392  // simple names (e.g., C++ conversion functions).
4393  if (!FnInfo)
4394  return false;
4395 
4396  CheckAbsoluteValueFunction(TheCall, FDecl);
4397  CheckMaxUnsignedZero(TheCall, FDecl);
4398 
4399  if (getLangOpts().ObjC)
4400  DiagnoseCStringFormatDirectiveInCFAPI(*this, FDecl, Args, NumArgs);
4401 
4402  unsigned CMId = FDecl->getMemoryFunctionKind();
4403  if (CMId == 0)
4404  return false;
4405 
4406  // Handle memory setting and copying functions.
4407  if (CMId == Builtin::BIstrlcpy || CMId == Builtin::BIstrlcat)
4408  CheckStrlcpycatArguments(TheCall, FnInfo);
4409  else if (CMId == Builtin::BIstrncat)
4410  CheckStrncatArguments(TheCall, FnInfo);
4411  else
4412  CheckMemaccessArguments(TheCall, CMId, FnInfo);
4413 
4414  return false;
4415 }
4416 
4417 bool Sema::CheckObjCMethodCall(ObjCMethodDecl *Method, SourceLocation lbrac,
4418  ArrayRef<const Expr *> Args) {
4419  VariadicCallType CallType =
4420  Method->isVariadic() ? VariadicMethod : VariadicDoesNotApply;
4421 
4422  checkCall(Method, nullptr, /*ThisArg=*/nullptr, Args,
4423  /*IsMemberFunction=*/false, lbrac, Method->getSourceRange(),
4424  CallType);
4425 
4426  return false;
4427 }
4428 
4429 bool Sema::CheckPointerCall(NamedDecl *NDecl, CallExpr *TheCall,
4430  const FunctionProtoType *Proto) {
4431  QualType Ty;
4432  if (const auto *V = dyn_cast<VarDecl>(NDecl))
4433  Ty = V->getType().getNonReferenceType();
4434  else if (const auto *F = dyn_cast<FieldDecl>(NDecl))
4435  Ty = F->getType().getNonReferenceType();
4436  else
4437  return false;
4438 
4439  if (!Ty->isBlockPointerType() && !Ty->isFunctionPointerType() &&
4440  !Ty->isFunctionProtoType())
4441  return false;
4442 
4443  VariadicCallType CallType;
4444  if (!Proto || !Proto->isVariadic()) {
4445  CallType = VariadicDoesNotApply;
4446  } else if (Ty->isBlockPointerType()) {
4447  CallType = VariadicBlock;
4448  } else { // Ty->isFunctionPointerType()
4449  CallType = VariadicFunction;
4450  }
4451 
4452  checkCall(NDecl, Proto, /*ThisArg=*/nullptr,
4453  llvm::makeArrayRef(TheCall->getArgs(), TheCall->getNumArgs()),
4454  /*IsMemberFunction=*/false, TheCall->getRParenLoc(),
4455  TheCall->getCallee()->getSourceRange(), CallType);
4456 
4457  return false;
4458 }
4459 
4460 /// Checks function calls when a FunctionDecl or a NamedDecl is not available,
4461 /// such as function pointers returned from functions.
4462 bool Sema::CheckOtherCall(CallExpr *TheCall, const FunctionProtoType *Proto) {
4463  VariadicCallType CallType = getVariadicCallType(/*FDecl=*/nullptr, Proto,
4464  TheCall->getCallee());
4465  checkCall(/*FDecl=*/nullptr, Proto, /*ThisArg=*/nullptr,
4466  llvm::makeArrayRef(TheCall->getArgs(), TheCall->getNumArgs()),
4467  /*IsMemberFunction=*/false, TheCall->getRParenLoc(),
4468  TheCall->getCallee()->getSourceRange(), CallType);
4469 
4470  return false;
4471 }
4472 
4473 static bool isValidOrderingForOp(int64_t Ordering, AtomicExpr::AtomicOp Op) {
4474  if (!llvm::isValidAtomicOrderingCABI(Ordering))
4475  return false;
4476 
4477  auto OrderingCABI = (llvm::AtomicOrderingCABI)Ordering;
4478  switch (Op) {
4479  case AtomicExpr::AO__c11_atomic_init:
4480  case AtomicExpr::AO__opencl_atomic_init:
4481  llvm_unreachable("There is no ordering argument for an init");
4482 
4483  case AtomicExpr::AO__c11_atomic_load:
4484  case AtomicExpr::AO__opencl_atomic_load:
4485  case AtomicExpr::AO__atomic_load_n:
4486  case AtomicExpr::AO__atomic_load:
4487  return OrderingCABI != llvm::AtomicOrderingCABI::release &&
4488  OrderingCABI != llvm::AtomicOrderingCABI::acq_rel;
4489 
4490  case AtomicExpr::AO__c11_atomic_store:
4491  case AtomicExpr::AO__opencl_atomic_store:
4492  case AtomicExpr::AO__atomic_store:
4493  case AtomicExpr::AO__atomic_store_n:
4494  return OrderingCABI != llvm::AtomicOrderingCABI::consume &&
4495  OrderingCABI != llvm::AtomicOrderingCABI::acquire &&
4496  OrderingCABI != llvm::AtomicOrderingCABI::acq_rel;
4497 
4498  default:
4499  return true;
4500  }
4501 }
4502 
4503 ExprResult Sema::SemaAtomicOpsOverloaded(ExprResult TheCallResult,
4504  AtomicExpr::AtomicOp Op) {
4505  CallExpr *TheCall = cast<CallExpr>(TheCallResult.get());
4506  DeclRefExpr *DRE =cast<DeclRefExpr>(TheCall->getCallee()->IgnoreParenCasts());
4507  MultiExprArg Args{TheCall->getArgs(), TheCall->getNumArgs()};
4508  return BuildAtomicExpr({TheCall->getBeginLoc(), TheCall->getEndLoc()},
4509  DRE->getSourceRange(), TheCall->getRParenLoc(), Args,
4510  Op);
4511 }
4512 
4514  SourceLocation RParenLoc, MultiExprArg Args,
4516  AtomicArgumentOrder ArgOrder) {
4517  // All the non-OpenCL operations take one of the following forms.
4518  // The OpenCL operations take the __c11 forms with one extra argument for
4519  // synchronization scope.
4520  enum {
4521  // C __c11_atomic_init(A *, C)
4522  Init,
4523 
4524  // C __c11_atomic_load(A *, int)
4525  Load,
4526 
4527  // void __atomic_load(A *, CP, int)
4528  LoadCopy,
4529 
4530  // void __atomic_store(A *, CP, int)
4531  Copy,
4532 
4533  // C __c11_atomic_add(A *, M, int)
4534  Arithmetic,
4535 
4536  // C __atomic_exchange_n(A *, CP, int)
4537  Xchg,
4538 
4539  // void __atomic_exchange(A *, C *, CP, int)
4540  GNUXchg,
4541 
4542  // bool __c11_atomic_compare_exchange_strong(A *, C *, CP, int, int)
4543  C11CmpXchg,
4544 
4545  // bool __atomic_compare_exchange(A *, C *, CP, bool, int, int)
4546  GNUCmpXchg
4547  } Form = Init;
4548 
4549  const unsigned NumForm = GNUCmpXchg + 1;
4550  const unsigned NumArgs[] = { 2, 2, 3, 3, 3, 3, 4, 5, 6 };
4551  const unsigned NumVals[] = { 1, 0, 1, 1, 1, 1, 2, 2, 3 };
4552  // where:
4553  // C is an appropriate type,
4554  // A is volatile _Atomic(C) for __c11 builtins and is C for GNU builtins,
4555  // CP is C for __c11 builtins and GNU _n builtins and is C * otherwise,
4556  // M is C if C is an integer, and ptrdiff_t if C is a pointer, and
4557  // the int parameters are for orderings.
4558 
4559  static_assert(sizeof(NumArgs)/sizeof(NumArgs[0]) == NumForm
4560  && sizeof(NumVals)/sizeof(NumVals[0]) == NumForm,
4561  "need to update code for modified forms");
4562  static_assert(AtomicExpr::AO__c11_atomic_init == 0 &&
4563  AtomicExpr::AO__c11_atomic_fetch_xor + 1 ==
4564  AtomicExpr::AO__atomic_load,
4565  "need to update code for modified C11 atomics");
4566  bool IsOpenCL = Op >= AtomicExpr::AO__opencl_atomic_init &&
4567  Op <= AtomicExpr::AO__opencl_atomic_fetch_max;
4568  bool IsC11 = (Op >= AtomicExpr::AO__c11_atomic_init &&
4569  Op <= AtomicExpr::AO__c11_atomic_fetch_xor) ||
4570  IsOpenCL;
4571  bool IsN = Op == AtomicExpr::AO__atomic_load_n ||
4572  Op == AtomicExpr::AO__atomic_store_n ||
4573  Op == AtomicExpr::AO__atomic_exchange_n ||
4574  Op == AtomicExpr::AO__atomic_compare_exchange_n;
4575  bool IsAddSub = false;
4576  bool IsMinMax = false;
4577 
4578  switch (Op) {
4579  case AtomicExpr::AO__c11_atomic_init:
4580  case AtomicExpr::AO__opencl_atomic_init:
4581  Form = Init;
4582  break;
4583 
4584  case AtomicExpr::AO__c11_atomic_load:
4585  case AtomicExpr::AO__opencl_atomic_load:
4586  case AtomicExpr::AO__atomic_load_n:
4587  Form = Load;
4588  break;
4589 
4590  case AtomicExpr::AO__atomic_load:
4591  Form = LoadCopy;
4592  break;
4593 
4594  case AtomicExpr::AO__c11_atomic_store:
4595  case AtomicExpr::AO__opencl_atomic_store:
4596  case AtomicExpr::AO__atomic_store:
4597  case AtomicExpr::AO__atomic_store_n:
4598  Form = Copy;
4599  break;
4600 
4601  case AtomicExpr::AO__c11_atomic_fetch_add:
4602  case AtomicExpr::AO__c11_atomic_fetch_sub:
4603  case AtomicExpr::AO__opencl_atomic_fetch_add:
4604  case AtomicExpr::AO__opencl_atomic_fetch_sub:
4605  case AtomicExpr::AO__opencl_atomic_fetch_min:
4606  case AtomicExpr::AO__opencl_atomic_fetch_max:
4607  case AtomicExpr::AO__atomic_fetch_add:
4608  case AtomicExpr::AO__atomic_fetch_sub:
4609  case AtomicExpr::AO__atomic_add_fetch:
4610  case AtomicExpr::AO__atomic_sub_fetch:
4611  IsAddSub = true;
4612  LLVM_FALLTHROUGH;
4613  case AtomicExpr::AO__c11_atomic_fetch_and:
4614  case AtomicExpr::AO__c11_atomic_fetch_or:
4615  case AtomicExpr::AO__c11_atomic_fetch_xor:
4616  case AtomicExpr::AO__opencl_atomic_fetch_and:
4617  case AtomicExpr::AO__opencl_atomic_fetch_or:
4618  case AtomicExpr::AO__opencl_atomic_fetch_xor:
4619  case AtomicExpr::AO__atomic_fetch_and:
4620  case AtomicExpr::AO__atomic_fetch_or:
4621  case AtomicExpr::AO__atomic_fetch_xor:
4622  case AtomicExpr::AO__atomic_fetch_nand:
4623  case AtomicExpr::AO__atomic_and_fetch:
4624  case AtomicExpr::AO__atomic_or_fetch:
4625  case AtomicExpr::AO__atomic_xor_fetch:
4626  case AtomicExpr::AO__atomic_nand_fetch:
4627  Form = Arithmetic;
4628  break;
4629 
4630  case AtomicExpr::AO__atomic_fetch_min:
4631  case AtomicExpr::AO__atomic_fetch_max:
4632  IsMinMax = true;
4633  Form = Arithmetic;
4634  break;
4635 
4636  case AtomicExpr::AO__c11_atomic_exchange:
4637  case AtomicExpr::AO__opencl_atomic_exchange:
4638  case AtomicExpr::AO__atomic_exchange_n:
4639  Form = Xchg;
4640  break;
4641 
4642  case AtomicExpr::AO__atomic_exchange:
4643  Form = GNUXchg;
4644  break;
4645 
4646  case AtomicExpr::AO__c11_atomic_compare_exchange_strong:
4647  case AtomicExpr::AO__c11_atomic_compare_exchange_weak:
4648  case AtomicExpr::AO__opencl_atomic_compare_exchange_strong:
4649  case AtomicExpr::AO__opencl_atomic_compare_exchange_weak:
4650  Form = C11CmpXchg;
4651  break;
4652 
4653  case AtomicExpr::AO__atomic_compare_exchange:
4654  case AtomicExpr::AO__atomic_compare_exchange_n:
4655  Form = GNUCmpXchg;
4656  break;
4657  }
4658 
4659  unsigned AdjustedNumArgs = NumArgs[Form];
4660  if (IsOpenCL && Op != AtomicExpr::AO__opencl_atomic_init)
4661  ++AdjustedNumArgs;
4662  // Check we have the right number of arguments.
4663  if (Args.size() < AdjustedNumArgs) {
4664  Diag(CallRange.getEnd(), diag::err_typecheck_call_too_few_args)
4665  << 0 << AdjustedNumArgs << static_cast<unsigned>(Args.size())
4666  << ExprRange;
4667  return ExprError();
4668  } else if (Args.size() > AdjustedNumArgs) {
4669  Diag(Args[AdjustedNumArgs]->getBeginLoc(),
4670  diag::err_typecheck_call_too_many_args)
4671  << 0 << AdjustedNumArgs << static_cast<unsigned>(Args.size())
4672  << ExprRange;
4673  return ExprError();
4674  }
4675 
4676  // Inspect the first argument of the atomic operation.
4677  Expr *Ptr = Args[0];
4678  ExprResult ConvertedPtr = DefaultFunctionArrayLvalueConversion(Ptr);
4679  if (ConvertedPtr.isInvalid())
4680  return ExprError();
4681 
4682  Ptr = ConvertedPtr.get();
4683  const PointerType *pointerType = Ptr->getType()->getAs<PointerType>();
4684  if (!pointerType) {
4685  Diag(ExprRange.getBegin(), diag::err_atomic_builtin_must_be_pointer)
4686  << Ptr->getType() << Ptr->getSourceRange();
4687  return ExprError();
4688  }
4689 
4690  // For a __c11 builtin, this should be a pointer to an _Atomic type.
4691  QualType AtomTy = pointerType->getPointeeType(); // 'A'
4692  QualType ValType = AtomTy; // 'C'
4693  if (IsC11) {
4694  if (!AtomTy->isAtomicType()) {
4695  Diag(ExprRange.getBegin(), diag::err_atomic_op_needs_atomic)
4696  << Ptr->getType() << Ptr->getSourceRange();
4697  return ExprError();
4698  }
4699  if ((Form != Load && Form != LoadCopy && AtomTy.isConstQualified()) ||
4701  Diag(ExprRange.getBegin(), diag::err_atomic_op_needs_non_const_atomic)
4702  << (AtomTy.isConstQualified() ? 0 : 1) << Ptr->getType()
4703  << Ptr->getSourceRange();
4704  return ExprError();
4705  }
4706  ValType = AtomTy->castAs<AtomicType>()->getValueType();
4707  } else if (Form != Load && Form != LoadCopy) {
4708  if (ValType.isConstQualified()) {
4709  Diag(ExprRange.getBegin(), diag::err_atomic_op_needs_non_const_pointer)
4710  << Ptr->getType() << Ptr->getSourceRange();
4711  return ExprError();
4712  }
4713  }
4714 
4715  // For an arithmetic operation, the implied arithmetic must be well-formed.
4716  if (Form == Arithmetic) {
4717  // gcc does not enforce these rules for GNU atomics, but we do so for sanity.
4718  if (IsAddSub && !ValType->isIntegerType()
4719  && !ValType->isPointerType()) {
4720  Diag(ExprRange.getBegin(), diag::err_atomic_op_needs_atomic_int_or_ptr)
4721  << IsC11 << Ptr->getType() << Ptr->getSourceRange();
4722  return ExprError();
4723  }
4724  if (IsMinMax) {
4725  const BuiltinType *BT = ValType->getAs<BuiltinType>();
4726  if (!BT || (BT->getKind() != BuiltinType::Int &&
4727  BT->getKind() != BuiltinType::UInt)) {
4728  Diag(ExprRange.getBegin(), diag::err_atomic_op_needs_int32_or_ptr);
4729  return ExprError();
4730  }
4731  }
4732  if (!IsAddSub && !IsMinMax && !ValType->isIntegerType()) {
4733  Diag(ExprRange.getBegin(), diag::err_atomic_op_bitwise_needs_atomic_int)
4734  << IsC11 << Ptr->getType() << Ptr->getSourceRange();
4735  return ExprError();
4736  }
4737  if (IsC11 && ValType->isPointerType() &&
4738  RequireCompleteType(Ptr->getBeginLoc(), ValType->getPointeeType(),
4739  diag::err_incomplete_type)) {
4740  return ExprError();
4741  }
4742  } else if (IsN && !ValType->isIntegerType() && !ValType->isPointerType()) {
4743  // For __atomic_*_n operations, the value type must be a scalar integral or
4744  // pointer type which is 1, 2, 4, 8 or 16 bytes in length.
4745  Diag(ExprRange.getBegin(), diag::err_atomic_op_needs_atomic_int_or_ptr)
4746  << IsC11 << Ptr->getType() << Ptr->getSourceRange();
4747  return ExprError();
4748  }
4749 
4750  if (!IsC11 && !AtomTy.isTriviallyCopyableType(Context) &&
4751  !AtomTy->isScalarType()) {
4752  // For GNU atomics, require a trivially-copyable type. This is not part of
4753  // the GNU atomics specification, but we enforce it for sanity.
4754  Diag(ExprRange.getBegin(), diag::err_atomic_op_needs_trivial_copy)
4755  << Ptr->getType() << Ptr->getSourceRange();
4756  return ExprError();
4757  }
4758 
4759  switch (ValType.getObjCLifetime()) {
4760  case Qualifiers::OCL_None:
4762  // okay
4763  break;
4764 
4765  case Qualifiers::OCL_Weak:
4768  // FIXME: Can this happen? By this point, ValType should be known
4769  // to be trivially copyable.
4770  Diag(ExprRange.getBegin(), diag::err_arc_atomic_ownership)
4771  << ValType << Ptr->getSourceRange();
4772  return ExprError();
4773  }
4774 
4775  // All atomic operations have an overload which takes a pointer to a volatile
4776  // 'A'. We shouldn't let the volatile-ness of the pointee-type inject itself
4777  // into the result or the other operands. Similarly atomic_load takes a
4778  // pointer to a const 'A'.
4779  ValType.removeLocalVolatile();
4780  ValType.removeLocalConst();
4781  QualType ResultType = ValType;
4782  if (Form == Copy || Form == LoadCopy || Form == GNUXchg ||
4783  Form == Init)
4784  ResultType = Context.VoidTy;
4785  else if (Form == C11CmpXchg || Form == GNUCmpXchg)
4786  ResultType = Context.BoolTy;
4787 
4788  // The type of a parameter passed 'by value'. In the GNU atomics, such
4789  // arguments are actually passed as pointers.
4790  QualType ByValType = ValType; // 'CP'
4791  bool IsPassedByAddress = false;
4792  if (!IsC11 && !IsN) {
4793  ByValType = Ptr->getType();
4794  IsPassedByAddress = true;
4795  }
4796 
4797  SmallVector<Expr *, 5> APIOrderedArgs;
4798  if (ArgOrder == Sema::AtomicArgumentOrder::AST) {
4799  APIOrderedArgs.push_back(Args[0]);
4800  switch (Form) {
4801  case Init:
4802  case Load:
4803  APIOrderedArgs.push_back(Args[1]); // Val1/Order
4804  break;
4805  case LoadCopy:
4806  case Copy:
4807  case Arithmetic:
4808  case Xchg:
4809  APIOrderedArgs.push_back(Args[2]); // Val1
4810  APIOrderedArgs.push_back(Args[1]); // Order
4811  break;
4812  case GNUXchg:
4813  APIOrderedArgs.push_back(Args[2]); // Val1
4814  APIOrderedArgs.push_back(Args[3]); // Val2
4815  APIOrderedArgs.push_back(Args[1]); // Order
4816  break;
4817  case C11CmpXchg:
4818  APIOrderedArgs.push_back(Args[2]); // Val1
4819  APIOrderedArgs.push_back(Args[4]); // Val2
4820  APIOrderedArgs.push_back(Args[1]); // Order
4821  APIOrderedArgs.push_back(Args[3]); // OrderFail
4822  break;
4823  case GNUCmpXchg:
4824  APIOrderedArgs.push_back(Args[2]); // Val1
4825  APIOrderedArgs.push_back(Args[4]); // Val2
4826  APIOrderedArgs.push_back(Args[5]); // Weak
4827  APIOrderedArgs.push_back(Args[1]); // Order
4828  APIOrderedArgs.push_back(Args[3]); // OrderFail
4829  break;
4830  }
4831  } else
4832  APIOrderedArgs.append(Args.begin(), Args.end());
4833 
4834  // The first argument's non-CV pointer type is used to deduce the type of
4835  // subsequent arguments, except for:
4836  // - weak flag (always converted to bool)
4837  // - memory order (always converted to int)
4838  // - scope (always converted to int)
4839  for (unsigned i = 0; i != APIOrderedArgs.size(); ++i) {
4840  QualType Ty;
4841  if (i < NumVals[Form] + 1) {
4842  switch (i) {
4843  case 0:
4844  // The first argument is always a pointer. It has a fixed type.
4845  // It is always dereferenced, a nullptr is undefined.
4846  CheckNonNullArgument(*this, APIOrderedArgs[i], ExprRange.getBegin());
4847  // Nothing else to do: we already know all we want about this pointer.
4848  continue;
4849  case 1:
4850  // The second argument is the non-atomic operand. For arithmetic, this
4851  // is always passed by value, and for a compare_exchange it is always
4852  // passed by address. For the rest, GNU uses by-address and C11 uses
4853  // by-value.
4854  assert(Form != Load);
4855  if (Form == Init || (Form == Arithmetic && ValType->isIntegerType()))
4856  Ty = ValType;
4857  else if (Form == Copy || Form == Xchg) {
4858  if (IsPassedByAddress) {
4859  // The value pointer is always dereferenced, a nullptr is undefined.
4860  CheckNonNullArgument(*this, APIOrderedArgs[i],
4861  ExprRange.getBegin());
4862  }
4863  Ty = ByValType;
4864  } else if (Form == Arithmetic)
4865  Ty = Context.getPointerDiffType();
4866  else {
4867  Expr *ValArg = APIOrderedArgs[i];
4868  // The value pointer is always dereferenced, a nullptr is undefined.
4869  CheckNonNullArgument(*this, ValArg, ExprRange.getBegin());
4870  LangAS AS = LangAS::Default;
4871  // Keep address space of non-atomic pointer type.
4872  if (const PointerType *PtrTy =
4873  ValArg->getType()->getAs<PointerType>()) {
4874  AS = PtrTy->getPointeeType().getAddressSpace();
4875  }
4876  Ty = Context.getPointerType(
4877  Context.getAddrSpaceQualType(ValType.getUnqualifiedType(), AS));
4878  }
4879  break;
4880  case 2:
4881  // The third argument to compare_exchange / GNU exchange is the desired
4882  // value, either by-value (for the C11 and *_n variant) or as a pointer.
4883  if (IsPassedByAddress)
4884  CheckNonNullArgument(*this, APIOrderedArgs[i], ExprRange.getBegin());
4885  Ty = ByValType;
4886  break;
4887  case 3:
4888  // The fourth argument to GNU compare_exchange is a 'weak' flag.
4889  Ty = Context.BoolTy;
4890  break;
4891  }
4892  } else {
4893  // The order(s) and scope are always converted to int.
4894  Ty = Context.IntTy;
4895  }
4896 
4897  InitializedEntity Entity =
4898  InitializedEntity::InitializeParameter(Context, Ty, false);
4899  ExprResult Arg = APIOrderedArgs[i];
4900  Arg = PerformCopyInitialization(Entity, SourceLocation(), Arg);
4901  if (Arg.isInvalid())
4902  return true;
4903  APIOrderedArgs[i] = Arg.get();
4904  }
4905 
4906  // Permute the arguments into a 'consistent' order.
4907  SmallVector<Expr*, 5> SubExprs;
4908  SubExprs.push_back(Ptr);
4909  switch (Form) {
4910  case Init:
4911  // Note, AtomicExpr::getVal1() has a special case for this atomic.
4912  SubExprs.push_back(APIOrderedArgs[1]); // Val1
4913  break;
4914  case Load:
4915  SubExprs.push_back(APIOrderedArgs[1]); // Order
4916  break;
4917  case LoadCopy:
4918  case Copy:
4919  case Arithmetic:
4920  case Xchg:
4921  SubExprs.push_back(APIOrderedArgs[2]); // Order
4922  SubExprs.push_back(APIOrderedArgs[1]); // Val1
4923  break;
4924  case GNUXchg:
4925  // Note, AtomicExpr::getVal2() has a special case for this atomic.
4926  SubExprs.push_back(APIOrderedArgs[3]); // Order
4927  SubExprs.push_back(APIOrderedArgs[1]); // Val1
4928  SubExprs.push_back(APIOrderedArgs[2]); // Val2
4929  break;
4930  case C11CmpXchg:
4931  SubExprs.push_back(APIOrderedArgs[3]); // Order
4932  SubExprs.push_back(APIOrderedArgs[1]); // Val1
4933  SubExprs.push_back(APIOrderedArgs[4]); // OrderFail
4934  SubExprs.push_back(APIOrderedArgs[2]); // Val2
4935  break;
4936  case GNUCmpXchg:
4937  SubExprs.push_back(APIOrderedArgs[4]); // Order
4938  SubExprs.push_back(APIOrderedArgs[1]); // Val1
4939  SubExprs.push_back(APIOrderedArgs[5]); // OrderFail
4940  SubExprs.push_back(APIOrderedArgs[2]); // Val2
4941  SubExprs.push_back(APIOrderedArgs[3]); // Weak
4942  break;
4943  }
4944 
4945  if (SubExprs.size() >= 2 && Form != Init) {
4946  llvm::APSInt Result(32);
4947  if (SubExprs[1]->isIntegerConstantExpr(Result, Context) &&
4948  !isValidOrderingForOp(Result.getSExtValue(), Op))
4949  Diag(SubExprs[1]->getBeginLoc(),
4950  diag::warn_atomic_op_has_invalid_memory_order)
4951  << SubExprs[1]->getSourceRange();
4952  }
4953 
4954  if (auto ScopeModel = AtomicExpr::getScopeModel(Op)) {
4955  auto *Scope = Args[Args.size() - 1];
4956  llvm::APSInt Result(32);
4957  if (Scope->isIntegerConstantExpr(Result, Context) &&
4958  !ScopeModel->isValid(Result.getZExtValue())) {
4959  Diag(Scope->getBeginLoc(), diag::err_atomic_op_has_invalid_synch_scope)
4960  << Scope->getSourceRange();
4961  }
4962  SubExprs.push_back(Scope);
4963  }
4964 
4965  AtomicExpr *AE = new (Context)
4966  AtomicExpr(ExprRange.getBegin(), SubExprs, ResultType, Op, RParenLoc);
4967 
4968  if ((Op == AtomicExpr::AO__c11_atomic_load ||
4969  Op == AtomicExpr::AO__c11_atomic_store ||
4970  Op == AtomicExpr::AO__opencl_atomic_load ||
4971  Op == AtomicExpr::AO__opencl_atomic_store ) &&
4972  Context.AtomicUsesUnsupportedLibcall(AE))
4973  Diag(AE->getBeginLoc(), diag::err_atomic_load_store_uses_lib)
4974  << ((Op == AtomicExpr::AO__c11_atomic_load ||
4975  Op == AtomicExpr::AO__opencl_atomic_load)
4976  ? 0
4977  : 1);
4978 
4979  return AE;
4980 }
4981 
4982 /// checkBuiltinArgument - Given a call to a builtin function, perform
4983 /// normal type-checking on the given argument, updating the call in
4984 /// place. This is useful when a builtin function requires custom
4985 /// type-checking for some of its arguments but not necessarily all of
4986 /// them.
4987 ///
4988 /// Returns true on error.
4989 static bool checkBuiltinArgument(Sema &S, CallExpr *E, unsigned ArgIndex) {
4990  FunctionDecl *Fn = E->getDirectCallee();
4991  assert(Fn && "builtin call without direct callee!");
4992 
4993  ParmVarDecl *Param = Fn->getParamDecl(ArgIndex);
4994  InitializedEntity Entity =
4996 
4997  ExprResult Arg = E->getArg(0);
4998  Arg = S.PerformCopyInitialization(Entity, SourceLocation(), Arg);
4999  if (Arg.isInvalid())
5000  return true;
5001 
5002  E->setArg(ArgIndex, Arg.get());
5003  return false;
5004 }
5005 
5006 /// We have a call to a function like __sync_fetch_and_add, which is an
5007 /// overloaded function based on the pointer type of its first argument.
5008 /// The main BuildCallExpr routines have already promoted the types of
5009 /// arguments because all of these calls are prototyped as void(...).
5010 ///
5011 /// This function goes through and does final semantic checking for these
5012 /// builtins, as well as generating any warnings.
5013 ExprResult
5014 Sema::SemaBuiltinAtomicOverloaded(ExprResult TheCallResult) {
5015  CallExpr *TheCall = static_cast<CallExpr *>(TheCallResult.get());
5016  Expr *Callee = TheCall->getCallee();
5017  DeclRefExpr *DRE = cast<DeclRefExpr>(Callee->IgnoreParenCasts());
5018  FunctionDecl *FDecl = cast<FunctionDecl>(DRE->getDecl());
5019 
5020  // Ensure that we have at least one argument to do type inference from.
5021  if (TheCall->getNumArgs() < 1) {
5022  Diag(TheCall->getEndLoc(), diag::err_typecheck_call_too_few_args_at_least)
5023  << 0 << 1 << TheCall->getNumArgs() << Callee->getSourceRange();
5024  return ExprError();
5025  }
5026 
5027  // Inspect the first argument of the atomic builtin. This should always be
5028  // a pointer type, whose element is an integral scalar or pointer type.
5029  // Because it is a pointer type, we don't have to worry about any implicit
5030  // casts here.
5031  // FIXME: We don't allow floating point scalars as input.
5032  Expr *FirstArg = TheCall->getArg(0);
5033  ExprResult FirstArgResult = DefaultFunctionArrayLvalueConversion(FirstArg);
5034  if (FirstArgResult.isInvalid())
5035  return ExprError();
5036  FirstArg = FirstArgResult.get();
5037  TheCall->setArg(0, FirstArg);
5038 
5039  const PointerType *pointerType = FirstArg->getType()->getAs<PointerType>();
5040  if (!pointerType) {
5041  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_must_be_pointer)
5042  << FirstArg->getType() << FirstArg->getSourceRange();
5043  return ExprError();
5044  }
5045 
5046  QualType ValType = pointerType->getPointeeType();
5047  if (!ValType->isIntegerType() && !ValType->isAnyPointerType() &&
5048  !ValType->isBlockPointerType()) {
5049  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_must_be_pointer_intptr)
5050  << FirstArg->getType() << FirstArg->getSourceRange();
5051  return ExprError();
5052  }
5053 
5054  if (ValType.isConstQualified()) {
5055  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_cannot_be_const)
5056  << FirstArg->getType() << FirstArg->getSourceRange();
5057  return ExprError();
5058  }
5059 
5060  switch (ValType.getObjCLifetime()) {
5061  case Qualifiers::OCL_None:
5063  // okay
5064  break;
5065 
5066  case Qualifiers::OCL_Weak:
5069  Diag(DRE->getBeginLoc(), diag::err_arc_atomic_ownership)
5070  << ValType << FirstArg->getSourceRange();
5071  return ExprError();
5072  }
5073 
5074  // Strip any qualifiers off ValType.
5075  ValType = ValType.getUnqualifiedType();
5076 
5077  // The majority of builtins return a value, but a few have special return
5078  // types, so allow them to override appropriately below.
5079  QualType ResultType = ValType;
5080 
5081  // We need to figure out which concrete builtin this maps onto. For example,
5082  // __sync_fetch_and_add with a 2 byte object turns into
5083  // __sync_fetch_and_add_2.
5084 #define BUILTIN_ROW(x) \
5085  { Builtin::BI##x##_1, Builtin::BI##x##_2, Builtin::BI##x##_4, \
5086  Builtin::BI##x##_8, Builtin::BI##x##_16 }
5087 
5088  static const unsigned BuiltinIndices[][5] = {
5089  BUILTIN_ROW(__sync_fetch_and_add),
5090  BUILTIN_ROW(__sync_fetch_and_sub),
5091  BUILTIN_ROW(__sync_fetch_and_or),
5092  BUILTIN_ROW(__sync_fetch_and_and),
5093  BUILTIN_ROW(__sync_fetch_and_xor),
5094  BUILTIN_ROW(__sync_fetch_and_nand),
5095 
5096  BUILTIN_ROW(__sync_add_and_fetch),
5097  BUILTIN_ROW(__sync_sub_and_fetch),
5098  BUILTIN_ROW(__sync_and_and_fetch),
5099  BUILTIN_ROW(__sync_or_and_fetch),
5100  BUILTIN_ROW(__sync_xor_and_fetch),
5101  BUILTIN_ROW(__sync_nand_and_fetch),
5102 
5103  BUILTIN_ROW(__sync_val_compare_and_swap),
5104  BUILTIN_ROW(__sync_bool_compare_and_swap),
5105  BUILTIN_ROW(__sync_lock_test_and_set),
5106  BUILTIN_ROW(__sync_lock_release),
5107  BUILTIN_ROW(__sync_swap)
5108  };
5109 #undef BUILTIN_ROW
5110 
5111  // Determine the index of the size.
5112  unsigned SizeIndex;
5113  switch (Context.getTypeSizeInChars(ValType).getQuantity()) {
5114  case 1: SizeIndex = 0; break;
5115  case 2: SizeIndex = 1; break;
5116  case 4: SizeIndex = 2; break;
5117  case 8: SizeIndex = 3; break;
5118  case 16: SizeIndex = 4; break;
5119  default:
5120  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_pointer_size)
5121  << FirstArg->getType() << FirstArg->getSourceRange();
5122  return ExprError();
5123  }
5124 
5125  // Each of these builtins has one pointer argument, followed by some number of
5126  // values (0, 1 or 2) followed by a potentially empty varags list of stuff
5127  // that we ignore. Find out which row of BuiltinIndices to read from as well
5128  // as the number of fixed args.
5129  unsigned BuiltinID = FDecl->getBuiltinID();
5130  unsigned BuiltinIndex, NumFixed = 1;
5131  bool WarnAboutSemanticsChange = false;
5132  switch (BuiltinID) {
5133  default: llvm_unreachable("Unknown overloaded atomic builtin!");
5134  case Builtin::BI__sync_fetch_and_add:
5135  case Builtin::BI__sync_fetch_and_add_1:
5136  case Builtin::BI__sync_fetch_and_add_2:
5137  case Builtin::BI__sync_fetch_and_add_4:
5138  case Builtin::BI__sync_fetch_and_add_8:
5139  case Builtin::BI__sync_fetch_and_add_16:
5140  BuiltinIndex = 0;
5141  break;
5142 
5143  case Builtin::BI__sync_fetch_and_sub:
5144  case Builtin::BI__sync_fetch_and_sub_1:
5145  case Builtin::BI__sync_fetch_and_sub_2:
5146  case Builtin::BI__sync_fetch_and_sub_4:
5147  case Builtin::BI__sync_fetch_and_sub_8:
5148  case Builtin::BI__sync_fetch_and_sub_16:
5149  BuiltinIndex = 1;
5150  break;
5151 
5152  case Builtin::BI__sync_fetch_and_or:
5153  case Builtin::BI__sync_fetch_and_or_1:
5154  case Builtin::BI__sync_fetch_and_or_2:
5155  case Builtin::BI__sync_fetch_and_or_4:
5156  case Builtin::BI__sync_fetch_and_or_8:
5157  case Builtin::BI__sync_fetch_and_or_16:
5158  BuiltinIndex = 2;
5159  break;
5160 
5161  case Builtin::BI__sync_fetch_and_and:
5162  case Builtin::BI__sync_fetch_and_and_1:
5163  case Builtin::BI__sync_fetch_and_and_2:
5164  case Builtin::BI__sync_fetch_and_and_4:
5165  case Builtin::BI__sync_fetch_and_and_8:
5166  case Builtin::BI__sync_fetch_and_and_16:
5167  BuiltinIndex = 3;
5168  break;
5169 
5170  case Builtin::BI__sync_fetch_and_xor:
5171  case Builtin::BI__sync_fetch_and_xor_1:
5172  case Builtin::BI__sync_fetch_and_xor_2:
5173  case Builtin::BI__sync_fetch_and_xor_4:
5174  case Builtin::BI__sync_fetch_and_xor_8:
5175  case Builtin::BI__sync_fetch_and_xor_16:
5176  BuiltinIndex = 4;
5177  break;
5178 
5179  case Builtin::BI__sync_fetch_and_nand:
5180  case Builtin::BI__sync_fetch_and_nand_1:
5181  case Builtin::BI__sync_fetch_and_nand_2:
5182  case Builtin::BI__sync_fetch_and_nand_4:
5183  case Builtin::BI__sync_fetch_and_nand_8:
5184  case Builtin::BI__sync_fetch_and_nand_16:
5185  BuiltinIndex = 5;
5186  WarnAboutSemanticsChange = true;
5187  break;
5188 
5189  case Builtin::BI__sync_add_and_fetch:
5190  case Builtin::BI__sync_add_and_fetch_1:
5191  case Builtin::BI__sync_add_and_fetch_2:
5192  case Builtin::BI__sync_add_and_fetch_4:
5193  case Builtin::BI__sync_add_and_fetch_8:
5194  case Builtin::BI__sync_add_and_fetch_16:
5195  BuiltinIndex = 6;
5196  break;
5197 
5198  case Builtin::BI__sync_sub_and_fetch:
5199  case Builtin::BI__sync_sub_and_fetch_1:
5200  case Builtin::BI__sync_sub_and_fetch_2:
5201  case Builtin::BI__sync_sub_and_fetch_4:
5202  case Builtin::BI__sync_sub_and_fetch_8:
5203  case Builtin::BI__sync_sub_and_fetch_16:
5204  BuiltinIndex = 7;
5205  break;
5206 
5207  case Builtin::BI__sync_and_and_fetch:
5208  case Builtin::BI__sync_and_and_fetch_1:
5209  case Builtin::BI__sync_and_and_fetch_2:
5210  case Builtin::BI__sync_and_and_fetch_4:
5211  case Builtin::BI__sync_and_and_fetch_8:
5212  case Builtin::BI__sync_and_and_fetch_16:
5213  BuiltinIndex = 8;
5214  break;
5215 
5216  case Builtin::BI__sync_or_and_fetch:
5217  case Builtin::BI__sync_or_and_fetch_1:
5218  case Builtin::BI__sync_or_and_fetch_2:
5219  case Builtin::BI__sync_or_and_fetch_4:
5220  case Builtin::BI__sync_or_and_fetch_8:
5221  case Builtin::BI__sync_or_and_fetch_16:
5222  BuiltinIndex = 9;
5223  break;
5224 
5225  case Builtin::BI__sync_xor_and_fetch:
5226  case Builtin::BI__sync_xor_and_fetch_1:
5227  case Builtin::BI__sync_xor_and_fetch_2:
5228  case Builtin::BI__sync_xor_and_fetch_4:
5229  case Builtin::BI__sync_xor_and_fetch_8:
5230  case Builtin::BI__sync_xor_and_fetch_16:
5231  BuiltinIndex = 10;
5232  break;
5233 
5234  case Builtin::BI__sync_nand_and_fetch:
5235  case Builtin::BI__sync_nand_and_fetch_1:
5236  case Builtin::BI__sync_nand_and_fetch_2:
5237  case Builtin::BI__sync_nand_and_fetch_4:
5238  case Builtin::BI__sync_nand_and_fetch_8:
5239  case Builtin::BI__sync_nand_and_fetch_16:
5240  BuiltinIndex = 11;
5241  WarnAboutSemanticsChange = true;
5242  break;
5243 
5244  case Builtin::BI__sync_val_compare_and_swap:
5245  case Builtin::BI__sync_val_compare_and_swap_1:
5246  case Builtin::BI__sync_val_compare_and_swap_2:
5247  case Builtin::BI__sync_val_compare_and_swap_4:
5248  case Builtin::BI__sync_val_compare_and_swap_8:
5249  case Builtin::BI__sync_val_compare_and_swap_16:
5250  BuiltinIndex = 12;
5251  NumFixed = 2;
5252  break;
5253 
5254  case Builtin::BI__sync_bool_compare_and_swap:
5255  case Builtin::BI__sync_bool_compare_and_swap_1:
5256  case Builtin::BI__sync_bool_compare_and_swap_2:
5257  case Builtin::BI__sync_bool_compare_and_swap_4:
5258  case Builtin::BI__sync_bool_compare_and_swap_8:
5259  case Builtin::BI__sync_bool_compare_and_swap_16:
5260  BuiltinIndex = 13;
5261  NumFixed = 2;
5262  ResultType = Context.BoolTy;
5263  break;
5264 
5265  case Builtin::BI__sync_lock_test_and_set:
5266  case Builtin::BI__sync_lock_test_and_set_1:
5267  case Builtin::BI__sync_lock_test_and_set_2:
5268  case Builtin::BI__sync_lock_test_and_set_4:
5269  case Builtin::BI__sync_lock_test_and_set_8:
5270  case Builtin::BI__sync_lock_test_and_set_16:
5271  BuiltinIndex = 14;
5272  break;
5273 
5274  case Builtin::BI__sync_lock_release:
5275  case Builtin::BI__sync_lock_release_1:
5276  case Builtin::BI__sync_lock_release_2:
5277  case Builtin::BI__sync_lock_release_4:
5278  case Builtin::BI__sync_lock_release_8:
5279  case Builtin::BI__sync_lock_release_16:
5280  BuiltinIndex = 15;
5281  NumFixed = 0;
5282  ResultType = Context.VoidTy;
5283  break;
5284 
5285  case Builtin::BI__sync_swap:
5286  case Builtin::BI__sync_swap_1:
5287  case Builtin::BI__sync_swap_2:
5288  case Builtin::BI__sync_swap_4:
5289  case Builtin::BI__sync_swap_8:
5290  case Builtin::BI__sync_swap_16:
5291  BuiltinIndex = 16;
5292  break;
5293  }
5294 
5295  // Now that we know how many fixed arguments we expect, first check that we
5296  // have at least that many.
5297  if (TheCall->getNumArgs() < 1+NumFixed) {
5298  Diag(TheCall->getEndLoc(), diag::err_typecheck_call_too_few_args_at_least)
5299  << 0 << 1 + NumFixed << TheCall->getNumArgs()
5300  << Callee->getSourceRange();
5301  return ExprError();
5302  }
5303 
5304  Diag(TheCall->getEndLoc(), diag::warn_atomic_implicit_seq_cst)
5305  << Callee->getSourceRange();
5306 
5307  if (WarnAboutSemanticsChange) {
5308  Diag(TheCall->getEndLoc(), diag::warn_sync_fetch_and_nand_semantics_change)
5309  << Callee->getSourceRange();
5310  }
5311 
5312  // Get the decl for the concrete builtin from this, we can tell what the
5313  // concrete integer type we should convert to is.
5314  unsigned NewBuiltinID = BuiltinIndices[BuiltinIndex][SizeIndex];
5315  const char *NewBuiltinName = Context.BuiltinInfo.getName(NewBuiltinID);
5316  FunctionDecl *NewBuiltinDecl;
5317  if (NewBuiltinID == BuiltinID)
5318  NewBuiltinDecl = FDecl;
5319  else {
5320  // Perform builtin lookup to avoid redeclaring it.
5321  DeclarationName DN(&Context.Idents.get(NewBuiltinName));
5322  LookupResult Res(*this, DN, DRE->getBeginLoc(), LookupOrdinaryName);
5323  LookupName(Res, TUScope, /*AllowBuiltinCreation=*/true);
5324  assert(Res.getFoundDecl());
5325  NewBuiltinDecl = dyn_cast<FunctionDecl>(Res.getFoundDecl());
5326  if (!NewBuiltinDecl)
5327  return ExprError();
5328  }
5329 
5330  // The first argument --- the pointer --- has a fixed type; we
5331  // deduce the types of the rest of the arguments accordingly. Walk
5332  // the remaining arguments, converting them to the deduced value type.
5333  for (unsigned i = 0; i != NumFixed; ++i) {
5334  ExprResult Arg = TheCall->getArg(i+1);
5335 
5336  // GCC does an implicit conversion to the pointer or integer ValType. This
5337  // can fail in some cases (1i -> int**), check for this error case now.
5338  // Initialize the argument.
5340  ValType, /*consume*/ false);
5341  Arg = PerformCopyInitialization(Entity, SourceLocation(), Arg);
5342  if (Arg.isInvalid())
5343  return ExprError();
5344 
5345  // Okay, we have something that *can* be converted to the right type. Check
5346  // to see if there is a potentially weird extension going on here. This can
5347  // happen when you do an atomic operation on something like an char* and
5348  // pass in 42. The 42 gets converted to char. This is even more strange
5349  // for things like 45.123 -> char, etc.
5350  // FIXME: Do this check.
5351  TheCall->setArg(i+1, Arg.get());
5352  }
5353 
5354  // Create a new DeclRefExpr to refer to the new decl.
5355  DeclRefExpr *NewDRE = DeclRefExpr::Create(
5356  Context, DRE->getQualifierLoc(), SourceLocation(), NewBuiltinDecl,
5357  /*enclosing*/ false, DRE->getLocation(), Context.BuiltinFnTy,
5358  DRE->getValueKind(), nullptr, nullptr, DRE->isNonOdrUse());
5359 
5360  // Set the callee in the CallExpr.
5361  // FIXME: This loses syntactic information.
5362  QualType CalleePtrTy = Context.getPointerType(NewBuiltinDecl->getType());
5363  ExprResult PromotedCall = ImpCastExprToType(NewDRE, CalleePtrTy,
5364  CK_BuiltinFnToFnPtr);
5365  TheCall->setCallee(PromotedCall.get());
5366 
5367  // Change the result type of the call to match the original value type. This
5368  // is arbitrary, but the codegen for these builtins ins design to handle it
5369  // gracefully.
5370  TheCall->setType(ResultType);
5371 
5372  return TheCallResult;
5373 }
5374 
5375 /// SemaBuiltinNontemporalOverloaded - We have a call to
5376 /// __builtin_nontemporal_store or __builtin_nontemporal_load, which is an
5377 /// overloaded function based on the pointer type of its last argument.
5378 ///
5379 /// This function goes through and does final semantic checking for these
5380 /// builtins.
5381 ExprResult Sema::SemaBuiltinNontemporalOverloaded(ExprResult TheCallResult) {
5382  CallExpr *TheCall = (CallExpr *)TheCallResult.get();
5383  DeclRefExpr *DRE =
5384  cast<DeclRefExpr>(TheCall->getCallee()->IgnoreParenCasts());
5385  FunctionDecl *FDecl = cast<FunctionDecl>(DRE->getDecl());
5386  unsigned BuiltinID = FDecl->getBuiltinID();
5387  assert((BuiltinID == Builtin::BI__builtin_nontemporal_store ||
5388  BuiltinID == Builtin::BI__builtin_nontemporal_load) &&
5389  "Unexpected nontemporal load/store builtin!");
5390  bool isStore = BuiltinID == Builtin::BI__builtin_nontemporal_store;
5391  unsigned numArgs = isStore ? 2 : 1;
5392 
5393  // Ensure that we have the proper number of arguments.
5394  if (checkArgCount(*this, TheCall, numArgs))
5395  return ExprError();
5396 
5397  // Inspect the last argument of the nontemporal builtin. This should always
5398  // be a pointer type, from which we imply the type of the memory access.
5399  // Because it is a pointer type, we don't have to worry about any implicit
5400  // casts here.
5401  Expr *PointerArg = TheCall->getArg(numArgs - 1);
5402  ExprResult PointerArgResult =
5403  DefaultFunctionArrayLvalueConversion(PointerArg);
5404 
5405  if (PointerArgResult.isInvalid())
5406  return ExprError();
5407  PointerArg = PointerArgResult.get();
5408  TheCall->setArg(numArgs - 1, PointerArg);
5409 
5410  const PointerType *pointerType = PointerArg->getType()->getAs<PointerType>();
5411  if (!pointerType) {
5412  Diag(DRE->getBeginLoc(), diag::err_nontemporal_builtin_must_be_pointer)
5413  << PointerArg->getType() << PointerArg->getSourceRange();
5414  return ExprError();
5415  }
5416 
5417  QualType ValType = pointerType->getPointeeType();
5418 
5419  // Strip any qualifiers off ValType.
5420  ValType = ValType.getUnqualifiedType();
5421  if (!ValType->isIntegerType() && !ValType->isAnyPointerType() &&
5422  !ValType->isBlockPointerType() && !ValType->isFloatingType() &&
5423  !ValType->isVectorType()) {
5424  Diag(DRE->getBeginLoc(),
5425  diag::err_nontemporal_builtin_must_be_pointer_intfltptr_or_vector)
5426  << PointerArg->getType() << PointerArg->getSourceRange();
5427  return ExprError();
5428  }
5429 
5430  if (!isStore) {
5431  TheCall->setType(ValType);
5432  return TheCallResult;
5433  }
5434 
5435  ExprResult ValArg = TheCall->getArg(0);
5437  Context, ValType, /*consume*/ false);
5438  ValArg = PerformCopyInitialization(Entity, SourceLocation(), ValArg);
5439  if (ValArg.isInvalid())
5440  return ExprError();
5441 
5442  TheCall->setArg(0, ValArg.get());
5443  TheCall->setType(Context.VoidTy);
5444  return TheCallResult;
5445 }
5446 
5447 /// CheckObjCString - Checks that the argument to the builtin
5448 /// CFString constructor is correct
5449 /// Note: It might also make sense to do the UTF-16 conversion here (would
5450 /// simplify the backend).
5451 bool Sema::CheckObjCString(Expr *Arg) {
5452  Arg = Arg->IgnoreParenCasts();
5453  StringLiteral *Literal = dyn_cast<StringLiteral>(Arg);
5454 
5455  if (!Literal || !Literal->isAscii()) {
5456  Diag(Arg->getBeginLoc(), diag::err_cfstring_literal_not_string_constant)
5457  << Arg->getSourceRange();
5458  return true;
5459  }
5460 
5461  if (Literal->containsNonAsciiOrNull()) {
5462  StringRef String = Literal->getString();
5463  unsigned NumBytes = String.size();
5464  SmallVector<llvm::UTF16, 128> ToBuf(NumBytes);
5465  const llvm::UTF8 *FromPtr = (const llvm::UTF8 *)String.data();
5466  llvm::UTF16 *ToPtr = &ToBuf[0];
5467 
5468  llvm::ConversionResult Result =
5469  llvm::ConvertUTF8toUTF16(&FromPtr, FromPtr + NumBytes, &ToPtr,
5470  ToPtr + NumBytes, llvm::strictConversion);
5471  // Check for conversion failure.
5472  if (Result != llvm::conversionOK)
5473  Diag(Arg->getBeginLoc(), diag::warn_cfstring_truncated)
5474  << Arg->getSourceRange();
5475  }
5476  return false;
5477 }
5478 
5479 /// CheckObjCString - Checks that the format string argument to the os_log()
5480 /// and os_trace() functions is correct, and converts it to const char *.
5481 ExprResult Sema::CheckOSLogFormatStringArg(Expr *Arg) {
5482  Arg = Arg->IgnoreParenCasts();
5483  auto *Literal = dyn_cast<StringLiteral>(Arg);
5484  if (!Literal) {
5485  if (auto *ObjcLiteral = dyn_cast<ObjCStringLiteral>(Arg)) {
5486  Literal = ObjcLiteral->getString();
5487  }
5488  }
5489 
5490  if (!Literal || (!Literal->isAscii() && !Literal->isUTF8())) {
5491  return ExprError(
5492  Diag(Arg->getBeginLoc(), diag::err_os_log_format_not_string_constant)
5493  << Arg->getSourceRange());
5494  }
5495 
5496  ExprResult Result(Literal);
5497  QualType ResultTy = Context.getPointerType(Context.CharTy.withConst());
5498  InitializedEntity Entity =
5499  InitializedEntity::InitializeParameter(Context, ResultTy, false);
5500  Result = PerformCopyInitialization(Entity, SourceLocation(), Result);
5501  return Result;
5502 }
5503 
5504 /// Check that the user is calling the appropriate va_start builtin for the
5505 /// target and calling convention.
5506 static bool checkVAStartABI(Sema &S, unsigned BuiltinID, Expr *Fn) {
5507  const llvm::Triple &TT = S.Context.getTargetInfo().getTriple();
5508  bool IsX64 = TT.getArch() == llvm::Triple::x86_64;
5509  bool IsAArch64 = TT.getArch() == llvm::Triple::aarch64;
5510  bool IsWindows = TT.isOSWindows();
5511  bool IsMSVAStart = BuiltinID == Builtin::BI__builtin_ms_va_start;
5512  if (IsX64 || IsAArch64) {
5513  CallingConv CC = CC_C;
5514  if (const FunctionDecl *FD = S.getCurFunctionDecl())
5515  CC = FD->getType()->castAs<FunctionType>()->getCallConv();
5516  if (IsMSVAStart) {
5517  // Don't allow this in System V ABI functions.
5518  if (CC == CC_X86_64SysV || (!IsWindows && CC != CC_Win64))
5519  return S.Diag(Fn->getBeginLoc(),
5520  diag::err_ms_va_start_used_in_sysv_function);
5521  } else {
5522  // On x86-64/AArch64 Unix, don't allow this in Win64 ABI functions.
5523  // On x64 Windows, don't allow this in System V ABI functions.
5524  // (Yes, that means there's no corresponding way to support variadic
5525  // System V ABI functions on Windows.)
5526  if ((IsWindows && CC == CC_X86_64SysV) ||
5527  (!IsWindows && CC == CC_Win64))
5528  return S.Diag(Fn->getBeginLoc(),
5529  diag::err_va_start_used_in_wrong_abi_function)
5530  << !IsWindows;
5531  }
5532  return false;
5533  }
5534 
5535  if (IsMSVAStart)
5536  return S.Diag(Fn->getBeginLoc(), diag::err_builtin_x64_aarch64_only);
5537  return false;
5538 }
5539 
5541  ParmVarDecl **LastParam = nullptr) {
5542  // Determine whether the current function, block, or obj-c method is variadic
5543  // and get its parameter list.
5544  bool IsVariadic = false;
5545  ArrayRef<ParmVarDecl *> Params;
5546  DeclContext *Caller = S.CurContext;
5547  if (auto *Block = dyn_cast<BlockDecl>(Caller)) {
5548  IsVariadic = Block->isVariadic();
5549  Params = Block->parameters();
5550  } else if (auto *FD = dyn_cast<FunctionDecl>(Caller)) {
5551  IsVariadic = FD->isVariadic();
5552  Params = FD->parameters();
5553  } else if (auto *MD = dyn_cast<ObjCMethodDecl>(Caller)) {
5554  IsVariadic = MD->isVariadic();
5555  // FIXME: This isn't correct for methods (results in bogus warning).
5556  Params = MD->parameters();
5557  } else if (isa<CapturedDecl>(Caller)) {
5558  // We don't support va_start in a CapturedDecl.
5559  S.Diag(Fn->getBeginLoc(), diag::err_va_start_captured_stmt);
5560  return true;
5561  } else {
5562  // This must be some other declcontext that parses exprs.
5563  S.Diag(Fn->getBeginLoc(), diag::err_va_start_outside_function);
5564  return true;
5565  }
5566 
5567  if (!IsVariadic) {
5568  S.Diag(Fn->getBeginLoc(), diag::err_va_start_fixed_function);
5569  return true;
5570  }
5571 
5572  if (LastParam)
5573  *LastParam = Params.empty() ? nullptr : Params.back();
5574 
5575  return false;
5576 }
5577 
5578 /// Check the arguments to '__builtin_va_start' or '__builtin_ms_va_start'
5579 /// for validity. Emit an error and return true on failure; return false
5580 /// on success.
5581 bool Sema::SemaBuiltinVAStart(unsigned BuiltinID, CallExpr *TheCall) {
5582  Expr *Fn = TheCall->getCallee();
5583 
5584  if (checkVAStartABI(*this, BuiltinID, Fn))
5585  return true;
5586 
5587  if (TheCall->getNumArgs() > 2) {
5588  Diag(TheCall->getArg(2)->getBeginLoc(),
5589  diag::err_typecheck_call_too_many_args)
5590  << 0 /*function call*/ << 2 << TheCall->getNumArgs()
5591  << Fn->getSourceRange()
5592  << SourceRange(TheCall->getArg(2)->getBeginLoc(),
5593  (*(TheCall->arg_end() - 1))->getEndLoc());
5594  return true;
5595  }
5596 
5597  if (TheCall->getNumArgs() < 2) {
5598  return Diag(TheCall->getEndLoc(),
5599  diag::err_typecheck_call_too_few_args_at_least)
5600  << 0 /*function call*/ << 2 << TheCall->getNumArgs();
5601  }
5602 
5603  // Type-check the first argument normally.
5604  if (checkBuiltinArgument(*this, TheCall, 0))
5605  return true;
5606 
5607  // Check that the current function is variadic, and get its last parameter.
5608  ParmVarDecl *LastParam;
5609  if (checkVAStartIsInVariadicFunction(*this, Fn, &LastParam))
5610  return true;
5611 
5612  // Verify that the second argument to the builtin is the last argument of the
5613  // current function or method.
5614  bool SecondArgIsLastNamedArgument = false;
5615  const Expr *Arg = TheCall->getArg(1)->IgnoreParenCasts();
5616 
5617  // These are valid if SecondArgIsLastNamedArgument is false after the next
5618  // block.
5619  QualType Type;
5620  SourceLocation ParamLoc;
5621  bool IsCRegister = false;
5622 
5623  if (const DeclRefExpr *DR = dyn_cast<DeclRefExpr>(Arg)) {
5624  if (const ParmVarDecl *PV = dyn_cast<ParmVarDecl>(DR->getDecl())) {
5625  SecondArgIsLastNamedArgument = PV == LastParam;
5626 
5627  Type = PV->getType();
5628  ParamLoc = PV->getLocation();
5629  IsCRegister =
5630  PV->getStorageClass() == SC_Register && !getLangOpts().CPlusPlus;
5631  }
5632  }
5633 
5634  if (!SecondArgIsLastNamedArgument)
5635  Diag(TheCall->getArg(1)->getBeginLoc(),
5636  diag::warn_second_arg_of_va_start_not_last_named_param);
5637  else if (IsCRegister || Type->isReferenceType() ||
5638  Type->isSpecificBuiltinType(BuiltinType::Float) || [=] {
5639  // Promotable integers are UB, but enumerations need a bit of
5640  // extra checking to see what their promotable type actually is.
5641  if (!Type->isPromotableIntegerType())
5642  return false;
5643  if (!Type->isEnumeralType())
5644  return true;
5645  const EnumDecl *ED = Type->castAs<EnumType>()->getDecl();
5646  return !(ED &&
5647  Context.typesAreCompatible(ED->getPromotionType(), Type));
5648  }()) {
5649  unsigned Reason = 0;
5650  if (Type->isReferenceType()) Reason = 1;
5651  else if (IsCRegister) Reason = 2;
5652  Diag(Arg->getBeginLoc(), diag::warn_va_start_type_is_undefined) << Reason;
5653  Diag(ParamLoc, diag::note_parameter_type) << Type;
5654  }
5655 
5656  TheCall->setType(Context.VoidTy);
5657  return false;
5658 }
5659 
5660 bool Sema::SemaBuiltinVAStartARMMicrosoft(CallExpr *Call) {
5661  // void __va_start(va_list *ap, const char *named_addr, size_t slot_size,
5662  // const char *named_addr);
5663 
5664  Expr *Func = Call->getCallee();
5665 
5666  if (Call->getNumArgs() < 3)
5667  return Diag(Call->getEndLoc(),
5668  diag::err_typecheck_call_too_few_args_at_least)
5669  << 0 /*function call*/ << 3 << Call->getNumArgs();
5670 
5671  // Type-check the first argument normally.
5672  if (checkBuiltinArgument(*this, Call, 0))
5673  return true;
5674 
5675  // Check that the current function is variadic.
5676  if (checkVAStartIsInVariadicFunction(*this, Func))
5677  return true;
5678 
5679  // __va_start on Windows does not validate the parameter qualifiers
5680 
5681  const Expr *Arg1 = Call->getArg(1)->IgnoreParens();
5682  const Type *Arg1Ty = Arg1->getType().getCanonicalType().getTypePtr();
5683 
5684  const Expr *Arg2 = Call->getArg(2)->IgnoreParens();
5685  const Type *Arg2Ty = Arg2->getType().getCanonicalType().getTypePtr();
5686 
5687  const QualType &ConstCharPtrTy =
5688  Context.getPointerType(Context.CharTy.withConst());
5689  if (!Arg1Ty->isPointerType() ||
5690  Arg1Ty->getPointeeType().withoutLocalFastQualifiers() != Context.CharTy)
5691  Diag(Arg1->getBeginLoc(), diag::err_typecheck_convert_incompatible)
5692  << Arg1->getType() << ConstCharPtrTy << 1 /* different class */
5693  << 0 /* qualifier difference */
5694  << 3 /* parameter mismatch */
5695  << 2 << Arg1->getType() << ConstCharPtrTy;
5696 
5697  const QualType SizeTy = Context.getSizeType();
5698  if (Arg2Ty->getCanonicalTypeInternal().withoutLocalFastQualifiers() != SizeTy)
5699  Diag(Arg2->getBeginLoc(), diag::err_typecheck_convert_incompatible)
5700  << Arg2->getType() << SizeTy << 1 /* different class */
5701  << 0 /* qualifier difference */
5702  << 3 /* parameter mismatch */
5703  << 3 << Arg2->getType() << SizeTy;
5704 
5705  return false;
5706 }
5707 
5708 /// SemaBuiltinUnorderedCompare - Handle functions like __builtin_isgreater and
5709 /// friends. This is declared to take (...), so we have to check everything.
5710 bool Sema::SemaBuiltinUnorderedCompare(CallExpr *TheCall) {
5711  if (TheCall->getNumArgs() < 2)
5712  return Diag(TheCall->getEndLoc(), diag::err_typecheck_call_too_few_args)
5713  << 0 << 2 << TheCall->getNumArgs() /*function call*/;
5714  if (TheCall->getNumArgs() > 2)
5715  return Diag(TheCall->getArg(2)->getBeginLoc(),
5716  diag::err_typecheck_call_too_many_args)
5717  << 0 /*function call*/ << 2 << TheCall->getNumArgs()
5718  << SourceRange(TheCall->getArg(2)->getBeginLoc(),
5719  (*(TheCall->arg_end() - 1))->getEndLoc());
5720 
5721  ExprResult OrigArg0 = TheCall->getArg(0);
5722  ExprResult OrigArg1 = TheCall->getArg(1);
5723 
5724  // Do standard promotions between the two arguments, returning their common
5725  // type.
5726  QualType Res = UsualArithmeticConversions(OrigArg0, OrigArg1, false);
5727  if (OrigArg0.isInvalid() || OrigArg1.isInvalid())
5728  return true;
5729 
5730  // Make sure any conversions are pushed back into the call; this is
5731  // type safe since unordered compare builtins are declared as "_Bool
5732  // foo(...)".
5733  TheCall->setArg(0, OrigArg0.get());
5734  TheCall->setArg(1, OrigArg1.get());
5735 
5736  if (OrigArg0.get()->isTypeDependent() || OrigArg1.get()->isTypeDependent())
5737  return false;
5738 
5739  // If the common type isn't a real floating type, then the arguments were
5740  // invalid for this operation.
5741  if (Res.isNull() || !Res->isRealFloatingType())
5742  return Diag(OrigArg0.get()->getBeginLoc(),
5743  diag::err_typecheck_call_invalid_ordered_compare)
5744  << OrigArg0.get()->getType() << OrigArg1.get()->getType()
5745  << SourceRange(OrigArg0.get()->getBeginLoc(),
5746  OrigArg1.get()->getEndLoc());
5747 
5748  return false;
5749 }
5750 
5751 /// SemaBuiltinSemaBuiltinFPClassification - Handle functions like
5752 /// __builtin_isnan and friends. This is declared to take (...), so we have
5753 ///