clang  8.0.0svn
SemaChecking.cpp
Go to the documentation of this file.
1 //===- SemaChecking.cpp - Extra Semantic Checking -------------------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements extra semantic analysis beyond what is enforced
11 // by the C type system.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "clang/AST/APValue.h"
16 #include "clang/AST/ASTContext.h"
17 #include "clang/AST/Attr.h"
18 #include "clang/AST/AttrIterator.h"
19 #include "clang/AST/CharUnits.h"
20 #include "clang/AST/Decl.h"
21 #include "clang/AST/DeclBase.h"
22 #include "clang/AST/DeclCXX.h"
23 #include "clang/AST/DeclObjC.h"
26 #include "clang/AST/Expr.h"
27 #include "clang/AST/ExprCXX.h"
28 #include "clang/AST/ExprObjC.h"
29 #include "clang/AST/ExprOpenMP.h"
30 #include "clang/AST/FormatString.h"
31 #include "clang/AST/NSAPI.h"
34 #include "clang/AST/Stmt.h"
35 #include "clang/AST/TemplateBase.h"
36 #include "clang/AST/Type.h"
37 #include "clang/AST/TypeLoc.h"
40 #include "clang/Basic/CharInfo.h"
41 #include "clang/Basic/Diagnostic.h"
43 #include "clang/Basic/LLVM.h"
50 #include "clang/Basic/Specifiers.h"
51 #include "clang/Basic/SyncScope.h"
54 #include "clang/Basic/TargetInfo.h"
55 #include "clang/Basic/TypeTraits.h"
56 #include "clang/Lex/Lexer.h" // TODO: Extract static functions to fix layering.
58 #include "clang/Sema/Lookup.h"
59 #include "clang/Sema/Ownership.h"
60 #include "clang/Sema/Scope.h"
61 #include "clang/Sema/ScopeInfo.h"
62 #include "clang/Sema/Sema.h"
64 #include "llvm/ADT/APFloat.h"
65 #include "llvm/ADT/APInt.h"
66 #include "llvm/ADT/APSInt.h"
67 #include "llvm/ADT/ArrayRef.h"
68 #include "llvm/ADT/DenseMap.h"
69 #include "llvm/ADT/FoldingSet.h"
70 #include "llvm/ADT/None.h"
71 #include "llvm/ADT/Optional.h"
72 #include "llvm/ADT/STLExtras.h"
73 #include "llvm/ADT/SmallBitVector.h"
74 #include "llvm/ADT/SmallPtrSet.h"
75 #include "llvm/ADT/SmallString.h"
76 #include "llvm/ADT/SmallVector.h"
77 #include "llvm/ADT/StringRef.h"
78 #include "llvm/ADT/StringSwitch.h"
79 #include "llvm/ADT/Triple.h"
80 #include "llvm/Support/AtomicOrdering.h"
81 #include "llvm/Support/Casting.h"
82 #include "llvm/Support/Compiler.h"
83 #include "llvm/Support/ConvertUTF.h"
84 #include "llvm/Support/ErrorHandling.h"
85 #include "llvm/Support/Format.h"
86 #include "llvm/Support/Locale.h"
87 #include "llvm/Support/MathExtras.h"
88 #include "llvm/Support/raw_ostream.h"
89 #include <algorithm>
90 #include <cassert>
91 #include <cstddef>
92 #include <cstdint>
93 #include <functional>
94 #include <limits>
95 #include <string>
96 #include <tuple>
97 #include <utility>
98 
99 using namespace clang;
100 using namespace sema;
101 
103  unsigned ByteNo) const {
104  return SL->getLocationOfByte(ByteNo, getSourceManager(), LangOpts,
105  Context.getTargetInfo());
106 }
107 
108 /// Checks that a call expression's argument count is the desired number.
109 /// This is useful when doing custom type-checking. Returns true on error.
110 static bool checkArgCount(Sema &S, CallExpr *call, unsigned desiredArgCount) {
111  unsigned argCount = call->getNumArgs();
112  if (argCount == desiredArgCount) return false;
113 
114  if (argCount < desiredArgCount)
115  return S.Diag(call->getEndLoc(), diag::err_typecheck_call_too_few_args)
116  << 0 /*function call*/ << desiredArgCount << argCount
117  << call->getSourceRange();
118 
119  // Highlight all the excess arguments.
120  SourceRange range(call->getArg(desiredArgCount)->getBeginLoc(),
121  call->getArg(argCount - 1)->getEndLoc());
122 
123  return S.Diag(range.getBegin(), diag::err_typecheck_call_too_many_args)
124  << 0 /*function call*/ << desiredArgCount << argCount
125  << call->getArg(1)->getSourceRange();
126 }
127 
128 /// Check that the first argument to __builtin_annotation is an integer
129 /// and the second argument is a non-wide string literal.
130 static bool SemaBuiltinAnnotation(Sema &S, CallExpr *TheCall) {
131  if (checkArgCount(S, TheCall, 2))
132  return true;
133 
134  // First argument should be an integer.
135  Expr *ValArg = TheCall->getArg(0);
136  QualType Ty = ValArg->getType();
137  if (!Ty->isIntegerType()) {
138  S.Diag(ValArg->getBeginLoc(), diag::err_builtin_annotation_first_arg)
139  << ValArg->getSourceRange();
140  return true;
141  }
142 
143  // Second argument should be a constant string.
144  Expr *StrArg = TheCall->getArg(1)->IgnoreParenCasts();
145  StringLiteral *Literal = dyn_cast<StringLiteral>(StrArg);
146  if (!Literal || !Literal->isAscii()) {
147  S.Diag(StrArg->getBeginLoc(), diag::err_builtin_annotation_second_arg)
148  << StrArg->getSourceRange();
149  return true;
150  }
151 
152  TheCall->setType(Ty);
153  return false;
154 }
155 
156 static bool SemaBuiltinMSVCAnnotation(Sema &S, CallExpr *TheCall) {
157  // We need at least one argument.
158  if (TheCall->getNumArgs() < 1) {
159  S.Diag(TheCall->getEndLoc(), diag::err_typecheck_call_too_few_args_at_least)
160  << 0 << 1 << TheCall->getNumArgs()
161  << TheCall->getCallee()->getSourceRange();
162  return true;
163  }
164 
165  // All arguments should be wide string literals.
166  for (Expr *Arg : TheCall->arguments()) {
167  auto *Literal = dyn_cast<StringLiteral>(Arg->IgnoreParenCasts());
168  if (!Literal || !Literal->isWide()) {
169  S.Diag(Arg->getBeginLoc(), diag::err_msvc_annotation_wide_str)
170  << Arg->getSourceRange();
171  return true;
172  }
173  }
174 
175  return false;
176 }
177 
178 /// Check that the argument to __builtin_addressof is a glvalue, and set the
179 /// result type to the corresponding pointer type.
180 static bool SemaBuiltinAddressof(Sema &S, CallExpr *TheCall) {
181  if (checkArgCount(S, TheCall, 1))
182  return true;
183 
184  ExprResult Arg(TheCall->getArg(0));
185  QualType ResultType = S.CheckAddressOfOperand(Arg, TheCall->getBeginLoc());
186  if (ResultType.isNull())
187  return true;
188 
189  TheCall->setArg(0, Arg.get());
190  TheCall->setType(ResultType);
191  return false;
192 }
193 
194 static bool SemaBuiltinOverflow(Sema &S, CallExpr *TheCall) {
195  if (checkArgCount(S, TheCall, 3))
196  return true;
197 
198  // First two arguments should be integers.
199  for (unsigned I = 0; I < 2; ++I) {
200  ExprResult Arg = TheCall->getArg(I);
201  QualType Ty = Arg.get()->getType();
202  if (!Ty->isIntegerType()) {
203  S.Diag(Arg.get()->getBeginLoc(), diag::err_overflow_builtin_must_be_int)
204  << Ty << Arg.get()->getSourceRange();
205  return true;
206  }
208  S.getASTContext(), Ty, /*consume*/ false);
209  Arg = S.PerformCopyInitialization(Entity, SourceLocation(), Arg);
210  if (Arg.isInvalid())
211  return true;
212  TheCall->setArg(I, Arg.get());
213  }
214 
215  // Third argument should be a pointer to a non-const integer.
216  // IRGen correctly handles volatile, restrict, and address spaces, and
217  // the other qualifiers aren't possible.
218  {
219  ExprResult Arg = TheCall->getArg(2);
220  QualType Ty = Arg.get()->getType();
221  const auto *PtrTy = Ty->getAs<PointerType>();
222  if (!(PtrTy && PtrTy->getPointeeType()->isIntegerType() &&
223  !PtrTy->getPointeeType().isConstQualified())) {
224  S.Diag(Arg.get()->getBeginLoc(),
225  diag::err_overflow_builtin_must_be_ptr_int)
226  << Ty << Arg.get()->getSourceRange();
227  return true;
228  }
230  S.getASTContext(), Ty, /*consume*/ false);
231  Arg = S.PerformCopyInitialization(Entity, SourceLocation(), Arg);
232  if (Arg.isInvalid())
233  return true;
234  TheCall->setArg(2, Arg.get());
235  }
236  return false;
237 }
238 
239 static void SemaBuiltinMemChkCall(Sema &S, FunctionDecl *FDecl,
240  CallExpr *TheCall, unsigned SizeIdx,
241  unsigned DstSizeIdx,
242  StringRef LikelyMacroName) {
243  if (TheCall->getNumArgs() <= SizeIdx ||
244  TheCall->getNumArgs() <= DstSizeIdx)
245  return;
246 
247  const Expr *SizeArg = TheCall->getArg(SizeIdx);
248  const Expr *DstSizeArg = TheCall->getArg(DstSizeIdx);
249 
250  Expr::EvalResult SizeResult, DstSizeResult;
251 
252  // find out if both sizes are known at compile time
253  if (!SizeArg->EvaluateAsInt(SizeResult, S.Context) ||
254  !DstSizeArg->EvaluateAsInt(DstSizeResult, S.Context))
255  return;
256 
257  llvm::APSInt Size = SizeResult.Val.getInt();
258  llvm::APSInt DstSize = DstSizeResult.Val.getInt();
259 
260  if (Size.ule(DstSize))
261  return;
262 
263  // Confirmed overflow, so generate the diagnostic.
264  StringRef FunctionName = FDecl->getName();
265  SourceLocation SL = TheCall->getBeginLoc();
267  // If we're in an expansion of a macro whose name corresponds to this builtin,
268  // use the simple macro name and location.
269  if (SL.isMacroID() && Lexer::getImmediateMacroName(SL, SM, S.getLangOpts()) ==
270  LikelyMacroName) {
271  FunctionName = LikelyMacroName;
272  SL = SM.getImmediateMacroCallerLoc(SL);
273  }
274 
275  S.Diag(SL, diag::warn_memcpy_chk_overflow)
276  << FunctionName << DstSize.toString(/*Radix=*/10)
277  << Size.toString(/*Radix=*/10);
278 }
279 
280 static bool SemaBuiltinCallWithStaticChain(Sema &S, CallExpr *BuiltinCall) {
281  if (checkArgCount(S, BuiltinCall, 2))
282  return true;
283 
284  SourceLocation BuiltinLoc = BuiltinCall->getBeginLoc();
285  Expr *Builtin = BuiltinCall->getCallee()->IgnoreImpCasts();
286  Expr *Call = BuiltinCall->getArg(0);
287  Expr *Chain = BuiltinCall->getArg(1);
288 
289  if (Call->getStmtClass() != Stmt::CallExprClass) {
290  S.Diag(BuiltinLoc, diag::err_first_argument_to_cwsc_not_call)
291  << Call->getSourceRange();
292  return true;
293  }
294 
295  auto CE = cast<CallExpr>(Call);
296  if (CE->getCallee()->getType()->isBlockPointerType()) {
297  S.Diag(BuiltinLoc, diag::err_first_argument_to_cwsc_block_call)
298  << Call->getSourceRange();
299  return true;
300  }
301 
302  const Decl *TargetDecl = CE->getCalleeDecl();
303  if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(TargetDecl))
304  if (FD->getBuiltinID()) {
305  S.Diag(BuiltinLoc, diag::err_first_argument_to_cwsc_builtin_call)
306  << Call->getSourceRange();
307  return true;
308  }
309 
310  if (isa<CXXPseudoDestructorExpr>(CE->getCallee()->IgnoreParens())) {
311  S.Diag(BuiltinLoc, diag::err_first_argument_to_cwsc_pdtor_call)
312  << Call->getSourceRange();
313  return true;
314  }
315 
316  ExprResult ChainResult = S.UsualUnaryConversions(Chain);
317  if (ChainResult.isInvalid())
318  return true;
319  if (!ChainResult.get()->getType()->isPointerType()) {
320  S.Diag(BuiltinLoc, diag::err_second_argument_to_cwsc_not_pointer)
321  << Chain->getSourceRange();
322  return true;
323  }
324 
325  QualType ReturnTy = CE->getCallReturnType(S.Context);
326  QualType ArgTys[2] = { ReturnTy, ChainResult.get()->getType() };
327  QualType BuiltinTy = S.Context.getFunctionType(
328  ReturnTy, ArgTys, FunctionProtoType::ExtProtoInfo());
329  QualType BuiltinPtrTy = S.Context.getPointerType(BuiltinTy);
330 
331  Builtin =
332  S.ImpCastExprToType(Builtin, BuiltinPtrTy, CK_BuiltinFnToFnPtr).get();
333 
334  BuiltinCall->setType(CE->getType());
335  BuiltinCall->setValueKind(CE->getValueKind());
336  BuiltinCall->setObjectKind(CE->getObjectKind());
337  BuiltinCall->setCallee(Builtin);
338  BuiltinCall->setArg(1, ChainResult.get());
339 
340  return false;
341 }
342 
343 static bool SemaBuiltinSEHScopeCheck(Sema &SemaRef, CallExpr *TheCall,
344  Scope::ScopeFlags NeededScopeFlags,
345  unsigned DiagID) {
346  // Scopes aren't available during instantiation. Fortunately, builtin
347  // functions cannot be template args so they cannot be formed through template
348  // instantiation. Therefore checking once during the parse is sufficient.
349  if (SemaRef.inTemplateInstantiation())
350  return false;
351 
352  Scope *S = SemaRef.getCurScope();
353  while (S && !S->isSEHExceptScope())
354  S = S->getParent();
355  if (!S || !(S->getFlags() & NeededScopeFlags)) {
356  auto *DRE = cast<DeclRefExpr>(TheCall->getCallee()->IgnoreParenCasts());
357  SemaRef.Diag(TheCall->getExprLoc(), DiagID)
358  << DRE->getDecl()->getIdentifier();
359  return true;
360  }
361 
362  return false;
363 }
364 
365 static inline bool isBlockPointer(Expr *Arg) {
366  return Arg->getType()->isBlockPointerType();
367 }
368 
369 /// OpenCL C v2.0, s6.13.17.2 - Checks that the block parameters are all local
370 /// void*, which is a requirement of device side enqueue.
371 static bool checkOpenCLBlockArgs(Sema &S, Expr *BlockArg) {
372  const BlockPointerType *BPT =
373  cast<BlockPointerType>(BlockArg->getType().getCanonicalType());
374  ArrayRef<QualType> Params =
375  BPT->getPointeeType()->getAs<FunctionProtoType>()->getParamTypes();
376  unsigned ArgCounter = 0;
377  bool IllegalParams = false;
378  // Iterate through the block parameters until either one is found that is not
379  // a local void*, or the block is valid.
380  for (ArrayRef<QualType>::iterator I = Params.begin(), E = Params.end();
381  I != E; ++I, ++ArgCounter) {
382  if (!(*I)->isPointerType() || !(*I)->getPointeeType()->isVoidType() ||
383  (*I)->getPointeeType().getQualifiers().getAddressSpace() !=
385  // Get the location of the error. If a block literal has been passed
386  // (BlockExpr) then we can point straight to the offending argument,
387  // else we just point to the variable reference.
388  SourceLocation ErrorLoc;
389  if (isa<BlockExpr>(BlockArg)) {
390  BlockDecl *BD = cast<BlockExpr>(BlockArg)->getBlockDecl();
391  ErrorLoc = BD->getParamDecl(ArgCounter)->getBeginLoc();
392  } else if (isa<DeclRefExpr>(BlockArg)) {
393  ErrorLoc = cast<DeclRefExpr>(BlockArg)->getBeginLoc();
394  }
395  S.Diag(ErrorLoc,
396  diag::err_opencl_enqueue_kernel_blocks_non_local_void_args);
397  IllegalParams = true;
398  }
399  }
400 
401  return IllegalParams;
402 }
403 
404 static bool checkOpenCLSubgroupExt(Sema &S, CallExpr *Call) {
405  if (!S.getOpenCLOptions().isEnabled("cl_khr_subgroups")) {
406  S.Diag(Call->getBeginLoc(), diag::err_opencl_requires_extension)
407  << 1 << Call->getDirectCallee() << "cl_khr_subgroups";
408  return true;
409  }
410  return false;
411 }
412 
413 static bool SemaOpenCLBuiltinNDRangeAndBlock(Sema &S, CallExpr *TheCall) {
414  if (checkArgCount(S, TheCall, 2))
415  return true;
416 
417  if (checkOpenCLSubgroupExt(S, TheCall))
418  return true;
419 
420  // First argument is an ndrange_t type.
421  Expr *NDRangeArg = TheCall->getArg(0);
422  if (NDRangeArg->getType().getUnqualifiedType().getAsString() != "ndrange_t") {
423  S.Diag(NDRangeArg->getBeginLoc(), diag::err_opencl_builtin_expected_type)
424  << TheCall->getDirectCallee() << "'ndrange_t'";
425  return true;
426  }
427 
428  Expr *BlockArg = TheCall->getArg(1);
429  if (!isBlockPointer(BlockArg)) {
430  S.Diag(BlockArg->getBeginLoc(), diag::err_opencl_builtin_expected_type)
431  << TheCall->getDirectCallee() << "block";
432  return true;
433  }
434  return checkOpenCLBlockArgs(S, BlockArg);
435 }
436 
437 /// OpenCL C v2.0, s6.13.17.6 - Check the argument to the
438 /// get_kernel_work_group_size
439 /// and get_kernel_preferred_work_group_size_multiple builtin functions.
441  if (checkArgCount(S, TheCall, 1))
442  return true;
443 
444  Expr *BlockArg = TheCall->getArg(0);
445  if (!isBlockPointer(BlockArg)) {
446  S.Diag(BlockArg->getBeginLoc(), diag::err_opencl_builtin_expected_type)
447  << TheCall->getDirectCallee() << "block";
448  return true;
449  }
450  return checkOpenCLBlockArgs(S, BlockArg);
451 }
452 
453 /// Diagnose integer type and any valid implicit conversion to it.
454 static bool checkOpenCLEnqueueIntType(Sema &S, Expr *E,
455  const QualType &IntType);
456 
458  unsigned Start, unsigned End) {
459  bool IllegalParams = false;
460  for (unsigned I = Start; I <= End; ++I)
461  IllegalParams |= checkOpenCLEnqueueIntType(S, TheCall->getArg(I),
462  S.Context.getSizeType());
463  return IllegalParams;
464 }
465 
466 /// OpenCL v2.0, s6.13.17.1 - Check that sizes are provided for all
467 /// 'local void*' parameter of passed block.
469  Expr *BlockArg,
470  unsigned NumNonVarArgs) {
471  const BlockPointerType *BPT =
472  cast<BlockPointerType>(BlockArg->getType().getCanonicalType());
473  unsigned NumBlockParams =
474  BPT->getPointeeType()->getAs<FunctionProtoType>()->getNumParams();
475  unsigned TotalNumArgs = TheCall->getNumArgs();
476 
477  // For each argument passed to the block, a corresponding uint needs to
478  // be passed to describe the size of the local memory.
479  if (TotalNumArgs != NumBlockParams + NumNonVarArgs) {
480  S.Diag(TheCall->getBeginLoc(),
481  diag::err_opencl_enqueue_kernel_local_size_args);
482  return true;
483  }
484 
485  // Check that the sizes of the local memory are specified by integers.
486  return checkOpenCLEnqueueLocalSizeArgs(S, TheCall, NumNonVarArgs,
487  TotalNumArgs - 1);
488 }
489 
490 /// OpenCL C v2.0, s6.13.17 - Enqueue kernel function contains four different
491 /// overload formats specified in Table 6.13.17.1.
492 /// int enqueue_kernel(queue_t queue,
493 /// kernel_enqueue_flags_t flags,
494 /// const ndrange_t ndrange,
495 /// void (^block)(void))
496 /// int enqueue_kernel(queue_t queue,
497 /// kernel_enqueue_flags_t flags,
498 /// const ndrange_t ndrange,
499 /// uint num_events_in_wait_list,
500 /// clk_event_t *event_wait_list,
501 /// clk_event_t *event_ret,
502 /// void (^block)(void))
503 /// int enqueue_kernel(queue_t queue,
504 /// kernel_enqueue_flags_t flags,
505 /// const ndrange_t ndrange,
506 /// void (^block)(local void*, ...),
507 /// uint size0, ...)
508 /// int enqueue_kernel(queue_t queue,
509 /// kernel_enqueue_flags_t flags,
510 /// const ndrange_t ndrange,
511 /// uint num_events_in_wait_list,
512 /// clk_event_t *event_wait_list,
513 /// clk_event_t *event_ret,
514 /// void (^block)(local void*, ...),
515 /// uint size0, ...)
516 static bool SemaOpenCLBuiltinEnqueueKernel(Sema &S, CallExpr *TheCall) {
517  unsigned NumArgs = TheCall->getNumArgs();
518 
519  if (NumArgs < 4) {
520  S.Diag(TheCall->getBeginLoc(), diag::err_typecheck_call_too_few_args);
521  return true;
522  }
523 
524  Expr *Arg0 = TheCall->getArg(0);
525  Expr *Arg1 = TheCall->getArg(1);
526  Expr *Arg2 = TheCall->getArg(2);
527  Expr *Arg3 = TheCall->getArg(3);
528 
529  // First argument always needs to be a queue_t type.
530  if (!Arg0->getType()->isQueueT()) {
531  S.Diag(TheCall->getArg(0)->getBeginLoc(),
532  diag::err_opencl_builtin_expected_type)
533  << TheCall->getDirectCallee() << S.Context.OCLQueueTy;
534  return true;
535  }
536 
537  // Second argument always needs to be a kernel_enqueue_flags_t enum value.
538  if (!Arg1->getType()->isIntegerType()) {
539  S.Diag(TheCall->getArg(1)->getBeginLoc(),
540  diag::err_opencl_builtin_expected_type)
541  << TheCall->getDirectCallee() << "'kernel_enqueue_flags_t' (i.e. uint)";
542  return true;
543  }
544 
545  // Third argument is always an ndrange_t type.
546  if (Arg2->getType().getUnqualifiedType().getAsString() != "ndrange_t") {
547  S.Diag(TheCall->getArg(2)->getBeginLoc(),
548  diag::err_opencl_builtin_expected_type)
549  << TheCall->getDirectCallee() << "'ndrange_t'";
550  return true;
551  }
552 
553  // With four arguments, there is only one form that the function could be
554  // called in: no events and no variable arguments.
555  if (NumArgs == 4) {
556  // check that the last argument is the right block type.
557  if (!isBlockPointer(Arg3)) {
558  S.Diag(Arg3->getBeginLoc(), diag::err_opencl_builtin_expected_type)
559  << TheCall->getDirectCallee() << "block";
560  return true;
561  }
562  // we have a block type, check the prototype
563  const BlockPointerType *BPT =
564  cast<BlockPointerType>(Arg3->getType().getCanonicalType());
565  if (BPT->getPointeeType()->getAs<FunctionProtoType>()->getNumParams() > 0) {
566  S.Diag(Arg3->getBeginLoc(),
567  diag::err_opencl_enqueue_kernel_blocks_no_args);
568  return true;
569  }
570  return false;
571  }
572  // we can have block + varargs.
573  if (isBlockPointer(Arg3))
574  return (checkOpenCLBlockArgs(S, Arg3) ||
575  checkOpenCLEnqueueVariadicArgs(S, TheCall, Arg3, 4));
576  // last two cases with either exactly 7 args or 7 args and varargs.
577  if (NumArgs >= 7) {
578  // check common block argument.
579  Expr *Arg6 = TheCall->getArg(6);
580  if (!isBlockPointer(Arg6)) {
581  S.Diag(Arg6->getBeginLoc(), diag::err_opencl_builtin_expected_type)
582  << TheCall->getDirectCallee() << "block";
583  return true;
584  }
585  if (checkOpenCLBlockArgs(S, Arg6))
586  return true;
587 
588  // Forth argument has to be any integer type.
589  if (!Arg3->getType()->isIntegerType()) {
590  S.Diag(TheCall->getArg(3)->getBeginLoc(),
591  diag::err_opencl_builtin_expected_type)
592  << TheCall->getDirectCallee() << "integer";
593  return true;
594  }
595  // check remaining common arguments.
596  Expr *Arg4 = TheCall->getArg(4);
597  Expr *Arg5 = TheCall->getArg(5);
598 
599  // Fifth argument is always passed as a pointer to clk_event_t.
600  if (!Arg4->isNullPointerConstant(S.Context,
603  S.Diag(TheCall->getArg(4)->getBeginLoc(),
604  diag::err_opencl_builtin_expected_type)
605  << TheCall->getDirectCallee()
607  return true;
608  }
609 
610  // Sixth argument is always passed as a pointer to clk_event_t.
611  if (!Arg5->isNullPointerConstant(S.Context,
613  !(Arg5->getType()->isPointerType() &&
614  Arg5->getType()->getPointeeType()->isClkEventT())) {
615  S.Diag(TheCall->getArg(5)->getBeginLoc(),
616  diag::err_opencl_builtin_expected_type)
617  << TheCall->getDirectCallee()
619  return true;
620  }
621 
622  if (NumArgs == 7)
623  return false;
624 
625  return checkOpenCLEnqueueVariadicArgs(S, TheCall, Arg6, 7);
626  }
627 
628  // None of the specific case has been detected, give generic error
629  S.Diag(TheCall->getBeginLoc(),
630  diag::err_opencl_enqueue_kernel_incorrect_args);
631  return true;
632 }
633 
634 /// Returns OpenCL access qual.
635 static OpenCLAccessAttr *getOpenCLArgAccess(const Decl *D) {
636  return D->getAttr<OpenCLAccessAttr>();
637 }
638 
639 /// Returns true if pipe element type is different from the pointer.
640 static bool checkOpenCLPipeArg(Sema &S, CallExpr *Call) {
641  const Expr *Arg0 = Call->getArg(0);
642  // First argument type should always be pipe.
643  if (!Arg0->getType()->isPipeType()) {
644  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_first_arg)
645  << Call->getDirectCallee() << Arg0->getSourceRange();
646  return true;
647  }
648  OpenCLAccessAttr *AccessQual =
649  getOpenCLArgAccess(cast<DeclRefExpr>(Arg0)->getDecl());
650  // Validates the access qualifier is compatible with the call.
651  // OpenCL v2.0 s6.13.16 - The access qualifiers for pipe should only be
652  // read_only and write_only, and assumed to be read_only if no qualifier is
653  // specified.
654  switch (Call->getDirectCallee()->getBuiltinID()) {
655  case Builtin::BIread_pipe:
656  case Builtin::BIreserve_read_pipe:
657  case Builtin::BIcommit_read_pipe:
658  case Builtin::BIwork_group_reserve_read_pipe:
659  case Builtin::BIsub_group_reserve_read_pipe:
660  case Builtin::BIwork_group_commit_read_pipe:
661  case Builtin::BIsub_group_commit_read_pipe:
662  if (!(!AccessQual || AccessQual->isReadOnly())) {
663  S.Diag(Arg0->getBeginLoc(),
664  diag::err_opencl_builtin_pipe_invalid_access_modifier)
665  << "read_only" << Arg0->getSourceRange();
666  return true;
667  }
668  break;
669  case Builtin::BIwrite_pipe:
670  case Builtin::BIreserve_write_pipe:
671  case Builtin::BIcommit_write_pipe:
672  case Builtin::BIwork_group_reserve_write_pipe:
673  case Builtin::BIsub_group_reserve_write_pipe:
674  case Builtin::BIwork_group_commit_write_pipe:
675  case Builtin::BIsub_group_commit_write_pipe:
676  if (!(AccessQual && AccessQual->isWriteOnly())) {
677  S.Diag(Arg0->getBeginLoc(),
678  diag::err_opencl_builtin_pipe_invalid_access_modifier)
679  << "write_only" << Arg0->getSourceRange();
680  return true;
681  }
682  break;
683  default:
684  break;
685  }
686  return false;
687 }
688 
689 /// Returns true if pipe element type is different from the pointer.
690 static bool checkOpenCLPipePacketType(Sema &S, CallExpr *Call, unsigned Idx) {
691  const Expr *Arg0 = Call->getArg(0);
692  const Expr *ArgIdx = Call->getArg(Idx);
693  const PipeType *PipeTy = cast<PipeType>(Arg0->getType());
694  const QualType EltTy = PipeTy->getElementType();
695  const PointerType *ArgTy = ArgIdx->getType()->getAs<PointerType>();
696  // The Idx argument should be a pointer and the type of the pointer and
697  // the type of pipe element should also be the same.
698  if (!ArgTy ||
699  !S.Context.hasSameType(
700  EltTy, ArgTy->getPointeeType()->getCanonicalTypeInternal())) {
701  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_invalid_arg)
702  << Call->getDirectCallee() << S.Context.getPointerType(EltTy)
703  << ArgIdx->getType() << ArgIdx->getSourceRange();
704  return true;
705  }
706  return false;
707 }
708 
709 // Performs semantic analysis for the read/write_pipe call.
710 // \param S Reference to the semantic analyzer.
711 // \param Call A pointer to the builtin call.
712 // \return True if a semantic error has been found, false otherwise.
713 static bool SemaBuiltinRWPipe(Sema &S, CallExpr *Call) {
714  // OpenCL v2.0 s6.13.16.2 - The built-in read/write
715  // functions have two forms.
716  switch (Call->getNumArgs()) {
717  case 2:
718  if (checkOpenCLPipeArg(S, Call))
719  return true;
720  // The call with 2 arguments should be
721  // read/write_pipe(pipe T, T*).
722  // Check packet type T.
723  if (checkOpenCLPipePacketType(S, Call, 1))
724  return true;
725  break;
726 
727  case 4: {
728  if (checkOpenCLPipeArg(S, Call))
729  return true;
730  // The call with 4 arguments should be
731  // read/write_pipe(pipe T, reserve_id_t, uint, T*).
732  // Check reserve_id_t.
733  if (!Call->getArg(1)->getType()->isReserveIDT()) {
734  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_invalid_arg)
735  << Call->getDirectCallee() << S.Context.OCLReserveIDTy
736  << Call->getArg(1)->getType() << Call->getArg(1)->getSourceRange();
737  return true;
738  }
739 
740  // Check the index.
741  const Expr *Arg2 = Call->getArg(2);
742  if (!Arg2->getType()->isIntegerType() &&
743  !Arg2->getType()->isUnsignedIntegerType()) {
744  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_invalid_arg)
745  << Call->getDirectCallee() << S.Context.UnsignedIntTy
746  << Arg2->getType() << Arg2->getSourceRange();
747  return true;
748  }
749 
750  // Check packet type T.
751  if (checkOpenCLPipePacketType(S, Call, 3))
752  return true;
753  } break;
754  default:
755  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_arg_num)
756  << Call->getDirectCallee() << Call->getSourceRange();
757  return true;
758  }
759 
760  return false;
761 }
762 
763 // Performs a semantic analysis on the {work_group_/sub_group_
764 // /_}reserve_{read/write}_pipe
765 // \param S Reference to the semantic analyzer.
766 // \param Call The call to the builtin function to be analyzed.
767 // \return True if a semantic error was found, false otherwise.
768 static bool SemaBuiltinReserveRWPipe(Sema &S, CallExpr *Call) {
769  if (checkArgCount(S, Call, 2))
770  return true;
771 
772  if (checkOpenCLPipeArg(S, Call))
773  return true;
774 
775  // Check the reserve size.
776  if (!Call->getArg(1)->getType()->isIntegerType() &&
777  !Call->getArg(1)->getType()->isUnsignedIntegerType()) {
778  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_invalid_arg)
779  << Call->getDirectCallee() << S.Context.UnsignedIntTy
780  << Call->getArg(1)->getType() << Call->getArg(1)->getSourceRange();
781  return true;
782  }
783 
784  // Since return type of reserve_read/write_pipe built-in function is
785  // reserve_id_t, which is not defined in the builtin def file , we used int
786  // as return type and need to override the return type of these functions.
787  Call->setType(S.Context.OCLReserveIDTy);
788 
789  return false;
790 }
791 
792 // Performs a semantic analysis on {work_group_/sub_group_
793 // /_}commit_{read/write}_pipe
794 // \param S Reference to the semantic analyzer.
795 // \param Call The call to the builtin function to be analyzed.
796 // \return True if a semantic error was found, false otherwise.
797 static bool SemaBuiltinCommitRWPipe(Sema &S, CallExpr *Call) {
798  if (checkArgCount(S, Call, 2))
799  return true;
800 
801  if (checkOpenCLPipeArg(S, Call))
802  return true;
803 
804  // Check reserve_id_t.
805  if (!Call->getArg(1)->getType()->isReserveIDT()) {
806  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_invalid_arg)
807  << Call->getDirectCallee() << S.Context.OCLReserveIDTy
808  << Call->getArg(1)->getType() << Call->getArg(1)->getSourceRange();
809  return true;
810  }
811 
812  return false;
813 }
814 
815 // Performs a semantic analysis on the call to built-in Pipe
816 // Query Functions.
817 // \param S Reference to the semantic analyzer.
818 // \param Call The call to the builtin function to be analyzed.
819 // \return True if a semantic error was found, false otherwise.
820 static bool SemaBuiltinPipePackets(Sema &S, CallExpr *Call) {
821  if (checkArgCount(S, Call, 1))
822  return true;
823 
824  if (!Call->getArg(0)->getType()->isPipeType()) {
825  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_pipe_first_arg)
826  << Call->getDirectCallee() << Call->getArg(0)->getSourceRange();
827  return true;
828  }
829 
830  return false;
831 }
832 
833 // OpenCL v2.0 s6.13.9 - Address space qualifier functions.
834 // Performs semantic analysis for the to_global/local/private call.
835 // \param S Reference to the semantic analyzer.
836 // \param BuiltinID ID of the builtin function.
837 // \param Call A pointer to the builtin call.
838 // \return True if a semantic error has been found, false otherwise.
839 static bool SemaOpenCLBuiltinToAddr(Sema &S, unsigned BuiltinID,
840  CallExpr *Call) {
841  if (Call->getNumArgs() != 1) {
842  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_to_addr_arg_num)
843  << Call->getDirectCallee() << Call->getSourceRange();
844  return true;
845  }
846 
847  auto RT = Call->getArg(0)->getType();
848  if (!RT->isPointerType() || RT->getPointeeType()
849  .getAddressSpace() == LangAS::opencl_constant) {
850  S.Diag(Call->getBeginLoc(), diag::err_opencl_builtin_to_addr_invalid_arg)
851  << Call->getArg(0) << Call->getDirectCallee() << Call->getSourceRange();
852  return true;
853  }
854 
855  if (RT->getPointeeType().getAddressSpace() != LangAS::opencl_generic) {
856  S.Diag(Call->getArg(0)->getBeginLoc(),
857  diag::warn_opencl_generic_address_space_arg)
858  << Call->getDirectCallee()->getNameInfo().getAsString()
859  << Call->getArg(0)->getSourceRange();
860  }
861 
862  RT = RT->getPointeeType();
863  auto Qual = RT.getQualifiers();
864  switch (BuiltinID) {
865  case Builtin::BIto_global:
866  Qual.setAddressSpace(LangAS::opencl_global);
867  break;
868  case Builtin::BIto_local:
869  Qual.setAddressSpace(LangAS::opencl_local);
870  break;
871  case Builtin::BIto_private:
872  Qual.setAddressSpace(LangAS::opencl_private);
873  break;
874  default:
875  llvm_unreachable("Invalid builtin function");
876  }
878  RT.getUnqualifiedType(), Qual)));
879 
880  return false;
881 }
882 
884  if (checkArgCount(S, TheCall, 1))
885  return ExprError();
886 
887  // Compute __builtin_launder's parameter type from the argument.
888  // The parameter type is:
889  // * The type of the argument if it's not an array or function type,
890  // Otherwise,
891  // * The decayed argument type.
892  QualType ParamTy = [&]() {
893  QualType ArgTy = TheCall->getArg(0)->getType();
894  if (const ArrayType *Ty = ArgTy->getAsArrayTypeUnsafe())
895  return S.Context.getPointerType(Ty->getElementType());
896  if (ArgTy->isFunctionType()) {
897  return S.Context.getPointerType(ArgTy);
898  }
899  return ArgTy;
900  }();
901 
902  TheCall->setType(ParamTy);
903 
904  auto DiagSelect = [&]() -> llvm::Optional<unsigned> {
905  if (!ParamTy->isPointerType())
906  return 0;
907  if (ParamTy->isFunctionPointerType())
908  return 1;
909  if (ParamTy->isVoidPointerType())
910  return 2;
911  return llvm::Optional<unsigned>{};
912  }();
913  if (DiagSelect.hasValue()) {
914  S.Diag(TheCall->getBeginLoc(), diag::err_builtin_launder_invalid_arg)
915  << DiagSelect.getValue() << TheCall->getSourceRange();
916  return ExprError();
917  }
918 
919  // We either have an incomplete class type, or we have a class template
920  // whose instantiation has not been forced. Example:
921  //
922  // template <class T> struct Foo { T value; };
923  // Foo<int> *p = nullptr;
924  // auto *d = __builtin_launder(p);
925  if (S.RequireCompleteType(TheCall->getBeginLoc(), ParamTy->getPointeeType(),
926  diag::err_incomplete_type))
927  return ExprError();
928 
929  assert(ParamTy->getPointeeType()->isObjectType() &&
930  "Unhandled non-object pointer case");
931 
932  InitializedEntity Entity =
934  ExprResult Arg =
935  S.PerformCopyInitialization(Entity, SourceLocation(), TheCall->getArg(0));
936  if (Arg.isInvalid())
937  return ExprError();
938  TheCall->setArg(0, Arg.get());
939 
940  return TheCall;
941 }
942 
943 // Emit an error and return true if the current architecture is not in the list
944 // of supported architectures.
945 static bool
946 CheckBuiltinTargetSupport(Sema &S, unsigned BuiltinID, CallExpr *TheCall,
947  ArrayRef<llvm::Triple::ArchType> SupportedArchs) {
948  llvm::Triple::ArchType CurArch =
949  S.getASTContext().getTargetInfo().getTriple().getArch();
950  if (llvm::is_contained(SupportedArchs, CurArch))
951  return false;
952  S.Diag(TheCall->getBeginLoc(), diag::err_builtin_target_unsupported)
953  << TheCall->getSourceRange();
954  return true;
955 }
956 
958 Sema::CheckBuiltinFunctionCall(FunctionDecl *FDecl, unsigned BuiltinID,
959  CallExpr *TheCall) {
960  ExprResult TheCallResult(TheCall);
961 
962  // Find out if any arguments are required to be integer constant expressions.
963  unsigned ICEArguments = 0;
965  Context.GetBuiltinType(BuiltinID, Error, &ICEArguments);
966  if (Error != ASTContext::GE_None)
967  ICEArguments = 0; // Don't diagnose previously diagnosed errors.
968 
969  // If any arguments are required to be ICE's, check and diagnose.
970  for (unsigned ArgNo = 0; ICEArguments != 0; ++ArgNo) {
971  // Skip arguments not required to be ICE's.
972  if ((ICEArguments & (1 << ArgNo)) == 0) continue;
973 
974  llvm::APSInt Result;
975  if (SemaBuiltinConstantArg(TheCall, ArgNo, Result))
976  return true;
977  ICEArguments &= ~(1 << ArgNo);
978  }
979 
980  switch (BuiltinID) {
981  case Builtin::BI__builtin___CFStringMakeConstantString:
982  assert(TheCall->getNumArgs() == 1 &&
983  "Wrong # arguments to builtin CFStringMakeConstantString");
984  if (CheckObjCString(TheCall->getArg(0)))
985  return ExprError();
986  break;
987  case Builtin::BI__builtin_ms_va_start:
988  case Builtin::BI__builtin_stdarg_start:
989  case Builtin::BI__builtin_va_start:
990  if (SemaBuiltinVAStart(BuiltinID, TheCall))
991  return ExprError();
992  break;
993  case Builtin::BI__va_start: {
994  switch (Context.getTargetInfo().getTriple().getArch()) {
995  case llvm::Triple::aarch64:
996  case llvm::Triple::arm:
997  case llvm::Triple::thumb:
998  if (SemaBuiltinVAStartARMMicrosoft(TheCall))
999  return ExprError();
1000  break;
1001  default:
1002  if (SemaBuiltinVAStart(BuiltinID, TheCall))
1003  return ExprError();
1004  break;
1005  }
1006  break;
1007  }
1008 
1009  // The acquire, release, and no fence variants are ARM and AArch64 only.
1010  case Builtin::BI_interlockedbittestandset_acq:
1011  case Builtin::BI_interlockedbittestandset_rel:
1012  case Builtin::BI_interlockedbittestandset_nf:
1013  case Builtin::BI_interlockedbittestandreset_acq:
1014  case Builtin::BI_interlockedbittestandreset_rel:
1015  case Builtin::BI_interlockedbittestandreset_nf:
1017  *this, BuiltinID, TheCall,
1018  {llvm::Triple::arm, llvm::Triple::thumb, llvm::Triple::aarch64}))
1019  return ExprError();
1020  break;
1021 
1022  // The 64-bit bittest variants are x64, ARM, and AArch64 only.
1023  case Builtin::BI_bittest64:
1024  case Builtin::BI_bittestandcomplement64:
1025  case Builtin::BI_bittestandreset64:
1026  case Builtin::BI_bittestandset64:
1027  case Builtin::BI_interlockedbittestandreset64:
1028  case Builtin::BI_interlockedbittestandset64:
1029  if (CheckBuiltinTargetSupport(*this, BuiltinID, TheCall,
1030  {llvm::Triple::x86_64, llvm::Triple::arm,
1031  llvm::Triple::thumb, llvm::Triple::aarch64}))
1032  return ExprError();
1033  break;
1034 
1035  case Builtin::BI__builtin_isgreater:
1036  case Builtin::BI__builtin_isgreaterequal:
1037  case Builtin::BI__builtin_isless:
1038  case Builtin::BI__builtin_islessequal:
1039  case Builtin::BI__builtin_islessgreater:
1040  case Builtin::BI__builtin_isunordered:
1041  if (SemaBuiltinUnorderedCompare(TheCall))
1042  return ExprError();
1043  break;
1044  case Builtin::BI__builtin_fpclassify:
1045  if (SemaBuiltinFPClassification(TheCall, 6))
1046  return ExprError();
1047  break;
1048  case Builtin::BI__builtin_isfinite:
1049  case Builtin::BI__builtin_isinf:
1050  case Builtin::BI__builtin_isinf_sign:
1051  case Builtin::BI__builtin_isnan:
1052  case Builtin::BI__builtin_isnormal:
1053  case Builtin::BI__builtin_signbit:
1054  case Builtin::BI__builtin_signbitf:
1055  case Builtin::BI__builtin_signbitl:
1056  if (SemaBuiltinFPClassification(TheCall, 1))
1057  return ExprError();
1058  break;
1059  case Builtin::BI__builtin_shufflevector:
1060  return SemaBuiltinShuffleVector(TheCall);
1061  // TheCall will be freed by the smart pointer here, but that's fine, since
1062  // SemaBuiltinShuffleVector guts it, but then doesn't release it.
1063  case Builtin::BI__builtin_prefetch:
1064  if (SemaBuiltinPrefetch(TheCall))
1065  return ExprError();
1066  break;
1067  case Builtin::BI__builtin_alloca_with_align:
1068  if (SemaBuiltinAllocaWithAlign(TheCall))
1069  return ExprError();
1070  break;
1071  case Builtin::BI__assume:
1072  case Builtin::BI__builtin_assume:
1073  if (SemaBuiltinAssume(TheCall))
1074  return ExprError();
1075  break;
1076  case Builtin::BI__builtin_assume_aligned:
1077  if (SemaBuiltinAssumeAligned(TheCall))
1078  return ExprError();
1079  break;
1080  case Builtin::BI__builtin_object_size:
1081  if (SemaBuiltinConstantArgRange(TheCall, 1, 0, 3))
1082  return ExprError();
1083  break;
1084  case Builtin::BI__builtin_longjmp:
1085  if (SemaBuiltinLongjmp(TheCall))
1086  return ExprError();
1087  break;
1088  case Builtin::BI__builtin_setjmp:
1089  if (SemaBuiltinSetjmp(TheCall))
1090  return ExprError();
1091  break;
1092  case Builtin::BI_setjmp:
1093  case Builtin::BI_setjmpex:
1094  if (checkArgCount(*this, TheCall, 1))
1095  return true;
1096  break;
1097  case Builtin::BI__builtin_classify_type:
1098  if (checkArgCount(*this, TheCall, 1)) return true;
1099  TheCall->setType(Context.IntTy);
1100  break;
1101  case Builtin::BI__builtin_constant_p:
1102  if (checkArgCount(*this, TheCall, 1)) return true;
1103  TheCall->setType(Context.IntTy);
1104  break;
1105  case Builtin::BI__builtin_launder:
1106  return SemaBuiltinLaunder(*this, TheCall);
1107  case Builtin::BI__sync_fetch_and_add:
1108  case Builtin::BI__sync_fetch_and_add_1:
1109  case Builtin::BI__sync_fetch_and_add_2:
1110  case Builtin::BI__sync_fetch_and_add_4:
1111  case Builtin::BI__sync_fetch_and_add_8:
1112  case Builtin::BI__sync_fetch_and_add_16:
1113  case Builtin::BI__sync_fetch_and_sub:
1114  case Builtin::BI__sync_fetch_and_sub_1:
1115  case Builtin::BI__sync_fetch_and_sub_2:
1116  case Builtin::BI__sync_fetch_and_sub_4:
1117  case Builtin::BI__sync_fetch_and_sub_8:
1118  case Builtin::BI__sync_fetch_and_sub_16:
1119  case Builtin::BI__sync_fetch_and_or:
1120  case Builtin::BI__sync_fetch_and_or_1:
1121  case Builtin::BI__sync_fetch_and_or_2:
1122  case Builtin::BI__sync_fetch_and_or_4:
1123  case Builtin::BI__sync_fetch_and_or_8:
1124  case Builtin::BI__sync_fetch_and_or_16:
1125  case Builtin::BI__sync_fetch_and_and:
1126  case Builtin::BI__sync_fetch_and_and_1:
1127  case Builtin::BI__sync_fetch_and_and_2:
1128  case Builtin::BI__sync_fetch_and_and_4:
1129  case Builtin::BI__sync_fetch_and_and_8:
1130  case Builtin::BI__sync_fetch_and_and_16:
1131  case Builtin::BI__sync_fetch_and_xor:
1132  case Builtin::BI__sync_fetch_and_xor_1:
1133  case Builtin::BI__sync_fetch_and_xor_2:
1134  case Builtin::BI__sync_fetch_and_xor_4:
1135  case Builtin::BI__sync_fetch_and_xor_8:
1136  case Builtin::BI__sync_fetch_and_xor_16:
1137  case Builtin::BI__sync_fetch_and_nand:
1138  case Builtin::BI__sync_fetch_and_nand_1:
1139  case Builtin::BI__sync_fetch_and_nand_2:
1140  case Builtin::BI__sync_fetch_and_nand_4:
1141  case Builtin::BI__sync_fetch_and_nand_8:
1142  case Builtin::BI__sync_fetch_and_nand_16:
1143  case Builtin::BI__sync_add_and_fetch:
1144  case Builtin::BI__sync_add_and_fetch_1:
1145  case Builtin::BI__sync_add_and_fetch_2:
1146  case Builtin::BI__sync_add_and_fetch_4:
1147  case Builtin::BI__sync_add_and_fetch_8:
1148  case Builtin::BI__sync_add_and_fetch_16:
1149  case Builtin::BI__sync_sub_and_fetch:
1150  case Builtin::BI__sync_sub_and_fetch_1:
1151  case Builtin::BI__sync_sub_and_fetch_2:
1152  case Builtin::BI__sync_sub_and_fetch_4:
1153  case Builtin::BI__sync_sub_and_fetch_8:
1154  case Builtin::BI__sync_sub_and_fetch_16:
1155  case Builtin::BI__sync_and_and_fetch:
1156  case Builtin::BI__sync_and_and_fetch_1:
1157  case Builtin::BI__sync_and_and_fetch_2:
1158  case Builtin::BI__sync_and_and_fetch_4:
1159  case Builtin::BI__sync_and_and_fetch_8:
1160  case Builtin::BI__sync_and_and_fetch_16:
1161  case Builtin::BI__sync_or_and_fetch:
1162  case Builtin::BI__sync_or_and_fetch_1:
1163  case Builtin::BI__sync_or_and_fetch_2:
1164  case Builtin::BI__sync_or_and_fetch_4:
1165  case Builtin::BI__sync_or_and_fetch_8:
1166  case Builtin::BI__sync_or_and_fetch_16:
1167  case Builtin::BI__sync_xor_and_fetch:
1168  case Builtin::BI__sync_xor_and_fetch_1:
1169  case Builtin::BI__sync_xor_and_fetch_2:
1170  case Builtin::BI__sync_xor_and_fetch_4:
1171  case Builtin::BI__sync_xor_and_fetch_8:
1172  case Builtin::BI__sync_xor_and_fetch_16:
1173  case Builtin::BI__sync_nand_and_fetch:
1174  case Builtin::BI__sync_nand_and_fetch_1:
1175  case Builtin::BI__sync_nand_and_fetch_2:
1176  case Builtin::BI__sync_nand_and_fetch_4:
1177  case Builtin::BI__sync_nand_and_fetch_8:
1178  case Builtin::BI__sync_nand_and_fetch_16:
1179  case Builtin::BI__sync_val_compare_and_swap:
1180  case Builtin::BI__sync_val_compare_and_swap_1:
1181  case Builtin::BI__sync_val_compare_and_swap_2:
1182  case Builtin::BI__sync_val_compare_and_swap_4:
1183  case Builtin::BI__sync_val_compare_and_swap_8:
1184  case Builtin::BI__sync_val_compare_and_swap_16:
1185  case Builtin::BI__sync_bool_compare_and_swap:
1186  case Builtin::BI__sync_bool_compare_and_swap_1:
1187  case Builtin::BI__sync_bool_compare_and_swap_2:
1188  case Builtin::BI__sync_bool_compare_and_swap_4:
1189  case Builtin::BI__sync_bool_compare_and_swap_8:
1190  case Builtin::BI__sync_bool_compare_and_swap_16:
1191  case Builtin::BI__sync_lock_test_and_set:
1192  case Builtin::BI__sync_lock_test_and_set_1:
1193  case Builtin::BI__sync_lock_test_and_set_2:
1194  case Builtin::BI__sync_lock_test_and_set_4:
1195  case Builtin::BI__sync_lock_test_and_set_8:
1196  case Builtin::BI__sync_lock_test_and_set_16:
1197  case Builtin::BI__sync_lock_release:
1198  case Builtin::BI__sync_lock_release_1:
1199  case Builtin::BI__sync_lock_release_2:
1200  case Builtin::BI__sync_lock_release_4:
1201  case Builtin::BI__sync_lock_release_8:
1202  case Builtin::BI__sync_lock_release_16:
1203  case Builtin::BI__sync_swap:
1204  case Builtin::BI__sync_swap_1:
1205  case Builtin::BI__sync_swap_2:
1206  case Builtin::BI__sync_swap_4:
1207  case Builtin::BI__sync_swap_8:
1208  case Builtin::BI__sync_swap_16:
1209  return SemaBuiltinAtomicOverloaded(TheCallResult);
1210  case Builtin::BI__sync_synchronize:
1211  Diag(TheCall->getBeginLoc(), diag::warn_atomic_implicit_seq_cst)
1212  << TheCall->getCallee()->getSourceRange();
1213  break;
1214  case Builtin::BI__builtin_nontemporal_load:
1215  case Builtin::BI__builtin_nontemporal_store:
1216  return SemaBuiltinNontemporalOverloaded(TheCallResult);
1217 #define BUILTIN(ID, TYPE, ATTRS)
1218 #define ATOMIC_BUILTIN(ID, TYPE, ATTRS) \
1219  case Builtin::BI##ID: \
1220  return SemaAtomicOpsOverloaded(TheCallResult, AtomicExpr::AO##ID);
1221 #include "clang/Basic/Builtins.def"
1222  case Builtin::BI__annotation:
1223  if (SemaBuiltinMSVCAnnotation(*this, TheCall))
1224  return ExprError();
1225  break;
1226  case Builtin::BI__builtin_annotation:
1227  if (SemaBuiltinAnnotation(*this, TheCall))
1228  return ExprError();
1229  break;
1230  case Builtin::BI__builtin_addressof:
1231  if (SemaBuiltinAddressof(*this, TheCall))
1232  return ExprError();
1233  break;
1234  case Builtin::BI__builtin_add_overflow:
1235  case Builtin::BI__builtin_sub_overflow:
1236  case Builtin::BI__builtin_mul_overflow:
1237  if (SemaBuiltinOverflow(*this, TheCall))
1238  return ExprError();
1239  break;
1240  case Builtin::BI__builtin_operator_new:
1241  case Builtin::BI__builtin_operator_delete: {
1242  bool IsDelete = BuiltinID == Builtin::BI__builtin_operator_delete;
1243  ExprResult Res =
1244  SemaBuiltinOperatorNewDeleteOverloaded(TheCallResult, IsDelete);
1245  if (Res.isInvalid())
1246  CorrectDelayedTyposInExpr(TheCallResult.get());
1247  return Res;
1248  }
1249  case Builtin::BI__builtin_dump_struct: {
1250  // We first want to ensure we are called with 2 arguments
1251  if (checkArgCount(*this, TheCall, 2))
1252  return ExprError();
1253  // Ensure that the first argument is of type 'struct XX *'
1254  const Expr *PtrArg = TheCall->getArg(0)->IgnoreParenImpCasts();
1255  const QualType PtrArgType = PtrArg->getType();
1256  if (!PtrArgType->isPointerType() ||
1257  !PtrArgType->getPointeeType()->isRecordType()) {
1258  Diag(PtrArg->getBeginLoc(), diag::err_typecheck_convert_incompatible)
1259  << PtrArgType << "structure pointer" << 1 << 0 << 3 << 1 << PtrArgType
1260  << "structure pointer";
1261  return ExprError();
1262  }
1263 
1264  // Ensure that the second argument is of type 'FunctionType'
1265  const Expr *FnPtrArg = TheCall->getArg(1)->IgnoreImpCasts();
1266  const QualType FnPtrArgType = FnPtrArg->getType();
1267  if (!FnPtrArgType->isPointerType()) {
1268  Diag(FnPtrArg->getBeginLoc(), diag::err_typecheck_convert_incompatible)
1269  << FnPtrArgType << "'int (*)(const char *, ...)'" << 1 << 0 << 3 << 2
1270  << FnPtrArgType << "'int (*)(const char *, ...)'";
1271  return ExprError();
1272  }
1273 
1274  const auto *FuncType =
1275  FnPtrArgType->getPointeeType()->getAs<FunctionType>();
1276 
1277  if (!FuncType) {
1278  Diag(FnPtrArg->getBeginLoc(), diag::err_typecheck_convert_incompatible)
1279  << FnPtrArgType << "'int (*)(const char *, ...)'" << 1 << 0 << 3 << 2
1280  << FnPtrArgType << "'int (*)(const char *, ...)'";
1281  return ExprError();
1282  }
1283 
1284  if (const auto *FT = dyn_cast<FunctionProtoType>(FuncType)) {
1285  if (!FT->getNumParams()) {
1286  Diag(FnPtrArg->getBeginLoc(), diag::err_typecheck_convert_incompatible)
1287  << FnPtrArgType << "'int (*)(const char *, ...)'" << 1 << 0 << 3
1288  << 2 << FnPtrArgType << "'int (*)(const char *, ...)'";
1289  return ExprError();
1290  }
1291  QualType PT = FT->getParamType(0);
1292  if (!FT->isVariadic() || FT->getReturnType() != Context.IntTy ||
1293  !PT->isPointerType() || !PT->getPointeeType()->isCharType() ||
1294  !PT->getPointeeType().isConstQualified()) {
1295  Diag(FnPtrArg->getBeginLoc(), diag::err_typecheck_convert_incompatible)
1296  << FnPtrArgType << "'int (*)(const char *, ...)'" << 1 << 0 << 3
1297  << 2 << FnPtrArgType << "'int (*)(const char *, ...)'";
1298  return ExprError();
1299  }
1300  }
1301 
1302  TheCall->setType(Context.IntTy);
1303  break;
1304  }
1305 
1306  // check secure string manipulation functions where overflows
1307  // are detectable at compile time
1308  case Builtin::BI__builtin___memcpy_chk:
1309  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 2, 3, "memcpy");
1310  break;
1311  case Builtin::BI__builtin___memmove_chk:
1312  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 2, 3, "memmove");
1313  break;
1314  case Builtin::BI__builtin___memset_chk:
1315  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 2, 3, "memset");
1316  break;
1317  case Builtin::BI__builtin___strlcat_chk:
1318  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 2, 3, "strlcat");
1319  break;
1320  case Builtin::BI__builtin___strlcpy_chk:
1321  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 2, 3, "strlcpy");
1322  break;
1323  case Builtin::BI__builtin___strncat_chk:
1324  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 2, 3, "strncat");
1325  break;
1326  case Builtin::BI__builtin___strncpy_chk:
1327  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 2, 3, "strncpy");
1328  break;
1329  case Builtin::BI__builtin___stpncpy_chk:
1330  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 2, 3, "stpncpy");
1331  break;
1332  case Builtin::BI__builtin___memccpy_chk:
1333  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 3, 4, "memccpy");
1334  break;
1335  case Builtin::BI__builtin___snprintf_chk:
1336  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 1, 3, "snprintf");
1337  break;
1338  case Builtin::BI__builtin___vsnprintf_chk:
1339  SemaBuiltinMemChkCall(*this, FDecl, TheCall, 1, 3, "vsnprintf");
1340  break;
1341  case Builtin::BI__builtin_call_with_static_chain:
1342  if (SemaBuiltinCallWithStaticChain(*this, TheCall))
1343  return ExprError();
1344  break;
1345  case Builtin::BI__exception_code:
1346  case Builtin::BI_exception_code:
1347  if (SemaBuiltinSEHScopeCheck(*this, TheCall, Scope::SEHExceptScope,
1348  diag::err_seh___except_block))
1349  return ExprError();
1350  break;
1351  case Builtin::BI__exception_info:
1352  case Builtin::BI_exception_info:
1353  if (SemaBuiltinSEHScopeCheck(*this, TheCall, Scope::SEHFilterScope,
1354  diag::err_seh___except_filter))
1355  return ExprError();
1356  break;
1357  case Builtin::BI__GetExceptionInfo:
1358  if (checkArgCount(*this, TheCall, 1))
1359  return ExprError();
1360 
1361  if (CheckCXXThrowOperand(
1362  TheCall->getBeginLoc(),
1363  Context.getExceptionObjectType(FDecl->getParamDecl(0)->getType()),
1364  TheCall))
1365  return ExprError();
1366 
1367  TheCall->setType(Context.VoidPtrTy);
1368  break;
1369  // OpenCL v2.0, s6.13.16 - Pipe functions
1370  case Builtin::BIread_pipe:
1371  case Builtin::BIwrite_pipe:
1372  // Since those two functions are declared with var args, we need a semantic
1373  // check for the argument.
1374  if (SemaBuiltinRWPipe(*this, TheCall))
1375  return ExprError();
1376  break;
1377  case Builtin::BIreserve_read_pipe:
1378  case Builtin::BIreserve_write_pipe:
1379  case Builtin::BIwork_group_reserve_read_pipe:
1380  case Builtin::BIwork_group_reserve_write_pipe:
1381  if (SemaBuiltinReserveRWPipe(*this, TheCall))
1382  return ExprError();
1383  break;
1384  case Builtin::BIsub_group_reserve_read_pipe:
1385  case Builtin::BIsub_group_reserve_write_pipe:
1386  if (checkOpenCLSubgroupExt(*this, TheCall) ||
1387  SemaBuiltinReserveRWPipe(*this, TheCall))
1388  return ExprError();
1389  break;
1390  case Builtin::BIcommit_read_pipe:
1391  case Builtin::BIcommit_write_pipe:
1392  case Builtin::BIwork_group_commit_read_pipe:
1393  case Builtin::BIwork_group_commit_write_pipe:
1394  if (SemaBuiltinCommitRWPipe(*this, TheCall))
1395  return ExprError();
1396  break;
1397  case Builtin::BIsub_group_commit_read_pipe:
1398  case Builtin::BIsub_group_commit_write_pipe:
1399  if (checkOpenCLSubgroupExt(*this, TheCall) ||
1400  SemaBuiltinCommitRWPipe(*this, TheCall))
1401  return ExprError();
1402  break;
1403  case Builtin::BIget_pipe_num_packets:
1404  case Builtin::BIget_pipe_max_packets:
1405  if (SemaBuiltinPipePackets(*this, TheCall))
1406  return ExprError();
1407  break;
1408  case Builtin::BIto_global:
1409  case Builtin::BIto_local:
1410  case Builtin::BIto_private:
1411  if (SemaOpenCLBuiltinToAddr(*this, BuiltinID, TheCall))
1412  return ExprError();
1413  break;
1414  // OpenCL v2.0, s6.13.17 - Enqueue kernel functions.
1415  case Builtin::BIenqueue_kernel:
1416  if (SemaOpenCLBuiltinEnqueueKernel(*this, TheCall))
1417  return ExprError();
1418  break;
1419  case Builtin::BIget_kernel_work_group_size:
1420  case Builtin::BIget_kernel_preferred_work_group_size_multiple:
1421  if (SemaOpenCLBuiltinKernelWorkGroupSize(*this, TheCall))
1422  return ExprError();
1423  break;
1424  case Builtin::BIget_kernel_max_sub_group_size_for_ndrange:
1425  case Builtin::BIget_kernel_sub_group_count_for_ndrange:
1426  if (SemaOpenCLBuiltinNDRangeAndBlock(*this, TheCall))
1427  return ExprError();
1428  break;
1429  case Builtin::BI__builtin_os_log_format:
1430  case Builtin::BI__builtin_os_log_format_buffer_size:
1431  if (SemaBuiltinOSLogFormat(TheCall))
1432  return ExprError();
1433  break;
1434  }
1435 
1436  // Since the target specific builtins for each arch overlap, only check those
1437  // of the arch we are compiling for.
1438  if (Context.BuiltinInfo.isTSBuiltin(BuiltinID)) {
1439  switch (Context.getTargetInfo().getTriple().getArch()) {
1440  case llvm::Triple::arm:
1441  case llvm::Triple::armeb:
1442  case llvm::Triple::thumb:
1443  case llvm::Triple::thumbeb:
1444  if (CheckARMBuiltinFunctionCall(BuiltinID, TheCall))
1445  return ExprError();
1446  break;
1447  case llvm::Triple::aarch64:
1448  case llvm::Triple::aarch64_be:
1449  if (CheckAArch64BuiltinFunctionCall(BuiltinID, TheCall))
1450  return ExprError();
1451  break;
1452  case llvm::Triple::hexagon:
1453  if (CheckHexagonBuiltinFunctionCall(BuiltinID, TheCall))
1454  return ExprError();
1455  break;
1456  case llvm::Triple::mips:
1457  case llvm::Triple::mipsel:
1458  case llvm::Triple::mips64:
1459  case llvm::Triple::mips64el:
1460  if (CheckMipsBuiltinFunctionCall(BuiltinID, TheCall))
1461  return ExprError();
1462  break;
1463  case llvm::Triple::systemz:
1464  if (CheckSystemZBuiltinFunctionCall(BuiltinID, TheCall))
1465  return ExprError();
1466  break;
1467  case llvm::Triple::x86:
1468  case llvm::Triple::x86_64:
1469  if (CheckX86BuiltinFunctionCall(BuiltinID, TheCall))
1470  return ExprError();
1471  break;
1472  case llvm::Triple::ppc:
1473  case llvm::Triple::ppc64:
1474  case llvm::Triple::ppc64le:
1475  if (CheckPPCBuiltinFunctionCall(BuiltinID, TheCall))
1476  return ExprError();
1477  break;
1478  default:
1479  break;
1480  }
1481  }
1482 
1483  return TheCallResult;
1484 }
1485 
1486 // Get the valid immediate range for the specified NEON type code.
1487 static unsigned RFT(unsigned t, bool shift = false, bool ForceQuad = false) {
1488  NeonTypeFlags Type(t);
1489  int IsQuad = ForceQuad ? true : Type.isQuad();
1490  switch (Type.getEltType()) {
1491  case NeonTypeFlags::Int8:
1492  case NeonTypeFlags::Poly8:
1493  return shift ? 7 : (8 << IsQuad) - 1;
1494  case NeonTypeFlags::Int16:
1495  case NeonTypeFlags::Poly16:
1496  return shift ? 15 : (4 << IsQuad) - 1;
1497  case NeonTypeFlags::Int32:
1498  return shift ? 31 : (2 << IsQuad) - 1;
1499  case NeonTypeFlags::Int64:
1500  case NeonTypeFlags::Poly64:
1501  return shift ? 63 : (1 << IsQuad) - 1;
1503  return shift ? 127 : (1 << IsQuad) - 1;
1505  assert(!shift && "cannot shift float types!");
1506  return (4 << IsQuad) - 1;
1508  assert(!shift && "cannot shift float types!");
1509  return (2 << IsQuad) - 1;
1511  assert(!shift && "cannot shift float types!");
1512  return (1 << IsQuad) - 1;
1513  }
1514  llvm_unreachable("Invalid NeonTypeFlag!");
1515 }
1516 
1517 /// getNeonEltType - Return the QualType corresponding to the elements of
1518 /// the vector type specified by the NeonTypeFlags. This is used to check
1519 /// the pointer arguments for Neon load/store intrinsics.
1521  bool IsPolyUnsigned, bool IsInt64Long) {
1522  switch (Flags.getEltType()) {
1523  case NeonTypeFlags::Int8:
1524  return Flags.isUnsigned() ? Context.UnsignedCharTy : Context.SignedCharTy;
1525  case NeonTypeFlags::Int16:
1526  return Flags.isUnsigned() ? Context.UnsignedShortTy : Context.ShortTy;
1527  case NeonTypeFlags::Int32:
1528  return Flags.isUnsigned() ? Context.UnsignedIntTy : Context.IntTy;
1529  case NeonTypeFlags::Int64:
1530  if (IsInt64Long)
1531  return Flags.isUnsigned() ? Context.UnsignedLongTy : Context.LongTy;
1532  else
1533  return Flags.isUnsigned() ? Context.UnsignedLongLongTy
1534  : Context.LongLongTy;
1535  case NeonTypeFlags::Poly8:
1536  return IsPolyUnsigned ? Context.UnsignedCharTy : Context.SignedCharTy;
1537  case NeonTypeFlags::Poly16:
1538  return IsPolyUnsigned ? Context.UnsignedShortTy : Context.ShortTy;
1539  case NeonTypeFlags::Poly64:
1540  if (IsInt64Long)
1541  return Context.UnsignedLongTy;
1542  else
1543  return Context.UnsignedLongLongTy;
1545  break;
1547  return Context.HalfTy;
1549  return Context.FloatTy;
1551  return Context.DoubleTy;
1552  }
1553  llvm_unreachable("Invalid NeonTypeFlag!");
1554 }
1555 
1556 bool Sema::CheckNeonBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
1557  llvm::APSInt Result;
1558  uint64_t mask = 0;
1559  unsigned TV = 0;
1560  int PtrArgNum = -1;
1561  bool HasConstPtr = false;
1562  switch (BuiltinID) {
1563 #define GET_NEON_OVERLOAD_CHECK
1564 #include "clang/Basic/arm_neon.inc"
1565 #include "clang/Basic/arm_fp16.inc"
1566 #undef GET_NEON_OVERLOAD_CHECK
1567  }
1568 
1569  // For NEON intrinsics which are overloaded on vector element type, validate
1570  // the immediate which specifies which variant to emit.
1571  unsigned ImmArg = TheCall->getNumArgs()-1;
1572  if (mask) {
1573  if (SemaBuiltinConstantArg(TheCall, ImmArg, Result))
1574  return true;
1575 
1576  TV = Result.getLimitedValue(64);
1577  if ((TV > 63) || (mask & (1ULL << TV)) == 0)
1578  return Diag(TheCall->getBeginLoc(), diag::err_invalid_neon_type_code)
1579  << TheCall->getArg(ImmArg)->getSourceRange();
1580  }
1581 
1582  if (PtrArgNum >= 0) {
1583  // Check that pointer arguments have the specified type.
1584  Expr *Arg = TheCall->getArg(PtrArgNum);
1585  if (ImplicitCastExpr *ICE = dyn_cast<ImplicitCastExpr>(Arg))
1586  Arg = ICE->getSubExpr();
1587  ExprResult RHS = DefaultFunctionArrayLvalueConversion(Arg);
1588  QualType RHSTy = RHS.get()->getType();
1589 
1590  llvm::Triple::ArchType Arch = Context.getTargetInfo().getTriple().getArch();
1591  bool IsPolyUnsigned = Arch == llvm::Triple::aarch64 ||
1592  Arch == llvm::Triple::aarch64_be;
1593  bool IsInt64Long =
1595  QualType EltTy =
1596  getNeonEltType(NeonTypeFlags(TV), Context, IsPolyUnsigned, IsInt64Long);
1597  if (HasConstPtr)
1598  EltTy = EltTy.withConst();
1599  QualType LHSTy = Context.getPointerType(EltTy);
1600  AssignConvertType ConvTy;
1601  ConvTy = CheckSingleAssignmentConstraints(LHSTy, RHS);
1602  if (RHS.isInvalid())
1603  return true;
1604  if (DiagnoseAssignmentResult(ConvTy, Arg->getBeginLoc(), LHSTy, RHSTy,
1605  RHS.get(), AA_Assigning))
1606  return true;
1607  }
1608 
1609  // For NEON intrinsics which take an immediate value as part of the
1610  // instruction, range check them here.
1611  unsigned i = 0, l = 0, u = 0;
1612  switch (BuiltinID) {
1613  default:
1614  return false;
1615  #define GET_NEON_IMMEDIATE_CHECK
1616  #include "clang/Basic/arm_neon.inc"
1617  #include "clang/Basic/arm_fp16.inc"
1618  #undef GET_NEON_IMMEDIATE_CHECK
1619  }
1620 
1621  return SemaBuiltinConstantArgRange(TheCall, i, l, u + l);
1622 }
1623 
1624 bool Sema::CheckARMBuiltinExclusiveCall(unsigned BuiltinID, CallExpr *TheCall,
1625  unsigned MaxWidth) {
1626  assert((BuiltinID == ARM::BI__builtin_arm_ldrex ||
1627  BuiltinID == ARM::BI__builtin_arm_ldaex ||
1628  BuiltinID == ARM::BI__builtin_arm_strex ||
1629  BuiltinID == ARM::BI__builtin_arm_stlex ||
1630  BuiltinID == AArch64::BI__builtin_arm_ldrex ||
1631  BuiltinID == AArch64::BI__builtin_arm_ldaex ||
1632  BuiltinID == AArch64::BI__builtin_arm_strex ||
1633  BuiltinID == AArch64::BI__builtin_arm_stlex) &&
1634  "unexpected ARM builtin");
1635  bool IsLdrex = BuiltinID == ARM::BI__builtin_arm_ldrex ||
1636  BuiltinID == ARM::BI__builtin_arm_ldaex ||
1637  BuiltinID == AArch64::BI__builtin_arm_ldrex ||
1638  BuiltinID == AArch64::BI__builtin_arm_ldaex;
1639 
1640  DeclRefExpr *DRE =cast<DeclRefExpr>(TheCall->getCallee()->IgnoreParenCasts());
1641 
1642  // Ensure that we have the proper number of arguments.
1643  if (checkArgCount(*this, TheCall, IsLdrex ? 1 : 2))
1644  return true;
1645 
1646  // Inspect the pointer argument of the atomic builtin. This should always be
1647  // a pointer type, whose element is an integral scalar or pointer type.
1648  // Because it is a pointer type, we don't have to worry about any implicit
1649  // casts here.
1650  Expr *PointerArg = TheCall->getArg(IsLdrex ? 0 : 1);
1651  ExprResult PointerArgRes = DefaultFunctionArrayLvalueConversion(PointerArg);
1652  if (PointerArgRes.isInvalid())
1653  return true;
1654  PointerArg = PointerArgRes.get();
1655 
1656  const PointerType *pointerType = PointerArg->getType()->getAs<PointerType>();
1657  if (!pointerType) {
1658  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_must_be_pointer)
1659  << PointerArg->getType() << PointerArg->getSourceRange();
1660  return true;
1661  }
1662 
1663  // ldrex takes a "const volatile T*" and strex takes a "volatile T*". Our next
1664  // task is to insert the appropriate casts into the AST. First work out just
1665  // what the appropriate type is.
1666  QualType ValType = pointerType->getPointeeType();
1667  QualType AddrType = ValType.getUnqualifiedType().withVolatile();
1668  if (IsLdrex)
1669  AddrType.addConst();
1670 
1671  // Issue a warning if the cast is dodgy.
1672  CastKind CastNeeded = CK_NoOp;
1673  if (!AddrType.isAtLeastAsQualifiedAs(ValType)) {
1674  CastNeeded = CK_BitCast;
1675  Diag(DRE->getBeginLoc(), diag::ext_typecheck_convert_discards_qualifiers)
1676  << PointerArg->getType() << Context.getPointerType(AddrType)
1677  << AA_Passing << PointerArg->getSourceRange();
1678  }
1679 
1680  // Finally, do the cast and replace the argument with the corrected version.
1681  AddrType = Context.getPointerType(AddrType);
1682  PointerArgRes = ImpCastExprToType(PointerArg, AddrType, CastNeeded);
1683  if (PointerArgRes.isInvalid())
1684  return true;
1685  PointerArg = PointerArgRes.get();
1686 
1687  TheCall->setArg(IsLdrex ? 0 : 1, PointerArg);
1688 
1689  // In general, we allow ints, floats and pointers to be loaded and stored.
1690  if (!ValType->isIntegerType() && !ValType->isAnyPointerType() &&
1691  !ValType->isBlockPointerType() && !ValType->isFloatingType()) {
1692  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_must_be_pointer_intfltptr)
1693  << PointerArg->getType() << PointerArg->getSourceRange();
1694  return true;
1695  }
1696 
1697  // But ARM doesn't have instructions to deal with 128-bit versions.
1698  if (Context.getTypeSize(ValType) > MaxWidth) {
1699  assert(MaxWidth == 64 && "Diagnostic unexpectedly inaccurate");
1700  Diag(DRE->getBeginLoc(), diag::err_atomic_exclusive_builtin_pointer_size)
1701  << PointerArg->getType() << PointerArg->getSourceRange();
1702  return true;
1703  }
1704 
1705  switch (ValType.getObjCLifetime()) {
1706  case Qualifiers::OCL_None:
1708  // okay
1709  break;
1710 
1711  case Qualifiers::OCL_Weak:
1714  Diag(DRE->getBeginLoc(), diag::err_arc_atomic_ownership)
1715  << ValType << PointerArg->getSourceRange();
1716  return true;
1717  }
1718 
1719  if (IsLdrex) {
1720  TheCall->setType(ValType);
1721  return false;
1722  }
1723 
1724  // Initialize the argument to be stored.
1725  ExprResult ValArg = TheCall->getArg(0);
1727  Context, ValType, /*consume*/ false);
1728  ValArg = PerformCopyInitialization(Entity, SourceLocation(), ValArg);
1729  if (ValArg.isInvalid())
1730  return true;
1731  TheCall->setArg(0, ValArg.get());
1732 
1733  // __builtin_arm_strex always returns an int. It's marked as such in the .def,
1734  // but the custom checker bypasses all default analysis.
1735  TheCall->setType(Context.IntTy);
1736  return false;
1737 }
1738 
1739 bool Sema::CheckARMBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
1740  if (BuiltinID == ARM::BI__builtin_arm_ldrex ||
1741  BuiltinID == ARM::BI__builtin_arm_ldaex ||
1742  BuiltinID == ARM::BI__builtin_arm_strex ||
1743  BuiltinID == ARM::BI__builtin_arm_stlex) {
1744  return CheckARMBuiltinExclusiveCall(BuiltinID, TheCall, 64);
1745  }
1746 
1747  if (BuiltinID == ARM::BI__builtin_arm_prefetch) {
1748  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 1) ||
1749  SemaBuiltinConstantArgRange(TheCall, 2, 0, 1);
1750  }
1751 
1752  if (BuiltinID == ARM::BI__builtin_arm_rsr64 ||
1753  BuiltinID == ARM::BI__builtin_arm_wsr64)
1754  return SemaBuiltinARMSpecialReg(BuiltinID, TheCall, 0, 3, false);
1755 
1756  if (BuiltinID == ARM::BI__builtin_arm_rsr ||
1757  BuiltinID == ARM::BI__builtin_arm_rsrp ||
1758  BuiltinID == ARM::BI__builtin_arm_wsr ||
1759  BuiltinID == ARM::BI__builtin_arm_wsrp)
1760  return SemaBuiltinARMSpecialReg(BuiltinID, TheCall, 0, 5, true);
1761 
1762  if (CheckNeonBuiltinFunctionCall(BuiltinID, TheCall))
1763  return true;
1764 
1765  // For intrinsics which take an immediate value as part of the instruction,
1766  // range check them here.
1767  // FIXME: VFP Intrinsics should error if VFP not present.
1768  switch (BuiltinID) {
1769  default: return false;
1770  case ARM::BI__builtin_arm_ssat:
1771  return SemaBuiltinConstantArgRange(TheCall, 1, 1, 32);
1772  case ARM::BI__builtin_arm_usat:
1773  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 31);
1774  case ARM::BI__builtin_arm_ssat16:
1775  return SemaBuiltinConstantArgRange(TheCall, 1, 1, 16);
1776  case ARM::BI__builtin_arm_usat16:
1777  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 15);
1778  case ARM::BI__builtin_arm_vcvtr_f:
1779  case ARM::BI__builtin_arm_vcvtr_d:
1780  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 1);
1781  case ARM::BI__builtin_arm_dmb:
1782  case ARM::BI__builtin_arm_dsb:
1783  case ARM::BI__builtin_arm_isb:
1784  case ARM::BI__builtin_arm_dbg:
1785  return SemaBuiltinConstantArgRange(TheCall, 0, 0, 15);
1786  }
1787 }
1788 
1789 bool Sema::CheckAArch64BuiltinFunctionCall(unsigned BuiltinID,
1790  CallExpr *TheCall) {
1791  if (BuiltinID == AArch64::BI__builtin_arm_ldrex ||
1792  BuiltinID == AArch64::BI__builtin_arm_ldaex ||
1793  BuiltinID == AArch64::BI__builtin_arm_strex ||
1794  BuiltinID == AArch64::BI__builtin_arm_stlex) {
1795  return CheckARMBuiltinExclusiveCall(BuiltinID, TheCall, 128);
1796  }
1797 
1798  if (BuiltinID == AArch64::BI__builtin_arm_prefetch) {
1799  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 1) ||
1800  SemaBuiltinConstantArgRange(TheCall, 2, 0, 2) ||
1801  SemaBuiltinConstantArgRange(TheCall, 3, 0, 1) ||
1802  SemaBuiltinConstantArgRange(TheCall, 4, 0, 1);
1803  }
1804 
1805  if (BuiltinID == AArch64::BI__builtin_arm_rsr64 ||
1806  BuiltinID == AArch64::BI__builtin_arm_wsr64)
1807  return SemaBuiltinARMSpecialReg(BuiltinID, TheCall, 0, 5, true);
1808 
1809  if (BuiltinID == AArch64::BI__builtin_arm_rsr ||
1810  BuiltinID == AArch64::BI__builtin_arm_rsrp ||
1811  BuiltinID == AArch64::BI__builtin_arm_wsr ||
1812  BuiltinID == AArch64::BI__builtin_arm_wsrp)
1813  return SemaBuiltinARMSpecialReg(BuiltinID, TheCall, 0, 5, true);
1814 
1815  // Only check the valid encoding range. Any constant in this range would be
1816  // converted to a register of the form S1_2_C3_C4_5. Let the hardware throw
1817  // an exception for incorrect registers. This matches MSVC behavior.
1818  if (BuiltinID == AArch64::BI_ReadStatusReg ||
1819  BuiltinID == AArch64::BI_WriteStatusReg)
1820  return SemaBuiltinConstantArgRange(TheCall, 0, 0, 0x7fff);
1821 
1822  if (BuiltinID == AArch64::BI__getReg)
1823  return SemaBuiltinConstantArgRange(TheCall, 0, 0, 31);
1824 
1825  if (CheckNeonBuiltinFunctionCall(BuiltinID, TheCall))
1826  return true;
1827 
1828  // For intrinsics which take an immediate value as part of the instruction,
1829  // range check them here.
1830  unsigned i = 0, l = 0, u = 0;
1831  switch (BuiltinID) {
1832  default: return false;
1833  case AArch64::BI__builtin_arm_dmb:
1834  case AArch64::BI__builtin_arm_dsb:
1835  case AArch64::BI__builtin_arm_isb: l = 0; u = 15; break;
1836  }
1837 
1838  return SemaBuiltinConstantArgRange(TheCall, i, l, u + l);
1839 }
1840 
1841 bool Sema::CheckHexagonBuiltinCpu(unsigned BuiltinID, CallExpr *TheCall) {
1842  struct BuiltinAndString {
1843  unsigned BuiltinID;
1844  const char *Str;
1845  };
1846 
1847  static BuiltinAndString ValidCPU[] = {
1848  { Hexagon::BI__builtin_HEXAGON_A6_vcmpbeq_notany, "v65,v66" },
1849  { Hexagon::BI__builtin_HEXAGON_A6_vminub_RdP, "v62,v65,v66" },
1850  { Hexagon::BI__builtin_HEXAGON_F2_dfadd, "v66" },
1851  { Hexagon::BI__builtin_HEXAGON_F2_dfsub, "v66" },
1852  { Hexagon::BI__builtin_HEXAGON_M2_mnaci, "v66" },
1853  { Hexagon::BI__builtin_HEXAGON_M6_vabsdiffb, "v62,v65,v66" },
1854  { Hexagon::BI__builtin_HEXAGON_M6_vabsdiffub, "v62,v65,v66" },
1855  { Hexagon::BI__builtin_HEXAGON_S2_mask, "v66" },
1856  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_acc, "v60,v62,v65,v66" },
1857  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_and, "v60,v62,v65,v66" },
1858  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_nac, "v60,v62,v65,v66" },
1859  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_or, "v60,v62,v65,v66" },
1860  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p, "v60,v62,v65,v66" },
1861  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_xacc, "v60,v62,v65,v66" },
1862  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_acc, "v60,v62,v65,v66" },
1863  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_and, "v60,v62,v65,v66" },
1864  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_nac, "v60,v62,v65,v66" },
1865  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_or, "v60,v62,v65,v66" },
1866  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r, "v60,v62,v65,v66" },
1867  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_xacc, "v60,v62,v65,v66" },
1868  { Hexagon::BI__builtin_HEXAGON_S6_vsplatrbp, "v62,v65,v66" },
1869  { Hexagon::BI__builtin_HEXAGON_S6_vtrunehb_ppp, "v62,v65,v66" },
1870  { Hexagon::BI__builtin_HEXAGON_S6_vtrunohb_ppp, "v62,v65,v66" },
1871  };
1872 
1873  static BuiltinAndString ValidHVX[] = {
1874  { Hexagon::BI__builtin_HEXAGON_V6_hi, "v60,v62,v65,v66" },
1875  { Hexagon::BI__builtin_HEXAGON_V6_hi_128B, "v60,v62,v65,v66" },
1876  { Hexagon::BI__builtin_HEXAGON_V6_lo, "v60,v62,v65,v66" },
1877  { Hexagon::BI__builtin_HEXAGON_V6_lo_128B, "v60,v62,v65,v66" },
1878  { Hexagon::BI__builtin_HEXAGON_V6_extractw, "v60,v62,v65,v66" },
1879  { Hexagon::BI__builtin_HEXAGON_V6_extractw_128B, "v60,v62,v65,v66" },
1880  { Hexagon::BI__builtin_HEXAGON_V6_lvsplatb, "v62,v65,v66" },
1881  { Hexagon::BI__builtin_HEXAGON_V6_lvsplatb_128B, "v62,v65,v66" },
1882  { Hexagon::BI__builtin_HEXAGON_V6_lvsplath, "v62,v65,v66" },
1883  { Hexagon::BI__builtin_HEXAGON_V6_lvsplath_128B, "v62,v65,v66" },
1884  { Hexagon::BI__builtin_HEXAGON_V6_lvsplatw, "v60,v62,v65,v66" },
1885  { Hexagon::BI__builtin_HEXAGON_V6_lvsplatw_128B, "v60,v62,v65,v66" },
1886  { Hexagon::BI__builtin_HEXAGON_V6_pred_and, "v60,v62,v65,v66" },
1887  { Hexagon::BI__builtin_HEXAGON_V6_pred_and_128B, "v60,v62,v65,v66" },
1888  { Hexagon::BI__builtin_HEXAGON_V6_pred_and_n, "v60,v62,v65,v66" },
1889  { Hexagon::BI__builtin_HEXAGON_V6_pred_and_n_128B, "v60,v62,v65,v66" },
1890  { Hexagon::BI__builtin_HEXAGON_V6_pred_not, "v60,v62,v65,v66" },
1891  { Hexagon::BI__builtin_HEXAGON_V6_pred_not_128B, "v60,v62,v65,v66" },
1892  { Hexagon::BI__builtin_HEXAGON_V6_pred_or, "v60,v62,v65,v66" },
1893  { Hexagon::BI__builtin_HEXAGON_V6_pred_or_128B, "v60,v62,v65,v66" },
1894  { Hexagon::BI__builtin_HEXAGON_V6_pred_or_n, "v60,v62,v65,v66" },
1895  { Hexagon::BI__builtin_HEXAGON_V6_pred_or_n_128B, "v60,v62,v65,v66" },
1896  { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2, "v60,v62,v65,v66" },
1897  { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2_128B, "v60,v62,v65,v66" },
1898  { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2v2, "v62,v65,v66" },
1899  { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2v2_128B, "v62,v65,v66" },
1900  { Hexagon::BI__builtin_HEXAGON_V6_pred_xor, "v60,v62,v65,v66" },
1901  { Hexagon::BI__builtin_HEXAGON_V6_pred_xor_128B, "v60,v62,v65,v66" },
1902  { Hexagon::BI__builtin_HEXAGON_V6_shuffeqh, "v62,v65,v66" },
1903  { Hexagon::BI__builtin_HEXAGON_V6_shuffeqh_128B, "v62,v65,v66" },
1904  { Hexagon::BI__builtin_HEXAGON_V6_shuffeqw, "v62,v65,v66" },
1905  { Hexagon::BI__builtin_HEXAGON_V6_shuffeqw_128B, "v62,v65,v66" },
1906  { Hexagon::BI__builtin_HEXAGON_V6_vabsb, "v65,v66" },
1907  { Hexagon::BI__builtin_HEXAGON_V6_vabsb_128B, "v65,v66" },
1908  { Hexagon::BI__builtin_HEXAGON_V6_vabsb_sat, "v65,v66" },
1909  { Hexagon::BI__builtin_HEXAGON_V6_vabsb_sat_128B, "v65,v66" },
1910  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffh, "v60,v62,v65,v66" },
1911  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffh_128B, "v60,v62,v65,v66" },
1912  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffub, "v60,v62,v65,v66" },
1913  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffub_128B, "v60,v62,v65,v66" },
1914  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffuh, "v60,v62,v65,v66" },
1915  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffuh_128B, "v60,v62,v65,v66" },
1916  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffw, "v60,v62,v65,v66" },
1917  { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffw_128B, "v60,v62,v65,v66" },
1918  { Hexagon::BI__builtin_HEXAGON_V6_vabsh, "v60,v62,v65,v66" },
1919  { Hexagon::BI__builtin_HEXAGON_V6_vabsh_128B, "v60,v62,v65,v66" },
1920  { Hexagon::BI__builtin_HEXAGON_V6_vabsh_sat, "v60,v62,v65,v66" },
1921  { Hexagon::BI__builtin_HEXAGON_V6_vabsh_sat_128B, "v60,v62,v65,v66" },
1922  { Hexagon::BI__builtin_HEXAGON_V6_vabsw, "v60,v62,v65,v66" },
1923  { Hexagon::BI__builtin_HEXAGON_V6_vabsw_128B, "v60,v62,v65,v66" },
1924  { Hexagon::BI__builtin_HEXAGON_V6_vabsw_sat, "v60,v62,v65,v66" },
1925  { Hexagon::BI__builtin_HEXAGON_V6_vabsw_sat_128B, "v60,v62,v65,v66" },
1926  { Hexagon::BI__builtin_HEXAGON_V6_vaddb, "v60,v62,v65,v66" },
1927  { Hexagon::BI__builtin_HEXAGON_V6_vaddb_128B, "v60,v62,v65,v66" },
1928  { Hexagon::BI__builtin_HEXAGON_V6_vaddb_dv, "v60,v62,v65,v66" },
1929  { Hexagon::BI__builtin_HEXAGON_V6_vaddb_dv_128B, "v60,v62,v65,v66" },
1930  { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat, "v62,v65,v66" },
1931  { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat_128B, "v62,v65,v66" },
1932  { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat_dv, "v62,v65,v66" },
1933  { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat_dv_128B, "v62,v65,v66" },
1934  { Hexagon::BI__builtin_HEXAGON_V6_vaddcarry, "v62,v65,v66" },
1935  { Hexagon::BI__builtin_HEXAGON_V6_vaddcarry_128B, "v62,v65,v66" },
1936  { Hexagon::BI__builtin_HEXAGON_V6_vaddcarrysat, "v66" },
1937  { Hexagon::BI__builtin_HEXAGON_V6_vaddcarrysat_128B, "v66" },
1938  { Hexagon::BI__builtin_HEXAGON_V6_vaddclbh, "v62,v65,v66" },
1939  { Hexagon::BI__builtin_HEXAGON_V6_vaddclbh_128B, "v62,v65,v66" },
1940  { Hexagon::BI__builtin_HEXAGON_V6_vaddclbw, "v62,v65,v66" },
1941  { Hexagon::BI__builtin_HEXAGON_V6_vaddclbw_128B, "v62,v65,v66" },
1942  { Hexagon::BI__builtin_HEXAGON_V6_vaddh, "v60,v62,v65,v66" },
1943  { Hexagon::BI__builtin_HEXAGON_V6_vaddh_128B, "v60,v62,v65,v66" },
1944  { Hexagon::BI__builtin_HEXAGON_V6_vaddh_dv, "v60,v62,v65,v66" },
1945  { Hexagon::BI__builtin_HEXAGON_V6_vaddh_dv_128B, "v60,v62,v65,v66" },
1946  { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat, "v60,v62,v65,v66" },
1947  { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat_128B, "v60,v62,v65,v66" },
1948  { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat_dv, "v60,v62,v65,v66" },
1949  { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat_dv_128B, "v60,v62,v65,v66" },
1950  { Hexagon::BI__builtin_HEXAGON_V6_vaddhw, "v60,v62,v65,v66" },
1951  { Hexagon::BI__builtin_HEXAGON_V6_vaddhw_128B, "v60,v62,v65,v66" },
1952  { Hexagon::BI__builtin_HEXAGON_V6_vaddhw_acc, "v62,v65,v66" },
1953  { Hexagon::BI__builtin_HEXAGON_V6_vaddhw_acc_128B, "v62,v65,v66" },
1954  { Hexagon::BI__builtin_HEXAGON_V6_vaddubh, "v60,v62,v65,v66" },
1955  { Hexagon::BI__builtin_HEXAGON_V6_vaddubh_128B, "v60,v62,v65,v66" },
1956  { Hexagon::BI__builtin_HEXAGON_V6_vaddubh_acc, "v62,v65,v66" },
1957  { Hexagon::BI__builtin_HEXAGON_V6_vaddubh_acc_128B, "v62,v65,v66" },
1958  { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat, "v60,v62,v65,v66" },
1959  { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat_128B, "v60,v62,v65,v66" },
1960  { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat_dv, "v60,v62,v65,v66" },
1961  { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat_dv_128B, "v60,v62,v65,v66" },
1962  { Hexagon::BI__builtin_HEXAGON_V6_vaddububb_sat, "v62,v65,v66" },
1963  { Hexagon::BI__builtin_HEXAGON_V6_vaddububb_sat_128B, "v62,v65,v66" },
1964  { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat, "v60,v62,v65,v66" },
1965  { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat_128B, "v60,v62,v65,v66" },
1966  { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat_dv, "v60,v62,v65,v66" },
1967  { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat_dv_128B, "v60,v62,v65,v66" },
1968  { Hexagon::BI__builtin_HEXAGON_V6_vadduhw, "v60,v62,v65,v66" },
1969  { Hexagon::BI__builtin_HEXAGON_V6_vadduhw_128B, "v60,v62,v65,v66" },
1970  { Hexagon::BI__builtin_HEXAGON_V6_vadduhw_acc, "v62,v65,v66" },
1971  { Hexagon::BI__builtin_HEXAGON_V6_vadduhw_acc_128B, "v62,v65,v66" },
1972  { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat, "v62,v65,v66" },
1973  { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat_128B, "v62,v65,v66" },
1974  { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat_dv, "v62,v65,v66" },
1975  { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat_dv_128B, "v62,v65,v66" },
1976  { Hexagon::BI__builtin_HEXAGON_V6_vaddw, "v60,v62,v65,v66" },
1977  { Hexagon::BI__builtin_HEXAGON_V6_vaddw_128B, "v60,v62,v65,v66" },
1978  { Hexagon::BI__builtin_HEXAGON_V6_vaddw_dv, "v60,v62,v65,v66" },
1979  { Hexagon::BI__builtin_HEXAGON_V6_vaddw_dv_128B, "v60,v62,v65,v66" },
1980  { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat, "v60,v62,v65,v66" },
1981  { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat_128B, "v60,v62,v65,v66" },
1982  { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat_dv, "v60,v62,v65,v66" },
1983  { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat_dv_128B, "v60,v62,v65,v66" },
1984  { Hexagon::BI__builtin_HEXAGON_V6_valignb, "v60,v62,v65,v66" },
1985  { Hexagon::BI__builtin_HEXAGON_V6_valignb_128B, "v60,v62,v65,v66" },
1986  { Hexagon::BI__builtin_HEXAGON_V6_valignbi, "v60,v62,v65,v66" },
1987  { Hexagon::BI__builtin_HEXAGON_V6_valignbi_128B, "v60,v62,v65,v66" },
1988  { Hexagon::BI__builtin_HEXAGON_V6_vand, "v60,v62,v65,v66" },
1989  { Hexagon::BI__builtin_HEXAGON_V6_vand_128B, "v60,v62,v65,v66" },
1990  { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt, "v62,v65,v66" },
1991  { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt_128B, "v62,v65,v66" },
1992  { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt_acc, "v62,v65,v66" },
1993  { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt_acc_128B, "v62,v65,v66" },
1994  { Hexagon::BI__builtin_HEXAGON_V6_vandqrt, "v60,v62,v65,v66" },
1995  { Hexagon::BI__builtin_HEXAGON_V6_vandqrt_128B, "v60,v62,v65,v66" },
1996  { Hexagon::BI__builtin_HEXAGON_V6_vandqrt_acc, "v60,v62,v65,v66" },
1997  { Hexagon::BI__builtin_HEXAGON_V6_vandqrt_acc_128B, "v60,v62,v65,v66" },
1998  { Hexagon::BI__builtin_HEXAGON_V6_vandvnqv, "v62,v65,v66" },
1999  { Hexagon::BI__builtin_HEXAGON_V6_vandvnqv_128B, "v62,v65,v66" },
2000  { Hexagon::BI__builtin_HEXAGON_V6_vandvqv, "v62,v65,v66" },
2001  { Hexagon::BI__builtin_HEXAGON_V6_vandvqv_128B, "v62,v65,v66" },
2002  { Hexagon::BI__builtin_HEXAGON_V6_vandvrt, "v60,v62,v65,v66" },
2003  { Hexagon::BI__builtin_HEXAGON_V6_vandvrt_128B, "v60,v62,v65,v66" },
2004  { Hexagon::BI__builtin_HEXAGON_V6_vandvrt_acc, "v60,v62,v65,v66" },
2005  { Hexagon::BI__builtin_HEXAGON_V6_vandvrt_acc_128B, "v60,v62,v65,v66" },
2006  { Hexagon::BI__builtin_HEXAGON_V6_vaslh, "v60,v62,v65,v66" },
2007  { Hexagon::BI__builtin_HEXAGON_V6_vaslh_128B, "v60,v62,v65,v66" },
2008  { Hexagon::BI__builtin_HEXAGON_V6_vaslh_acc, "v65,v66" },
2009  { Hexagon::BI__builtin_HEXAGON_V6_vaslh_acc_128B, "v65,v66" },
2010  { Hexagon::BI__builtin_HEXAGON_V6_vaslhv, "v60,v62,v65,v66" },
2011  { Hexagon::BI__builtin_HEXAGON_V6_vaslhv_128B, "v60,v62,v65,v66" },
2012  { Hexagon::BI__builtin_HEXAGON_V6_vaslw, "v60,v62,v65,v66" },
2013  { Hexagon::BI__builtin_HEXAGON_V6_vaslw_128B, "v60,v62,v65,v66" },
2014  { Hexagon::BI__builtin_HEXAGON_V6_vaslw_acc, "v60,v62,v65,v66" },
2015  { Hexagon::BI__builtin_HEXAGON_V6_vaslw_acc_128B, "v60,v62,v65,v66" },
2016  { Hexagon::BI__builtin_HEXAGON_V6_vaslwv, "v60,v62,v65,v66" },
2017  { Hexagon::BI__builtin_HEXAGON_V6_vaslwv_128B, "v60,v62,v65,v66" },
2018  { Hexagon::BI__builtin_HEXAGON_V6_vasrh, "v60,v62,v65,v66" },
2019  { Hexagon::BI__builtin_HEXAGON_V6_vasrh_128B, "v60,v62,v65,v66" },
2020  { Hexagon::BI__builtin_HEXAGON_V6_vasrh_acc, "v65,v66" },
2021  { Hexagon::BI__builtin_HEXAGON_V6_vasrh_acc_128B, "v65,v66" },
2022  { Hexagon::BI__builtin_HEXAGON_V6_vasrhbrndsat, "v60,v62,v65,v66" },
2023  { Hexagon::BI__builtin_HEXAGON_V6_vasrhbrndsat_128B, "v60,v62,v65,v66" },
2024  { Hexagon::BI__builtin_HEXAGON_V6_vasrhbsat, "v62,v65,v66" },
2025  { Hexagon::BI__builtin_HEXAGON_V6_vasrhbsat_128B, "v62,v65,v66" },
2026  { Hexagon::BI__builtin_HEXAGON_V6_vasrhubrndsat, "v60,v62,v65,v66" },
2027  { Hexagon::BI__builtin_HEXAGON_V6_vasrhubrndsat_128B, "v60,v62,v65,v66" },
2028  { Hexagon::BI__builtin_HEXAGON_V6_vasrhubsat, "v60,v62,v65,v66" },
2029  { Hexagon::BI__builtin_HEXAGON_V6_vasrhubsat_128B, "v60,v62,v65,v66" },
2030  { Hexagon::BI__builtin_HEXAGON_V6_vasrhv, "v60,v62,v65,v66" },
2031  { Hexagon::BI__builtin_HEXAGON_V6_vasrhv_128B, "v60,v62,v65,v66" },
2032  { Hexagon::BI__builtin_HEXAGON_V6_vasr_into, "v66" },
2033  { Hexagon::BI__builtin_HEXAGON_V6_vasr_into_128B, "v66" },
2034  { Hexagon::BI__builtin_HEXAGON_V6_vasruhubrndsat, "v65,v66" },
2035  { Hexagon::BI__builtin_HEXAGON_V6_vasruhubrndsat_128B, "v65,v66" },
2036  { Hexagon::BI__builtin_HEXAGON_V6_vasruhubsat, "v65,v66" },
2037  { Hexagon::BI__builtin_HEXAGON_V6_vasruhubsat_128B, "v65,v66" },
2038  { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhrndsat, "v62,v65,v66" },
2039  { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhrndsat_128B, "v62,v65,v66" },
2040  { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhsat, "v65,v66" },
2041  { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhsat_128B, "v65,v66" },
2042  { Hexagon::BI__builtin_HEXAGON_V6_vasrw, "v60,v62,v65,v66" },
2043  { Hexagon::BI__builtin_HEXAGON_V6_vasrw_128B, "v60,v62,v65,v66" },
2044  { Hexagon::BI__builtin_HEXAGON_V6_vasrw_acc, "v60,v62,v65,v66" },
2045  { Hexagon::BI__builtin_HEXAGON_V6_vasrw_acc_128B, "v60,v62,v65,v66" },
2046  { Hexagon::BI__builtin_HEXAGON_V6_vasrwh, "v60,v62,v65,v66" },
2047  { Hexagon::BI__builtin_HEXAGON_V6_vasrwh_128B, "v60,v62,v65,v66" },
2048  { Hexagon::BI__builtin_HEXAGON_V6_vasrwhrndsat, "v60,v62,v65,v66" },
2049  { Hexagon::BI__builtin_HEXAGON_V6_vasrwhrndsat_128B, "v60,v62,v65,v66" },
2050  { Hexagon::BI__builtin_HEXAGON_V6_vasrwhsat, "v60,v62,v65,v66" },
2051  { Hexagon::BI__builtin_HEXAGON_V6_vasrwhsat_128B, "v60,v62,v65,v66" },
2052  { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhrndsat, "v62,v65,v66" },
2053  { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhrndsat_128B, "v62,v65,v66" },
2054  { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhsat, "v60,v62,v65,v66" },
2055  { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhsat_128B, "v60,v62,v65,v66" },
2056  { Hexagon::BI__builtin_HEXAGON_V6_vasrwv, "v60,v62,v65,v66" },
2057  { Hexagon::BI__builtin_HEXAGON_V6_vasrwv_128B, "v60,v62,v65,v66" },
2058  { Hexagon::BI__builtin_HEXAGON_V6_vassign, "v60,v62,v65,v66" },
2059  { Hexagon::BI__builtin_HEXAGON_V6_vassign_128B, "v60,v62,v65,v66" },
2060  { Hexagon::BI__builtin_HEXAGON_V6_vassignp, "v60,v62,v65,v66" },
2061  { Hexagon::BI__builtin_HEXAGON_V6_vassignp_128B, "v60,v62,v65,v66" },
2062  { Hexagon::BI__builtin_HEXAGON_V6_vavgb, "v65,v66" },
2063  { Hexagon::BI__builtin_HEXAGON_V6_vavgb_128B, "v65,v66" },
2064  { Hexagon::BI__builtin_HEXAGON_V6_vavgbrnd, "v65,v66" },
2065  { Hexagon::BI__builtin_HEXAGON_V6_vavgbrnd_128B, "v65,v66" },
2066  { Hexagon::BI__builtin_HEXAGON_V6_vavgh, "v60,v62,v65,v66" },
2067  { Hexagon::BI__builtin_HEXAGON_V6_vavgh_128B, "v60,v62,v65,v66" },
2068  { Hexagon::BI__builtin_HEXAGON_V6_vavghrnd, "v60,v62,v65,v66" },
2069  { Hexagon::BI__builtin_HEXAGON_V6_vavghrnd_128B, "v60,v62,v65,v66" },
2070  { Hexagon::BI__builtin_HEXAGON_V6_vavgub, "v60,v62,v65,v66" },
2071  { Hexagon::BI__builtin_HEXAGON_V6_vavgub_128B, "v60,v62,v65,v66" },
2072  { Hexagon::BI__builtin_HEXAGON_V6_vavgubrnd, "v60,v62,v65,v66" },
2073  { Hexagon::BI__builtin_HEXAGON_V6_vavgubrnd_128B, "v60,v62,v65,v66" },
2074  { Hexagon::BI__builtin_HEXAGON_V6_vavguh, "v60,v62,v65,v66" },
2075  { Hexagon::BI__builtin_HEXAGON_V6_vavguh_128B, "v60,v62,v65,v66" },
2076  { Hexagon::BI__builtin_HEXAGON_V6_vavguhrnd, "v60,v62,v65,v66" },
2077  { Hexagon::BI__builtin_HEXAGON_V6_vavguhrnd_128B, "v60,v62,v65,v66" },
2078  { Hexagon::BI__builtin_HEXAGON_V6_vavguw, "v65,v66" },
2079  { Hexagon::BI__builtin_HEXAGON_V6_vavguw_128B, "v65,v66" },
2080  { Hexagon::BI__builtin_HEXAGON_V6_vavguwrnd, "v65,v66" },
2081  { Hexagon::BI__builtin_HEXAGON_V6_vavguwrnd_128B, "v65,v66" },
2082  { Hexagon::BI__builtin_HEXAGON_V6_vavgw, "v60,v62,v65,v66" },
2083  { Hexagon::BI__builtin_HEXAGON_V6_vavgw_128B, "v60,v62,v65,v66" },
2084  { Hexagon::BI__builtin_HEXAGON_V6_vavgwrnd, "v60,v62,v65,v66" },
2085  { Hexagon::BI__builtin_HEXAGON_V6_vavgwrnd_128B, "v60,v62,v65,v66" },
2086  { Hexagon::BI__builtin_HEXAGON_V6_vcl0h, "v60,v62,v65,v66" },
2087  { Hexagon::BI__builtin_HEXAGON_V6_vcl0h_128B, "v60,v62,v65,v66" },
2088  { Hexagon::BI__builtin_HEXAGON_V6_vcl0w, "v60,v62,v65,v66" },
2089  { Hexagon::BI__builtin_HEXAGON_V6_vcl0w_128B, "v60,v62,v65,v66" },
2090  { Hexagon::BI__builtin_HEXAGON_V6_vcombine, "v60,v62,v65,v66" },
2091  { Hexagon::BI__builtin_HEXAGON_V6_vcombine_128B, "v60,v62,v65,v66" },
2092  { Hexagon::BI__builtin_HEXAGON_V6_vd0, "v60,v62,v65,v66" },
2093  { Hexagon::BI__builtin_HEXAGON_V6_vd0_128B, "v60,v62,v65,v66" },
2094  { Hexagon::BI__builtin_HEXAGON_V6_vdd0, "v65,v66" },
2095  { Hexagon::BI__builtin_HEXAGON_V6_vdd0_128B, "v65,v66" },
2096  { Hexagon::BI__builtin_HEXAGON_V6_vdealb, "v60,v62,v65,v66" },
2097  { Hexagon::BI__builtin_HEXAGON_V6_vdealb_128B, "v60,v62,v65,v66" },
2098  { Hexagon::BI__builtin_HEXAGON_V6_vdealb4w, "v60,v62,v65,v66" },
2099  { Hexagon::BI__builtin_HEXAGON_V6_vdealb4w_128B, "v60,v62,v65,v66" },
2100  { Hexagon::BI__builtin_HEXAGON_V6_vdealh, "v60,v62,v65,v66" },
2101  { Hexagon::BI__builtin_HEXAGON_V6_vdealh_128B, "v60,v62,v65,v66" },
2102  { Hexagon::BI__builtin_HEXAGON_V6_vdealvdd, "v60,v62,v65,v66" },
2103  { Hexagon::BI__builtin_HEXAGON_V6_vdealvdd_128B, "v60,v62,v65,v66" },
2104  { Hexagon::BI__builtin_HEXAGON_V6_vdelta, "v60,v62,v65,v66" },
2105  { Hexagon::BI__builtin_HEXAGON_V6_vdelta_128B, "v60,v62,v65,v66" },
2106  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus, "v60,v62,v65,v66" },
2107  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_128B, "v60,v62,v65,v66" },
2108  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_acc, "v60,v62,v65,v66" },
2109  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_acc_128B, "v60,v62,v65,v66" },
2110  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv, "v60,v62,v65,v66" },
2111  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv_128B, "v60,v62,v65,v66" },
2112  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv_acc, "v60,v62,v65,v66" },
2113  { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv_acc_128B, "v60,v62,v65,v66" },
2114  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb, "v60,v62,v65,v66" },
2115  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_128B, "v60,v62,v65,v66" },
2116  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_acc, "v60,v62,v65,v66" },
2117  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_acc_128B, "v60,v62,v65,v66" },
2118  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv, "v60,v62,v65,v66" },
2119  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv_128B, "v60,v62,v65,v66" },
2120  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv_acc, "v60,v62,v65,v66" },
2121  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv_acc_128B, "v60,v62,v65,v66" },
2122  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat, "v60,v62,v65,v66" },
2123  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat_128B, "v60,v62,v65,v66" },
2124  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat_acc, "v60,v62,v65,v66" },
2125  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat_acc_128B, "v60,v62,v65,v66" },
2126  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat, "v60,v62,v65,v66" },
2127  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat_128B, "v60,v62,v65,v66" },
2128  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat_acc, "v60,v62,v65,v66" },
2129  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat_acc_128B, "v60,v62,v65,v66" },
2130  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat, "v60,v62,v65,v66" },
2131  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat_128B, "v60,v62,v65,v66" },
2132  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat_acc, "v60,v62,v65,v66" },
2133  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat_acc_128B, "v60,v62,v65,v66" },
2134  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat, "v60,v62,v65,v66" },
2135  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat_128B, "v60,v62,v65,v66" },
2136  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat_acc, "v60,v62,v65,v66" },
2137  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat_acc_128B, "v60,v62,v65,v66" },
2138  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat, "v60,v62,v65,v66" },
2139  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat_128B, "v60,v62,v65,v66" },
2140  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat_acc, "v60,v62,v65,v66" },
2141  { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat_acc_128B, "v60,v62,v65,v66" },
2142  { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh, "v60,v62,v65,v66" },
2143  { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh_128B, "v60,v62,v65,v66" },
2144  { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh_acc, "v60,v62,v65,v66" },
2145  { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh_acc_128B, "v60,v62,v65,v66" },
2146  { Hexagon::BI__builtin_HEXAGON_V6_veqb, "v60,v62,v65,v66" },
2147  { Hexagon::BI__builtin_HEXAGON_V6_veqb_128B, "v60,v62,v65,v66" },
2148  { Hexagon::BI__builtin_HEXAGON_V6_veqb_and, "v60,v62,v65,v66" },
2149  { Hexagon::BI__builtin_HEXAGON_V6_veqb_and_128B, "v60,v62,v65,v66" },
2150  { Hexagon::BI__builtin_HEXAGON_V6_veqb_or, "v60,v62,v65,v66" },
2151  { Hexagon::BI__builtin_HEXAGON_V6_veqb_or_128B, "v60,v62,v65,v66" },
2152  { Hexagon::BI__builtin_HEXAGON_V6_veqb_xor, "v60,v62,v65,v66" },
2153  { Hexagon::BI__builtin_HEXAGON_V6_veqb_xor_128B, "v60,v62,v65,v66" },
2154  { Hexagon::BI__builtin_HEXAGON_V6_veqh, "v60,v62,v65,v66" },
2155  { Hexagon::BI__builtin_HEXAGON_V6_veqh_128B, "v60,v62,v65,v66" },
2156  { Hexagon::BI__builtin_HEXAGON_V6_veqh_and, "v60,v62,v65,v66" },
2157  { Hexagon::BI__builtin_HEXAGON_V6_veqh_and_128B, "v60,v62,v65,v66" },
2158  { Hexagon::BI__builtin_HEXAGON_V6_veqh_or, "v60,v62,v65,v66" },
2159  { Hexagon::BI__builtin_HEXAGON_V6_veqh_or_128B, "v60,v62,v65,v66" },
2160  { Hexagon::BI__builtin_HEXAGON_V6_veqh_xor, "v60,v62,v65,v66" },
2161  { Hexagon::BI__builtin_HEXAGON_V6_veqh_xor_128B, "v60,v62,v65,v66" },
2162  { Hexagon::BI__builtin_HEXAGON_V6_veqw, "v60,v62,v65,v66" },
2163  { Hexagon::BI__builtin_HEXAGON_V6_veqw_128B, "v60,v62,v65,v66" },
2164  { Hexagon::BI__builtin_HEXAGON_V6_veqw_and, "v60,v62,v65,v66" },
2165  { Hexagon::BI__builtin_HEXAGON_V6_veqw_and_128B, "v60,v62,v65,v66" },
2166  { Hexagon::BI__builtin_HEXAGON_V6_veqw_or, "v60,v62,v65,v66" },
2167  { Hexagon::BI__builtin_HEXAGON_V6_veqw_or_128B, "v60,v62,v65,v66" },
2168  { Hexagon::BI__builtin_HEXAGON_V6_veqw_xor, "v60,v62,v65,v66" },
2169  { Hexagon::BI__builtin_HEXAGON_V6_veqw_xor_128B, "v60,v62,v65,v66" },
2170  { Hexagon::BI__builtin_HEXAGON_V6_vgtb, "v60,v62,v65,v66" },
2171  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_128B, "v60,v62,v65,v66" },
2172  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_and, "v60,v62,v65,v66" },
2173  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_and_128B, "v60,v62,v65,v66" },
2174  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_or, "v60,v62,v65,v66" },
2175  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_or_128B, "v60,v62,v65,v66" },
2176  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_xor, "v60,v62,v65,v66" },
2177  { Hexagon::BI__builtin_HEXAGON_V6_vgtb_xor_128B, "v60,v62,v65,v66" },
2178  { Hexagon::BI__builtin_HEXAGON_V6_vgth, "v60,v62,v65,v66" },
2179  { Hexagon::BI__builtin_HEXAGON_V6_vgth_128B, "v60,v62,v65,v66" },
2180  { Hexagon::BI__builtin_HEXAGON_V6_vgth_and, "v60,v62,v65,v66" },
2181  { Hexagon::BI__builtin_HEXAGON_V6_vgth_and_128B, "v60,v62,v65,v66" },
2182  { Hexagon::BI__builtin_HEXAGON_V6_vgth_or, "v60,v62,v65,v66" },
2183  { Hexagon::BI__builtin_HEXAGON_V6_vgth_or_128B, "v60,v62,v65,v66" },
2184  { Hexagon::BI__builtin_HEXAGON_V6_vgth_xor, "v60,v62,v65,v66" },
2185  { Hexagon::BI__builtin_HEXAGON_V6_vgth_xor_128B, "v60,v62,v65,v66" },
2186  { Hexagon::BI__builtin_HEXAGON_V6_vgtub, "v60,v62,v65,v66" },
2187  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_128B, "v60,v62,v65,v66" },
2188  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_and, "v60,v62,v65,v66" },
2189  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_and_128B, "v60,v62,v65,v66" },
2190  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_or, "v60,v62,v65,v66" },
2191  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_or_128B, "v60,v62,v65,v66" },
2192  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_xor, "v60,v62,v65,v66" },
2193  { Hexagon::BI__builtin_HEXAGON_V6_vgtub_xor_128B, "v60,v62,v65,v66" },
2194  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh, "v60,v62,v65,v66" },
2195  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_128B, "v60,v62,v65,v66" },
2196  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_and, "v60,v62,v65,v66" },
2197  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_and_128B, "v60,v62,v65,v66" },
2198  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_or, "v60,v62,v65,v66" },
2199  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_or_128B, "v60,v62,v65,v66" },
2200  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_xor, "v60,v62,v65,v66" },
2201  { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_xor_128B, "v60,v62,v65,v66" },
2202  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw, "v60,v62,v65,v66" },
2203  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_128B, "v60,v62,v65,v66" },
2204  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_and, "v60,v62,v65,v66" },
2205  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_and_128B, "v60,v62,v65,v66" },
2206  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_or, "v60,v62,v65,v66" },
2207  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_or_128B, "v60,v62,v65,v66" },
2208  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_xor, "v60,v62,v65,v66" },
2209  { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_xor_128B, "v60,v62,v65,v66" },
2210  { Hexagon::BI__builtin_HEXAGON_V6_vgtw, "v60,v62,v65,v66" },
2211  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_128B, "v60,v62,v65,v66" },
2212  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_and, "v60,v62,v65,v66" },
2213  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_and_128B, "v60,v62,v65,v66" },
2214  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_or, "v60,v62,v65,v66" },
2215  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_or_128B, "v60,v62,v65,v66" },
2216  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_xor, "v60,v62,v65,v66" },
2217  { Hexagon::BI__builtin_HEXAGON_V6_vgtw_xor_128B, "v60,v62,v65,v66" },
2218  { Hexagon::BI__builtin_HEXAGON_V6_vinsertwr, "v60,v62,v65,v66" },
2219  { Hexagon::BI__builtin_HEXAGON_V6_vinsertwr_128B, "v60,v62,v65,v66" },
2220  { Hexagon::BI__builtin_HEXAGON_V6_vlalignb, "v60,v62,v65,v66" },
2221  { Hexagon::BI__builtin_HEXAGON_V6_vlalignb_128B, "v60,v62,v65,v66" },
2222  { Hexagon::BI__builtin_HEXAGON_V6_vlalignbi, "v60,v62,v65,v66" },
2223  { Hexagon::BI__builtin_HEXAGON_V6_vlalignbi_128B, "v60,v62,v65,v66" },
2224  { Hexagon::BI__builtin_HEXAGON_V6_vlsrb, "v62,v65,v66" },
2225  { Hexagon::BI__builtin_HEXAGON_V6_vlsrb_128B, "v62,v65,v66" },
2226  { Hexagon::BI__builtin_HEXAGON_V6_vlsrh, "v60,v62,v65,v66" },
2227  { Hexagon::BI__builtin_HEXAGON_V6_vlsrh_128B, "v60,v62,v65,v66" },
2228  { Hexagon::BI__builtin_HEXAGON_V6_vlsrhv, "v60,v62,v65,v66" },
2229  { Hexagon::BI__builtin_HEXAGON_V6_vlsrhv_128B, "v60,v62,v65,v66" },
2230  { Hexagon::BI__builtin_HEXAGON_V6_vlsrw, "v60,v62,v65,v66" },
2231  { Hexagon::BI__builtin_HEXAGON_V6_vlsrw_128B, "v60,v62,v65,v66" },
2232  { Hexagon::BI__builtin_HEXAGON_V6_vlsrwv, "v60,v62,v65,v66" },
2233  { Hexagon::BI__builtin_HEXAGON_V6_vlsrwv_128B, "v60,v62,v65,v66" },
2234  { Hexagon::BI__builtin_HEXAGON_V6_vlut4, "v65,v66" },
2235  { Hexagon::BI__builtin_HEXAGON_V6_vlut4_128B, "v65,v66" },
2236  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb, "v60,v62,v65,v66" },
2237  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_128B, "v60,v62,v65,v66" },
2238  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvbi, "v62,v65,v66" },
2239  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvbi_128B, "v62,v65,v66" },
2240  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_nm, "v62,v65,v66" },
2241  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_nm_128B, "v62,v65,v66" },
2242  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracc, "v60,v62,v65,v66" },
2243  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracc_128B, "v60,v62,v65,v66" },
2244  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracci, "v62,v65,v66" },
2245  { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracci_128B, "v62,v65,v66" },
2246  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh, "v60,v62,v65,v66" },
2247  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_128B, "v60,v62,v65,v66" },
2248  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwhi, "v62,v65,v66" },
2249  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwhi_128B, "v62,v65,v66" },
2250  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_nm, "v62,v65,v66" },
2251  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_nm_128B, "v62,v65,v66" },
2252  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracc, "v60,v62,v65,v66" },
2253  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracc_128B, "v60,v62,v65,v66" },
2254  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracci, "v62,v65,v66" },
2255  { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracci_128B, "v62,v65,v66" },
2256  { Hexagon::BI__builtin_HEXAGON_V6_vmaxb, "v62,v65,v66" },
2257  { Hexagon::BI__builtin_HEXAGON_V6_vmaxb_128B, "v62,v65,v66" },
2258  { Hexagon::BI__builtin_HEXAGON_V6_vmaxh, "v60,v62,v65,v66" },
2259  { Hexagon::BI__builtin_HEXAGON_V6_vmaxh_128B, "v60,v62,v65,v66" },
2260  { Hexagon::BI__builtin_HEXAGON_V6_vmaxub, "v60,v62,v65,v66" },
2261  { Hexagon::BI__builtin_HEXAGON_V6_vmaxub_128B, "v60,v62,v65,v66" },
2262  { Hexagon::BI__builtin_HEXAGON_V6_vmaxuh, "v60,v62,v65,v66" },
2263  { Hexagon::BI__builtin_HEXAGON_V6_vmaxuh_128B, "v60,v62,v65,v66" },
2264  { Hexagon::BI__builtin_HEXAGON_V6_vmaxw, "v60,v62,v65,v66" },
2265  { Hexagon::BI__builtin_HEXAGON_V6_vmaxw_128B, "v60,v62,v65,v66" },
2266  { Hexagon::BI__builtin_HEXAGON_V6_vminb, "v62,v65,v66" },
2267  { Hexagon::BI__builtin_HEXAGON_V6_vminb_128B, "v62,v65,v66" },
2268  { Hexagon::BI__builtin_HEXAGON_V6_vminh, "v60,v62,v65,v66" },
2269  { Hexagon::BI__builtin_HEXAGON_V6_vminh_128B, "v60,v62,v65,v66" },
2270  { Hexagon::BI__builtin_HEXAGON_V6_vminub, "v60,v62,v65,v66" },
2271  { Hexagon::BI__builtin_HEXAGON_V6_vminub_128B, "v60,v62,v65,v66" },
2272  { Hexagon::BI__builtin_HEXAGON_V6_vminuh, "v60,v62,v65,v66" },
2273  { Hexagon::BI__builtin_HEXAGON_V6_vminuh_128B, "v60,v62,v65,v66" },
2274  { Hexagon::BI__builtin_HEXAGON_V6_vminw, "v60,v62,v65,v66" },
2275  { Hexagon::BI__builtin_HEXAGON_V6_vminw_128B, "v60,v62,v65,v66" },
2276  { Hexagon::BI__builtin_HEXAGON_V6_vmpabus, "v60,v62,v65,v66" },
2277  { Hexagon::BI__builtin_HEXAGON_V6_vmpabus_128B, "v60,v62,v65,v66" },
2278  { Hexagon::BI__builtin_HEXAGON_V6_vmpabus_acc, "v60,v62,v65,v66" },
2279  { Hexagon::BI__builtin_HEXAGON_V6_vmpabus_acc_128B, "v60,v62,v65,v66" },
2280  { Hexagon::BI__builtin_HEXAGON_V6_vmpabusv, "v60,v62,v65,v66" },
2281  { Hexagon::BI__builtin_HEXAGON_V6_vmpabusv_128B, "v60,v62,v65,v66" },
2282  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu, "v65,v66" },
2283  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu_128B, "v65,v66" },
2284  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu_acc, "v65,v66" },
2285  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu_acc_128B, "v65,v66" },
2286  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuuv, "v60,v62,v65,v66" },
2287  { Hexagon::BI__builtin_HEXAGON_V6_vmpabuuv_128B, "v60,v62,v65,v66" },
2288  { Hexagon::BI__builtin_HEXAGON_V6_vmpahb, "v60,v62,v65,v66" },
2289  { Hexagon::BI__builtin_HEXAGON_V6_vmpahb_128B, "v60,v62,v65,v66" },
2290  { Hexagon::BI__builtin_HEXAGON_V6_vmpahb_acc, "v60,v62,v65,v66" },
2291  { Hexagon::BI__builtin_HEXAGON_V6_vmpahb_acc_128B, "v60,v62,v65,v66" },
2292  { Hexagon::BI__builtin_HEXAGON_V6_vmpahhsat, "v65,v66" },
2293  { Hexagon::BI__builtin_HEXAGON_V6_vmpahhsat_128B, "v65,v66" },
2294  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb, "v62,v65,v66" },
2295  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb_128B, "v62,v65,v66" },
2296  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb_acc, "v62,v65,v66" },
2297  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb_acc_128B, "v62,v65,v66" },
2298  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhuhsat, "v65,v66" },
2299  { Hexagon::BI__builtin_HEXAGON_V6_vmpauhuhsat_128B, "v65,v66" },
2300  { Hexagon::BI__builtin_HEXAGON_V6_vmpsuhuhsat, "v65,v66" },
2301  { Hexagon::BI__builtin_HEXAGON_V6_vmpsuhuhsat_128B, "v65,v66" },
2302  { Hexagon::BI__builtin_HEXAGON_V6_vmpybus, "v60,v62,v65,v66" },
2303  { Hexagon::BI__builtin_HEXAGON_V6_vmpybus_128B, "v60,v62,v65,v66" },
2304  { Hexagon::BI__builtin_HEXAGON_V6_vmpybus_acc, "v60,v62,v65,v66" },
2305  { Hexagon::BI__builtin_HEXAGON_V6_vmpybus_acc_128B, "v60,v62,v65,v66" },
2306  { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv, "v60,v62,v65,v66" },
2307  { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv_128B, "v60,v62,v65,v66" },
2308  { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv_acc, "v60,v62,v65,v66" },
2309  { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv_acc_128B, "v60,v62,v65,v66" },
2310  { Hexagon::BI__builtin_HEXAGON_V6_vmpybv, "v60,v62,v65,v66" },
2311  { Hexagon::BI__builtin_HEXAGON_V6_vmpybv_128B, "v60,v62,v65,v66" },
2312  { Hexagon::BI__builtin_HEXAGON_V6_vmpybv_acc, "v60,v62,v65,v66" },
2313  { Hexagon::BI__builtin_HEXAGON_V6_vmpybv_acc_128B, "v60,v62,v65,v66" },
2314  { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh, "v60,v62,v65,v66" },
2315  { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh_128B, "v60,v62,v65,v66" },
2316  { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh_64, "v62,v65,v66" },
2317  { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh_64_128B, "v62,v65,v66" },
2318  { Hexagon::BI__builtin_HEXAGON_V6_vmpyh, "v60,v62,v65,v66" },
2319  { Hexagon::BI__builtin_HEXAGON_V6_vmpyh_128B, "v60,v62,v65,v66" },
2320  { Hexagon::BI__builtin_HEXAGON_V6_vmpyh_acc, "v65,v66" },
2321  { Hexagon::BI__builtin_HEXAGON_V6_vmpyh_acc_128B, "v65,v66" },
2322  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsat_acc, "v60,v62,v65,v66" },
2323  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsat_acc_128B, "v60,v62,v65,v66" },
2324  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsrs, "v60,v62,v65,v66" },
2325  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsrs_128B, "v60,v62,v65,v66" },
2326  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhss, "v60,v62,v65,v66" },
2327  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhss_128B, "v60,v62,v65,v66" },
2328  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus, "v60,v62,v65,v66" },
2329  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus_128B, "v60,v62,v65,v66" },
2330  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus_acc, "v60,v62,v65,v66" },
2331  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus_acc_128B, "v60,v62,v65,v66" },
2332  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv, "v60,v62,v65,v66" },
2333  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv_128B, "v60,v62,v65,v66" },
2334  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv_acc, "v60,v62,v65,v66" },
2335  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv_acc_128B, "v60,v62,v65,v66" },
2336  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhvsrs, "v60,v62,v65,v66" },
2337  { Hexagon::BI__builtin_HEXAGON_V6_vmpyhvsrs_128B, "v60,v62,v65,v66" },
2338  { Hexagon::BI__builtin_HEXAGON_V6_vmpyieoh, "v60,v62,v65,v66" },
2339  { Hexagon::BI__builtin_HEXAGON_V6_vmpyieoh_128B, "v60,v62,v65,v66" },
2340  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewh_acc, "v60,v62,v65,v66" },
2341  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewh_acc_128B, "v60,v62,v65,v66" },
2342  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh, "v60,v62,v65,v66" },
2343  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh_128B, "v60,v62,v65,v66" },
2344  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh_acc, "v60,v62,v65,v66" },
2345  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh_acc_128B, "v60,v62,v65,v66" },
2346  { Hexagon::BI__builtin_HEXAGON_V6_vmpyih, "v60,v62,v65,v66" },
2347  { Hexagon::BI__builtin_HEXAGON_V6_vmpyih_128B, "v60,v62,v65,v66" },
2348  { Hexagon::BI__builtin_HEXAGON_V6_vmpyih_acc, "v60,v62,v65,v66" },
2349  { Hexagon::BI__builtin_HEXAGON_V6_vmpyih_acc_128B, "v60,v62,v65,v66" },
2350  { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb, "v60,v62,v65,v66" },
2351  { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb_128B, "v60,v62,v65,v66" },
2352  { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb_acc, "v60,v62,v65,v66" },
2353  { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb_acc_128B, "v60,v62,v65,v66" },
2354  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiowh, "v60,v62,v65,v66" },
2355  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiowh_128B, "v60,v62,v65,v66" },
2356  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb, "v60,v62,v65,v66" },
2357  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb_128B, "v60,v62,v65,v66" },
2358  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb_acc, "v60,v62,v65,v66" },
2359  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb_acc_128B, "v60,v62,v65,v66" },
2360  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh, "v60,v62,v65,v66" },
2361  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh_128B, "v60,v62,v65,v66" },
2362  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh_acc, "v60,v62,v65,v66" },
2363  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh_acc_128B, "v60,v62,v65,v66" },
2364  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub, "v62,v65,v66" },
2365  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub_128B, "v62,v65,v66" },
2366  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub_acc, "v62,v65,v66" },
2367  { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub_acc_128B, "v62,v65,v66" },
2368  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh, "v60,v62,v65,v66" },
2369  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_128B, "v60,v62,v65,v66" },
2370  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_64_acc, "v62,v65,v66" },
2371  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_64_acc_128B, "v62,v65,v66" },
2372  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd, "v60,v62,v65,v66" },
2373  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd_128B, "v60,v62,v65,v66" },
2374  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd_sacc, "v60,v62,v65,v66" },
2375  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd_sacc_128B, "v60,v62,v65,v66" },
2376  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_sacc, "v60,v62,v65,v66" },
2377  { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_sacc_128B, "v60,v62,v65,v66" },
2378  { Hexagon::BI__builtin_HEXAGON_V6_vmpyub, "v60,v62,v65,v66" },
2379  { Hexagon::BI__builtin_HEXAGON_V6_vmpyub_128B, "v60,v62,v65,v66" },
2380  { Hexagon::BI__builtin_HEXAGON_V6_vmpyub_acc, "v60,v62,v65,v66" },
2381  { Hexagon::BI__builtin_HEXAGON_V6_vmpyub_acc_128B, "v60,v62,v65,v66" },
2382  { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv, "v60,v62,v65,v66" },
2383  { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv_128B, "v60,v62,v65,v66" },
2384  { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv_acc, "v60,v62,v65,v66" },
2385  { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv_acc_128B, "v60,v62,v65,v66" },
2386  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh, "v60,v62,v65,v66" },
2387  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh_128B, "v60,v62,v65,v66" },
2388  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh_acc, "v60,v62,v65,v66" },
2389  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh_acc_128B, "v60,v62,v65,v66" },
2390  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe, "v65,v66" },
2391  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe_128B, "v65,v66" },
2392  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe_acc, "v65,v66" },
2393  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe_acc_128B, "v65,v66" },
2394  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv, "v60,v62,v65,v66" },
2395  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv_128B, "v60,v62,v65,v66" },
2396  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv_acc, "v60,v62,v65,v66" },
2397  { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv_acc_128B, "v60,v62,v65,v66" },
2398  { Hexagon::BI__builtin_HEXAGON_V6_vmux, "v60,v62,v65,v66" },
2399  { Hexagon::BI__builtin_HEXAGON_V6_vmux_128B, "v60,v62,v65,v66" },
2400  { Hexagon::BI__builtin_HEXAGON_V6_vnavgb, "v65,v66" },
2401  { Hexagon::BI__builtin_HEXAGON_V6_vnavgb_128B, "v65,v66" },
2402  { Hexagon::BI__builtin_HEXAGON_V6_vnavgh, "v60,v62,v65,v66" },
2403  { Hexagon::BI__builtin_HEXAGON_V6_vnavgh_128B, "v60,v62,v65,v66" },
2404  { Hexagon::BI__builtin_HEXAGON_V6_vnavgub, "v60,v62,v65,v66" },
2405  { Hexagon::BI__builtin_HEXAGON_V6_vnavgub_128B, "v60,v62,v65,v66" },
2406  { Hexagon::BI__builtin_HEXAGON_V6_vnavgw, "v60,v62,v65,v66" },
2407  { Hexagon::BI__builtin_HEXAGON_V6_vnavgw_128B, "v60,v62,v65,v66" },
2408  { Hexagon::BI__builtin_HEXAGON_V6_vnormamth, "v60,v62,v65,v66" },
2409  { Hexagon::BI__builtin_HEXAGON_V6_vnormamth_128B, "v60,v62,v65,v66" },
2410  { Hexagon::BI__builtin_HEXAGON_V6_vnormamtw, "v60,v62,v65,v66" },
2411  { Hexagon::BI__builtin_HEXAGON_V6_vnormamtw_128B, "v60,v62,v65,v66" },
2412  { Hexagon::BI__builtin_HEXAGON_V6_vnot, "v60,v62,v65,v66" },
2413  { Hexagon::BI__builtin_HEXAGON_V6_vnot_128B, "v60,v62,v65,v66" },
2414  { Hexagon::BI__builtin_HEXAGON_V6_vor, "v60,v62,v65,v66" },
2415  { Hexagon::BI__builtin_HEXAGON_V6_vor_128B, "v60,v62,v65,v66" },
2416  { Hexagon::BI__builtin_HEXAGON_V6_vpackeb, "v60,v62,v65,v66" },
2417  { Hexagon::BI__builtin_HEXAGON_V6_vpackeb_128B, "v60,v62,v65,v66" },
2418  { Hexagon::BI__builtin_HEXAGON_V6_vpackeh, "v60,v62,v65,v66" },
2419  { Hexagon::BI__builtin_HEXAGON_V6_vpackeh_128B, "v60,v62,v65,v66" },
2420  { Hexagon::BI__builtin_HEXAGON_V6_vpackhb_sat, "v60,v62,v65,v66" },
2421  { Hexagon::BI__builtin_HEXAGON_V6_vpackhb_sat_128B, "v60,v62,v65,v66" },
2422  { Hexagon::BI__builtin_HEXAGON_V6_vpackhub_sat, "v60,v62,v65,v66" },
2423  { Hexagon::BI__builtin_HEXAGON_V6_vpackhub_sat_128B, "v60,v62,v65,v66" },
2424  { Hexagon::BI__builtin_HEXAGON_V6_vpackob, "v60,v62,v65,v66" },
2425  { Hexagon::BI__builtin_HEXAGON_V6_vpackob_128B, "v60,v62,v65,v66" },
2426  { Hexagon::BI__builtin_HEXAGON_V6_vpackoh, "v60,v62,v65,v66" },
2427  { Hexagon::BI__builtin_HEXAGON_V6_vpackoh_128B, "v60,v62,v65,v66" },
2428  { Hexagon::BI__builtin_HEXAGON_V6_vpackwh_sat, "v60,v62,v65,v66" },
2429  { Hexagon::BI__builtin_HEXAGON_V6_vpackwh_sat_128B, "v60,v62,v65,v66" },
2430  { Hexagon::BI__builtin_HEXAGON_V6_vpackwuh_sat, "v60,v62,v65,v66" },
2431  { Hexagon::BI__builtin_HEXAGON_V6_vpackwuh_sat_128B, "v60,v62,v65,v66" },
2432  { Hexagon::BI__builtin_HEXAGON_V6_vpopcounth, "v60,v62,v65,v66" },
2433  { Hexagon::BI__builtin_HEXAGON_V6_vpopcounth_128B, "v60,v62,v65,v66" },
2434  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqb, "v65,v66" },
2435  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqb_128B, "v65,v66" },
2436  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqh, "v65,v66" },
2437  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqh_128B, "v65,v66" },
2438  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqw, "v65,v66" },
2439  { Hexagon::BI__builtin_HEXAGON_V6_vprefixqw_128B, "v65,v66" },
2440  { Hexagon::BI__builtin_HEXAGON_V6_vrdelta, "v60,v62,v65,v66" },
2441  { Hexagon::BI__builtin_HEXAGON_V6_vrdelta_128B, "v60,v62,v65,v66" },
2442  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt, "v65" },
2443  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt_128B, "v65" },
2444  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt_acc, "v65" },
2445  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt_acc_128B, "v65" },
2446  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus, "v60,v62,v65,v66" },
2447  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus_128B, "v60,v62,v65,v66" },
2448  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus_acc, "v60,v62,v65,v66" },
2449  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus_acc_128B, "v60,v62,v65,v66" },
2450  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi, "v60,v62,v65,v66" },
2451  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_128B, "v60,v62,v65,v66" },
2452  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_acc, "v60,v62,v65,v66" },
2453  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_acc_128B, "v60,v62,v65,v66" },
2454  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv, "v60,v62,v65,v66" },
2455  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv_128B, "v60,v62,v65,v66" },
2456  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv_acc, "v60,v62,v65,v66" },
2457  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv_acc_128B, "v60,v62,v65,v66" },
2458  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv, "v60,v62,v65,v66" },
2459  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv_128B, "v60,v62,v65,v66" },
2460  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv_acc, "v60,v62,v65,v66" },
2461  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv_acc_128B, "v60,v62,v65,v66" },
2462  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub, "v60,v62,v65,v66" },
2463  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_128B, "v60,v62,v65,v66" },
2464  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_acc, "v60,v62,v65,v66" },
2465  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_acc_128B, "v60,v62,v65,v66" },
2466  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi, "v60,v62,v65,v66" },
2467  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_128B, "v60,v62,v65,v66" },
2468  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_acc, "v60,v62,v65,v66" },
2469  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_acc_128B, "v60,v62,v65,v66" },
2470  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt, "v65" },
2471  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt_128B, "v65" },
2472  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt_acc, "v65" },
2473  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt_acc_128B, "v65" },
2474  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv, "v60,v62,v65,v66" },
2475  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv_128B, "v60,v62,v65,v66" },
2476  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv_acc, "v60,v62,v65,v66" },
2477  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv_acc_128B, "v60,v62,v65,v66" },
2478  { Hexagon::BI__builtin_HEXAGON_V6_vror, "v60,v62,v65,v66" },
2479  { Hexagon::BI__builtin_HEXAGON_V6_vror_128B, "v60,v62,v65,v66" },
2480  { Hexagon::BI__builtin_HEXAGON_V6_vrotr, "v66" },
2481  { Hexagon::BI__builtin_HEXAGON_V6_vrotr_128B, "v66" },
2482  { Hexagon::BI__builtin_HEXAGON_V6_vroundhb, "v60,v62,v65,v66" },
2483  { Hexagon::BI__builtin_HEXAGON_V6_vroundhb_128B, "v60,v62,v65,v66" },
2484  { Hexagon::BI__builtin_HEXAGON_V6_vroundhub, "v60,v62,v65,v66" },
2485  { Hexagon::BI__builtin_HEXAGON_V6_vroundhub_128B, "v60,v62,v65,v66" },
2486  { Hexagon::BI__builtin_HEXAGON_V6_vrounduhub, "v62,v65,v66" },
2487  { Hexagon::BI__builtin_HEXAGON_V6_vrounduhub_128B, "v62,v65,v66" },
2488  { Hexagon::BI__builtin_HEXAGON_V6_vrounduwuh, "v62,v65,v66" },
2489  { Hexagon::BI__builtin_HEXAGON_V6_vrounduwuh_128B, "v62,v65,v66" },
2490  { Hexagon::BI__builtin_HEXAGON_V6_vroundwh, "v60,v62,v65,v66" },
2491  { Hexagon::BI__builtin_HEXAGON_V6_vroundwh_128B, "v60,v62,v65,v66" },
2492  { Hexagon::BI__builtin_HEXAGON_V6_vroundwuh, "v60,v62,v65,v66" },
2493  { Hexagon::BI__builtin_HEXAGON_V6_vroundwuh_128B, "v60,v62,v65,v66" },
2494  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi, "v60,v62,v65,v66" },
2495  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_128B, "v60,v62,v65,v66" },
2496  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_acc, "v60,v62,v65,v66" },
2497  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_acc_128B, "v60,v62,v65,v66" },
2498  { Hexagon::BI__builtin_HEXAGON_V6_vsatdw, "v66" },
2499  { Hexagon::BI__builtin_HEXAGON_V6_vsatdw_128B, "v66" },
2500  { Hexagon::BI__builtin_HEXAGON_V6_vsathub, "v60,v62,v65,v66" },
2501  { Hexagon::BI__builtin_HEXAGON_V6_vsathub_128B, "v60,v62,v65,v66" },
2502  { Hexagon::BI__builtin_HEXAGON_V6_vsatuwuh, "v62,v65,v66" },
2503  { Hexagon::BI__builtin_HEXAGON_V6_vsatuwuh_128B, "v62,v65,v66" },
2504  { Hexagon::BI__builtin_HEXAGON_V6_vsatwh, "v60,v62,v65,v66" },
2505  { Hexagon::BI__builtin_HEXAGON_V6_vsatwh_128B, "v60,v62,v65,v66" },
2506  { Hexagon::BI__builtin_HEXAGON_V6_vsb, "v60,v62,v65,v66" },
2507  { Hexagon::BI__builtin_HEXAGON_V6_vsb_128B, "v60,v62,v65,v66" },
2508  { Hexagon::BI__builtin_HEXAGON_V6_vsh, "v60,v62,v65,v66" },
2509  { Hexagon::BI__builtin_HEXAGON_V6_vsh_128B, "v60,v62,v65,v66" },
2510  { Hexagon::BI__builtin_HEXAGON_V6_vshufeh, "v60,v62,v65,v66" },
2511  { Hexagon::BI__builtin_HEXAGON_V6_vshufeh_128B, "v60,v62,v65,v66" },
2512  { Hexagon::BI__builtin_HEXAGON_V6_vshuffb, "v60,v62,v65,v66" },
2513  { Hexagon::BI__builtin_HEXAGON_V6_vshuffb_128B, "v60,v62,v65,v66" },
2514  { Hexagon::BI__builtin_HEXAGON_V6_vshuffeb, "v60,v62,v65,v66" },
2515  { Hexagon::BI__builtin_HEXAGON_V6_vshuffeb_128B, "v60,v62,v65,v66" },
2516  { Hexagon::BI__builtin_HEXAGON_V6_vshuffh, "v60,v62,v65,v66" },
2517  { Hexagon::BI__builtin_HEXAGON_V6_vshuffh_128B, "v60,v62,v65,v66" },
2518  { Hexagon::BI__builtin_HEXAGON_V6_vshuffob, "v60,v62,v65,v66" },
2519  { Hexagon::BI__builtin_HEXAGON_V6_vshuffob_128B, "v60,v62,v65,v66" },
2520  { Hexagon::BI__builtin_HEXAGON_V6_vshuffvdd, "v60,v62,v65,v66" },
2521  { Hexagon::BI__builtin_HEXAGON_V6_vshuffvdd_128B, "v60,v62,v65,v66" },
2522  { Hexagon::BI__builtin_HEXAGON_V6_vshufoeb, "v60,v62,v65,v66" },
2523  { Hexagon::BI__builtin_HEXAGON_V6_vshufoeb_128B, "v60,v62,v65,v66" },
2524  { Hexagon::BI__builtin_HEXAGON_V6_vshufoeh, "v60,v62,v65,v66" },
2525  { Hexagon::BI__builtin_HEXAGON_V6_vshufoeh_128B, "v60,v62,v65,v66" },
2526  { Hexagon::BI__builtin_HEXAGON_V6_vshufoh, "v60,v62,v65,v66" },
2527  { Hexagon::BI__builtin_HEXAGON_V6_vshufoh_128B, "v60,v62,v65,v66" },
2528  { Hexagon::BI__builtin_HEXAGON_V6_vsubb, "v60,v62,v65,v66" },
2529  { Hexagon::BI__builtin_HEXAGON_V6_vsubb_128B, "v60,v62,v65,v66" },
2530  { Hexagon::BI__builtin_HEXAGON_V6_vsubb_dv, "v60,v62,v65,v66" },
2531  { Hexagon::BI__builtin_HEXAGON_V6_vsubb_dv_128B, "v60,v62,v65,v66" },
2532  { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat, "v62,v65,v66" },
2533  { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat_128B, "v62,v65,v66" },
2534  { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat_dv, "v62,v65,v66" },
2535  { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat_dv_128B, "v62,v65,v66" },
2536  { Hexagon::BI__builtin_HEXAGON_V6_vsubcarry, "v62,v65,v66" },
2537  { Hexagon::BI__builtin_HEXAGON_V6_vsubcarry_128B, "v62,v65,v66" },
2538  { Hexagon::BI__builtin_HEXAGON_V6_vsubh, "v60,v62,v65,v66" },
2539  { Hexagon::BI__builtin_HEXAGON_V6_vsubh_128B, "v60,v62,v65,v66" },
2540  { Hexagon::BI__builtin_HEXAGON_V6_vsubh_dv, "v60,v62,v65,v66" },
2541  { Hexagon::BI__builtin_HEXAGON_V6_vsubh_dv_128B, "v60,v62,v65,v66" },
2542  { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat, "v60,v62,v65,v66" },
2543  { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat_128B, "v60,v62,v65,v66" },
2544  { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat_dv, "v60,v62,v65,v66" },
2545  { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat_dv_128B, "v60,v62,v65,v66" },
2546  { Hexagon::BI__builtin_HEXAGON_V6_vsubhw, "v60,v62,v65,v66" },
2547  { Hexagon::BI__builtin_HEXAGON_V6_vsubhw_128B, "v60,v62,v65,v66" },
2548  { Hexagon::BI__builtin_HEXAGON_V6_vsububh, "v60,v62,v65,v66" },
2549  { Hexagon::BI__builtin_HEXAGON_V6_vsububh_128B, "v60,v62,v65,v66" },
2550  { Hexagon::BI__builtin_HEXAGON_V6_vsububsat, "v60,v62,v65,v66" },
2551  { Hexagon::BI__builtin_HEXAGON_V6_vsububsat_128B, "v60,v62,v65,v66" },
2552  { Hexagon::BI__builtin_HEXAGON_V6_vsububsat_dv, "v60,v62,v65,v66" },
2553  { Hexagon::BI__builtin_HEXAGON_V6_vsububsat_dv_128B, "v60,v62,v65,v66" },
2554  { Hexagon::BI__builtin_HEXAGON_V6_vsubububb_sat, "v62,v65,v66" },
2555  { Hexagon::BI__builtin_HEXAGON_V6_vsubububb_sat_128B, "v62,v65,v66" },
2556  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat, "v60,v62,v65,v66" },
2557  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat_128B, "v60,v62,v65,v66" },
2558  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat_dv, "v60,v62,v65,v66" },
2559  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat_dv_128B, "v60,v62,v65,v66" },
2560  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhw, "v60,v62,v65,v66" },
2561  { Hexagon::BI__builtin_HEXAGON_V6_vsubuhw_128B, "v60,v62,v65,v66" },
2562  { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat, "v62,v65,v66" },
2563  { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat_128B, "v62,v65,v66" },
2564  { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat_dv, "v62,v65,v66" },
2565  { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat_dv_128B, "v62,v65,v66" },
2566  { Hexagon::BI__builtin_HEXAGON_V6_vsubw, "v60,v62,v65,v66" },
2567  { Hexagon::BI__builtin_HEXAGON_V6_vsubw_128B, "v60,v62,v65,v66" },
2568  { Hexagon::BI__builtin_HEXAGON_V6_vsubw_dv, "v60,v62,v65,v66" },
2569  { Hexagon::BI__builtin_HEXAGON_V6_vsubw_dv_128B, "v60,v62,v65,v66" },
2570  { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat, "v60,v62,v65,v66" },
2571  { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat_128B, "v60,v62,v65,v66" },
2572  { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat_dv, "v60,v62,v65,v66" },
2573  { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat_dv_128B, "v60,v62,v65,v66" },
2574  { Hexagon::BI__builtin_HEXAGON_V6_vswap, "v60,v62,v65,v66" },
2575  { Hexagon::BI__builtin_HEXAGON_V6_vswap_128B, "v60,v62,v65,v66" },
2576  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb, "v60,v62,v65,v66" },
2577  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb_128B, "v60,v62,v65,v66" },
2578  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb_acc, "v60,v62,v65,v66" },
2579  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb_acc_128B, "v60,v62,v65,v66" },
2580  { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus, "v60,v62,v65,v66" },
2581  { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus_128B, "v60,v62,v65,v66" },
2582  { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus_acc, "v60,v62,v65,v66" },
2583  { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus_acc_128B, "v60,v62,v65,v66" },
2584  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb, "v60,v62,v65,v66" },
2585  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb_128B, "v60,v62,v65,v66" },
2586  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb_acc, "v60,v62,v65,v66" },
2587  { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb_acc_128B, "v60,v62,v65,v66" },
2588  { Hexagon::BI__builtin_HEXAGON_V6_vunpackb, "v60,v62,v65,v66" },
2589  { Hexagon::BI__builtin_HEXAGON_V6_vunpackb_128B, "v60,v62,v65,v66" },
2590  { Hexagon::BI__builtin_HEXAGON_V6_vunpackh, "v60,v62,v65,v66" },
2591  { Hexagon::BI__builtin_HEXAGON_V6_vunpackh_128B, "v60,v62,v65,v66" },
2592  { Hexagon::BI__builtin_HEXAGON_V6_vunpackob, "v60,v62,v65,v66" },
2593  { Hexagon::BI__builtin_HEXAGON_V6_vunpackob_128B, "v60,v62,v65,v66" },
2594  { Hexagon::BI__builtin_HEXAGON_V6_vunpackoh, "v60,v62,v65,v66" },
2595  { Hexagon::BI__builtin_HEXAGON_V6_vunpackoh_128B, "v60,v62,v65,v66" },
2596  { Hexagon::BI__builtin_HEXAGON_V6_vunpackub, "v60,v62,v65,v66" },
2597  { Hexagon::BI__builtin_HEXAGON_V6_vunpackub_128B, "v60,v62,v65,v66" },
2598  { Hexagon::BI__builtin_HEXAGON_V6_vunpackuh, "v60,v62,v65,v66" },
2599  { Hexagon::BI__builtin_HEXAGON_V6_vunpackuh_128B, "v60,v62,v65,v66" },
2600  { Hexagon::BI__builtin_HEXAGON_V6_vxor, "v60,v62,v65,v66" },
2601  { Hexagon::BI__builtin_HEXAGON_V6_vxor_128B, "v60,v62,v65,v66" },
2602  { Hexagon::BI__builtin_HEXAGON_V6_vzb, "v60,v62,v65,v66" },
2603  { Hexagon::BI__builtin_HEXAGON_V6_vzb_128B, "v60,v62,v65,v66" },
2604  { Hexagon::BI__builtin_HEXAGON_V6_vzh, "v60,v62,v65,v66" },
2605  { Hexagon::BI__builtin_HEXAGON_V6_vzh_128B, "v60,v62,v65,v66" },
2606  };
2607 
2608  // Sort the tables on first execution so we can binary search them.
2609  auto SortCmp = [](const BuiltinAndString &LHS, const BuiltinAndString &RHS) {
2610  return LHS.BuiltinID < RHS.BuiltinID;
2611  };
2612  static const bool SortOnce =
2613  (std::sort(std::begin(ValidCPU), std::end(ValidCPU), SortCmp),
2614  std::sort(std::begin(ValidHVX), std::end(ValidHVX), SortCmp), true);
2615  (void)SortOnce;
2616  auto LowerBoundCmp = [](const BuiltinAndString &BI, unsigned BuiltinID) {
2617  return BI.BuiltinID < BuiltinID;
2618  };
2619 
2620  const TargetInfo &TI = Context.getTargetInfo();
2621 
2622  const BuiltinAndString *FC =
2623  std::lower_bound(std::begin(ValidCPU), std::end(ValidCPU), BuiltinID,
2624  LowerBoundCmp);
2625  if (FC != std::end(ValidCPU) && FC->BuiltinID == BuiltinID) {
2626  const TargetOptions &Opts = TI.getTargetOpts();
2627  StringRef CPU = Opts.CPU;
2628  if (!CPU.empty()) {
2629  assert(CPU.startswith("hexagon") && "Unexpected CPU name");
2630  CPU.consume_front("hexagon");
2632  StringRef(FC->Str).split(CPUs, ',');
2633  if (llvm::none_of(CPUs, [CPU](StringRef S) { return S == CPU; }))
2634  return Diag(TheCall->getBeginLoc(),
2635  diag::err_hexagon_builtin_unsupported_cpu);
2636  }
2637  }
2638 
2639  const BuiltinAndString *FH =
2640  std::lower_bound(std::begin(ValidHVX), std::end(ValidHVX), BuiltinID,
2641  LowerBoundCmp);
2642  if (FH != std::end(ValidHVX) && FH->BuiltinID == BuiltinID) {
2643  if (!TI.hasFeature("hvx"))
2644  return Diag(TheCall->getBeginLoc(),
2645  diag::err_hexagon_builtin_requires_hvx);
2646 
2648  StringRef(FH->Str).split(HVXs, ',');
2649  bool IsValid = llvm::any_of(HVXs,
2650  [&TI] (StringRef V) {
2651  std::string F = "hvx" + V.str();
2652  return TI.hasFeature(F);
2653  });
2654  if (!IsValid)
2655  return Diag(TheCall->getBeginLoc(),
2656  diag::err_hexagon_builtin_unsupported_hvx);
2657  }
2658 
2659  return false;
2660 }
2661 
2662 bool Sema::CheckHexagonBuiltinArgument(unsigned BuiltinID, CallExpr *TheCall) {
2663  struct ArgInfo {
2664  uint8_t OpNum;
2665  bool IsSigned;
2666  uint8_t BitWidth;
2667  uint8_t Align;
2668  };
2669  struct BuiltinInfo {
2670  unsigned BuiltinID;
2671  ArgInfo Infos[2];
2672  };
2673 
2674  static BuiltinInfo Infos[] = {
2675  { Hexagon::BI__builtin_circ_ldd, {{ 3, true, 4, 3 }} },
2676  { Hexagon::BI__builtin_circ_ldw, {{ 3, true, 4, 2 }} },
2677  { Hexagon::BI__builtin_circ_ldh, {{ 3, true, 4, 1 }} },
2678  { Hexagon::BI__builtin_circ_lduh, {{ 3, true, 4, 0 }} },
2679  { Hexagon::BI__builtin_circ_ldb, {{ 3, true, 4, 0 }} },
2680  { Hexagon::BI__builtin_circ_ldub, {{ 3, true, 4, 0 }} },
2681  { Hexagon::BI__builtin_circ_std, {{ 3, true, 4, 3 }} },
2682  { Hexagon::BI__builtin_circ_stw, {{ 3, true, 4, 2 }} },
2683  { Hexagon::BI__builtin_circ_sth, {{ 3, true, 4, 1 }} },
2684  { Hexagon::BI__builtin_circ_sthhi, {{ 3, true, 4, 1 }} },
2685  { Hexagon::BI__builtin_circ_stb, {{ 3, true, 4, 0 }} },
2686 
2687  { Hexagon::BI__builtin_HEXAGON_L2_loadrub_pci, {{ 1, true, 4, 0 }} },
2688  { Hexagon::BI__builtin_HEXAGON_L2_loadrb_pci, {{ 1, true, 4, 0 }} },
2689  { Hexagon::BI__builtin_HEXAGON_L2_loadruh_pci, {{ 1, true, 4, 1 }} },
2690  { Hexagon::BI__builtin_HEXAGON_L2_loadrh_pci, {{ 1, true, 4, 1 }} },
2691  { Hexagon::BI__builtin_HEXAGON_L2_loadri_pci, {{ 1, true, 4, 2 }} },
2692  { Hexagon::BI__builtin_HEXAGON_L2_loadrd_pci, {{ 1, true, 4, 3 }} },
2693  { Hexagon::BI__builtin_HEXAGON_S2_storerb_pci, {{ 1, true, 4, 0 }} },
2694  { Hexagon::BI__builtin_HEXAGON_S2_storerh_pci, {{ 1, true, 4, 1 }} },
2695  { Hexagon::BI__builtin_HEXAGON_S2_storerf_pci, {{ 1, true, 4, 1 }} },
2696  { Hexagon::BI__builtin_HEXAGON_S2_storeri_pci, {{ 1, true, 4, 2 }} },
2697  { Hexagon::BI__builtin_HEXAGON_S2_storerd_pci, {{ 1, true, 4, 3 }} },
2698 
2699  { Hexagon::BI__builtin_HEXAGON_A2_combineii, {{ 1, true, 8, 0 }} },
2700  { Hexagon::BI__builtin_HEXAGON_A2_tfrih, {{ 1, false, 16, 0 }} },
2701  { Hexagon::BI__builtin_HEXAGON_A2_tfril, {{ 1, false, 16, 0 }} },
2702  { Hexagon::BI__builtin_HEXAGON_A2_tfrpi, {{ 0, true, 8, 0 }} },
2703  { Hexagon::BI__builtin_HEXAGON_A4_bitspliti, {{ 1, false, 5, 0 }} },
2704  { Hexagon::BI__builtin_HEXAGON_A4_cmpbeqi, {{ 1, false, 8, 0 }} },
2705  { Hexagon::BI__builtin_HEXAGON_A4_cmpbgti, {{ 1, true, 8, 0 }} },
2706  { Hexagon::BI__builtin_HEXAGON_A4_cround_ri, {{ 1, false, 5, 0 }} },
2707  { Hexagon::BI__builtin_HEXAGON_A4_round_ri, {{ 1, false, 5, 0 }} },
2708  { Hexagon::BI__builtin_HEXAGON_A4_round_ri_sat, {{ 1, false, 5, 0 }} },
2709  { Hexagon::BI__builtin_HEXAGON_A4_vcmpbeqi, {{ 1, false, 8, 0 }} },
2710  { Hexagon::BI__builtin_HEXAGON_A4_vcmpbgti, {{ 1, true, 8, 0 }} },
2711  { Hexagon::BI__builtin_HEXAGON_A4_vcmpbgtui, {{ 1, false, 7, 0 }} },
2712  { Hexagon::BI__builtin_HEXAGON_A4_vcmpheqi, {{ 1, true, 8, 0 }} },
2713  { Hexagon::BI__builtin_HEXAGON_A4_vcmphgti, {{ 1, true, 8, 0 }} },
2714  { Hexagon::BI__builtin_HEXAGON_A4_vcmphgtui, {{ 1, false, 7, 0 }} },
2715  { Hexagon::BI__builtin_HEXAGON_A4_vcmpweqi, {{ 1, true, 8, 0 }} },
2716  { Hexagon::BI__builtin_HEXAGON_A4_vcmpwgti, {{ 1, true, 8, 0 }} },
2717  { Hexagon::BI__builtin_HEXAGON_A4_vcmpwgtui, {{ 1, false, 7, 0 }} },
2718  { Hexagon::BI__builtin_HEXAGON_C2_bitsclri, {{ 1, false, 6, 0 }} },
2719  { Hexagon::BI__builtin_HEXAGON_C2_muxii, {{ 2, true, 8, 0 }} },
2720  { Hexagon::BI__builtin_HEXAGON_C4_nbitsclri, {{ 1, false, 6, 0 }} },
2721  { Hexagon::BI__builtin_HEXAGON_F2_dfclass, {{ 1, false, 5, 0 }} },
2722  { Hexagon::BI__builtin_HEXAGON_F2_dfimm_n, {{ 0, false, 10, 0 }} },
2723  { Hexagon::BI__builtin_HEXAGON_F2_dfimm_p, {{ 0, false, 10, 0 }} },
2724  { Hexagon::BI__builtin_HEXAGON_F2_sfclass, {{ 1, false, 5, 0 }} },
2725  { Hexagon::BI__builtin_HEXAGON_F2_sfimm_n, {{ 0, false, 10, 0 }} },
2726  { Hexagon::BI__builtin_HEXAGON_F2_sfimm_p, {{ 0, false, 10, 0 }} },
2727  { Hexagon::BI__builtin_HEXAGON_M4_mpyri_addi, {{ 2, false, 6, 0 }} },
2728  { Hexagon::BI__builtin_HEXAGON_M4_mpyri_addr_u2, {{ 1, false, 6, 2 }} },
2729  { Hexagon::BI__builtin_HEXAGON_S2_addasl_rrri, {{ 2, false, 3, 0 }} },
2730  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_acc, {{ 2, false, 6, 0 }} },
2731  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_and, {{ 2, false, 6, 0 }} },
2732  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p, {{ 1, false, 6, 0 }} },
2733  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_nac, {{ 2, false, 6, 0 }} },
2734  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_or, {{ 2, false, 6, 0 }} },
2735  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_xacc, {{ 2, false, 6, 0 }} },
2736  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_acc, {{ 2, false, 5, 0 }} },
2737  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_and, {{ 2, false, 5, 0 }} },
2738  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r, {{ 1, false, 5, 0 }} },
2739  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_nac, {{ 2, false, 5, 0 }} },
2740  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_or, {{ 2, false, 5, 0 }} },
2741  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_sat, {{ 1, false, 5, 0 }} },
2742  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_xacc, {{ 2, false, 5, 0 }} },
2743  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_vh, {{ 1, false, 4, 0 }} },
2744  { Hexagon::BI__builtin_HEXAGON_S2_asl_i_vw, {{ 1, false, 5, 0 }} },
2745  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_acc, {{ 2, false, 6, 0 }} },
2746  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_and, {{ 2, false, 6, 0 }} },
2747  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p, {{ 1, false, 6, 0 }} },
2748  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_nac, {{ 2, false, 6, 0 }} },
2749  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_or, {{ 2, false, 6, 0 }} },
2750  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_rnd_goodsyntax,
2751  {{ 1, false, 6, 0 }} },
2752  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_rnd, {{ 1, false, 6, 0 }} },
2753  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_acc, {{ 2, false, 5, 0 }} },
2754  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_and, {{ 2, false, 5, 0 }} },
2755  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r, {{ 1, false, 5, 0 }} },
2756  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_nac, {{ 2, false, 5, 0 }} },
2757  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_or, {{ 2, false, 5, 0 }} },
2758  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_rnd_goodsyntax,
2759  {{ 1, false, 5, 0 }} },
2760  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_rnd, {{ 1, false, 5, 0 }} },
2761  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_svw_trun, {{ 1, false, 5, 0 }} },
2762  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_vh, {{ 1, false, 4, 0 }} },
2763  { Hexagon::BI__builtin_HEXAGON_S2_asr_i_vw, {{ 1, false, 5, 0 }} },
2764  { Hexagon::BI__builtin_HEXAGON_S2_clrbit_i, {{ 1, false, 5, 0 }} },
2765  { Hexagon::BI__builtin_HEXAGON_S2_extractu, {{ 1, false, 5, 0 },
2766  { 2, false, 5, 0 }} },
2767  { Hexagon::BI__builtin_HEXAGON_S2_extractup, {{ 1, false, 6, 0 },
2768  { 2, false, 6, 0 }} },
2769  { Hexagon::BI__builtin_HEXAGON_S2_insert, {{ 2, false, 5, 0 },
2770  { 3, false, 5, 0 }} },
2771  { Hexagon::BI__builtin_HEXAGON_S2_insertp, {{ 2, false, 6, 0 },
2772  { 3, false, 6, 0 }} },
2773  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_acc, {{ 2, false, 6, 0 }} },
2774  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_and, {{ 2, false, 6, 0 }} },
2775  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p, {{ 1, false, 6, 0 }} },
2776  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_nac, {{ 2, false, 6, 0 }} },
2777  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_or, {{ 2, false, 6, 0 }} },
2778  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_xacc, {{ 2, false, 6, 0 }} },
2779  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_acc, {{ 2, false, 5, 0 }} },
2780  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_and, {{ 2, false, 5, 0 }} },
2781  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r, {{ 1, false, 5, 0 }} },
2782  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_nac, {{ 2, false, 5, 0 }} },
2783  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_or, {{ 2, false, 5, 0 }} },
2784  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_xacc, {{ 2, false, 5, 0 }} },
2785  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_vh, {{ 1, false, 4, 0 }} },
2786  { Hexagon::BI__builtin_HEXAGON_S2_lsr_i_vw, {{ 1, false, 5, 0 }} },
2787  { Hexagon::BI__builtin_HEXAGON_S2_setbit_i, {{ 1, false, 5, 0 }} },
2788  { Hexagon::BI__builtin_HEXAGON_S2_tableidxb_goodsyntax,
2789  {{ 2, false, 4, 0 },
2790  { 3, false, 5, 0 }} },
2791  { Hexagon::BI__builtin_HEXAGON_S2_tableidxd_goodsyntax,
2792  {{ 2, false, 4, 0 },
2793  { 3, false, 5, 0 }} },
2794  { Hexagon::BI__builtin_HEXAGON_S2_tableidxh_goodsyntax,
2795  {{ 2, false, 4, 0 },
2796  { 3, false, 5, 0 }} },
2797  { Hexagon::BI__builtin_HEXAGON_S2_tableidxw_goodsyntax,
2798  {{ 2, false, 4, 0 },
2799  { 3, false, 5, 0 }} },
2800  { Hexagon::BI__builtin_HEXAGON_S2_togglebit_i, {{ 1, false, 5, 0 }} },
2801  { Hexagon::BI__builtin_HEXAGON_S2_tstbit_i, {{ 1, false, 5, 0 }} },
2802  { Hexagon::BI__builtin_HEXAGON_S2_valignib, {{ 2, false, 3, 0 }} },
2803  { Hexagon::BI__builtin_HEXAGON_S2_vspliceib, {{ 2, false, 3, 0 }} },
2804  { Hexagon::BI__builtin_HEXAGON_S4_addi_asl_ri, {{ 2, false, 5, 0 }} },
2805  { Hexagon::BI__builtin_HEXAGON_S4_addi_lsr_ri, {{ 2, false, 5, 0 }} },
2806  { Hexagon::BI__builtin_HEXAGON_S4_andi_asl_ri, {{ 2, false, 5, 0 }} },
2807  { Hexagon::BI__builtin_HEXAGON_S4_andi_lsr_ri, {{ 2, false, 5, 0 }} },
2808  { Hexagon::BI__builtin_HEXAGON_S4_clbaddi, {{ 1, true , 6, 0 }} },
2809  { Hexagon::BI__builtin_HEXAGON_S4_clbpaddi, {{ 1, true, 6, 0 }} },
2810  { Hexagon::BI__builtin_HEXAGON_S4_extract, {{ 1, false, 5, 0 },
2811  { 2, false, 5, 0 }} },
2812  { Hexagon::BI__builtin_HEXAGON_S4_extractp, {{ 1, false, 6, 0 },
2813  { 2, false, 6, 0 }} },
2814  { Hexagon::BI__builtin_HEXAGON_S4_lsli, {{ 0, true, 6, 0 }} },
2815  { Hexagon::BI__builtin_HEXAGON_S4_ntstbit_i, {{ 1, false, 5, 0 }} },
2816  { Hexagon::BI__builtin_HEXAGON_S4_ori_asl_ri, {{ 2, false, 5, 0 }} },
2817  { Hexagon::BI__builtin_HEXAGON_S4_ori_lsr_ri, {{ 2, false, 5, 0 }} },
2818  { Hexagon::BI__builtin_HEXAGON_S4_subi_asl_ri, {{ 2, false, 5, 0 }} },
2819  { Hexagon::BI__builtin_HEXAGON_S4_subi_lsr_ri, {{ 2, false, 5, 0 }} },
2820  { Hexagon::BI__builtin_HEXAGON_S4_vrcrotate_acc, {{ 3, false, 2, 0 }} },
2821  { Hexagon::BI__builtin_HEXAGON_S4_vrcrotate, {{ 2, false, 2, 0 }} },
2822  { Hexagon::BI__builtin_HEXAGON_S5_asrhub_rnd_sat_goodsyntax,
2823  {{ 1, false, 4, 0 }} },
2824  { Hexagon::BI__builtin_HEXAGON_S5_asrhub_sat, {{ 1, false, 4, 0 }} },
2825  { Hexagon::BI__builtin_HEXAGON_S5_vasrhrnd_goodsyntax,
2826  {{ 1, false, 4, 0 }} },
2827  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p, {{ 1, false, 6, 0 }} },
2828  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_acc, {{ 2, false, 6, 0 }} },
2829  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_and, {{ 2, false, 6, 0 }} },
2830  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_nac, {{ 2, false, 6, 0 }} },
2831  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_or, {{ 2, false, 6, 0 }} },
2832  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_xacc, {{ 2, false, 6, 0 }} },
2833  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r, {{ 1, false, 5, 0 }} },
2834  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_acc, {{ 2, false, 5, 0 }} },
2835  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_and, {{ 2, false, 5, 0 }} },
2836  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_nac, {{ 2, false, 5, 0 }} },
2837  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_or, {{ 2, false, 5, 0 }} },
2838  { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_xacc, {{ 2, false, 5, 0 }} },
2839  { Hexagon::BI__builtin_HEXAGON_V6_valignbi, {{ 2, false, 3, 0 }} },
2840  { Hexagon::BI__builtin_HEXAGON_V6_valignbi_128B, {{ 2, false, 3, 0 }} },
2841  { Hexagon::BI__builtin_HEXAGON_V6_vlalignbi, {{ 2, false, 3, 0 }} },
2842  { Hexagon::BI__builtin_HEXAGON_V6_vlalignbi_128B, {{ 2, false, 3, 0 }} },
2843  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi, {{ 2, false, 1, 0 }} },
2844  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_128B, {{ 2, false, 1, 0 }} },
2845  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_acc, {{ 3, false, 1, 0 }} },
2846  { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_acc_128B,
2847  {{ 3, false, 1, 0 }} },
2848  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi, {{ 2, false, 1, 0 }} },
2849  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_128B, {{ 2, false, 1, 0 }} },
2850  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_acc, {{ 3, false, 1, 0 }} },
2851  { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_acc_128B,
2852  {{ 3, false, 1, 0 }} },
2853  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi, {{ 2, false, 1, 0 }} },
2854  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_128B, {{ 2, false, 1, 0 }} },
2855  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_acc, {{ 3, false, 1, 0 }} },
2856  { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_acc_128B,
2857  {{ 3, false, 1, 0 }} },
2858  };
2859 
2860  // Use a dynamically initialized static to sort the table exactly once on
2861  // first run.
2862  static const bool SortOnce =
2863  (std::sort(std::begin(Infos), std::end(Infos),
2864  [](const BuiltinInfo &LHS, const BuiltinInfo &RHS) {
2865  return LHS.BuiltinID < RHS.BuiltinID;
2866  }),
2867  true);
2868  (void)SortOnce;
2869 
2870  const BuiltinInfo *F =
2871  std::lower_bound(std::begin(Infos), std::end(Infos), BuiltinID,
2872  [](const BuiltinInfo &BI, unsigned BuiltinID) {
2873  return BI.BuiltinID < BuiltinID;
2874  });
2875  if (F == std::end(Infos) || F->BuiltinID != BuiltinID)
2876  return false;
2877 
2878  bool Error = false;
2879 
2880  for (const ArgInfo &A : F->Infos) {
2881  // Ignore empty ArgInfo elements.
2882  if (A.BitWidth == 0)
2883  continue;
2884 
2885  int32_t Min = A.IsSigned ? -(1 << (A.BitWidth - 1)) : 0;
2886  int32_t Max = (1 << (A.IsSigned ? A.BitWidth - 1 : A.BitWidth)) - 1;
2887  if (!A.Align) {
2888  Error |= SemaBuiltinConstantArgRange(TheCall, A.OpNum, Min, Max);
2889  } else {
2890  unsigned M = 1 << A.Align;
2891  Min *= M;
2892  Max *= M;
2893  Error |= SemaBuiltinConstantArgRange(TheCall, A.OpNum, Min, Max) |
2894  SemaBuiltinConstantArgMultiple(TheCall, A.OpNum, M);
2895  }
2896  }
2897  return Error;
2898 }
2899 
2900 bool Sema::CheckHexagonBuiltinFunctionCall(unsigned BuiltinID,
2901  CallExpr *TheCall) {
2902  return CheckHexagonBuiltinCpu(BuiltinID, TheCall) ||
2903  CheckHexagonBuiltinArgument(BuiltinID, TheCall);
2904 }
2905 
2906 
2907 // CheckMipsBuiltinFunctionCall - Checks the constant value passed to the
2908 // intrinsic is correct. The switch statement is ordered by DSP, MSA. The
2909 // ordering for DSP is unspecified. MSA is ordered by the data format used
2910 // by the underlying instruction i.e., df/m, df/n and then by size.
2911 //
2912 // FIXME: The size tests here should instead be tablegen'd along with the
2913 // definitions from include/clang/Basic/BuiltinsMips.def.
2914 // FIXME: GCC is strict on signedness for some of these intrinsics, we should
2915 // be too.
2916 bool Sema::CheckMipsBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
2917  unsigned i = 0, l = 0, u = 0, m = 0;
2918  switch (BuiltinID) {
2919  default: return false;
2920  case Mips::BI__builtin_mips_wrdsp: i = 1; l = 0; u = 63; break;
2921  case Mips::BI__builtin_mips_rddsp: i = 0; l = 0; u = 63; break;
2922  case Mips::BI__builtin_mips_append: i = 2; l = 0; u = 31; break;
2923  case Mips::BI__builtin_mips_balign: i = 2; l = 0; u = 3; break;
2924  case Mips::BI__builtin_mips_precr_sra_ph_w: i = 2; l = 0; u = 31; break;
2925  case Mips::BI__builtin_mips_precr_sra_r_ph_w: i = 2; l = 0; u = 31; break;
2926  case Mips::BI__builtin_mips_prepend: i = 2; l = 0; u = 31; break;
2927  // MSA intrinsics. Instructions (which the intrinsics maps to) which use the
2928  // df/m field.
2929  // These intrinsics take an unsigned 3 bit immediate.
2930  case Mips::BI__builtin_msa_bclri_b:
2931  case Mips::BI__builtin_msa_bnegi_b:
2932  case Mips::BI__builtin_msa_bseti_b:
2933  case Mips::BI__builtin_msa_sat_s_b:
2934  case Mips::BI__builtin_msa_sat_u_b:
2935  case Mips::BI__builtin_msa_slli_b:
2936  case Mips::BI__builtin_msa_srai_b:
2937  case Mips::BI__builtin_msa_srari_b:
2938  case Mips::BI__builtin_msa_srli_b:
2939  case Mips::BI__builtin_msa_srlri_b: i = 1; l = 0; u = 7; break;
2940  case Mips::BI__builtin_msa_binsli_b:
2941  case Mips::BI__builtin_msa_binsri_b: i = 2; l = 0; u = 7; break;
2942  // These intrinsics take an unsigned 4 bit immediate.
2943  case Mips::BI__builtin_msa_bclri_h:
2944  case Mips::BI__builtin_msa_bnegi_h:
2945  case Mips::BI__builtin_msa_bseti_h:
2946  case Mips::BI__builtin_msa_sat_s_h:
2947  case Mips::BI__builtin_msa_sat_u_h:
2948  case Mips::BI__builtin_msa_slli_h:
2949  case Mips::BI__builtin_msa_srai_h:
2950  case Mips::BI__builtin_msa_srari_h:
2951  case Mips::BI__builtin_msa_srli_h:
2952  case Mips::BI__builtin_msa_srlri_h: i = 1; l = 0; u = 15; break;
2953  case Mips::BI__builtin_msa_binsli_h:
2954  case Mips::BI__builtin_msa_binsri_h: i = 2; l = 0; u = 15; break;
2955  // These intrinsics take an unsigned 5 bit immediate.
2956  // The first block of intrinsics actually have an unsigned 5 bit field,
2957  // not a df/n field.
2958  case Mips::BI__builtin_msa_clei_u_b:
2959  case Mips::BI__builtin_msa_clei_u_h:
2960  case Mips::BI__builtin_msa_clei_u_w:
2961  case Mips::BI__builtin_msa_clei_u_d:
2962  case Mips::BI__builtin_msa_clti_u_b:
2963  case Mips::BI__builtin_msa_clti_u_h:
2964  case Mips::BI__builtin_msa_clti_u_w:
2965  case Mips::BI__builtin_msa_clti_u_d:
2966  case Mips::BI__builtin_msa_maxi_u_b:
2967  case Mips::BI__builtin_msa_maxi_u_h:
2968  case Mips::BI__builtin_msa_maxi_u_w:
2969  case Mips::BI__builtin_msa_maxi_u_d:
2970  case Mips::BI__builtin_msa_mini_u_b:
2971  case Mips::BI__builtin_msa_mini_u_h:
2972  case Mips::BI__builtin_msa_mini_u_w:
2973  case Mips::BI__builtin_msa_mini_u_d:
2974  case Mips::BI__builtin_msa_addvi_b:
2975  case Mips::BI__builtin_msa_addvi_h:
2976  case Mips::BI__builtin_msa_addvi_w:
2977  case Mips::BI__builtin_msa_addvi_d:
2978  case Mips::BI__builtin_msa_bclri_w:
2979  case Mips::BI__builtin_msa_bnegi_w:
2980  case Mips::BI__builtin_msa_bseti_w:
2981  case Mips::BI__builtin_msa_sat_s_w:
2982  case Mips::BI__builtin_msa_sat_u_w:
2983  case Mips::BI__builtin_msa_slli_w:
2984  case Mips::BI__builtin_msa_srai_w:
2985  case Mips::BI__builtin_msa_srari_w:
2986  case Mips::BI__builtin_msa_srli_w:
2987  case Mips::BI__builtin_msa_srlri_w:
2988  case Mips::BI__builtin_msa_subvi_b:
2989  case Mips::BI__builtin_msa_subvi_h:
2990  case Mips::BI__builtin_msa_subvi_w:
2991  case Mips::BI__builtin_msa_subvi_d: i = 1; l = 0; u = 31; break;
2992  case Mips::BI__builtin_msa_binsli_w:
2993  case Mips::BI__builtin_msa_binsri_w: i = 2; l = 0; u = 31; break;
2994  // These intrinsics take an unsigned 6 bit immediate.
2995  case Mips::BI__builtin_msa_bclri_d:
2996  case Mips::BI__builtin_msa_bnegi_d:
2997  case Mips::BI__builtin_msa_bseti_d:
2998  case Mips::BI__builtin_msa_sat_s_d:
2999  case Mips::BI__builtin_msa_sat_u_d:
3000  case Mips::BI__builtin_msa_slli_d:
3001  case Mips::BI__builtin_msa_srai_d:
3002  case Mips::BI__builtin_msa_srari_d:
3003  case Mips::BI__builtin_msa_srli_d:
3004  case Mips::BI__builtin_msa_srlri_d: i = 1; l = 0; u = 63; break;
3005  case Mips::BI__builtin_msa_binsli_d:
3006  case Mips::BI__builtin_msa_binsri_d: i = 2; l = 0; u = 63; break;
3007  // These intrinsics take a signed 5 bit immediate.
3008  case Mips::BI__builtin_msa_ceqi_b:
3009  case Mips::BI__builtin_msa_ceqi_h:
3010  case Mips::BI__builtin_msa_ceqi_w:
3011  case Mips::BI__builtin_msa_ceqi_d:
3012  case Mips::BI__builtin_msa_clti_s_b:
3013  case Mips::BI__builtin_msa_clti_s_h:
3014  case Mips::BI__builtin_msa_clti_s_w:
3015  case Mips::BI__builtin_msa_clti_s_d:
3016  case Mips::BI__builtin_msa_clei_s_b:
3017  case Mips::BI__builtin_msa_clei_s_h:
3018  case Mips::BI__builtin_msa_clei_s_w:
3019  case Mips::BI__builtin_msa_clei_s_d:
3020  case Mips::BI__builtin_msa_maxi_s_b:
3021  case Mips::BI__builtin_msa_maxi_s_h:
3022  case Mips::BI__builtin_msa_maxi_s_w:
3023  case Mips::BI__builtin_msa_maxi_s_d:
3024  case Mips::BI__builtin_msa_mini_s_b:
3025  case Mips::BI__builtin_msa_mini_s_h:
3026  case Mips::BI__builtin_msa_mini_s_w:
3027  case Mips::BI__builtin_msa_mini_s_d: i = 1; l = -16; u = 15; break;
3028  // These intrinsics take an unsigned 8 bit immediate.
3029  case Mips::BI__builtin_msa_andi_b:
3030  case Mips::BI__builtin_msa_nori_b:
3031  case Mips::BI__builtin_msa_ori_b:
3032  case Mips::BI__builtin_msa_shf_b:
3033  case Mips::BI__builtin_msa_shf_h:
3034  case Mips::BI__builtin_msa_shf_w:
3035  case Mips::BI__builtin_msa_xori_b: i = 1; l = 0; u = 255; break;
3036  case Mips::BI__builtin_msa_bseli_b:
3037  case Mips::BI__builtin_msa_bmnzi_b:
3038  case Mips::BI__builtin_msa_bmzi_b: i = 2; l = 0; u = 255; break;
3039  // df/n format
3040  // These intrinsics take an unsigned 4 bit immediate.
3041  case Mips::BI__builtin_msa_copy_s_b:
3042  case Mips::BI__builtin_msa_copy_u_b:
3043  case Mips::BI__builtin_msa_insve_b:
3044  case Mips::BI__builtin_msa_splati_b: i = 1; l = 0; u = 15; break;
3045  case Mips::BI__builtin_msa_sldi_b: i = 2; l = 0; u = 15; break;
3046  // These intrinsics take an unsigned 3 bit immediate.
3047  case Mips::BI__builtin_msa_copy_s_h:
3048  case Mips::BI__builtin_msa_copy_u_h:
3049  case Mips::BI__builtin_msa_insve_h:
3050  case Mips::BI__builtin_msa_splati_h: i = 1; l = 0; u = 7; break;
3051  case Mips::BI__builtin_msa_sldi_h: i = 2; l = 0; u = 7; break;
3052  // These intrinsics take an unsigned 2 bit immediate.
3053  case Mips::BI__builtin_msa_copy_s_w:
3054  case Mips::BI__builtin_msa_copy_u_w:
3055  case Mips::BI__builtin_msa_insve_w:
3056  case Mips::BI__builtin_msa_splati_w: i = 1; l = 0; u = 3; break;
3057  case Mips::BI__builtin_msa_sldi_w: i = 2; l = 0; u = 3; break;
3058  // These intrinsics take an unsigned 1 bit immediate.
3059  case Mips::BI__builtin_msa_copy_s_d:
3060  case Mips::BI__builtin_msa_copy_u_d:
3061  case Mips::BI__builtin_msa_insve_d:
3062  case Mips::BI__builtin_msa_splati_d: i = 1; l = 0; u = 1; break;
3063  case Mips::BI__builtin_msa_sldi_d: i = 2; l = 0; u = 1; break;
3064  // Memory offsets and immediate loads.
3065  // These intrinsics take a signed 10 bit immediate.
3066  case Mips::BI__builtin_msa_ldi_b: i = 0; l = -128; u = 255; break;
3067  case Mips::BI__builtin_msa_ldi_h:
3068  case Mips::BI__builtin_msa_ldi_w:
3069  case Mips::BI__builtin_msa_ldi_d: i = 0; l = -512; u = 511; break;
3070  case Mips::BI__builtin_msa_ld_b: i = 1; l = -512; u = 511; m = 1; break;
3071  case Mips::BI__builtin_msa_ld_h: i = 1; l = -1024; u = 1022; m = 2; break;
3072  case Mips::BI__builtin_msa_ld_w: i = 1; l = -2048; u = 2044; m = 4; break;
3073  case Mips::BI__builtin_msa_ld_d: i = 1; l = -4096; u = 4088; m = 8; break;
3074  case Mips::BI__builtin_msa_st_b: i = 2; l = -512; u = 511; m = 1; break;
3075  case Mips::BI__builtin_msa_st_h: i = 2; l = -1024; u = 1022; m = 2; break;
3076  case Mips::BI__builtin_msa_st_w: i = 2; l = -2048; u = 2044; m = 4; break;
3077  case Mips::BI__builtin_msa_st_d: i = 2; l = -4096; u = 4088; m = 8; break;
3078  }
3079 
3080  if (!m)
3081  return SemaBuiltinConstantArgRange(TheCall, i, l, u);
3082 
3083  return SemaBuiltinConstantArgRange(TheCall, i, l, u) ||
3084  SemaBuiltinConstantArgMultiple(TheCall, i, m);
3085 }
3086 
3087 bool Sema::CheckPPCBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
3088  unsigned i = 0, l = 0, u = 0;
3089  bool Is64BitBltin = BuiltinID == PPC::BI__builtin_divde ||
3090  BuiltinID == PPC::BI__builtin_divdeu ||
3091  BuiltinID == PPC::BI__builtin_bpermd;
3092  bool IsTarget64Bit = Context.getTargetInfo()
3093  .getTypeWidth(Context
3094  .getTargetInfo()
3095  .getIntPtrType()) == 64;
3096  bool IsBltinExtDiv = BuiltinID == PPC::BI__builtin_divwe ||
3097  BuiltinID == PPC::BI__builtin_divweu ||
3098  BuiltinID == PPC::BI__builtin_divde ||
3099  BuiltinID == PPC::BI__builtin_divdeu;
3100 
3101  if (Is64BitBltin && !IsTarget64Bit)
3102  return Diag(TheCall->getBeginLoc(), diag::err_64_bit_builtin_32_bit_tgt)
3103  << TheCall->getSourceRange();
3104 
3105  if ((IsBltinExtDiv && !Context.getTargetInfo().hasFeature("extdiv")) ||
3106  (BuiltinID == PPC::BI__builtin_bpermd &&
3107  !Context.getTargetInfo().hasFeature("bpermd")))
3108  return Diag(TheCall->getBeginLoc(), diag::err_ppc_builtin_only_on_pwr7)
3109  << TheCall->getSourceRange();
3110 
3111  auto SemaVSXCheck = [&](CallExpr *TheCall) -> bool {
3112  if (!Context.getTargetInfo().hasFeature("vsx"))
3113  return Diag(TheCall->getBeginLoc(), diag::err_ppc_builtin_only_on_pwr7)
3114  << TheCall->getSourceRange();
3115  return false;
3116  };
3117 
3118  switch (BuiltinID) {
3119  default: return false;
3120  case PPC::BI__builtin_altivec_crypto_vshasigmaw:
3121  case PPC::BI__builtin_altivec_crypto_vshasigmad:
3122  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 1) ||
3123  SemaBuiltinConstantArgRange(TheCall, 2, 0, 15);
3124  case PPC::BI__builtin_tbegin:
3125  case PPC::BI__builtin_tend: i = 0; l = 0; u = 1; break;
3126  case PPC::BI__builtin_tsr: i = 0; l = 0; u = 7; break;
3127  case PPC::BI__builtin_tabortwc:
3128  case PPC::BI__builtin_tabortdc: i = 0; l = 0; u = 31; break;
3129  case PPC::BI__builtin_tabortwci:
3130  case PPC::BI__builtin_tabortdci:
3131  return SemaBuiltinConstantArgRange(TheCall, 0, 0, 31) ||
3132  SemaBuiltinConstantArgRange(TheCall, 2, 0, 31);
3133  case PPC::BI__builtin_vsx_xxpermdi:
3134  case PPC::BI__builtin_vsx_xxsldwi:
3135  return SemaBuiltinVSX(TheCall);
3136  case PPC::BI__builtin_unpack_vector_int128:
3137  return SemaVSXCheck(TheCall) ||
3138  SemaBuiltinConstantArgRange(TheCall, 1, 0, 1);
3139  case PPC::BI__builtin_pack_vector_int128:
3140  return SemaVSXCheck(TheCall);
3141  }
3142  return SemaBuiltinConstantArgRange(TheCall, i, l, u);
3143 }
3144 
3145 bool Sema::CheckSystemZBuiltinFunctionCall(unsigned BuiltinID,
3146  CallExpr *TheCall) {
3147  if (BuiltinID == SystemZ::BI__builtin_tabort) {
3148  Expr *Arg = TheCall->getArg(0);
3149  llvm::APSInt AbortCode(32);
3150  if (Arg->isIntegerConstantExpr(AbortCode, Context) &&
3151  AbortCode.getSExtValue() >= 0 && AbortCode.getSExtValue() < 256)
3152  return Diag(Arg->getBeginLoc(), diag::err_systemz_invalid_tabort_code)
3153  << Arg->getSourceRange();
3154  }
3155 
3156  // For intrinsics which take an immediate value as part of the instruction,
3157  // range check them here.
3158  unsigned i = 0, l = 0, u = 0;
3159  switch (BuiltinID) {
3160  default: return false;
3161  case SystemZ::BI__builtin_s390_lcbb: i = 1; l = 0; u = 15; break;
3162  case SystemZ::BI__builtin_s390_verimb:
3163  case SystemZ::BI__builtin_s390_verimh:
3164  case SystemZ::BI__builtin_s390_verimf:
3165  case SystemZ::BI__builtin_s390_verimg: i = 3; l = 0; u = 255; break;
3166  case SystemZ::BI__builtin_s390_vfaeb:
3167  case SystemZ::BI__builtin_s390_vfaeh:
3168  case SystemZ::BI__builtin_s390_vfaef:
3169  case SystemZ::BI__builtin_s390_vfaebs:
3170  case SystemZ::BI__builtin_s390_vfaehs:
3171  case SystemZ::BI__builtin_s390_vfaefs:
3172  case SystemZ::BI__builtin_s390_vfaezb:
3173  case SystemZ::BI__builtin_s390_vfaezh:
3174  case SystemZ::BI__builtin_s390_vfaezf:
3175  case SystemZ::BI__builtin_s390_vfaezbs:
3176  case SystemZ::BI__builtin_s390_vfaezhs:
3177  case SystemZ::BI__builtin_s390_vfaezfs: i = 2; l = 0; u = 15; break;
3178  case SystemZ::BI__builtin_s390_vfisb:
3179  case SystemZ::BI__builtin_s390_vfidb:
3180  return SemaBuiltinConstantArgRange(TheCall, 1, 0, 15) ||
3181  SemaBuiltinConstantArgRange(TheCall, 2, 0, 15);
3182  case SystemZ::BI__builtin_s390_vftcisb:
3183  case SystemZ::BI__builtin_s390_vftcidb: i = 1; l = 0; u = 4095; break;
3184  case SystemZ::BI__builtin_s390_vlbb: i = 1; l = 0; u = 15; break;
3185  case SystemZ::BI__builtin_s390_vpdi: i = 2; l = 0; u = 15; break;
3186  case SystemZ::BI__builtin_s390_vsldb: i = 2; l = 0; u = 15; break;
3187  case SystemZ::BI__builtin_s390_vstrcb:
3188  case SystemZ::BI__builtin_s390_vstrch:
3189  case SystemZ::BI__builtin_s390_vstrcf:
3190  case SystemZ::BI__builtin_s390_vstrczb:
3191  case SystemZ::BI__builtin_s390_vstrczh:
3192  case SystemZ::BI__builtin_s390_vstrczf:
3193  case SystemZ::BI__builtin_s390_vstrcbs:
3194  case SystemZ::BI__builtin_s390_vstrchs:
3195  case SystemZ::BI__builtin_s390_vstrcfs:
3196  case SystemZ::BI__builtin_s390_vstrczbs:
3197  case SystemZ::BI__builtin_s390_vstrczhs:
3198  case SystemZ::BI__builtin_s390_vstrczfs: i = 3; l = 0; u = 15; break;
3199  case SystemZ::BI__builtin_s390_vmslg: i = 3; l = 0; u = 15; break;
3200  case SystemZ::BI__builtin_s390_vfminsb:
3201  case SystemZ::BI__builtin_s390_vfmaxsb:
3202  case SystemZ::BI__builtin_s390_vfmindb:
3203  case SystemZ::BI__builtin_s390_vfmaxdb: i = 2; l = 0; u = 15; break;
3204  }
3205  return SemaBuiltinConstantArgRange(TheCall, i, l, u);
3206 }
3207 
3208 /// SemaBuiltinCpuSupports - Handle __builtin_cpu_supports(char *).
3209 /// This checks that the target supports __builtin_cpu_supports and
3210 /// that the string argument is constant and valid.
3211 static bool SemaBuiltinCpuSupports(Sema &S, CallExpr *TheCall) {
3212  Expr *Arg = TheCall->getArg(0);
3213 
3214  // Check if the argument is a string literal.
3215  if (!isa<StringLiteral>(Arg->IgnoreParenImpCasts()))
3216  return S.Diag(TheCall->getBeginLoc(), diag::err_expr_not_string_literal)
3217  << Arg->getSourceRange();
3218 
3219  // Check the contents of the string.
3220  StringRef Feature =
3221  cast<StringLiteral>(Arg->IgnoreParenImpCasts())->getString();
3222  if (!S.Context.getTargetInfo().validateCpuSupports(Feature))
3223  return S.Diag(TheCall->getBeginLoc(), diag::err_invalid_cpu_supports)
3224  << Arg->getSourceRange();
3225  return false;
3226 }
3227 
3228 /// SemaBuiltinCpuIs - Handle __builtin_cpu_is(char *).
3229 /// This checks that the target supports __builtin_cpu_is and
3230 /// that the string argument is constant and valid.
3231 static bool SemaBuiltinCpuIs(Sema &S, CallExpr *TheCall) {
3232  Expr *Arg = TheCall->getArg(0);
3233 
3234  // Check if the argument is a string literal.
3235  if (!isa<StringLiteral>(Arg->IgnoreParenImpCasts()))
3236  return S.Diag(TheCall->getBeginLoc(), diag::err_expr_not_string_literal)
3237  << Arg->getSourceRange();
3238 
3239  // Check the contents of the string.
3240  StringRef Feature =
3241  cast<StringLiteral>(Arg->IgnoreParenImpCasts())->getString();
3242  if (!S.Context.getTargetInfo().validateCpuIs(Feature))
3243  return S.Diag(TheCall->getBeginLoc(), diag::err_invalid_cpu_is)
3244  << Arg->getSourceRange();
3245  return false;
3246 }
3247 
3248 // Check if the rounding mode is legal.
3249 bool Sema::CheckX86BuiltinRoundingOrSAE(unsigned BuiltinID, CallExpr *TheCall) {
3250  // Indicates if this instruction has rounding control or just SAE.
3251  bool HasRC = false;
3252 
3253  unsigned ArgNum = 0;
3254  switch (BuiltinID) {
3255  default:
3256  return false;
3257  case X86::BI__builtin_ia32_vcvttsd2si32:
3258  case X86::BI__builtin_ia32_vcvttsd2si64:
3259  case X86::BI__builtin_ia32_vcvttsd2usi32:
3260  case X86::BI__builtin_ia32_vcvttsd2usi64:
3261  case X86::BI__builtin_ia32_vcvttss2si32:
3262  case X86::BI__builtin_ia32_vcvttss2si64:
3263  case X86::BI__builtin_ia32_vcvttss2usi32:
3264  case X86::BI__builtin_ia32_vcvttss2usi64:
3265  ArgNum = 1;
3266  break;
3267  case X86::BI__builtin_ia32_maxpd512:
3268  case X86::BI__builtin_ia32_maxps512:
3269  case X86::BI__builtin_ia32_minpd512:
3270  case X86::BI__builtin_ia32_minps512:
3271  ArgNum = 2;
3272  break;
3273  case X86::BI__builtin_ia32_cvtps2pd512_mask:
3274  case X86::BI__builtin_ia32_cvttpd2dq512_mask:
3275  case X86::BI__builtin_ia32_cvttpd2qq512_mask:
3276  case X86::BI__builtin_ia32_cvttpd2udq512_mask:
3277  case X86::BI__builtin_ia32_cvttpd2uqq512_mask:
3278  case X86::BI__builtin_ia32_cvttps2dq512_mask:
3279  case X86::BI__builtin_ia32_cvttps2qq512_mask:
3280  case X86::BI__builtin_ia32_cvttps2udq512_mask:
3281  case X86::BI__builtin_ia32_cvttps2uqq512_mask:
3282  case X86::BI__builtin_ia32_exp2pd_mask:
3283  case X86::BI__builtin_ia32_exp2ps_mask:
3284  case X86::BI__builtin_ia32_getexppd512_mask:
3285  case X86::BI__builtin_ia32_getexpps512_mask:
3286  case X86::BI__builtin_ia32_rcp28pd_mask:
3287  case X86::BI__builtin_ia32_rcp28ps_mask:
3288  case X86::BI__builtin_ia32_rsqrt28pd_mask:
3289  case X86::BI__builtin_ia32_rsqrt28ps_mask:
3290  case X86::BI__builtin_ia32_vcomisd:
3291  case X86::BI__builtin_ia32_vcomiss:
3292  case X86::BI__builtin_ia32_vcvtph2ps512_mask:
3293  ArgNum = 3;
3294  break;
3295  case X86::BI__builtin_ia32_cmppd512_mask:
3296  case X86::BI__builtin_ia32_cmpps512_mask:
3297  case X86::BI__builtin_ia32_cmpsd_mask:
3298  case X86::BI__builtin_ia32_cmpss_mask:
3299  case X86::BI__builtin_ia32_cvtss2sd_round_mask:
3300  case X86::BI__builtin_ia32_getexpsd128_round_mask:
3301  case X86::BI__builtin_ia32_getexpss128_round_mask:
3302  case X86::BI__builtin_ia32_maxsd_round_mask:
3303  case X86::BI__builtin_ia32_maxss_round_mask:
3304  case X86::BI__builtin_ia32_minsd_round_mask:
3305  case X86::BI__builtin_ia32_minss_round_mask:
3306  case X86::BI__builtin_ia32_rcp28sd_round_mask:
3307  case X86::BI__builtin_ia32_rcp28ss_round_mask:
3308  case X86::BI__builtin_ia32_reducepd512_mask:
3309  case X86::BI__builtin_ia32_reduceps512_mask:
3310  case X86::BI__builtin_ia32_rndscalepd_mask:
3311  case X86::BI__builtin_ia32_rndscaleps_mask:
3312  case X86::BI__builtin_ia32_rsqrt28sd_round_mask:
3313  case X86::BI__builtin_ia32_rsqrt28ss_round_mask:
3314  ArgNum = 4;
3315  break;
3316  case X86::BI__builtin_ia32_fixupimmpd512_mask:
3317  case X86::BI__builtin_ia32_fixupimmpd512_maskz:
3318  case X86::BI__builtin_ia32_fixupimmps512_mask:
3319  case X86::BI__builtin_ia32_fixupimmps512_maskz:
3320  case X86::BI__builtin_ia32_fixupimmsd_mask:
3321  case X86::BI__builtin_ia32_fixupimmsd_maskz:
3322  case X86::BI__builtin_ia32_fixupimmss_mask:
3323  case X86::BI__builtin_ia32_fixupimmss_maskz:
3324  case X86::BI__builtin_ia32_rangepd512_mask:
3325  case X86::BI__builtin_ia32_rangeps512_mask:
3326  case X86::BI__builtin_ia32_rangesd128_round_mask:
3327  case X86::BI__builtin_ia32_rangess128_round_mask:
3328  case X86::BI__builtin_ia32_reducesd_mask:
3329  case X86::BI__builtin_ia32_reducess_mask:
3330  case X86::BI__builtin_ia32_rndscalesd_round_mask:
3331  case X86::BI__builtin_ia32_rndscaless_round_mask:
3332  ArgNum = 5;
3333  break;
3334  case X86::BI__builtin_ia32_vcvtsd2si64:
3335  case X86::BI__builtin_ia32_vcvtsd2si32:
3336  case X86::BI__builtin_ia32_vcvtsd2usi32:
3337  case X86::BI__builtin_ia32_vcvtsd2usi64:
3338  case X86::BI__builtin_ia32_vcvtss2si32:
3339  case X86::BI__builtin_ia32_vcvtss2si64:
3340  case X86::BI__builtin_ia32_vcvtss2usi32:
3341  case X86::BI__builtin_ia32_vcvtss2usi64:
3342  case X86::BI__builtin_ia32_sqrtpd512:
3343  case X86::BI__builtin_ia32_sqrtps512:
3344  ArgNum = 1;
3345  HasRC = true;
3346  break;
3347  case X86::BI__builtin_ia32_addpd512:
3348  case X86::BI__builtin_ia32_addps512:
3349  case X86::BI__builtin_ia32_divpd512:
3350  case X86::BI__builtin_ia32_divps512:
3351  case X86::BI__builtin_ia32_mulpd512:
3352  case X86::BI__builtin_ia32_mulps512:
3353  case X86::BI__builtin_ia32_subpd512:
3354  case X86::BI__builtin_ia32_subps512:
3355  case X86::BI__builtin_ia32_cvtsi2sd64:
3356  case X86::BI__builtin_ia32_cvtsi2ss32:
3357  case X86::BI__builtin_ia32_cvtsi2ss64:
3358  case X86::BI__builtin_ia32_cvtusi2sd64:
3359  case X86::BI__builtin_ia32_cvtusi2ss32:
3360  case X86::BI__builtin_ia32_cvtusi2ss64:
3361  ArgNum = 2;
3362  HasRC = true;
3363  break;
3364  case X86::BI__builtin_ia32_cvtdq2ps512_mask:
3365  case X86::BI__builtin_ia32_cvtudq2ps512_mask:
3366  case X86::BI__builtin_ia32_cvtpd2ps512_mask:
3367  case X86::BI__builtin_ia32_cvtpd2qq512_mask:
3368  case X86::BI__builtin_ia32_cvtpd2uqq512_mask:
3369  case X86::BI__builtin_ia32_cvtps2qq512_mask:
3370  case X86::BI__builtin_ia32_cvtps2uqq512_mask:
3371  case X86::BI__builtin_ia32_cvtqq2pd512_mask:
3372  case X86::BI__builtin_ia32_cvtqq2ps512_mask:
3373  case X86::BI__builtin_ia32_cvtuqq2pd512_mask:
3374  case X86::BI__builtin_ia32_cvtuqq2ps512_mask:
3375  ArgNum = 3;
3376  HasRC = true;
3377  break;
3378  case X86::BI__builtin_ia32_addss_round_mask:
3379  case X86::BI__builtin_ia32_addsd_round_mask:
3380  case X86::BI__builtin_ia32_divss_round_mask:
3381  case X86::BI__builtin_ia32_divsd_round_mask:
3382  case X86::BI__builtin_ia32_mulss_round_mask:
3383  case X86::BI__builtin_ia32_mulsd_round_mask:
3384  case X86::BI__builtin_ia32_subss_round_mask:
3385  case X86::BI__builtin_ia32_subsd_round_mask:
3386  case X86::BI__builtin_ia32_scalefpd512_mask:
3387  case X86::BI__builtin_ia32_scalefps512_mask:
3388  case X86::BI__builtin_ia32_scalefsd_round_mask:
3389  case X86::BI__builtin_ia32_scalefss_round_mask:
3390  case X86::BI__builtin_ia32_getmantpd512_mask:
3391  case X86::BI__builtin_ia32_getmantps512_mask:
3392  case X86::BI__builtin_ia32_cvtsd2ss_round_mask:
3393  case X86::BI__builtin_ia32_sqrtsd_round_mask:
3394  case X86::BI__builtin_ia32_sqrtss_round_mask:
3395  case X86::BI__builtin_ia32_vfmaddsd3_mask:
3396  case X86::BI__builtin_ia32_vfmaddsd3_maskz:
3397  case X86::BI__builtin_ia32_vfmaddsd3_mask3:
3398  case X86::BI__builtin_ia32_vfmaddss3_mask:
3399  case X86::BI__builtin_ia32_vfmaddss3_maskz:
3400  case X86::BI__builtin_ia32_vfmaddss3_mask3:
3401  case X86::BI__builtin_ia32_vfmaddpd512_mask:
3402  case X86::BI__builtin_ia32_vfmaddpd512_maskz:
3403  case X86::BI__builtin_ia32_vfmaddpd512_mask3:
3404  case X86::BI__builtin_ia32_vfmsubpd512_mask3:
3405  case X86::BI__builtin_ia32_vfmaddps512_mask:
3406  case X86::BI__builtin_ia32_vfmaddps512_maskz:
3407  case X86::BI__builtin_ia32_vfmaddps512_mask3:
3408  case X86::BI__builtin_ia32_vfmsubps512_mask3:
3409  case X86::BI__builtin_ia32_vfmaddsubpd512_mask:
3410  case X86::BI__builtin_ia32_vfmaddsubpd512_maskz:
3411  case X86::BI__builtin_ia32_vfmaddsubpd512_mask3:
3412  case X86::BI__builtin_ia32_vfmsubaddpd512_mask3:
3413  case X86::BI__builtin_ia32_vfmaddsubps512_mask:
3414  case X86::BI__builtin_ia32_vfmaddsubps512_maskz:
3415  case X86::BI__builtin_ia32_vfmaddsubps512_mask3:
3416  case X86::BI__builtin_ia32_vfmsubaddps512_mask3:
3417  ArgNum = 4;
3418  HasRC = true;
3419  break;
3420  case X86::BI__builtin_ia32_getmantsd_round_mask:
3421  case X86::BI__builtin_ia32_getmantss_round_mask:
3422  ArgNum = 5;
3423  HasRC = true;
3424  break;
3425  }
3426 
3427  llvm::APSInt Result;
3428 
3429  // We can't check the value of a dependent argument.
3430  Expr *Arg = TheCall->getArg(ArgNum);
3431  if (Arg->isTypeDependent() || Arg->isValueDependent())
3432  return false;
3433 
3434  // Check constant-ness first.
3435  if (SemaBuiltinConstantArg(TheCall, ArgNum, Result))
3436  return true;
3437 
3438  // Make sure rounding mode is either ROUND_CUR_DIRECTION or ROUND_NO_EXC bit
3439  // is set. If the intrinsic has rounding control(bits 1:0), make sure its only
3440  // combined with ROUND_NO_EXC.
3441  if (Result == 4/*ROUND_CUR_DIRECTION*/ ||
3442  Result == 8/*ROUND_NO_EXC*/ ||
3443  (HasRC && Result.getZExtValue() >= 8 && Result.getZExtValue() <= 11))
3444  return false;
3445 
3446  return Diag(TheCall->getBeginLoc(), diag::err_x86_builtin_invalid_rounding)
3447  << Arg->getSourceRange();
3448 }
3449 
3450 // Check if the gather/scatter scale is legal.
3451 bool Sema::CheckX86BuiltinGatherScatterScale(unsigned BuiltinID,
3452  CallExpr *TheCall) {
3453  unsigned ArgNum = 0;
3454  switch (BuiltinID) {
3455  default:
3456  return false;
3457  case X86::BI__builtin_ia32_gatherpfdpd:
3458  case X86::BI__builtin_ia32_gatherpfdps:
3459  case X86::BI__builtin_ia32_gatherpfqpd:
3460  case X86::BI__builtin_ia32_gatherpfqps:
3461  case X86::BI__builtin_ia32_scatterpfdpd:
3462  case X86::BI__builtin_ia32_scatterpfdps:
3463  case X86::BI__builtin_ia32_scatterpfqpd:
3464  case X86::BI__builtin_ia32_scatterpfqps:
3465  ArgNum = 3;
3466  break;
3467  case X86::BI__builtin_ia32_gatherd_pd:
3468  case X86::BI__builtin_ia32_gatherd_pd256:
3469  case X86::BI__builtin_ia32_gatherq_pd:
3470  case X86::BI__builtin_ia32_gatherq_pd256:
3471  case X86::BI__builtin_ia32_gatherd_ps:
3472  case X86::BI__builtin_ia32_gatherd_ps256:
3473  case X86::BI__builtin_ia32_gatherq_ps:
3474  case X86::BI__builtin_ia32_gatherq_ps256:
3475  case X86::BI__builtin_ia32_gatherd_q:
3476  case X86::BI__builtin_ia32_gatherd_q256:
3477  case X86::BI__builtin_ia32_gatherq_q:
3478  case X86::BI__builtin_ia32_gatherq_q256:
3479  case X86::BI__builtin_ia32_gatherd_d:
3480  case X86::BI__builtin_ia32_gatherd_d256:
3481  case X86::BI__builtin_ia32_gatherq_d:
3482  case X86::BI__builtin_ia32_gatherq_d256:
3483  case X86::BI__builtin_ia32_gather3div2df:
3484  case X86::BI__builtin_ia32_gather3div2di:
3485  case X86::BI__builtin_ia32_gather3div4df:
3486  case X86::BI__builtin_ia32_gather3div4di:
3487  case X86::BI__builtin_ia32_gather3div4sf:
3488  case X86::BI__builtin_ia32_gather3div4si:
3489  case X86::BI__builtin_ia32_gather3div8sf:
3490  case X86::BI__builtin_ia32_gather3div8si:
3491  case X86::BI__builtin_ia32_gather3siv2df:
3492  case X86::BI__builtin_ia32_gather3siv2di:
3493  case X86::BI__builtin_ia32_gather3siv4df:
3494  case X86::BI__builtin_ia32_gather3siv4di:
3495  case X86::BI__builtin_ia32_gather3siv4sf:
3496  case X86::BI__builtin_ia32_gather3siv4si:
3497  case X86::BI__builtin_ia32_gather3siv8sf:
3498  case X86::BI__builtin_ia32_gather3siv8si:
3499  case X86::BI__builtin_ia32_gathersiv8df:
3500  case X86::BI__builtin_ia32_gathersiv16sf:
3501  case X86::BI__builtin_ia32_gatherdiv8df:
3502  case X86::BI__builtin_ia32_gatherdiv16sf:
3503  case X86::BI__builtin_ia32_gathersiv8di:
3504  case X86::BI__builtin_ia32_gathersiv16si:
3505  case X86::BI__builtin_ia32_gatherdiv8di:
3506  case X86::BI__builtin_ia32_gatherdiv16si:
3507  case X86::BI__builtin_ia32_scatterdiv2df:
3508  case X86::BI__builtin_ia32_scatterdiv2di:
3509  case X86::BI__builtin_ia32_scatterdiv4df:
3510  case X86::BI__builtin_ia32_scatterdiv4di:
3511  case X86::BI__builtin_ia32_scatterdiv4sf:
3512  case X86::BI__builtin_ia32_scatterdiv4si:
3513  case X86::BI__builtin_ia32_scatterdiv8sf:
3514  case X86::BI__builtin_ia32_scatterdiv8si:
3515  case X86::BI__builtin_ia32_scattersiv2df:
3516  case X86::BI__builtin_ia32_scattersiv2di:
3517  case X86::BI__builtin_ia32_scattersiv4df:
3518  case X86::BI__builtin_ia32_scattersiv4di:
3519  case X86::BI__builtin_ia32_scattersiv4sf:
3520  case X86::BI__builtin_ia32_scattersiv4si:
3521  case X86::BI__builtin_ia32_scattersiv8sf:
3522  case X86::BI__builtin_ia32_scattersiv8si:
3523  case X86::BI__builtin_ia32_scattersiv8df:
3524  case X86::BI__builtin_ia32_scattersiv16sf:
3525  case X86::BI__builtin_ia32_scatterdiv8df:
3526  case X86::BI__builtin_ia32_scatterdiv16sf:
3527  case X86::BI__builtin_ia32_scattersiv8di:
3528  case X86::BI__builtin_ia32_scattersiv16si:
3529  case X86::BI__builtin_ia32_scatterdiv8di:
3530  case X86::BI__builtin_ia32_scatterdiv16si:
3531  ArgNum = 4;
3532  break;
3533  }
3534 
3535  llvm::APSInt Result;
3536 
3537  // We can't check the value of a dependent argument.
3538  Expr *Arg = TheCall->getArg(ArgNum);
3539  if (Arg->isTypeDependent() || Arg->isValueDependent())
3540  return false;
3541 
3542  // Check constant-ness first.
3543  if (SemaBuiltinConstantArg(TheCall, ArgNum, Result))
3544  return true;
3545 
3546  if (Result == 1 || Result == 2 || Result == 4 || Result == 8)
3547  return false;
3548 
3549  return Diag(TheCall->getBeginLoc(), diag::err_x86_builtin_invalid_scale)
3550  << Arg->getSourceRange();
3551 }
3552 
3553 static bool isX86_32Builtin(unsigned BuiltinID) {
3554  // These builtins only work on x86-32 targets.
3555  switch (BuiltinID) {
3556  case X86::BI__builtin_ia32_readeflags_u32:
3557  case X86::BI__builtin_ia32_writeeflags_u32:
3558  return true;
3559  }
3560 
3561  return false;
3562 }
3563 
3564 bool Sema::CheckX86BuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
3565  if (BuiltinID == X86::BI__builtin_cpu_supports)
3566  return SemaBuiltinCpuSupports(*this, TheCall);
3567 
3568  if (BuiltinID == X86::BI__builtin_cpu_is)
3569  return SemaBuiltinCpuIs(*this, TheCall);
3570 
3571  // Check for 32-bit only builtins on a 64-bit target.
3572  const llvm::Triple &TT = Context.getTargetInfo().getTriple();
3573  if (TT.getArch() != llvm::Triple::x86 && isX86_32Builtin(BuiltinID))
3574  return Diag(TheCall->getCallee()->getBeginLoc(),
3575  diag::err_32_bit_builtin_64_bit_tgt);
3576 
3577  // If the intrinsic has rounding or SAE make sure its valid.
3578  if (CheckX86BuiltinRoundingOrSAE(BuiltinID, TheCall))
3579  return true;
3580 
3581  // If the intrinsic has a gather/scatter scale immediate make sure its valid.
3582  if (CheckX86BuiltinGatherScatterScale(BuiltinID, TheCall))
3583  return true;
3584 
3585  // For intrinsics which take an immediate value as part of the instruction,
3586  // range check them here.
3587  int i = 0, l = 0, u = 0;
3588  switch (BuiltinID) {
3589  default:
3590  return false;
3591  case X86::BI__builtin_ia32_vec_ext_v2si:
3592  case X86::BI__builtin_ia32_vec_ext_v2di:
3593  case X86::BI__builtin_ia32_vextractf128_pd256:
3594  case X86::BI__builtin_ia32_vextractf128_ps256:
3595  case X86::BI__builtin_ia32_vextractf128_si256:
3596  case X86::BI__builtin_ia32_extract128i256:
3597  case X86::BI__builtin_ia32_extractf64x4_mask:
3598  case X86::BI__builtin_ia32_extracti64x4_mask:
3599  case X86::BI__builtin_ia32_extractf32x8_mask:
3600  case X86::BI__builtin_ia32_extracti32x8_mask:
3601  case X86::BI__builtin_ia32_extractf64x2_256_mask:
3602  case X86::BI__builtin_ia32_extracti64x2_256_mask:
3603  case X86::BI__builtin_ia32_extractf32x4_256_mask:
3604  case X86::BI__builtin_ia32_extracti32x4_256_mask:
3605  i = 1; l = 0; u = 1;
3606  break;
3607  case X86::BI__builtin_ia32_vec_set_v2di:
3608  case X86::BI__builtin_ia32_vinsertf128_pd256:
3609  case X86::BI__builtin_ia32_vinsertf128_ps256:
3610  case X86::BI__builtin_ia32_vinsertf128_si256:
3611  case X86::BI__builtin_ia32_insert128i256:
3612  case X86::BI__builtin_ia32_insertf32x8:
3613  case X86::BI__builtin_ia32_inserti32x8:
3614  case X86::BI__builtin_ia32_insertf64x4:
3615  case X86::BI__builtin_ia32_inserti64x4:
3616  case X86::BI__builtin_ia32_insertf64x2_256:
3617  case X86::BI__builtin_ia32_inserti64x2_256:
3618  case X86::BI__builtin_ia32_insertf32x4_256:
3619  case X86::BI__builtin_ia32_inserti32x4_256:
3620  i = 2; l = 0; u = 1;
3621  break;
3622  case X86::BI__builtin_ia32_vpermilpd:
3623  case X86::BI__builtin_ia32_vec_ext_v4hi:
3624  case X86::BI__builtin_ia32_vec_ext_v4si:
3625  case X86::BI__builtin_ia32_vec_ext_v4sf:
3626  case X86::BI__builtin_ia32_vec_ext_v4di:
3627  case X86::BI__builtin_ia32_extractf32x4_mask:
3628  case X86::BI__builtin_ia32_extracti32x4_mask:
3629  case X86::BI__builtin_ia32_extractf64x2_512_mask:
3630  case X86::BI__builtin_ia32_extracti64x2_512_mask:
3631  i = 1; l = 0; u = 3;
3632  break;
3633  case X86::BI_mm_prefetch:
3634  case X86::BI__builtin_ia32_vec_ext_v8hi:
3635  case X86::BI__builtin_ia32_vec_ext_v8si:
3636  i = 1; l = 0; u = 7;
3637  break;
3638  case X86::BI__builtin_ia32_sha1rnds4:
3639  case X86::BI__builtin_ia32_blendpd:
3640  case X86::BI__builtin_ia32_shufpd:
3641  case X86::BI__builtin_ia32_vec_set_v4hi:
3642  case X86::BI__builtin_ia32_vec_set_v4si:
3643  case X86::BI__builtin_ia32_vec_set_v4di:
3644  case X86::BI__builtin_ia32_shuf_f32x4_256:
3645  case X86::BI__builtin_ia32_shuf_f64x2_256:
3646  case X86::BI__builtin_ia32_shuf_i32x4_256:
3647  case X86::BI__builtin_ia32_shuf_i64x2_256:
3648  case X86::BI__builtin_ia32_insertf64x2_512:
3649  case X86::BI__builtin_ia32_inserti64x2_512:
3650  case X86::BI__builtin_ia32_insertf32x4:
3651  case X86::BI__builtin_ia32_inserti32x4:
3652  i = 2; l = 0; u = 3;
3653  break;
3654  case X86::BI__builtin_ia32_vpermil2pd:
3655  case X86::BI__builtin_ia32_vpermil2pd256:
3656  case X86::BI__builtin_ia32_vpermil2ps:
3657  case X86::BI__builtin_ia32_vpermil2ps256:
3658  i = 3; l = 0; u = 3;
3659  break;
3660  case X86::BI__builtin_ia32_cmpb128_mask:
3661  case X86::BI__builtin_ia32_cmpw128_mask:
3662  case X86::BI__builtin_ia32_cmpd128_mask:
3663  case X86::BI__builtin_ia32_cmpq128_mask:
3664  case X86::BI__builtin_ia32_cmpb256_mask:
3665  case X86::BI__builtin_ia32_cmpw256_mask:
3666  case X86::BI__builtin_ia32_cmpd256_mask:
3667  case X86::BI__builtin_ia32_cmpq256_mask:
3668  case X86::BI__builtin_ia32_cmpb512_mask:
3669  case X86::BI__builtin_ia32_cmpw512_mask:
3670  case X86::BI__builtin_ia32_cmpd512_mask:
3671  case X86::BI__builtin_ia32_cmpq512_mask:
3672  case X86::BI__builtin_ia32_ucmpb128_mask:
3673  case X86::BI__builtin_ia32_ucmpw128_mask:
3674  case X86::BI__builtin_ia32_ucmpd128_mask:
3675  case X86::BI__builtin_ia32_ucmpq128_mask:
3676  case X86::BI__builtin_ia32_ucmpb256_mask:
3677  case X86::BI__builtin_ia32_ucmpw256_mask:
3678  case X86::BI__builtin_ia32_ucmpd256_mask:
3679  case X86::BI__builtin_ia32_ucmpq256_mask:
3680  case X86::BI__builtin_ia32_ucmpb512_mask:
3681  case X86::BI__builtin_ia32_ucmpw512_mask:
3682  case X86::BI__builtin_ia32_ucmpd512_mask:
3683  case X86::BI__builtin_ia32_ucmpq512_mask:
3684  case X86::BI__builtin_ia32_vpcomub:
3685  case X86::BI__builtin_ia32_vpcomuw:
3686  case X86::BI__builtin_ia32_vpcomud:
3687  case X86::BI__builtin_ia32_vpcomuq:
3688  case X86::BI__builtin_ia32_vpcomb:
3689  case X86::BI__builtin_ia32_vpcomw:
3690  case X86::BI__builtin_ia32_vpcomd:
3691  case X86::BI__builtin_ia32_vpcomq:
3692  case X86::BI__builtin_ia32_vec_set_v8hi:
3693  case X86::BI__builtin_ia32_vec_set_v8si:
3694  i = 2; l = 0; u = 7;
3695  break;
3696  case X86::BI__builtin_ia32_vpermilpd256:
3697  case X86::BI__builtin_ia32_roundps:
3698  case X86::BI__builtin_ia32_roundpd:
3699  case X86::BI__builtin_ia32_roundps256:
3700  case X86::BI__builtin_ia32_roundpd256:
3701  case X86::BI__builtin_ia32_getmantpd128_mask:
3702  case X86::BI__builtin_ia32_getmantpd256_mask:
3703  case X86::BI__builtin_ia32_getmantps128_mask:
3704  case X86::BI__builtin_ia32_getmantps256_mask:
3705  case X86::BI__builtin_ia32_getmantpd512_mask:
3706  case X86::BI__builtin_ia32_getmantps512_mask:
3707  case X86::BI__builtin_ia32_vec_ext_v16qi:
3708  case X86::BI__builtin_ia32_vec_ext_v16hi:
3709  i = 1; l = 0; u = 15;
3710  break;
3711  case X86::BI__builtin_ia32_pblendd128:
3712  case X86::BI__builtin_ia32_blendps:
3713  case X86::BI__builtin_ia32_blendpd256:
3714  case X86::BI__builtin_ia32_shufpd256:
3715  case X86::BI__builtin_ia32_roundss:
3716  case X86::BI__builtin_ia32_roundsd:
3717  case X86::BI__builtin_ia32_rangepd128_mask:
3718  case X86::BI__builtin_ia32_rangepd256_mask:
3719  case X86::BI__builtin_ia32_rangepd512_mask:
3720  case X86::BI__builtin_ia32_rangeps128_mask:
3721  case X86::BI__builtin_ia32_rangeps256_mask:
3722  case X86::BI__builtin_ia32_rangeps512_mask:
3723  case X86::BI__builtin_ia32_getmantsd_round_mask:
3724  case X86::BI__builtin_ia32_getmantss_round_mask:
3725  case X86::BI__builtin_ia32_vec_set_v16qi:
3726  case X86::BI__builtin_ia32_vec_set_v16hi:
3727  i = 2; l = 0; u = 15;
3728  break;
3729  case X86::BI__builtin_ia32_vec_ext_v32qi:
3730  i = 1; l = 0; u = 31;
3731  break;
3732  case X86::BI__builtin_ia32_cmpps:
3733  case X86::BI__builtin_ia32_cmpss:
3734  case X86::BI__builtin_ia32_cmppd:
3735  case X86::BI__builtin_ia32_cmpsd:
3736  case X86::BI__builtin_ia32_cmpps256:
3737  case X86::BI__builtin_ia32_cmppd256:
3738  case X86::BI__builtin_ia32_cmpps128_mask:
3739  case X86::BI__builtin_ia32_cmppd128_mask:
3740  case X86::BI__builtin_ia32_cmpps256_mask:
3741  case X86::BI__builtin_ia32_cmppd256_mask:
3742  case X86::BI__builtin_ia32_cmpps512_mask:
3743  case X86::BI__builtin_ia32_cmppd512_mask:
3744  case X86::BI__builtin_ia32_cmpsd_mask:
3745  case X86::BI__builtin_ia32_cmpss_mask:
3746  case X86::BI__builtin_ia32_vec_set_v32qi:
3747  i = 2; l = 0; u = 31;
3748  break;
3749  case X86::BI__builtin_ia32_permdf256:
3750  case X86::BI__builtin_ia32_permdi256:
3751  case X86::BI__builtin_ia32_permdf512:
3752  case X86::BI__builtin_ia32_permdi512:
3753  case X86::BI__builtin_ia32_vpermilps:
3754  case X86::BI__builtin_ia32_vpermilps256:
3755  case X86::BI__builtin_ia32_vpermilpd512:
3756  case X86::BI__builtin_ia32_vpermilps512:
3757  case X86::BI__builtin_ia32_pshufd:
3758  case X86::BI__builtin_ia32_pshufd256:
3759  case X86::BI__builtin_ia32_pshufd512:
3760  case X86::BI__builtin_ia32_pshufhw:
3761  case X86::BI__builtin_ia32_pshufhw256:
3762  case X86::BI__builtin_ia32_pshufhw512:
3763  case X86::BI__builtin_ia32_pshuflw:
3764  case X86::BI__builtin_ia32_pshuflw256:
3765  case X86::BI__builtin_ia32_pshuflw512:
3766  case X86::BI__builtin_ia32_vcvtps2ph:
3767  case X86::BI__builtin_ia32_vcvtps2ph_mask:
3768  case X86::BI__builtin_ia32_vcvtps2ph256:
3769  case X86::BI__builtin_ia32_vcvtps2ph256_mask:
3770  case X86::BI__builtin_ia32_vcvtps2ph512_mask:
3771  case X86::BI__builtin_ia32_rndscaleps_128_mask:
3772  case X86::BI__builtin_ia32_rndscalepd_128_mask:
3773  case X86::BI__builtin_ia32_rndscaleps_256_mask:
3774  case X86::BI__builtin_ia32_rndscalepd_256_mask:
3775  case X86::BI__builtin_ia32_rndscaleps_mask:
3776  case X86::BI__builtin_ia32_rndscalepd_mask:
3777  case X86::BI__builtin_ia32_reducepd128_mask:
3778  case X86::BI__builtin_ia32_reducepd256_mask:
3779  case X86::BI__builtin_ia32_reducepd512_mask:
3780  case X86::BI__builtin_ia32_reduceps128_mask:
3781  case X86::BI__builtin_ia32_reduceps256_mask:
3782  case X86::BI__builtin_ia32_reduceps512_mask:
3783  case X86::BI__builtin_ia32_prold512:
3784  case X86::BI__builtin_ia32_prolq512:
3785  case X86::BI__builtin_ia32_prold128:
3786  case X86::BI__builtin_ia32_prold256:
3787  case X86::BI__builtin_ia32_prolq128:
3788  case X86::BI__builtin_ia32_prolq256:
3789  case X86::BI__builtin_ia32_prord512:
3790  case X86::BI__builtin_ia32_prorq512:
3791  case X86::BI__builtin_ia32_prord128:
3792  case X86::BI__builtin_ia32_prord256:
3793  case X86::BI__builtin_ia32_prorq128:
3794  case X86::BI__builtin_ia32_prorq256:
3795  case X86::BI__builtin_ia32_fpclasspd128_mask:
3796  case X86::BI__builtin_ia32_fpclasspd256_mask:
3797  case X86::BI__builtin_ia32_fpclassps128_mask:
3798  case X86::BI__builtin_ia32_fpclassps256_mask:
3799  case X86::BI__builtin_ia32_fpclassps512_mask:
3800  case X86::BI__builtin_ia32_fpclasspd512_mask:
3801  case X86::BI__builtin_ia32_fpclasssd_mask:
3802  case X86::BI__builtin_ia32_fpclassss_mask:
3803  case X86::BI__builtin_ia32_pslldqi128_byteshift:
3804  case X86::BI__builtin_ia32_pslldqi256_byteshift:
3805  case X86::BI__builtin_ia32_pslldqi512_byteshift:
3806  case X86::BI__builtin_ia32_psrldqi128_byteshift:
3807  case X86::BI__builtin_ia32_psrldqi256_byteshift:
3808  case X86::BI__builtin_ia32_psrldqi512_byteshift:
3809  case X86::BI__builtin_ia32_kshiftliqi:
3810  case X86::BI__builtin_ia32_kshiftlihi:
3811  case X86::BI__builtin_ia32_kshiftlisi:
3812  case X86::BI__builtin_ia32_kshiftlidi:
3813  case X86::BI__builtin_ia32_kshiftriqi:
3814  case X86::BI__builtin_ia32_kshiftrihi:
3815  case X86::BI__builtin_ia32_kshiftrisi:
3816  case X86::BI__builtin_ia32_kshiftridi:
3817  i = 1; l = 0; u = 255;
3818  break;
3819  case X86::BI__builtin_ia32_vperm2f128_pd256:
3820  case X86::BI__builtin_ia32_vperm2f128_ps256:
3821  case X86::BI__builtin_ia32_vperm2f128_si256:
3822  case X86::BI__builtin_ia32_permti256:
3823  case X86::BI__builtin_ia32_pblendw128:
3824  case X86::BI__builtin_ia32_pblendw256:
3825  case X86::BI__builtin_ia32_blendps256:
3826  case X86::BI__builtin_ia32_pblendd256:
3827  case X86::BI__builtin_ia32_palignr128:
3828  case X86::BI__builtin_ia32_palignr256:
3829  case X86::BI__builtin_ia32_palignr512:
3830  case X86::BI__builtin_ia32_alignq512:
3831  case X86::BI__builtin_ia32_alignd512:
3832  case X86::BI__builtin_ia32_alignd128:
3833  case X86::BI__builtin_ia32_alignd256:
3834  case X86::BI__builtin_ia32_alignq128:
3835  case X86::BI__builtin_ia32_alignq256:
3836  case X86::BI__builtin_ia32_vcomisd:
3837  case X86::BI__builtin_ia32_vcomiss:
3838  case X86::BI__builtin_ia32_shuf_f32x4:
3839  case X86::BI__builtin_ia32_shuf_f64x2:
3840  case X86::BI__builtin_ia32_shuf_i32x4:
3841  case X86::BI__builtin_ia32_shuf_i64x2:
3842  case X86::BI__builtin_ia32_shufpd512:
3843  case X86::BI__builtin_ia32_shufps:
3844  case X86::BI__builtin_ia32_shufps256:
3845  case X86::BI__builtin_ia32_shufps512:
3846  case X86::BI__builtin_ia32_dbpsadbw128:
3847  case X86::BI__builtin_ia32_dbpsadbw256:
3848  case X86::BI__builtin_ia32_dbpsadbw512:
3849  case X86::BI__builtin_ia32_vpshldd128:
3850  case X86::BI__builtin_ia32_vpshldd256:
3851  case X86::BI__builtin_ia32_vpshldd512:
3852  case X86::BI__builtin_ia32_vpshldq128:
3853  case X86::BI__builtin_ia32_vpshldq256:
3854  case X86::BI__builtin_ia32_vpshldq512:
3855  case X86::BI__builtin_ia32_vpshldw128:
3856  case X86::BI__builtin_ia32_vpshldw256:
3857  case X86::BI__builtin_ia32_vpshldw512:
3858  case X86::BI__builtin_ia32_vpshrdd128:
3859  case X86::BI__builtin_ia32_vpshrdd256:
3860  case X86::BI__builtin_ia32_vpshrdd512:
3861  case X86::BI__builtin_ia32_vpshrdq128:
3862  case X86::BI__builtin_ia32_vpshrdq256:
3863  case X86::BI__builtin_ia32_vpshrdq512:
3864  case X86::BI__builtin_ia32_vpshrdw128:
3865  case X86::BI__builtin_ia32_vpshrdw256:
3866  case X86::BI__builtin_ia32_vpshrdw512:
3867  i = 2; l = 0; u = 255;
3868  break;
3869  case X86::BI__builtin_ia32_fixupimmpd512_mask:
3870  case X86::BI__builtin_ia32_fixupimmpd512_maskz:
3871  case X86::BI__builtin_ia32_fixupimmps512_mask:
3872  case X86::BI__builtin_ia32_fixupimmps512_maskz:
3873  case X86::BI__builtin_ia32_fixupimmsd_mask:
3874  case X86::BI__builtin_ia32_fixupimmsd_maskz:
3875  case X86::BI__builtin_ia32_fixupimmss_mask:
3876  case X86::BI__builtin_ia32_fixupimmss_maskz:
3877  case X86::BI__builtin_ia32_fixupimmpd128_mask:
3878  case X86::BI__builtin_ia32_fixupimmpd128_maskz:
3879  case X86::BI__builtin_ia32_fixupimmpd256_mask:
3880  case X86::BI__builtin_ia32_fixupimmpd256_maskz:
3881  case X86::BI__builtin_ia32_fixupimmps128_mask:
3882  case X86::BI__builtin_ia32_fixupimmps128_maskz:
3883  case X86::BI__builtin_ia32_fixupimmps256_mask:
3884  case X86::BI__builtin_ia32_fixupimmps256_maskz:
3885  case X86::BI__builtin_ia32_pternlogd512_mask:
3886  case X86::BI__builtin_ia32_pternlogd512_maskz:
3887  case X86::BI__builtin_ia32_pternlogq512_mask:
3888  case X86::BI__builtin_ia32_pternlogq512_maskz:
3889  case X86::BI__builtin_ia32_pternlogd128_mask:
3890  case X86::BI__builtin_ia32_pternlogd128_maskz:
3891  case X86::BI__builtin_ia32_pternlogd256_mask:
3892  case X86::BI__builtin_ia32_pternlogd256_maskz:
3893  case X86::BI__builtin_ia32_pternlogq128_mask:
3894  case X86::BI__builtin_ia32_pternlogq128_maskz:
3895  case X86::BI__builtin_ia32_pternlogq256_mask:
3896  case X86::BI__builtin_ia32_pternlogq256_maskz:
3897  i = 3; l = 0; u = 255;
3898  break;
3899  case X86::BI__builtin_ia32_gatherpfdpd:
3900  case X86::BI__builtin_ia32_gatherpfdps:
3901  case X86::BI__builtin_ia32_gatherpfqpd:
3902  case X86::BI__builtin_ia32_gatherpfqps:
3903  case X86::BI__builtin_ia32_scatterpfdpd:
3904  case X86::BI__builtin_ia32_scatterpfdps:
3905  case X86::BI__builtin_ia32_scatterpfqpd:
3906  case X86::BI__builtin_ia32_scatterpfqps:
3907  i = 4; l = 2; u = 3;
3908  break;
3909  case X86::BI__builtin_ia32_rndscalesd_round_mask:
3910  case X86::BI__builtin_ia32_rndscaless_round_mask:
3911  i = 4; l = 0; u = 255;
3912  break;
3913  }
3914 
3915  // Note that we don't force a hard error on the range check here, allowing
3916  // template-generated or macro-generated dead code to potentially have out-of-
3917  // range values. These need to code generate, but don't need to necessarily
3918  // make any sense. We use a warning that defaults to an error.
3919  return SemaBuiltinConstantArgRange(TheCall, i, l, u, /*RangeIsError*/ false);
3920 }
3921 
3922 /// Given a FunctionDecl's FormatAttr, attempts to populate the FomatStringInfo
3923 /// parameter with the FormatAttr's correct format_idx and firstDataArg.
3924 /// Returns true when the format fits the function and the FormatStringInfo has
3925 /// been populated.
3926 bool Sema::getFormatStringInfo(const FormatAttr *Format, bool IsCXXMember,
3927  FormatStringInfo *FSI) {
3928  FSI->HasVAListArg = Format->getFirstArg() == 0;
3929  FSI->FormatIdx = Format->getFormatIdx() - 1;
3930  FSI->FirstDataArg = FSI->HasVAListArg ? 0 : Format->getFirstArg() - 1;
3931 
3932  // The way the format attribute works in GCC, the implicit this argument
3933  // of member functions is counted. However, it doesn't appear in our own
3934  // lists, so decrement format_idx in that case.
3935  if (IsCXXMember) {
3936  if(FSI->FormatIdx == 0)
3937  return false;
3938  --FSI->FormatIdx;
3939  if (FSI->FirstDataArg != 0)
3940  --FSI->FirstDataArg;
3941  }
3942  return true;
3943 }
3944 
3945 /// Checks if a the given expression evaluates to null.
3946 ///
3947 /// Returns true if the value evaluates to null.
3948 static bool CheckNonNullExpr(Sema &S, const Expr *Expr) {
3949  // If the expression has non-null type, it doesn't evaluate to null.
3950  if (auto nullability
3951  = Expr->IgnoreImplicit()->getType()->getNullability(S.Context)) {
3952  if (*nullability == NullabilityKind::NonNull)
3953  return false;
3954  }
3955 
3956  // As a special case, transparent unions initialized with zero are
3957  // considered null for the purposes of the nonnull attribute.
3958  if (const RecordType *UT = Expr->getType()->getAsUnionType()) {
3959  if (UT->getDecl()->hasAttr<TransparentUnionAttr>())
3960  if (const CompoundLiteralExpr *CLE =
3961  dyn_cast<CompoundLiteralExpr>(Expr))
3962  if (const InitListExpr *ILE =
3963  dyn_cast<InitListExpr>(CLE->getInitializer()))
3964  Expr = ILE->getInit(0);
3965  }
3966 
3967  bool Result;
3968  return (!Expr->isValueDependent() &&
3969  Expr->EvaluateAsBooleanCondition(Result, S.Context) &&
3970  !Result);
3971 }
3972 
3974  const Expr *ArgExpr,
3975  SourceLocation CallSiteLoc) {
3976  if (CheckNonNullExpr(S, ArgExpr))
3977  S.DiagRuntimeBehavior(CallSiteLoc, ArgExpr,
3978  S.PDiag(diag::warn_null_arg) << ArgExpr->getSourceRange());
3979 }
3980 
3981 bool Sema::GetFormatNSStringIdx(const FormatAttr *Format, unsigned &Idx) {
3982  FormatStringInfo FSI;
3983  if ((GetFormatStringType(Format) == FST_NSString) &&
3984  getFormatStringInfo(Format, false, &FSI)) {
3985  Idx = FSI.FormatIdx;
3986  return true;
3987  }
3988  return false;
3989 }
3990 
3991 /// Diagnose use of %s directive in an NSString which is being passed
3992 /// as formatting string to formatting method.
3993 static void
3995  const NamedDecl *FDecl,
3996  Expr **Args,
3997  unsigned NumArgs) {
3998  unsigned Idx = 0;
3999  bool Format = false;
4001  if (SFFamily == ObjCStringFormatFamily::SFF_CFString) {
4002  Idx = 2;
4003  Format = true;
4004  }
4005  else
4006  for (const auto *I : FDecl->specific_attrs<FormatAttr>()) {
4007  if (S.GetFormatNSStringIdx(I, Idx)) {
4008  Format = true;
4009  break;
4010  }
4011  }
4012  if (!Format || NumArgs <= Idx)
4013  return;
4014  const Expr *FormatExpr = Args[Idx];
4015  if (const CStyleCastExpr *CSCE = dyn_cast<CStyleCastExpr>(FormatExpr))
4016  FormatExpr = CSCE->getSubExpr();
4017  const StringLiteral *FormatString;
4018  if (const ObjCStringLiteral *OSL =
4019  dyn_cast<ObjCStringLiteral>(FormatExpr->IgnoreParenImpCasts()))
4020  FormatString = OSL->getString();
4021  else
4022  FormatString = dyn_cast<StringLiteral>(FormatExpr->IgnoreParenImpCasts());
4023  if (!FormatString)
4024  return;
4025  if (S.FormatStringHasSArg(FormatString)) {
4026  S.Diag(FormatExpr->getExprLoc(), diag::warn_objc_cdirective_format_string)
4027  << "%s" << 1 << 1;
4028  S.Diag(FDecl->getLocation(), diag::note_entity_declared_at)
4029  << FDecl->getDeclName();
4030  }
4031 }
4032 
4033 /// Determine whether the given type has a non-null nullability annotation.
4035  if (auto nullability = type->getNullability(ctx))
4036  return *nullability == NullabilityKind::NonNull;
4037 
4038  return false;
4039 }
4040 
4042  const NamedDecl *FDecl,
4043  const FunctionProtoType *Proto,
4044  ArrayRef<const Expr *> Args,
4045  SourceLocation CallSiteLoc) {
4046  assert((FDecl || Proto) && "Need a function declaration or prototype");
4047 
4048  // Check the attributes attached to the method/function itself.
4049  llvm::SmallBitVector NonNullArgs;
4050  if (FDecl) {
4051  // Handle the nonnull attribute on the function/method declaration itself.
4052  for (const auto *NonNull : FDecl->specific_attrs<NonNullAttr>()) {
4053  if (!NonNull->args_size()) {
4054  // Easy case: all pointer arguments are nonnull.
4055  for (const auto *Arg : Args)
4056  if (S.isValidPointerAttrType(Arg->getType()))
4057  CheckNonNullArgument(S, Arg, CallSiteLoc);
4058  return;
4059  }
4060 
4061  for (const ParamIdx &Idx : NonNull->args()) {
4062  unsigned IdxAST = Idx.getASTIndex();
4063  if (IdxAST >= Args.size())
4064  continue;
4065  if (NonNullArgs.empty())
4066  NonNullArgs.resize(Args.size());
4067  NonNullArgs.set(IdxAST);
4068  }
4069  }
4070  }
4071 
4072  if (FDecl && (isa<FunctionDecl>(FDecl) || isa<ObjCMethodDecl>(FDecl))) {
4073  // Handle the nonnull attribute on the parameters of the
4074  // function/method.
4075  ArrayRef<ParmVarDecl*> parms;
4076  if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(FDecl))
4077  parms = FD->parameters();
4078  else
4079  parms = cast<ObjCMethodDecl>(FDecl)->parameters();
4080 
4081  unsigned ParamIndex = 0;
4082  for (ArrayRef<ParmVarDecl*>::iterator I = parms.begin(), E = parms.end();
4083  I != E; ++I, ++ParamIndex) {
4084  const ParmVarDecl *PVD = *I;
4085  if (PVD->hasAttr<NonNullAttr>() ||
4086  isNonNullType(S.Context, PVD->getType())) {
4087  if (NonNullArgs.empty())
4088  NonNullArgs.resize(Args.size());
4089 
4090  NonNullArgs.set(ParamIndex);
4091  }
4092  }
4093  } else {
4094  // If we have a non-function, non-method declaration but no
4095  // function prototype, try to dig out the function prototype.
4096  if (!Proto) {
4097  if (const ValueDecl *VD = dyn_cast<ValueDecl>(FDecl)) {
4098  QualType type = VD->getType().getNonReferenceType();
4099  if (auto pointerType = type->getAs<PointerType>())
4100  type = pointerType->getPointeeType();
4101  else if (auto blockType = type->getAs<BlockPointerType>())
4102  type = blockType->getPointeeType();
4103  // FIXME: data member pointers?
4104 
4105  // Dig out the function prototype, if there is one.
4106  Proto = type->getAs<FunctionProtoType>();
4107  }
4108  }
4109 
4110  // Fill in non-null argument information from the nullability
4111  // information on the parameter types (if we have them).
4112  if (Proto) {
4113  unsigned Index = 0;
4114  for (auto paramType : Proto->getParamTypes()) {
4115  if (isNonNullType(S.Context, paramType)) {
4116  if (NonNullArgs.empty())
4117  NonNullArgs.resize(Args.size());
4118 
4119  NonNullArgs.set(Index);
4120  }
4121 
4122  ++Index;
4123  }
4124  }
4125  }
4126 
4127  // Check for non-null arguments.
4128  for (unsigned ArgIndex = 0, ArgIndexEnd = NonNullArgs.size();
4129  ArgIndex != ArgIndexEnd; ++ArgIndex) {
4130  if (NonNullArgs[ArgIndex])
4131  CheckNonNullArgument(S, Args[ArgIndex], CallSiteLoc);
4132  }
4133 }
4134 
4135 /// Handles the checks for format strings, non-POD arguments to vararg
4136 /// functions, NULL arguments passed to non-NULL parameters, and diagnose_if
4137 /// attributes.
4138 void Sema::checkCall(NamedDecl *FDecl, const FunctionProtoType *Proto,
4139  const Expr *ThisArg, ArrayRef<const Expr *> Args,
4140  bool IsMemberFunction, SourceLocation Loc,
4141  SourceRange Range, VariadicCallType CallType) {
4142  // FIXME: We should check as much as we can in the template definition.
4143  if (CurContext->isDependentContext())
4144  return;
4145 
4146  // Printf and scanf checking.
4147  llvm::SmallBitVector CheckedVarArgs;
4148  if (FDecl) {
4149  for (const auto *I : FDecl->specific_attrs<FormatAttr>()) {
4150  // Only create vector if there are format attributes.
4151  CheckedVarArgs.resize(Args.size());
4152 
4153  CheckFormatArguments(I, Args, IsMemberFunction, CallType, Loc, Range,
4154  CheckedVarArgs);
4155  }
4156  }
4157 
4158  // Refuse POD arguments that weren't caught by the format string
4159  // checks above.
4160  auto *FD = dyn_cast_or_null<FunctionDecl>(FDecl);
4161  if (CallType != VariadicDoesNotApply &&
4162  (!FD || FD->getBuiltinID() != Builtin::BI__noop)) {
4163  unsigned NumParams = Proto ? Proto->getNumParams()
4164  : FDecl && isa<FunctionDecl>(FDecl)
4165  ? cast<FunctionDecl>(FDecl)->getNumParams()
4166  : FDecl && isa<ObjCMethodDecl>(FDecl)
4167  ? cast<ObjCMethodDecl>(FDecl)->param_size()
4168  : 0;
4169 
4170  for (unsigned ArgIdx = NumParams; ArgIdx < Args.size(); ++ArgIdx) {
4171  // Args[ArgIdx] can be null in malformed code.
4172  if (const Expr *Arg = Args[ArgIdx]) {
4173  if (CheckedVarArgs.empty() || !CheckedVarArgs[ArgIdx])
4174  checkVariadicArgument(Arg, CallType);
4175  }
4176  }
4177  }
4178 
4179  if (FDecl || Proto) {
4180  CheckNonNullArguments(*this, FDecl, Proto, Args, Loc);
4181 
4182  // Type safety checking.
4183  if (FDecl) {
4184  for (const auto *I : FDecl->specific_attrs<ArgumentWithTypeTagAttr>())
4185  CheckArgumentWithTypeTag(I, Args, Loc);
4186  }
4187  }
4188 
4189  if (FD)
4190  diagnoseArgDependentDiagnoseIfAttrs(FD, ThisArg, Args, Loc);
4191 }
4192 
4193 /// CheckConstructorCall - Check a constructor call for correctness and safety
4194 /// properties not enforced by the C type system.
4195 void Sema::CheckConstructorCall(FunctionDecl *FDecl,
4196  ArrayRef<const Expr *> Args,
4197  const FunctionProtoType *Proto,
4198  SourceLocation Loc) {
4199  VariadicCallType CallType =
4200  Proto->isVariadic() ? VariadicConstructor : VariadicDoesNotApply;
4201  checkCall(FDecl, Proto, /*ThisArg=*/nullptr, Args, /*IsMemberFunction=*/true,
4202  Loc, SourceRange(), CallType);
4203 }
4204 
4205 /// CheckFunctionCall - Check a direct function call for various correctness
4206 /// and safety properties not strictly enforced by the C type system.
4207 bool Sema::CheckFunctionCall(FunctionDecl *FDecl, CallExpr *TheCall,
4208  const FunctionProtoType *Proto) {
4209  bool IsMemberOperatorCall = isa<CXXOperatorCallExpr>(TheCall) &&
4210  isa<CXXMethodDecl>(FDecl);
4211  bool IsMemberFunction = isa<CXXMemberCallExpr>(TheCall) ||
4212  IsMemberOperatorCall;
4213  VariadicCallType CallType = getVariadicCallType(FDecl, Proto,
4214  TheCall->getCallee());
4215  Expr** Args = TheCall->getArgs();
4216  unsigned NumArgs = TheCall->getNumArgs();
4217 
4218  Expr *ImplicitThis = nullptr;
4219  if (IsMemberOperatorCall) {
4220  // If this is a call to a member operator, hide the first argument
4221  // from checkCall.
4222  // FIXME: Our choice of AST representation here is less than ideal.
4223  ImplicitThis = Args[0];
4224  ++Args;
4225  --NumArgs;
4226  } else if (IsMemberFunction)
4227  ImplicitThis =
4228  cast<CXXMemberCallExpr>(TheCall)->getImplicitObjectArgument();
4229 
4230  checkCall(FDecl, Proto, ImplicitThis, llvm::makeArrayRef(Args, NumArgs),
4231  IsMemberFunction, TheCall->getRParenLoc(),
4232  TheCall->getCallee()->getSourceRange(), CallType);
4233 
4234  IdentifierInfo *FnInfo = FDecl->getIdentifier();
4235  // None of the checks below are needed for functions that don't have
4236  // simple names (e.g., C++ conversion functions).
4237  if (!FnInfo)
4238  return false;
4239 
4240  CheckAbsoluteValueFunction(TheCall, FDecl);
4241  CheckMaxUnsignedZero(TheCall, FDecl);
4242 
4243  if (getLangOpts().ObjC)
4244  DiagnoseCStringFormatDirectiveInCFAPI(*this, FDecl, Args, NumArgs);
4245 
4246  unsigned CMId = FDecl->getMemoryFunctionKind();
4247  if (CMId == 0)
4248  return false;
4249 
4250  // Handle memory setting and copying functions.
4251  if (CMId == Builtin::BIstrlcpy || CMId == Builtin::BIstrlcat)
4252  CheckStrlcpycatArguments(TheCall, FnInfo);
4253  else if (CMId == Builtin::BIstrncat)
4254  CheckStrncatArguments(TheCall, FnInfo);
4255  else
4256  CheckMemaccessArguments(TheCall, CMId, FnInfo);
4257 
4258  return false;
4259 }
4260 
4261 bool Sema::CheckObjCMethodCall(ObjCMethodDecl *Method, SourceLocation lbrac,
4262  ArrayRef<const Expr *> Args) {
4263  VariadicCallType CallType =
4264  Method->isVariadic() ? VariadicMethod : VariadicDoesNotApply;
4265 
4266  checkCall(Method, nullptr, /*ThisArg=*/nullptr, Args,
4267  /*IsMemberFunction=*/false, lbrac, Method->getSourceRange(),
4268  CallType);
4269 
4270  return false;
4271 }
4272 
4273 bool Sema::CheckPointerCall(NamedDecl *NDecl, CallExpr *TheCall,
4274  const FunctionProtoType *Proto) {
4275  QualType Ty;
4276  if (const auto *V = dyn_cast<VarDecl>(NDecl))
4277  Ty = V->getType().getNonReferenceType();
4278  else if (const auto *F = dyn_cast<FieldDecl>(NDecl))
4279  Ty = F->getType().getNonReferenceType();
4280  else
4281  return false;
4282 
4283  if (!Ty->isBlockPointerType() && !Ty->isFunctionPointerType() &&
4284  !Ty->isFunctionProtoType())
4285  return false;
4286 
4287  VariadicCallType CallType;
4288  if (!Proto || !Proto->isVariadic()) {
4289  CallType = VariadicDoesNotApply;
4290  } else if (Ty->isBlockPointerType()) {
4291  CallType = VariadicBlock;
4292  } else { // Ty->isFunctionPointerType()
4293  CallType = VariadicFunction;
4294  }
4295 
4296  checkCall(NDecl, Proto, /*ThisArg=*/nullptr,
4297  llvm::makeArrayRef(TheCall->getArgs(), TheCall->getNumArgs()),
4298  /*IsMemberFunction=*/false, TheCall->getRParenLoc(),
4299  TheCall->getCallee()->getSourceRange(), CallType);
4300 
4301  return false;
4302 }
4303 
4304 /// Checks function calls when a FunctionDecl or a NamedDecl is not available,
4305 /// such as function pointers returned from functions.
4306 bool Sema::CheckOtherCall(CallExpr *TheCall, const FunctionProtoType *Proto) {
4307  VariadicCallType CallType = getVariadicCallType(/*FDecl=*/nullptr, Proto,
4308  TheCall->getCallee());
4309  checkCall(/*FDecl=*/nullptr, Proto, /*ThisArg=*/nullptr,
4310  llvm::makeArrayRef(TheCall->getArgs(), TheCall->getNumArgs()),
4311  /*IsMemberFunction=*/false, TheCall->getRParenLoc(),
4312  TheCall->getCallee()->getSourceRange(), CallType);
4313 
4314  return false;
4315 }
4316 
4317 static bool isValidOrderingForOp(int64_t Ordering, AtomicExpr::AtomicOp Op) {
4318  if (!llvm::isValidAtomicOrderingCABI(Ordering))
4319  return false;
4320 
4321  auto OrderingCABI = (llvm::AtomicOrderingCABI)Ordering;
4322  switch (Op) {
4323  case AtomicExpr::AO__c11_atomic_init:
4324  case AtomicExpr::AO__opencl_atomic_init:
4325  llvm_unreachable("There is no ordering argument for an init");
4326 
4327  case AtomicExpr::AO__c11_atomic_load:
4328  case AtomicExpr::AO__opencl_atomic_load:
4329  case AtomicExpr::AO__atomic_load_n:
4330  case AtomicExpr::AO__atomic_load:
4331  return OrderingCABI != llvm::AtomicOrderingCABI::release &&
4332  OrderingCABI != llvm::AtomicOrderingCABI::acq_rel;
4333 
4334  case AtomicExpr::AO__c11_atomic_store:
4335  case AtomicExpr::AO__opencl_atomic_store:
4336  case AtomicExpr::AO__atomic_store:
4337  case AtomicExpr::AO__atomic_store_n:
4338  return OrderingCABI != llvm::AtomicOrderingCABI::consume &&
4339  OrderingCABI != llvm::AtomicOrderingCABI::acquire &&
4340  OrderingCABI != llvm::AtomicOrderingCABI::acq_rel;
4341 
4342  default:
4343  return true;
4344  }
4345 }
4346 
4347 ExprResult Sema::SemaAtomicOpsOverloaded(ExprResult TheCallResult,
4348  AtomicExpr::AtomicOp Op) {
4349  CallExpr *TheCall = cast<CallExpr>(TheCallResult.get());
4350  DeclRefExpr *DRE =cast<DeclRefExpr>(TheCall->getCallee()->IgnoreParenCasts());
4351 
4352  // All the non-OpenCL operations take one of the following forms.
4353  // The OpenCL operations take the __c11 forms with one extra argument for
4354  // synchronization scope.
4355  enum {
4356  // C __c11_atomic_init(A *, C)
4357  Init,
4358 
4359  // C __c11_atomic_load(A *, int)
4360  Load,
4361 
4362  // void __atomic_load(A *, CP, int)
4363  LoadCopy,
4364 
4365  // void __atomic_store(A *, CP, int)
4366  Copy,
4367 
4368  // C __c11_atomic_add(A *, M, int)
4369  Arithmetic,
4370 
4371  // C __atomic_exchange_n(A *, CP, int)
4372  Xchg,
4373 
4374  // void __atomic_exchange(A *, C *, CP, int)
4375  GNUXchg,
4376 
4377  // bool __c11_atomic_compare_exchange_strong(A *, C *, CP, int, int)
4378  C11CmpXchg,
4379 
4380  // bool __atomic_compare_exchange(A *, C *, CP, bool, int, int)
4381  GNUCmpXchg
4382  } Form = Init;
4383 
4384  const unsigned NumForm = GNUCmpXchg + 1;
4385  const unsigned NumArgs[] = { 2, 2, 3, 3, 3, 3, 4, 5, 6 };
4386  const unsigned NumVals[] = { 1, 0, 1, 1, 1, 1, 2, 2, 3 };
4387  // where:
4388  // C is an appropriate type,
4389  // A is volatile _Atomic(C) for __c11 builtins and is C for GNU builtins,
4390  // CP is C for __c11 builtins and GNU _n builtins and is C * otherwise,
4391  // M is C if C is an integer, and ptrdiff_t if C is a pointer, and
4392  // the int parameters are for orderings.
4393 
4394  static_assert(sizeof(NumArgs)/sizeof(NumArgs[0]) == NumForm
4395  && sizeof(NumVals)/sizeof(NumVals[0]) == NumForm,
4396  "need to update code for modified forms");
4397  static_assert(AtomicExpr::AO__c11_atomic_init == 0 &&
4398  AtomicExpr::AO__c11_atomic_fetch_xor + 1 ==
4399  AtomicExpr::AO__atomic_load,
4400  "need to update code for modified C11 atomics");
4401  bool IsOpenCL = Op >= AtomicExpr::AO__opencl_atomic_init &&
4402  Op <= AtomicExpr::AO__opencl_atomic_fetch_max;
4403  bool IsC11 = (Op >= AtomicExpr::AO__c11_atomic_init &&
4404  Op <= AtomicExpr::AO__c11_atomic_fetch_xor) ||
4405  IsOpenCL;
4406  bool IsN = Op == AtomicExpr::AO__atomic_load_n ||
4407  Op == AtomicExpr::AO__atomic_store_n ||
4408  Op == AtomicExpr::AO__atomic_exchange_n ||
4409  Op == AtomicExpr::AO__atomic_compare_exchange_n;
4410  bool IsAddSub = false;
4411  bool IsMinMax = false;
4412 
4413  switch (Op) {
4414  case AtomicExpr::AO__c11_atomic_init:
4415  case AtomicExpr::AO__opencl_atomic_init:
4416  Form = Init;
4417  break;
4418 
4419  case AtomicExpr::AO__c11_atomic_load:
4420  case AtomicExpr::AO__opencl_atomic_load:
4421  case AtomicExpr::AO__atomic_load_n:
4422  Form = Load;
4423  break;
4424 
4425  case AtomicExpr::AO__atomic_load:
4426  Form = LoadCopy;
4427  break;
4428 
4429  case AtomicExpr::AO__c11_atomic_store:
4430  case AtomicExpr::AO__opencl_atomic_store:
4431  case AtomicExpr::AO__atomic_store:
4432  case AtomicExpr::AO__atomic_store_n:
4433  Form = Copy;
4434  break;
4435 
4436  case AtomicExpr::AO__c11_atomic_fetch_add:
4437  case AtomicExpr::AO__c11_atomic_fetch_sub:
4438  case AtomicExpr::AO__opencl_atomic_fetch_add:
4439  case AtomicExpr::AO__opencl_atomic_fetch_sub:
4440  case AtomicExpr::AO__opencl_atomic_fetch_min:
4441  case AtomicExpr::AO__opencl_atomic_fetch_max:
4442  case AtomicExpr::AO__atomic_fetch_add:
4443  case AtomicExpr::AO__atomic_fetch_sub:
4444  case AtomicExpr::AO__atomic_add_fetch:
4445  case AtomicExpr::AO__atomic_sub_fetch:
4446  IsAddSub = true;
4447  LLVM_FALLTHROUGH;
4448  case AtomicExpr::AO__c11_atomic_fetch_and:
4449  case AtomicExpr::AO__c11_atomic_fetch_or:
4450  case AtomicExpr::AO__c11_atomic_fetch_xor:
4451  case AtomicExpr::AO__opencl_atomic_fetch_and:
4452  case AtomicExpr::AO__opencl_atomic_fetch_or:
4453  case AtomicExpr::AO__opencl_atomic_fetch_xor:
4454  case AtomicExpr::AO__atomic_fetch_and:
4455  case AtomicExpr::AO__atomic_fetch_or:
4456  case AtomicExpr::AO__atomic_fetch_xor:
4457  case AtomicExpr::AO__atomic_fetch_nand:
4458  case AtomicExpr::AO__atomic_and_fetch:
4459  case AtomicExpr::AO__atomic_or_fetch:
4460  case AtomicExpr::AO__atomic_xor_fetch:
4461  case AtomicExpr::AO__atomic_nand_fetch:
4462  Form = Arithmetic;
4463  break;
4464 
4465  case AtomicExpr::AO__atomic_fetch_min:
4466  case AtomicExpr::AO__atomic_fetch_max:
4467  IsMinMax = true;
4468  Form = Arithmetic;
4469  break;
4470 
4471  case AtomicExpr::AO__c11_atomic_exchange:
4472  case AtomicExpr::AO__opencl_atomic_exchange:
4473  case AtomicExpr::AO__atomic_exchange_n:
4474  Form = Xchg;
4475  break;
4476 
4477  case AtomicExpr::AO__atomic_exchange:
4478  Form = GNUXchg;
4479  break;
4480 
4481  case AtomicExpr::AO__c11_atomic_compare_exchange_strong:
4482  case AtomicExpr::AO__c11_atomic_compare_exchange_weak:
4483  case AtomicExpr::AO__opencl_atomic_compare_exchange_strong:
4484  case AtomicExpr::AO__opencl_atomic_compare_exchange_weak:
4485  Form = C11CmpXchg;
4486  break;
4487 
4488  case AtomicExpr::AO__atomic_compare_exchange:
4489  case AtomicExpr::AO__atomic_compare_exchange_n:
4490  Form = GNUCmpXchg;
4491  break;
4492  }
4493 
4494  unsigned AdjustedNumArgs = NumArgs[Form];
4495  if (IsOpenCL && Op != AtomicExpr::AO__opencl_atomic_init)
4496  ++AdjustedNumArgs;
4497  // Check we have the right number of arguments.
4498  if (TheCall->getNumArgs() < AdjustedNumArgs) {
4499  Diag(TheCall->getEndLoc(), diag::err_typecheck_call_too_few_args)
4500  << 0 << AdjustedNumArgs << TheCall->getNumArgs()
4501  << TheCall->getCallee()->getSourceRange();
4502  return ExprError();
4503  } else if (TheCall->getNumArgs() > AdjustedNumArgs) {
4504  Diag(TheCall->getArg(AdjustedNumArgs)->getBeginLoc(),
4505  diag::err_typecheck_call_too_many_args)
4506  << 0 << AdjustedNumArgs << TheCall->getNumArgs()
4507  << TheCall->getCallee()->getSourceRange();
4508  return ExprError();
4509  }
4510 
4511  // Inspect the first argument of the atomic operation.
4512  Expr *Ptr = TheCall->getArg(0);
4513  ExprResult ConvertedPtr = DefaultFunctionArrayLvalueConversion(Ptr);
4514  if (ConvertedPtr.isInvalid())
4515  return ExprError();
4516 
4517  Ptr = ConvertedPtr.get();
4518  const PointerType *pointerType = Ptr->getType()->getAs<PointerType>();
4519  if (!pointerType) {
4520  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_must_be_pointer)
4521  << Ptr->getType() << Ptr->getSourceRange();
4522  return ExprError();
4523  }
4524 
4525  // For a __c11 builtin, this should be a pointer to an _Atomic type.
4526  QualType AtomTy = pointerType->getPointeeType(); // 'A'
4527  QualType ValType = AtomTy; // 'C'
4528  if (IsC11) {
4529  if (!AtomTy->isAtomicType()) {
4530  Diag(DRE->getBeginLoc(), diag::err_atomic_op_needs_atomic)
4531  << Ptr->getType() << Ptr->getSourceRange();
4532  return ExprError();
4533  }
4534  if ((Form != Load && Form != LoadCopy && AtomTy.isConstQualified()) ||
4536  Diag(DRE->getBeginLoc(), diag::err_atomic_op_needs_non_const_atomic)
4537  << (AtomTy.isConstQualified() ? 0 : 1) << Ptr->getType()
4538  << Ptr->getSourceRange();
4539  return ExprError();
4540  }
4541  ValType = AtomTy->getAs<AtomicType>()->getValueType();
4542  } else if (Form != Load && Form != LoadCopy) {
4543  if (ValType.isConstQualified()) {
4544  Diag(DRE->getBeginLoc(), diag::err_atomic_op_needs_non_const_pointer)
4545  << Ptr->getType() << Ptr->getSourceRange();
4546  return ExprError();
4547  }
4548  }
4549 
4550  // For an arithmetic operation, the implied arithmetic must be well-formed.
4551  if (Form == Arithmetic) {
4552  // gcc does not enforce these rules for GNU atomics, but we do so for sanity.
4553  if (IsAddSub && !ValType->isIntegerType()
4554  && !ValType->isPointerType()) {
4555  Diag(DRE->getBeginLoc(), diag::err_atomic_op_needs_atomic_int_or_ptr)
4556  << IsC11 << Ptr->getType() << Ptr->getSourceRange();
4557  return ExprError();
4558  }
4559  if (IsMinMax) {
4560  const BuiltinType *BT = ValType->getAs<BuiltinType>();
4561  if (!BT || (BT->getKind() != BuiltinType::Int &&
4562  BT->getKind() != BuiltinType::UInt)) {
4563  Diag(DRE->getBeginLoc(), diag::err_atomic_op_needs_int32_or_ptr);
4564  return ExprError();
4565  }
4566  }
4567  if (!IsAddSub && !IsMinMax && !ValType->isIntegerType()) {
4568  Diag(DRE->getBeginLoc(), diag::err_atomic_op_bitwise_needs_atomic_int)
4569  << IsC11 << Ptr->getType() << Ptr->getSourceRange();
4570  return ExprError();
4571  }
4572  if (IsC11 && ValType->isPointerType() &&
4573  RequireCompleteType(Ptr->getBeginLoc(), ValType->getPointeeType(),
4574  diag::err_incomplete_type)) {
4575  return ExprError();
4576  }
4577  } else if (IsN && !ValType->isIntegerType() && !ValType->isPointerType()) {
4578  // For __atomic_*_n operations, the value type must be a scalar integral or
4579  // pointer type which is 1, 2, 4, 8 or 16 bytes in length.
4580  Diag(DRE->getBeginLoc(), diag::err_atomic_op_needs_atomic_int_or_ptr)
4581  << IsC11 << Ptr->getType() << Ptr->getSourceRange();
4582  return ExprError();
4583  }
4584 
4585  if (!IsC11 && !AtomTy.isTriviallyCopyableType(Context) &&
4586  !AtomTy->isScalarType()) {
4587  // For GNU atomics, require a trivially-copyable type. This is not part of
4588  // the GNU atomics specification, but we enforce it for sanity.
4589  Diag(DRE->getBeginLoc(), diag::err_atomic_op_needs_trivial_copy)
4590  << Ptr->getType() << Ptr->getSourceRange();
4591  return ExprError();
4592  }
4593 
4594  switch (ValType.getObjCLifetime()) {
4595  case Qualifiers::OCL_None:
4597  // okay
4598  break;
4599 
4600  case Qualifiers::OCL_Weak:
4603  // FIXME: Can this happen? By this point, ValType should be known
4604  // to be trivially copyable.
4605  Diag(DRE->getBeginLoc(), diag::err_arc_atomic_ownership)
4606  << ValType << Ptr->getSourceRange();
4607  return ExprError();
4608  }
4609 
4610  // All atomic operations have an overload which takes a pointer to a volatile
4611  // 'A'. We shouldn't let the volatile-ness of the pointee-type inject itself
4612  // into the result or the other operands. Similarly atomic_load takes a
4613  // pointer to a const 'A'.
4614  ValType.removeLocalVolatile();
4615  ValType.removeLocalConst();
4616  QualType ResultType = ValType;
4617  if (Form == Copy || Form == LoadCopy || Form == GNUXchg ||
4618  Form == Init)
4619  ResultType = Context.VoidTy;
4620  else if (Form == C11CmpXchg || Form == GNUCmpXchg)
4621  ResultType = Context.BoolTy;
4622 
4623  // The type of a parameter passed 'by value'. In the GNU atomics, such
4624  // arguments are actually passed as pointers.
4625  QualType ByValType = ValType; // 'CP'
4626  bool IsPassedByAddress = false;
4627  if (!IsC11 && !IsN) {
4628  ByValType = Ptr->getType();
4629  IsPassedByAddress = true;
4630  }
4631 
4632  // The first argument's non-CV pointer type is used to deduce the type of
4633  // subsequent arguments, except for:
4634  // - weak flag (always converted to bool)
4635  // - memory order (always converted to int)
4636  // - scope (always converted to int)
4637  for (unsigned i = 0; i != TheCall->getNumArgs(); ++i) {
4638  QualType Ty;
4639  if (i < NumVals[Form] + 1) {
4640  switch (i) {
4641  case 0:
4642  // The first argument is always a pointer. It has a fixed type.
4643  // It is always dereferenced, a nullptr is undefined.
4644  CheckNonNullArgument(*this, TheCall->getArg(i), DRE->getBeginLoc());
4645  // Nothing else to do: we already know all we want about this pointer.
4646  continue;
4647  case 1:
4648  // The second argument is the non-atomic operand. For arithmetic, this
4649  // is always passed by value, and for a compare_exchange it is always
4650  // passed by address. For the rest, GNU uses by-address and C11 uses
4651  // by-value.
4652  assert(Form != Load);
4653  if (Form == Init || (Form == Arithmetic && ValType->isIntegerType()))
4654  Ty = ValType;
4655  else if (Form == Copy || Form == Xchg) {
4656  if (IsPassedByAddress)
4657  // The value pointer is always dereferenced, a nullptr is undefined.
4658  CheckNonNullArgument(*this, TheCall->getArg(i), DRE->getBeginLoc());
4659  Ty = ByValType;
4660  } else if (Form == Arithmetic)
4661  Ty = Context.getPointerDiffType();
4662  else {
4663  Expr *ValArg = TheCall->getArg(i);
4664  // The value pointer is always dereferenced, a nullptr is undefined.
4665  CheckNonNullArgument(*this, ValArg, DRE->getBeginLoc());
4666  LangAS AS = LangAS::Default;
4667  // Keep address space of non-atomic pointer type.
4668  if (const PointerType *PtrTy =
4669  ValArg->getType()->getAs<PointerType>()) {
4670  AS = PtrTy->getPointeeType().getAddressSpace();
4671  }
4672  Ty = Context.getPointerType(
4673  Context.getAddrSpaceQualType(ValType.getUnqualifiedType(), AS));
4674  }
4675  break;
4676  case 2:
4677  // The third argument to compare_exchange / GNU exchange is the desired
4678  // value, either by-value (for the C11 and *_n variant) or as a pointer.
4679  if (IsPassedByAddress)
4680  CheckNonNullArgument(*this, TheCall->getArg(i), DRE->getBeginLoc());
4681  Ty = ByValType;
4682  break;
4683  case 3:
4684  // The fourth argument to GNU compare_exchange is a 'weak' flag.
4685  Ty = Context.BoolTy;
4686  break;
4687  }
4688  } else {
4689  // The order(s) and scope are always converted to int.
4690  Ty = Context.IntTy;
4691  }
4692 
4693  InitializedEntity Entity =
4694  InitializedEntity::InitializeParameter(Context, Ty, false);
4695  ExprResult Arg = TheCall->getArg(i);
4696  Arg = PerformCopyInitialization(Entity, SourceLocation(), Arg);
4697  if (Arg.isInvalid())
4698  return true;
4699  TheCall->setArg(i, Arg.get());
4700  }
4701 
4702  // Permute the arguments into a 'consistent' order.
4703  SmallVector<Expr*, 5> SubExprs;
4704  SubExprs.push_back(Ptr);
4705  switch (Form) {
4706  case Init:
4707  // Note, AtomicExpr::getVal1() has a special case for this atomic.
4708  SubExprs.push_back(TheCall->getArg(1)); // Val1
4709  break;
4710  case Load:
4711  SubExprs.push_back(TheCall->getArg(1)); // Order
4712  break;
4713  case LoadCopy:
4714  case Copy:
4715  case Arithmetic:
4716  case Xchg:
4717  SubExprs.push_back(TheCall->getArg(2)); // Order
4718  SubExprs.push_back(TheCall->getArg(1)); // Val1
4719  break;
4720  case GNUXchg:
4721  // Note, AtomicExpr::getVal2() has a special case for this atomic.
4722  SubExprs.push_back(TheCall->getArg(3)); // Order
4723  SubExprs.push_back(TheCall->getArg(1)); // Val1
4724  SubExprs.push_back(TheCall->getArg(2)); // Val2
4725  break;
4726  case C11CmpXchg:
4727  SubExprs.push_back(TheCall->getArg(3)); // Order
4728  SubExprs.push_back(TheCall->getArg(1)); // Val1
4729  SubExprs.push_back(TheCall->getArg(4)); // OrderFail
4730  SubExprs.push_back(TheCall->getArg(2)); // Val2
4731  break;
4732  case GNUCmpXchg:
4733  SubExprs.push_back(TheCall->getArg(4)); // Order
4734  SubExprs.push_back(TheCall->getArg(1)); // Val1
4735  SubExprs.push_back(TheCall->getArg(5)); // OrderFail
4736  SubExprs.push_back(TheCall->getArg(2)); // Val2
4737  SubExprs.push_back(TheCall->getArg(3)); // Weak
4738  break;
4739  }
4740 
4741  if (SubExprs.size() >= 2 && Form != Init) {
4742  llvm::APSInt Result(32);
4743  if (SubExprs[1]->isIntegerConstantExpr(Result, Context) &&
4744  !isValidOrderingForOp(Result.getSExtValue(), Op))
4745  Diag(SubExprs[1]->getBeginLoc(),
4746  diag::warn_atomic_op_has_invalid_memory_order)
4747  << SubExprs[1]->getSourceRange();
4748  }
4749 
4750  if (auto ScopeModel = AtomicExpr::getScopeModel(Op)) {
4751  auto *Scope = TheCall->getArg(TheCall->getNumArgs() - 1);
4752  llvm::APSInt Result(32);
4753  if (Scope->isIntegerConstantExpr(Result, Context) &&
4754  !ScopeModel->isValid(Result.getZExtValue())) {
4755  Diag(Scope->getBeginLoc(), diag::err_atomic_op_has_invalid_synch_scope)
4756  << Scope->getSourceRange();
4757  }
4758  SubExprs.push_back(Scope);
4759  }
4760 
4761  AtomicExpr *AE =
4762  new (Context) AtomicExpr(TheCall->getCallee()->getBeginLoc(), SubExprs,
4763  ResultType, Op, TheCall->getRParenLoc());
4764 
4765  if ((Op == AtomicExpr::AO__c11_atomic_load ||
4766  Op == AtomicExpr::AO__c11_atomic_store ||
4767  Op == AtomicExpr::AO__opencl_atomic_load ||
4768  Op == AtomicExpr::AO__opencl_atomic_store ) &&
4769  Context.AtomicUsesUnsupportedLibcall(AE))
4770  Diag(AE->getBeginLoc(), diag::err_atomic_load_store_uses_lib)
4771  << ((Op == AtomicExpr::AO__c11_atomic_load ||
4772  Op == AtomicExpr::AO__opencl_atomic_load)
4773  ? 0
4774  : 1);
4775 
4776  return AE;
4777 }
4778 
4779 /// checkBuiltinArgument - Given a call to a builtin function, perform
4780 /// normal type-checking on the given argument, updating the call in
4781 /// place. This is useful when a builtin function requires custom
4782 /// type-checking for some of its arguments but not necessarily all of
4783 /// them.
4784 ///
4785 /// Returns true on error.
4786 static bool checkBuiltinArgument(Sema &S, CallExpr *E, unsigned ArgIndex) {
4787  FunctionDecl *Fn = E->getDirectCallee();
4788  assert(Fn && "builtin call without direct callee!");
4789 
4790  ParmVarDecl *Param = Fn->getParamDecl(ArgIndex);
4791  InitializedEntity Entity =
4793 
4794  ExprResult Arg = E->getArg(0);
4795  Arg = S.PerformCopyInitialization(Entity, SourceLocation(), Arg);
4796  if (Arg.isInvalid())
4797  return true;
4798 
4799  E->setArg(ArgIndex, Arg.get());
4800  return false;
4801 }
4802 
4803 /// We have a call to a function like __sync_fetch_and_add, which is an
4804 /// overloaded function based on the pointer type of its first argument.
4805 /// The main ActOnCallExpr routines have already promoted the types of
4806 /// arguments because all of these calls are prototyped as void(...).
4807 ///
4808 /// This function goes through and does final semantic checking for these
4809 /// builtins, as well as generating any warnings.
4810 ExprResult
4811 Sema::SemaBuiltinAtomicOverloaded(ExprResult TheCallResult) {
4812  CallExpr *TheCall = static_cast<CallExpr *>(TheCallResult.get());
4813  Expr *Callee = TheCall->getCallee();
4814  DeclRefExpr *DRE = cast<DeclRefExpr>(Callee->IgnoreParenCasts());
4815  FunctionDecl *FDecl = cast<FunctionDecl>(DRE->getDecl());
4816 
4817  // Ensure that we have at least one argument to do type inference from.
4818  if (TheCall->getNumArgs() < 1) {
4819  Diag(TheCall->getEndLoc(), diag::err_typecheck_call_too_few_args_at_least)
4820  << 0 << 1 << TheCall->getNumArgs() << Callee->getSourceRange();
4821  return ExprError();
4822  }
4823 
4824  // Inspect the first argument of the atomic builtin. This should always be
4825  // a pointer type, whose element is an integral scalar or pointer type.
4826  // Because it is a pointer type, we don't have to worry about any implicit
4827  // casts here.
4828  // FIXME: We don't allow floating point scalars as input.
4829  Expr *FirstArg = TheCall->getArg(0);
4830  ExprResult FirstArgResult = DefaultFunctionArrayLvalueConversion(FirstArg);
4831  if (FirstArgResult.isInvalid())
4832  return ExprError();
4833  FirstArg = FirstArgResult.get();
4834  TheCall->setArg(0, FirstArg);
4835 
4836  const PointerType *pointerType = FirstArg->getType()->getAs<PointerType>();
4837  if (!pointerType) {
4838  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_must_be_pointer)
4839  << FirstArg->getType() << FirstArg->getSourceRange();
4840  return ExprError();
4841  }
4842 
4843  QualType ValType = pointerType->getPointeeType();
4844  if (!ValType->isIntegerType() && !ValType->isAnyPointerType() &&
4845  !ValType->isBlockPointerType()) {
4846  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_must_be_pointer_intptr)
4847  << FirstArg->getType() << FirstArg->getSourceRange();
4848  return ExprError();
4849  }
4850 
4851  if (ValType.isConstQualified()) {
4852  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_cannot_be_const)
4853  << FirstArg->getType() << FirstArg->getSourceRange();
4854  return ExprError();
4855  }
4856 
4857  switch (ValType.getObjCLifetime()) {
4858  case Qualifiers::OCL_None:
4860  // okay
4861  break;
4862 
4863  case Qualifiers::OCL_Weak:
4866  Diag(DRE->getBeginLoc(), diag::err_arc_atomic_ownership)
4867  << ValType << FirstArg->getSourceRange();
4868  return ExprError();
4869  }
4870 
4871  // Strip any qualifiers off ValType.
4872  ValType = ValType.getUnqualifiedType();
4873 
4874  // The majority of builtins return a value, but a few have special return
4875  // types, so allow them to override appropriately below.
4876  QualType ResultType = ValType;
4877 
4878  // We need to figure out which concrete builtin this maps onto. For example,
4879  // __sync_fetch_and_add with a 2 byte object turns into
4880  // __sync_fetch_and_add_2.
4881 #define BUILTIN_ROW(x) \
4882  { Builtin::BI##x##_1, Builtin::BI##x##_2, Builtin::BI##x##_4, \
4883  Builtin::BI##x##_8, Builtin::BI##x##_16 }
4884 
4885  static const unsigned BuiltinIndices[][5] = {
4886  BUILTIN_ROW(__sync_fetch_and_add),
4887  BUILTIN_ROW(__sync_fetch_and_sub),
4888  BUILTIN_ROW(__sync_fetch_and_or),
4889  BUILTIN_ROW(__sync_fetch_and_and),
4890  BUILTIN_ROW(__sync_fetch_and_xor),
4891  BUILTIN_ROW(__sync_fetch_and_nand),
4892 
4893  BUILTIN_ROW(__sync_add_and_fetch),
4894  BUILTIN_ROW(__sync_sub_and_fetch),
4895  BUILTIN_ROW(__sync_and_and_fetch),
4896  BUILTIN_ROW(__sync_or_and_fetch),
4897  BUILTIN_ROW(__sync_xor_and_fetch),
4898  BUILTIN_ROW(__sync_nand_and_fetch),
4899 
4900  BUILTIN_ROW(__sync_val_compare_and_swap),
4901  BUILTIN_ROW(__sync_bool_compare_and_swap),
4902  BUILTIN_ROW(__sync_lock_test_and_set),
4903  BUILTIN_ROW(__sync_lock_release),
4904  BUILTIN_ROW(__sync_swap)
4905  };
4906 #undef BUILTIN_ROW
4907 
4908  // Determine the index of the size.
4909  unsigned SizeIndex;
4910  switch (Context.getTypeSizeInChars(ValType).getQuantity()) {
4911  case 1: SizeIndex = 0; break;
4912  case 2: SizeIndex = 1; break;
4913  case 4: SizeIndex = 2; break;
4914  case 8: SizeIndex = 3; break;
4915  case 16: SizeIndex = 4; break;
4916  default:
4917  Diag(DRE->getBeginLoc(), diag::err_atomic_builtin_pointer_size)
4918  << FirstArg->getType() << FirstArg->getSourceRange();
4919  return ExprError();
4920  }
4921 
4922  // Each of these builtins has one pointer argument, followed by some number of
4923  // values (0, 1 or 2) followed by a potentially empty varags list of stuff
4924  // that we ignore. Find out which row of BuiltinIndices to read from as well
4925  // as the number of fixed args.
4926  unsigned BuiltinID = FDecl->getBuiltinID();
4927  unsigned BuiltinIndex, NumFixed = 1;
4928  bool WarnAboutSemanticsChange = false;
4929  switch (BuiltinID) {
4930  default: llvm_unreachable("Unknown overloaded atomic builtin!");
4931  case Builtin::BI__sync_fetch_and_add:
4932  case Builtin::BI__sync_fetch_and_add_1:
4933  case Builtin::BI__sync_fetch_and_add_2:
4934  case Builtin::BI__sync_fetch_and_add_4:
4935  case Builtin::BI__sync_fetch_and_add_8:
4936  case Builtin::BI__sync_fetch_and_add_16:
4937  BuiltinIndex = 0;
4938  break;
4939 
4940  case Builtin::BI__sync_fetch_and_sub:
4941  case Builtin::BI__sync_fetch_and_sub_1:
4942  case Builtin::BI__sync_fetch_and_sub_2:
4943  case Builtin::BI__sync_fetch_and_sub_4:
4944  case Builtin::BI__sync_fetch_and_sub_8:
4945  case Builtin::BI__sync_fetch_and_sub_16:
4946  BuiltinIndex = 1;
4947  break;
4948 
4949  case Builtin::BI__sync_fetch_and_or:
4950  case Builtin::BI__sync_fetch_and_or_1:
4951  case Builtin::BI__sync_fetch_and_or_2:
4952  case Builtin::BI__sync_fetch_and_or_4:
4953  case Builtin::BI__sync_fetch_and_or_8:
4954  case Builtin::BI__sync_fetch_and_or_16:
4955  BuiltinIndex = 2;
4956  break;
4957 
4958  case Builtin::BI__sync_fetch_and_and:
4959  case Builtin::BI__sync_fetch_and_and_1:
4960  case Builtin::BI__sync_fetch_and_and_2:
4961  case Builtin::BI__sync_fetch_and_and_4:
4962  case Builtin::BI__sync_fetch_and_and_8:
4963  case Builtin::BI__sync_fetch_and_and_16:
4964  BuiltinIndex = 3;
4965  break;
4966 
4967  case Builtin::BI__sync_fetch_and_xor:
4968  case Builtin::BI__sync_fetch_and_xor_1:
4969  case Builtin::BI__sync_fetch_and_xor_2:
4970  case Builtin::BI__sync_fetch_and_xor_4:
4971  case Builtin::BI__sync_fetch_and_xor_8:
4972  case Builtin::BI__sync_fetch_and_xor_16:
4973  BuiltinIndex = 4;
4974  break;
4975 
4976  case Builtin::BI__sync_fetch_and_nand:
4977  case Builtin::BI__sync_fetch_and_nand_1:
4978  case Builtin::BI__sync_fetch_and_nand_2:
4979  case Builtin::BI__sync_fetch_and_nand_4:
4980  case Builtin::BI__sync_fetch_and_nand_8:
4981  case Builtin::BI__sync_fetch_and_nand_16:
4982  BuiltinIndex = 5;
4983  WarnAboutSemanticsChange = true;
4984  break;
4985 
4986  case Builtin::BI__sync_add_and_fetch:
4987  case Builtin::BI__sync_add_and_fetch_1:
4988  case Builtin::BI__sync_add_and_fetch_2:
4989  case Builtin::BI__sync_add_and_fetch_4:
4990  case Builtin::BI__sync_add_and_fetch_8:
4991  case Builtin::BI__sync_add_and_fetch_16:
4992  BuiltinIndex = 6;
4993  break;
4994 
4995  case Builtin::BI__sync_sub_and_fetch:
4996  case Builtin::BI__sync_sub_and_fetch_1:
4997  case Builtin::BI__sync_sub_and_fetch_2:
4998  case Builtin::BI__sync_sub_and_fetch_4:
4999  case Builtin::BI__sync_sub_and_fetch_8:
5000  case Builtin::BI__sync_sub_and_fetch_16:
5001  BuiltinIndex = 7;
5002  break;
5003 
5004  case Builtin::BI__sync_and_and_fetch:
5005  case Builtin::BI__sync_and_and_fetch_1:
5006  case Builtin::BI__sync_and_and_fetch_2:
5007  case Builtin::BI__sync_and_and_fetch_4:
5008  case Builtin::BI__sync_and_and_fetch_8:
5009  case Builtin::BI__sync_and_and_fetch_16:
5010  BuiltinIndex = 8;
5011  break;
5012 
5013  case Builtin::BI__sync_or_and_fetch:
5014  case Builtin::BI__sync_or_and_fetch_1:
5015  case Builtin::BI__sync_or_and_fetch_2:
5016  case Builtin::BI__sync_or_and_fetch_4:
5017  case Builtin::BI__sync_or_and_fetch_8:
5018  case Builtin::BI__sync_or_and_fetch_16:
5019  BuiltinIndex = 9;
5020  break;
5021 
5022  case Builtin::BI__sync_xor_and_fetch:
5023  case Builtin::BI__sync_xor_and_fetch_1:
5024  case Builtin::BI__sync_xor_and_fetch_2:
5025  case Builtin::BI__sync_xor_and_fetch_4:
5026  case Builtin::BI__sync_xor_and_fetch_8:
5027  case Builtin::BI__sync_xor_and_fetch_16:
5028  BuiltinIndex = 10;
5029  break;
5030 
5031  case Builtin::BI__sync_nand_and_fetch:
5032  case Builtin::BI__sync_nand_and_fetch_1:
5033  case Builtin::BI__sync_nand_and_fetch_2:
5034  case Builtin::BI__sync_nand_and_fetch_4:
5035  case Builtin::BI__sync_nand_and_fetch_8:
5036  case Builtin::BI__sync_nand_and_fetch_16:
5037  BuiltinIndex = 11;
5038  WarnAboutSemanticsChange = true;
5039  break;
5040 
5041  case Builtin::BI__sync_val_compare_and_swap:
5042  case Builtin::BI__sync_val_compare_and_swap_1:
5043  case Builtin::BI__sync_val_compare_and_swap_2:
5044  case Builtin::BI__sync_val_compare_and_swap_4:
5045  case Builtin::BI__sync_val_compare_and_swap_8:
5046  case Builtin::BI__sync_val_compare_and_swap_16:
5047  BuiltinIndex = 12;
5048  NumFixed = 2;
5049  break;
5050 
5051  case Builtin::BI__sync_bool_compare_and_swap:
5052  case Builtin::BI__sync_bool_compare_and_swap_1:
5053  case Builtin::BI__sync_bool_compare_and_swap_2:
5054  case Builtin::BI__sync_bool_compare_and_swap_4:
5055  case Builtin::BI__sync_bool_compare_and_swap_8:
5056  case Builtin::BI__sync_bool_compare_and_swap_16:
5057  BuiltinIndex = 13;
5058  NumFixed = 2;
5059  ResultType = Context.BoolTy;
5060  break;
5061 
5062  case Builtin::BI__sync_lock_test_and_set:
5063  case Builtin::BI__sync_lock_test_and_set_1:
5064  case Builtin::BI__sync_lock_test_and_set_2:
5065  case Builtin::BI__sync_lock_test_and_set_4:
5066  case Builtin::BI__sync_lock_test_and_set_8:
5067  case Builtin::BI__sync_lock_test_and_set_16:
5068  BuiltinIndex = 14;
5069  break;
5070 
5071  case Builtin::BI__sync_lock_release:
5072  case Builtin::BI__sync_lock_release_1:
5073  case Builtin::BI__sync_lock_release_2:
5074  case Builtin::BI__sync_lock_release_4:
5075  case Builtin::BI__sync_lock_release_8:
5076  case Builtin::BI__sync_lock_release_16:
5077  BuiltinIndex = 15;
5078  NumFixed = 0;
5079  ResultType = Context.VoidTy;
5080  break;
5081 
5082  case Builtin::BI__sync_swap:
5083  case Builtin::BI__sync_swap_1:
5084  case Builtin::BI__sync_swap_2:
5085  case Builtin::BI__sync_swap_4:
5086  case Builtin::BI__sync_swap_8:
5087  case Builtin::BI__sync_swap_16:
5088  BuiltinIndex = 16;
5089  break;
5090  }
5091 
5092  // Now that we know how many fixed arguments we expect, first check that we
5093  // have at least that many.
5094  if (TheCall->getNumArgs() < 1+NumFixed) {
5095  Diag(TheCall->getEndLoc(), diag::err_typecheck_call_too_few_args_at_least)
5096  << 0 << 1 + NumFixed << TheCall->getNumArgs()
5097  << Callee->getSourceRange();
5098  return ExprError();
5099  }
5100 
5101  Diag(TheCall->getEndLoc(), diag::warn_atomic_implicit_seq_cst)
5102  << Callee->getSourceRange();
5103 
5104  if (WarnAboutSemanticsChange) {
5105  Diag(TheCall->getEndLoc(), diag::warn_sync_fetch_and_nand_semantics_change)
5106  << Callee->getSourceRange();
5107  }
5108 
5109  // Get the decl for the concrete builtin from this, we can tell what the
5110  // concrete integer type we should convert to is.
5111  unsigned NewBuiltinID = BuiltinIndices[BuiltinIndex][SizeIndex];
5112  const char *NewBuiltinName = Context.BuiltinInfo.getName(NewBuiltinID);
5113  FunctionDecl *NewBuiltinDecl;
5114  if (NewBuiltinID == BuiltinID)
5115  NewBuiltinDecl = FDecl;
5116  else {
5117  // Perform builtin lookup to avoid redeclaring it.
5118  DeclarationName DN(&Context.Idents.get(NewBuiltinName));
5119  LookupResult Res(*this, DN, DRE->getBeginLoc(), LookupOrdinaryName);
5120  LookupName(Res, TUScope, /*AllowBuiltinCreation=*/true);
5121  assert(Res.getFoundDecl());
5122  NewBuiltinDecl = dyn_cast<FunctionDecl>(Res.getFoundDecl());
5123  if (!NewBuiltinDecl)
5124  return ExprError();
5125  }
5126 
5127  // The first argument --- the pointer --- has a fixed type; we
5128  // deduce the types of the rest of the arguments accordingly. Walk
5129  // the remaining arguments, converting them to the deduced value type.
5130  for (unsigned i = 0; i != NumFixed; ++i) {
5131  ExprResult Arg = TheCall->getArg(i+1);
5132 
5133  // GCC does an implicit conversion to the pointer or integer ValType. This
5134  // can fail in some cases (1i -> int**), check for this error case now.
5135  // Initialize the argument.
5137  ValType, /*consume*/ false);
5138  Arg = PerformCopyInitialization(Entity, SourceLocation(), Arg);
5139  if (Arg.isInvalid())
5140  return ExprError();
5141 
5142  // Okay, we have something that *can* be converted to the right type. Check
5143  // to see if there is a potentially weird extension going on here. This can
5144  // happen when you do an atomic operation on something like an char* and
5145  // pass in 42. The 42 gets converted to char. This is even more strange
5146  // for things like 45.123 -> char, etc.
5147  // FIXME: Do this check.
5148  TheCall->setArg(i+1, Arg.get());
5149  }
5150 
5151  ASTContext& Context = this->getASTContext();
5152 
5153  // Create a new DeclRefExpr to refer to the new decl.
5154  DeclRefExpr* NewDRE = DeclRefExpr::Create(
5155  Context,
5156  DRE->getQualifierLoc(),
5157  SourceLocation(),
5158  NewBuiltinDecl,
5159  /*enclosing*/ false,
5160  DRE->getLocation(),
5161  Context.BuiltinFnTy,
5162  DRE->getValueKind());
5163 
5164  // Set the callee in the CallExpr.
5165  // FIXME: This loses syntactic information.
5166  QualType CalleePtrTy = Context.getPointerType(NewBuiltinDecl->getType());
5167  ExprResult PromotedCall = ImpCastExprToType(NewDRE, CalleePtrTy,
5168  CK_BuiltinFnToFnPtr);
5169  TheCall->setCallee(PromotedCall.get());
5170 
5171  // Change the result type of the call to match the original value type. This
5172  // is arbitrary, but the codegen for these builtins ins design to handle it
5173  // gracefully.
5174  TheCall->setType(ResultType);
5175 
5176  return TheCallResult;
5177 }
5178 
5179 /// SemaBuiltinNontemporalOverloaded - We have a call to
5180 /// __builtin_nontemporal_store or __builtin_nontemporal_load, which is an
5181 /// overloaded function based on the pointer type of its last argument.
5182 ///
5183 /// This function goes through and does final semantic checking for these
5184 /// builtins.
5185 ExprResult Sema::SemaBuiltinNontemporalOverloaded(ExprResult TheCallResult) {
5186  CallExpr *TheCall = (CallExpr *)TheCallResult.get();
5187  DeclRefExpr *DRE =
5188  cast<DeclRefExpr>(TheCall->getCallee()->IgnoreParenCasts());
5189  FunctionDecl *FDecl = cast<FunctionDecl>(DRE->getDecl());
5190  unsigned BuiltinID = FDecl->getBuiltinID();
5191  assert((BuiltinID == Builtin::BI__builtin_nontemporal_store ||
5192  BuiltinID == Builtin::BI__builtin_nontemporal_load) &&
5193  "Unexpected nontemporal load/store builtin!");
5194  bool isStore = BuiltinID == Builtin::BI__builtin_nontemporal_store;
5195  unsigned numArgs = isStore ? 2 : 1;
5196 
5197  // Ensure that we have the proper number of arguments.
5198  if (checkArgCount(*this, TheCall, numArgs))
5199  return ExprError();
5200 
5201  // Inspect the last argument of the nontemporal builtin. This should always
5202  // be a pointer type, from which we imply the type of the memory access.
5203  // Because it is a pointer type, we don't have to worry about any implicit
5204  // casts here.
5205  Expr *PointerArg = TheCall->getArg(numArgs - 1);
5206  ExprResult PointerArgResult =
5207  DefaultFunctionArrayLvalueConversion(PointerArg);
5208 
5209  if (PointerArgResult.isInvalid())
5210  return ExprError();
5211  PointerArg = PointerArgResult.get();
5212  TheCall->setArg(numArgs - 1, PointerArg);
5213 
5214  const PointerType *pointerType = PointerArg->getType()->getAs<PointerType>();
5215  if (!pointerType) {
5216  Diag(DRE->getBeginLoc(), diag::err_nontemporal_builtin_must_be_pointer)
5217  << PointerArg->getType() << PointerArg->getSourceRange();
5218  return ExprError();
5219  }
5220 
5221  QualType ValType = pointerType->getPointeeType();
5222 
5223  // Strip any qualifiers off ValType.
5224  ValType = ValType.getUnqualifiedType();
5225  if (!ValType->isIntegerType() && !ValType->isAnyPointerType() &&
5226  !ValType->isBlockPointerType() && !ValType->isFloatingType() &&
5227  !ValType->isVectorType()) {
5228  Diag(DRE->getBeginLoc(),
5229  diag::err_nontemporal_builtin_must_be_pointer_intfltptr_or_vector)
5230  << PointerArg->getType() << PointerArg->getSourceRange();
5231  return ExprError();
5232  }
5233 
5234  if (!isStore) {
5235  TheCall->setType(ValType);
5236  return TheCallResult;
5237  }
5238 
5239  ExprResult ValArg = TheCall->getArg(0);
5241  Context, ValType, /*consume*/ false);
5242  ValArg = PerformCopyInitialization(Entity, SourceLocation(), ValArg);
5243  if (ValArg.isInvalid())
5244  return ExprError();
5245 
5246  TheCall->setArg(0, ValArg.get());
5247  TheCall->setType(Context.VoidTy);
5248  return TheCallResult;
5249 }
5250 
5251 /// CheckObjCString - Checks that the argument to the builtin
5252 /// CFString constructor is correct
5253 /// Note: It might also make sense to do the UTF-16 conversion here (would
5254 /// simplify the backend).
5255 bool Sema::CheckObjCString(Expr *Arg) {
5256  Arg = Arg->IgnoreParenCasts();
5257  StringLiteral *Literal = dyn_cast<StringLiteral>(Arg);
5258 
5259  if (!Literal || !Literal->isAscii()) {
5260  Diag(Arg->getBeginLoc(), diag::err_cfstring_literal_not_string_constant)
5261  << Arg->getSourceRange();
5262  return true;
5263  }
5264 
5265  if (Literal->containsNonAsciiOrNull()) {
5266  StringRef String = Literal->getString();
5267  unsigned NumBytes = String.size();
5268  SmallVector<llvm::UTF16, 128> ToBuf(NumBytes);
5269  const llvm::UTF8 *FromPtr = (const llvm::UTF8 *)String.data();
5270  llvm::UTF16 *ToPtr = &ToBuf[0];
5271 
5272  llvm::ConversionResult Result =
5273  llvm::ConvertUTF8toUTF16(&FromPtr, FromPtr + NumBytes, &ToPtr,
5274  ToPtr + NumBytes, llvm::strictConversion);
5275  // Check for conversion failure.
5276  if (Result != llvm::conversionOK)
5277  Diag(Arg->getBeginLoc(), diag::warn_cfstring_truncated)
5278  << Arg->getSourceRange();
5279  }
5280  return false;
5281 }
5282 
5283 /// CheckObjCString - Checks that the format string argument to the os_log()
5284 /// and os_trace() functions is correct, and converts it to const char *.
5285 ExprResult Sema::CheckOSLogFormatStringArg(Expr *Arg) {
5286  Arg = Arg->IgnoreParenCasts();
5287  auto *Literal = dyn_cast<StringLiteral>(Arg);
5288  if (!Literal) {
5289  if (auto *ObjcLiteral = dyn_cast<ObjCStringLiteral>(Arg)) {
5290  Literal = ObjcLiteral->getString();
5291  }
5292  }
5293 
5294  if (!Literal || (!Literal->isAscii() && !Literal->isUTF8())) {
5295  return ExprError(
5296  Diag(Arg->getBeginLoc(), diag::err_os_log_format_not_string_constant)
5297  << Arg->getSourceRange());
5298  }
5299 
5300  ExprResult Result(Literal);
5301  QualType ResultTy = Context.getPointerType(Context.CharTy.withConst());
5302  InitializedEntity Entity =
5303  InitializedEntity::InitializeParameter(Context, ResultTy, false);
5304  Result = PerformCopyInitialization(Entity, SourceLocation(), Result);
5305  return Result;
5306 }
5307 
5308 /// Check that the user is calling the appropriate va_start builtin for the
5309 /// target and calling convention.
5310 static bool checkVAStartABI(Sema &S, unsigned BuiltinID, Expr *Fn) {
5311  const llvm::Triple &TT = S.Context.getTargetInfo().getTriple();
5312  bool IsX64 = TT.getArch() == llvm::Triple::x86_64;
5313  bool IsAArch64 = TT.getArch() == llvm::Triple::aarch64;
5314  bool IsWindows = TT.isOSWindows();
5315  bool IsMSVAStart = BuiltinID == Builtin::BI__builtin_ms_va_start;
5316  if (IsX64 || IsAArch64) {
5317  CallingConv CC = CC_C;
5318  if (const FunctionDecl *FD = S.getCurFunctionDecl())
5319  CC = FD->getType()->getAs<FunctionType>()->getCallConv();
5320  if (IsMSVAStart) {
5321  // Don't allow this in System V ABI functions.
5322  if (CC == CC_X86_64SysV || (!IsWindows && CC != CC_Win64))
5323  return S.Diag(Fn->getBeginLoc(),
5324  diag::err_ms_va_start_used_in_sysv_function);
5325  } else {
5326  // On x86-64/AArch64 Unix, don't allow this in Win64 ABI functions.
5327  // On x64 Windows, don't allow this in System V ABI functions.
5328  // (Yes, that means there's no corresponding way to support variadic
5329  // System V ABI functions on Windows.)
5330  if ((IsWindows && CC == CC_X86_64SysV) ||
5331  (!IsWindows && CC == CC_Win64))
5332  return S.Diag(Fn->getBeginLoc(),
5333  diag::err_va_start_used_in_wrong_abi_function)
5334  << !IsWindows;
5335  }
5336  return false;
5337  }
5338 
5339  if (IsMSVAStart)
5340  return S.Diag(Fn->getBeginLoc(), diag::err_builtin_x64_aarch64_only);
5341  return false;
5342 }
5343 
5345  ParmVarDecl **LastParam = nullptr) {
5346  // Determine whether the current function, block, or obj-c method is variadic
5347  // and get its parameter list.
5348  bool IsVariadic = false;
5349  ArrayRef<ParmVarDecl *> Params;
5350  DeclContext *Caller = S.CurContext;
5351  if (auto *Block = dyn_cast<BlockDecl>(Caller)) {
5352  IsVariadic = Block->isVariadic();
5353  Params = Block->parameters();
5354  } else if (auto *FD = dyn_cast<FunctionDecl>(Caller)) {
5355  IsVariadic = FD->isVariadic();
5356  Params = FD->parameters();
5357  } else if (auto *MD = dyn_cast<ObjCMethodDecl>(Caller)) {
5358  IsVariadic = MD->isVariadic();
5359  // FIXME: This isn't correct for methods (results in bogus warning).
5360  Params = MD->parameters();
5361  } else if (isa<CapturedDecl>(Caller)) {
5362  // We don't support va_start in a CapturedDecl.
5363  S.Diag(Fn->getBeginLoc(), diag::err_va_start_captured_stmt);
5364  return true;
5365  } else {
5366  // This must be some other declcontext that parses exprs.
5367  S.Diag(Fn->getBeginLoc(), diag::err_va_start_outside_function);
5368  return true;
5369  }
5370 
5371  if (!IsVariadic) {
5372  S.Diag(Fn->getBeginLoc(), diag::err_va_start_fixed_function);
5373  return true;
5374  }
5375 
5376  if (LastParam)
5377  *LastParam = Params.empty() ? nullptr : Params.back();
5378 
5379  return false;
5380 }
5381 
5382 /// Check the arguments to '__builtin_va_start' or '__builtin_ms_va_start'
5383 /// for validity. Emit an error and return true on failure; return false
5384 /// on success.
5385 bool Sema::SemaBuiltinVAStart(unsigned BuiltinID, CallExpr *TheCall) {
5386  Expr *Fn = TheCall->getCallee();
5387 
5388  if (checkVAStartABI(*this, BuiltinID, Fn))
5389  return true;
5390 
5391  if (TheCall->getNumArgs() > 2) {
5392  Diag(TheCall->getArg(2)->getBeginLoc(),
5393  diag::err_typecheck_call_too_many_args)
5394  << 0 /*function call*/ << 2 << TheCall->getNumArgs()
5395  << Fn->getSourceRange()
5396  << SourceRange(TheCall->getArg(2)->getBeginLoc(),
5397  (*(TheCall->arg_end() - 1))->getEndLoc());
5398  return true;
5399  }
5400 
5401  if (TheCall->getNumArgs() < 2) {
5402  return Diag(TheCall->getEndLoc(),
5403  diag::err_typecheck_call_too_few_args_at_least)
5404  << 0 /*function call*/ << 2 << TheCall->getNumArgs();
5405  }
5406 
5407  // Type-check the first argument normally.
5408  if (checkBuiltinArgument(*this, TheCall, 0))
5409  return true;
5410 
5411  // Check that the current function is variadic, and get its last parameter.
5412  ParmVarDecl *LastParam;
5413  if (checkVAStartIsInVariadicFunction(*this, Fn, &LastParam))
5414  return true;
5415 
5416  // Verify that the second argument to the builtin is the last argument of the
5417  // current function or method.
5418  bool SecondArgIsLastNamedArgument = false;
5419  const Expr *Arg = TheCall->getArg(1)->IgnoreParenCasts();
5420 
5421  // These are valid if SecondArgIsLastNamedArgument is false after the next
5422  // block.
5423  QualType Type;
5424  SourceLocation ParamLoc;
5425  bool IsCRegister = false;
5426 
5427  if (const DeclRefExpr *DR = dyn_cast<DeclRefExpr>(Arg)) {
5428  if (const ParmVarDecl *PV = dyn_cast<ParmVarDecl>(DR->getDecl())) {
5429  SecondArgIsLastNamedArgument = PV == LastParam;
5430 
5431  Type = PV->getType();
5432  ParamLoc = PV->getLocation();
5433  IsCRegister =
5434  PV->getStorageClass() == SC_Register && !getLangOpts().CPlusPlus;
5435  }
5436  }
5437 
5438  if (!SecondArgIsLastNamedArgument)
5439  Diag(TheCall->getArg(1)->getBeginLoc(),
5440  diag::warn_second_arg_of_va_start_not_last_named_param);
5441  else if (IsCRegister || Type->isReferenceType() ||
5442  Type->isSpecificBuiltinType(BuiltinType::Float) || [=] {
5443  // Promotable integers are UB, but enumerations need a bit of
5444  // extra checking to see what their promotable type actually is.
5445  if (!Type->isPromotableIntegerType())
5446  return false;
5447  if (!Type->isEnumeralType())
5448  return true;
5449  const EnumDecl *ED = Type->getAs<EnumType>()->getDecl();
5450  return !(ED &&
5451  Context.typesAreCompatible(ED->getPromotionType(), Type));
5452  }()) {
5453  unsigned Reason = 0;
5454  if (Type->isReferenceType()) Reason = 1;
5455  else if (IsCRegister) Reason = 2;
5456  Diag(Arg->getBeginLoc(), diag::warn_va_start_type_is_undefined) << Reason;
5457  Diag(ParamLoc, diag::note_parameter_type) << Type;
5458  }
5459 
5460  TheCall->setType(Context.VoidTy);
5461  return false;
5462 }
5463 
5464 bool Sema::SemaBuiltinVAStartARMMicrosoft(CallExpr *Call) {
5465  // void __va_start(va_list *ap, const char *named_addr, size_t slot_size,
5466  // const char *named_addr);
5467 
5468  Expr *Func = Call->getCallee();
5469 
5470  if (Call->getNumArgs() < 3)
5471  return Diag(Call->getEndLoc(),
5472  diag::err_typecheck_call_too_few_args_at_least)
5473  << 0 /*function call*/ << 3 << Call->getNumArgs();
5474 
5475  // Type-check the first argument normally.
5476  if (checkBuiltinArgument(*this, Call, 0))
5477  return true;
5478 
5479  // Check that the current function is variadic.
5480  if (checkVAStartIsInVariadicFunction(*this, Func))
5481  return true;
5482 
5483  // __va_start on Windows does not validate the parameter qualifiers
5484 
5485  const Expr *Arg1 = Call->getArg(1)->IgnoreParens();
5486  const Type *Arg1Ty = Arg1->getType().getCanonicalType().getTypePtr();
5487 
5488  const Expr *Arg2 = Call->getArg(2)->IgnoreParens();
5489  const Type *Arg2Ty = Arg2->getType().getCanonicalType().getTypePtr();
5490 
5491  const QualType &ConstCharPtrTy =
5492  Context.getPointerType(Context.CharTy.withConst());
5493  if (!Arg1Ty->isPointerType() ||
5494  Arg1Ty->getPointeeType().withoutLocalFastQualifiers() != Context.CharTy)
5495  Diag(Arg1->getBeginLoc(), diag::err_typecheck_convert_incompatible)
5496  << Arg1->getType() << ConstCharPtrTy << 1 /* different class */
5497  << 0 /* qualifier difference */
5498  << 3 /* parameter mismatch */
5499  << 2 << Arg1->getType() << ConstCharPtrTy;
5500 
5501  const QualType SizeTy = Context.getSizeType();
5502  if (Arg2Ty->getCanonicalTypeInternal().withoutLocalFastQualifiers() != SizeTy)
5503  Diag(Arg2->getBeginLoc(), diag::err_typecheck_convert_incompatible)
5504  << Arg2->getType() << SizeTy << 1 /* different class */
5505  << 0 /* qualifier difference */
5506  << 3 /* parameter mismatch */
5507  << 3 << Arg2->getType() << SizeTy;
5508 
5509  return false;
5510 }
5511 
5512 /// SemaBuiltinUnorderedCompare - Handle functions like __builtin_isgreater and
5513 /// friends. This is declared to take (...), so we have to check everything.
5514 bool Sema::SemaBuiltinUnorderedCompare(CallExpr *TheCall) {
5515  if (TheCall->getNumArgs() < 2)
5516  return Diag(TheCall->getEndLoc(), diag::err_typecheck_call_too_few_args)
5517  << 0 << 2 << TheCall->getNumArgs() /*function call*/;
5518  if (TheCall->getNumArgs() > 2)
5519  return Diag(TheCall->getArg(2)->getBeginLoc(),
5520  diag::err_typecheck_call_too_many_args)
5521  << 0 /*function call*/ << 2 << TheCall->getNumArgs()
5522  << SourceRange(TheCall->getArg(2)->getBeginLoc(),
5523  (*(TheCall->arg_end() - 1))->getEndLoc());
5524 
5525  ExprResult OrigArg0 = TheCall->getArg(0);
5526  ExprResult OrigArg1 = TheCall->getArg(1);
5527 
5528  // Do standard promotions between the two arguments, returning their common
5529  // type.
5530  QualType Res = UsualArithmeticConversions(OrigArg0, OrigArg1, false);
5531  if (OrigArg0.isInvalid() || OrigArg1.isInvalid())
5532  return true;
5533 
5534  // Make sure any conversions are pushed back into the call; this is
5535  // type safe since unordered compare builtins are declared as "_Bool
5536  // foo(...)".
5537  TheCall->setArg(0, OrigArg0.get());
5538  TheCall->setArg(1, OrigArg1.get());
5539 
5540  if (OrigArg0.get()->isTypeDependent() || OrigArg1.get()->isTypeDependent())
5541  return false;
5542 
5543  // If the common type isn't a real floating type, then the arguments were
5544  // invalid for this operation.
5545  if (Res.isNull() || !Res->isRealFloatingType())
5546  return Diag(OrigArg0.get()->getBeginLoc(),
5547  diag::err_typecheck_call_invalid_ordered_compare)
5548  << OrigArg0.get()->getType() << OrigArg1.get()->getType()
5549  << SourceRange(OrigArg0.get()->getBeginLoc(),
5550  OrigArg1.get()->getEndLoc());
5551 
5552  return false;
5553 }
5554 
5555 /// SemaBuiltinSemaBuiltinFPClassification - Handle functions like
5556 /// __builtin_isnan and friends. This is declared to take (...), so we have
5557 /// to check everything. We expect the last argument to be a floating point
5558 /// value.
5559 bool Sema::SemaBuiltinFPClassification(CallExpr *TheCall, unsigned NumArgs) {
5560  if (TheCall->getNumArgs() < NumArgs)
5561  return Diag(TheCall->getEndLoc(), diag::err_typecheck_call_too_few_args)
5562  << 0 << NumArgs << TheCall->getNumArgs() /*function call*/;
5563  if (TheCall->getNumArgs() > NumArgs)
5564  return Diag(TheCall->getArg(NumArgs)->getBeginLoc(),
5565  diag::err_typecheck_call_too_many_args)
5566  << 0 /*function call*/ << NumArgs << TheCall->getNumArgs()
5567  << SourceRange(TheCall->getArg(NumArgs)->getBeginLoc(),
5568  (*(TheCall->arg_end() - 1))->getEndLoc());
5569 
5570  Expr *OrigArg = TheCall->getArg(NumArgs-1);
5571 
5572  if (OrigArg->isTypeDependent())
5573  return false;
5574 
5575  // This operation requires a non-_Complex floating-point number.
5576  if (!OrigArg->getType()->isRealFloatingType())
5577  return Diag(OrigArg->getBeginLoc(),
5578  diag::err_typecheck_call_invalid_unary_fp)
5579  << OrigArg->getType() << OrigArg->getSourceRange();
5580 
5581  // If this is an implicit conversion from float -> float, double, or
5582  // long double, remove it.
5583  if (ImplicitCastExpr *Cast = dyn_cast<ImplicitCastExpr>(OrigArg)) {
5584  // Only remove standard FloatCasts, leaving other casts inplace
5585  if (Cast->getCastKind() == CK_FloatingCast) {
5586  Expr *CastArg = Cast->getSubExpr();
5587  if (CastArg->getType()->isSpecificBuiltinType(BuiltinType::Float)) {
5588  assert(
5589  (Cast->getType()->isSpecificBuiltinType(BuiltinType::Double) ||
5590  Cast->getType()->isSpecificBuiltinType(BuiltinType::Float) ||
5591  Cast->getType()->isSpecificBuiltinType(BuiltinType::LongDouble)) &&
5592  "promotion from float to either float, double, or long double is "
5593  "the only expected cast here");
5594  Cast->setSubExpr(nullptr);
5595  TheCall->setArg(NumArgs-1, CastArg);
5596  }
5597  }
5598  }
5599 
5600  return false;
5601 }
5602 
5603 // Customized Sema Checking for VSX builtins that have the following signature:
5604 // vector [...] builtinName(vector [...], vector [...], const int);
5605 // Which takes the same type of vectors (any legal vector type) for the first
5606 // two arguments and takes compile time constant for the third argument.
5607 // Example builtins are :
5608 // vector double vec_xxpermdi(vector double, vector double, int);
5609 // vector short vec_xxsldwi(vector short, vector short, int);
5610 bool Sema::SemaBuiltinVSX(CallExpr *TheCall) {
5611  unsigned ExpectedNumArgs = 3;
5612  if (TheCall->getNumArgs() < ExpectedNumArgs)
5613  return Diag(TheCall->getEndLoc(),
5614  diag::err_typecheck_call_too_few_args_at_least)
5615  << 0 /*function call*/ << ExpectedNumArgs << TheCall->getNumArgs()
5616  << TheCall->getSourceRange();
5617 
5618  if (TheCall->getNumArgs() > ExpectedNumArgs)
5619  return Diag(TheCall->getEndLoc(),
5620  diag::err_typecheck_call_too_many_args_at_most)
5621  << 0 /*function call*/ << ExpectedNumArgs << TheCall->getNumArgs()
5622  << TheCall->getSourceRange();
5623 
5624  // Check the third argument is a compile time constant
5625  llvm::APSInt Value;
5626  if(!TheCall->getArg(2)->isIntegerConstantExpr(Value, Context))
5627  return Diag(TheCall->getBeginLoc(),
5628  diag::err_vsx_builtin_nonconstant_argument)
5629  << 3 /* argument index */ << TheCall->getDirectCallee()
5630  << SourceRange(TheCall->getArg(2)->getBeginLoc(),
5631  TheCall->getArg(2)->getEndLoc());
5632 
5633  QualType Arg1Ty = TheCall->getArg(0)->getType();
5634  QualType Arg2Ty = TheCall->getArg(1)->getType();
5635 
5636  // Check the type of argument 1 and argument 2 are vectors.
5637  SourceLocation BuiltinLoc = TheCall->getBeginLoc();
5638  if ((!Arg1Ty->isVectorType() && !Arg1Ty->isDependentType()) ||
5639  (!Arg2Ty->isVectorType() && !Arg2Ty->isDependentType())) {
5640  return Diag(BuiltinLoc, diag::err_vec_builtin_non_vector)
5641  << TheCall->getDirectCallee()
5642  << SourceRange(TheCall->getArg(0)->getBeginLoc(),
5643  TheCall->getArg(1)->getEndLoc());
5644  }
5645 
5646  // Check the first two arguments are the same type.
5647  if (!Context.hasSameUnqualifiedType(Arg1Ty, Arg2Ty)) {
5648  return Diag(BuiltinLoc, diag::err_vec_builtin_incompatible_vector)
5649  << TheCall->getDirectCallee()
5650  << SourceRange(TheCall->getArg(0)->getBeginLoc(),
5651  TheCall->getArg(1)->getEndLoc());
5652  }
5653 
5654  // When default clang type checking is turned off and the customized type
5655  // checking is used, the returning type of the function must be explicitly
5656  // set. Otherwise it is _Bool by default.
5657  TheCall->setType(Arg1Ty);
5658 
5659  return false;
5660 }
5661 
5662 /// SemaBuiltinShuffleVector - Handle __builtin_shufflevector.
5663 // This is declared to take (...), so we have to check everything.
5665  if (TheCall->getNumArgs() < 2)
5666  return ExprError(Diag(TheCall->getEndLoc(),
5667  diag::err_typecheck_call_too_few_args_at_least)
5668  << 0 /*function call*/ << 2 << TheCall->getNumArgs()
5669  << TheCall->getSourceRange());
5670 
5671  // Determine which of the following types of shufflevector we're checking:
5672  // 1) unary, vector mask: (lhs, mask)
5673  // 2) binary, scalar mask: (lhs, rhs, index, ..., index)
5674  QualType resType = TheCall->getArg(0)->getType();
5675  unsigned numElements = 0;
5676 
5677  if (!TheCall->getArg(0)->isTypeDependent() &&
5678  !TheCall->getArg(1)->isTypeDependent()) {
5679  QualType LHSType = TheCall->getArg(0)->getType();
5680  QualType RHSType = TheCall->getArg(1)->getType();
5681 
5682  if (!LHSType->isVectorType() || !RHSType->isVectorType())
5683  return ExprError(
5684  Diag(TheCall->getBeginLoc(), diag::err_vec_builtin_non_vector)
5685  << TheCall->getDirectCallee()
5686  << SourceRange(TheCall->getArg(0)->getBeginLoc(),
5687  TheCall->getArg(1)->getEndLoc()));
5688 
5689  numElements = LHSType->getAs<VectorType>()->getNumElements();
5690  unsigned numResElements = TheCall->getNumArgs() - 2;
5691 
5692  // Check to see if we have a call with 2 vector arguments, the unary shuffle
5693  // with mask. If so, verify that RHS is an integer vector type with the
5694  // same number of elts as lhs.
5695  if (TheCall->getNumArgs() == 2) {
5696  if (!RHSType->hasIntegerRepresentation() ||
5697  RHSType->getAs<VectorType>()->getNumElements() != numElements)
5698  return ExprError(Diag(TheCall->getBeginLoc(),
5699  diag::err_vec_builtin_incompatible_vector)
5700  << TheCall->getDirectCallee()
5701  << SourceRange(TheCall->getArg(1)->getBeginLoc(),
5702  TheCall->getArg(1)->getEndLoc()));
5703  } else if (!Context.hasSameUnqualifiedType(LHSType, RHSType)) {
5704  return ExprError(Diag(TheCall->getBeginLoc(),