40#include "llvm/ADT/IndexedMap.h"
41#include "llvm/ADT/PointerEmbeddedInt.h"
42#include "llvm/ADT/STLExtras.h"
43#include "llvm/ADT/Sequence.h"
44#include "llvm/ADT/SetVector.h"
45#include "llvm/ADT/SmallSet.h"
46#include "llvm/ADT/StringExtras.h"
47#include "llvm/Frontend/OpenMP/OMPAssume.h"
48#include "llvm/Frontend/OpenMP/OMPConstants.h"
49#include "llvm/IR/Assumptions.h"
53using namespace llvm::omp;
68enum DefaultDataSharingAttributes {
73 DSA_firstprivate = 1 << 3,
80enum DefaultDataSharingVCAttributes {
94 unsigned Modifier = 0;
95 const Expr *RefExpr =
nullptr;
96 DeclRefExpr *PrivateCopy =
nullptr;
97 SourceLocation ImplicitDSALoc;
98 bool AppliedToPointee =
false;
99 DSAVarData() =
default;
101 const Expr *RefExpr, DeclRefExpr *PrivateCopy,
102 SourceLocation ImplicitDSALoc,
unsigned Modifier,
103 bool AppliedToPointee)
104 : DKind(DKind), CKind(CKind), Modifier(Modifier), RefExpr(RefExpr),
105 PrivateCopy(PrivateCopy), ImplicitDSALoc(ImplicitDSALoc),
106 AppliedToPointee(AppliedToPointee) {}
108 using OperatorOffsetTy =
109 llvm::SmallVector<std::pair<Expr *, OverloadedOperatorKind>, 4>;
110 using DoacrossClauseMapTy = llvm::DenseMap<OMPClause *, OperatorOffsetTy>;
112 enum class UsesAllocatorsDeclKind {
116 UserDefinedAllocator,
124 unsigned Modifier = 0;
127 llvm::PointerIntPair<const Expr *, 1, bool> RefExpr;
128 DeclRefExpr *PrivateCopy =
nullptr;
131 bool AppliedToPointee =
false;
133 using DeclSAMapTy = llvm::SmallDenseMap<const ValueDecl *, DSAInfo, 8>;
134 using UsedRefMapTy = llvm::SmallDenseMap<const ValueDecl *, const Expr *, 8>;
135 using LCDeclInfo = std::pair<unsigned, VarDecl *>;
136 using LoopControlVariablesMapTy =
137 llvm::SmallDenseMap<const ValueDecl *, LCDeclInfo, 8>;
140 struct MappedExprComponentTy {
144 using MappedExprComponentsTy =
145 llvm::DenseMap<const ValueDecl *, MappedExprComponentTy>;
146 using CriticalsWithHintsTy =
147 llvm::StringMap<std::pair<const OMPCriticalDirective *, llvm::APSInt>>;
148 struct ReductionData {
149 using BOKPtrType = llvm::PointerEmbeddedInt<BinaryOperatorKind, 16>;
150 SourceRange ReductionRange;
151 llvm::PointerUnion<const Expr *, BOKPtrType> ReductionOp;
152 ReductionData() =
default;
157 void set(
const Expr *RefExpr, SourceRange RR) {
159 ReductionOp = RefExpr;
162 using DeclReductionMapTy =
163 llvm::SmallDenseMap<const ValueDecl *, ReductionData, 4>;
164 struct DefaultmapInfo {
168 DefaultmapInfo() =
default;
170 : ImplicitBehavior(M), SLoc(Loc) {}
173 struct SharingMapTy {
174 DeclSAMapTy SharingMap;
175 DeclReductionMapTy ReductionMap;
176 UsedRefMapTy AlignedMap;
177 UsedRefMapTy NontemporalMap;
178 MappedExprComponentsTy MappedExprComponents;
179 LoopControlVariablesMapTy LCVMap;
180 DefaultDataSharingAttributes DefaultAttr = DSA_unspecified;
181 SourceLocation DefaultAttrLoc;
182 DefaultDataSharingVCAttributes DefaultVCAttr = DSA_VC_all;
183 SourceLocation DefaultAttrVCLoc;
186 DeclarationNameInfo DirectiveName;
187 Scope *CurScope =
nullptr;
188 DeclContext *Context =
nullptr;
189 SourceLocation ConstructLoc;
193 DoacrossClauseMapTy DoacrossDepends;
197 std::optional<std::pair<const Expr *, OMPOrderedClause *>> OrderedRegion;
198 bool RegionHasOrderConcurrent =
false;
199 unsigned AssociatedLoops = 1;
200 bool HasMutipleLoops =
false;
201 const Decl *PossiblyLoopCounter =
nullptr;
202 bool NowaitRegion =
false;
203 bool UntiedRegion =
false;
204 bool CancelRegion =
false;
205 bool LoopStart =
false;
206 bool BodyComplete =
false;
207 SourceLocation PrevScanLocation;
208 SourceLocation PrevOrderedLocation;
209 SourceLocation InnerTeamsRegionLoc;
211 Expr *TaskgroupReductionRef =
nullptr;
212 llvm::DenseSet<QualType> MappedClassesQualTypes;
213 SmallVector<Expr *, 4> InnerUsedAllocators;
214 llvm::DenseSet<CanonicalDeclPtr<Decl>> ImplicitTaskFirstprivates;
217 llvm::SmallVector<DeclRefExpr *, 4> DeclareTargetLinkVarDecls;
219 llvm::DenseSet<CanonicalDeclPtr<Decl>> UsedInScanDirective;
220 llvm::DenseMap<CanonicalDeclPtr<const Decl>, UsesAllocatorsDeclKind>
224 struct ImplicitDefaultFDInfoTy {
226 const FieldDecl *FD =
nullptr;
228 size_t StackLevel = 0;
230 VarDecl *VD =
nullptr;
231 ImplicitDefaultFDInfoTy(
const FieldDecl *FD,
size_t StackLevel,
233 : FD(FD), StackLevel(StackLevel), VD(VD) {}
236 llvm::SmallVector<ImplicitDefaultFDInfoTy, 8>
237 ImplicitDefaultFirstprivateFDs;
238 Expr *DeclareMapperVar =
nullptr;
239 SmallVector<VarDecl *, 16> IteratorVarDecls;
241 Scope *CurScope, SourceLocation Loc)
242 : Directive(DKind), DirectiveName(Name), CurScope(CurScope),
244 SharingMapTy() =
default;
247 using StackTy = SmallVector<SharingMapTy, 4>;
250 DeclSAMapTy Threadprivates;
251 DeclSAMapTy Groupprivates;
252 const FunctionScopeInfo *CurrentNonCapturingFunctionScope =
nullptr;
253 SmallVector<std::pair<StackTy, const FunctionScopeInfo *>, 4> Stack;
258 bool ForceCapturing =
false;
261 bool ForceCaptureByReferenceInTargetExecutable =
false;
262 CriticalsWithHintsTy Criticals;
263 unsigned IgnoredStackElements = 0;
267 using const_iterator = StackTy::const_reverse_iterator;
268 const_iterator begin()
const {
269 return Stack.empty() ? const_iterator()
270 : Stack.back().first.rbegin() + IgnoredStackElements;
272 const_iterator end()
const {
273 return Stack.empty() ? const_iterator() : Stack.back().first.rend();
275 using iterator = StackTy::reverse_iterator;
277 return Stack.empty() ? iterator()
278 : Stack.back().first.rbegin() + IgnoredStackElements;
281 return Stack.empty() ? iterator() : Stack.back().first.rend();
286 bool isStackEmpty()
const {
287 return Stack.empty() ||
288 Stack.back().second != CurrentNonCapturingFunctionScope ||
289 Stack.back().first.size() <= IgnoredStackElements;
291 size_t getStackSize()
const {
292 return isStackEmpty() ? 0
293 : Stack.back().first.size() - IgnoredStackElements;
296 SharingMapTy *getTopOfStackOrNull() {
297 size_t Size = getStackSize();
300 return &Stack.back().first[
Size - 1];
302 const SharingMapTy *getTopOfStackOrNull()
const {
303 return const_cast<DSAStackTy &
>(*this).getTopOfStackOrNull();
305 SharingMapTy &getTopOfStack() {
306 assert(!isStackEmpty() &&
"no current directive");
307 return *getTopOfStackOrNull();
309 const SharingMapTy &getTopOfStack()
const {
310 return const_cast<DSAStackTy &
>(*this).getTopOfStack();
313 SharingMapTy *getSecondOnStackOrNull() {
314 size_t Size = getStackSize();
317 return &Stack.back().first[
Size - 2];
319 const SharingMapTy *getSecondOnStackOrNull()
const {
320 return const_cast<DSAStackTy &
>(*this).getSecondOnStackOrNull();
329 SharingMapTy &getStackElemAtLevel(
unsigned Level) {
330 assert(Level < getStackSize() &&
"no such stack element");
331 return Stack.back().first[
Level];
333 const SharingMapTy &getStackElemAtLevel(
unsigned Level)
const {
334 return const_cast<DSAStackTy &
>(*this).getStackElemAtLevel(Level);
337 DSAVarData getDSA(const_iterator &Iter, ValueDecl *D)
const;
340 bool isOpenMPLocal(VarDecl *D, const_iterator Iter)
const;
343 SmallVector<const OMPRequiresDecl *, 2> RequiresDecls;
345 QualType OMPAllocatorHandleT;
349 QualType OMPEventHandleT;
351 QualType OMPAlloctraitT;
353 Expr *OMPPredefinedAllocators[OMPAllocateDeclAttr::OMPUserDefinedMemAlloc] = {
356 SmallVector<SourceLocation, 2> TargetLocations;
357 SourceLocation AtomicLocation;
359 SmallVector<llvm::omp::TraitProperty, 8> ConstructTraits;
362 explicit DSAStackTy(Sema &S) : SemaRef(S) {}
365 void setOMPAllocatorHandleT(QualType Ty) { OMPAllocatorHandleT = Ty; }
367 QualType getOMPAllocatorHandleT()
const {
return OMPAllocatorHandleT; }
369 void setOMPAlloctraitT(QualType Ty) { OMPAlloctraitT = Ty; }
371 QualType getOMPAlloctraitT()
const {
return OMPAlloctraitT; }
373 void setAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
375 OMPPredefinedAllocators[AllocatorKind] = Allocator;
378 Expr *getAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind)
const {
379 return OMPPredefinedAllocators[AllocatorKind];
382 void setOMPDependT(QualType Ty) { OMPDependT = Ty; }
384 QualType getOMPDependT()
const {
return OMPDependT; }
387 void setOMPEventHandleT(QualType Ty) { OMPEventHandleT = Ty; }
389 QualType getOMPEventHandleT()
const {
return OMPEventHandleT; }
391 bool isClauseParsingMode()
const {
return ClauseKindMode != OMPC_unknown; }
393 assert(isClauseParsingMode() &&
"Must be in clause parsing mode.");
394 return ClauseKindMode;
398 bool isBodyComplete()
const {
399 const SharingMapTy *Top = getTopOfStackOrNull();
400 return Top && Top->BodyComplete;
402 void setBodyComplete() { getTopOfStack().BodyComplete =
true; }
404 bool isForceVarCapturing()
const {
return ForceCapturing; }
405 void setForceVarCapturing(
bool V) { ForceCapturing =
V; }
407 void setForceCaptureByReferenceInTargetExecutable(
bool V) {
408 ForceCaptureByReferenceInTargetExecutable =
V;
410 bool isForceCaptureByReferenceInTargetExecutable()
const {
411 return ForceCaptureByReferenceInTargetExecutable;
415 Scope *CurScope, SourceLocation Loc) {
416 assert(!IgnoredStackElements &&
417 "cannot change stack while ignoring elements");
419 Stack.back().second != CurrentNonCapturingFunctionScope)
420 Stack.emplace_back(StackTy(), CurrentNonCapturingFunctionScope);
421 Stack.back().first.emplace_back(DKind, DirName, CurScope, Loc);
422 Stack.back().first.back().DefaultAttrLoc = Loc;
426 assert(!IgnoredStackElements &&
427 "cannot change stack while ignoring elements");
428 assert(!Stack.back().first.empty() &&
429 "Data-sharing attributes stack is empty!");
430 Stack.back().first.pop_back();
435 class ParentDirectiveScope {
440 ParentDirectiveScope(DSAStackTy &Self,
bool Activate)
441 : Self(Self), Active(
false) {
445 ~ParentDirectiveScope() { disable(); }
448 --Self.IgnoredStackElements;
454 ++Self.IgnoredStackElements;
463 "Expected loop-based directive.");
464 getTopOfStack().LoopStart =
true;
469 "Expected loop-based directive.");
470 getTopOfStack().LoopStart =
false;
473 bool isLoopStarted()
const {
475 "Expected loop-based directive.");
476 return !getTopOfStack().LoopStart;
479 void resetPossibleLoopCounter(
const Decl *D =
nullptr) {
480 getTopOfStack().PossiblyLoopCounter = D ? D->getCanonicalDecl() : D;
483 const Decl *getPossiblyLoopCounter()
const {
484 return getTopOfStack().PossiblyLoopCounter;
487 void pushFunction() {
488 assert(!IgnoredStackElements &&
489 "cannot change stack while ignoring elements");
490 const FunctionScopeInfo *CurFnScope = SemaRef.getCurFunction();
492 CurrentNonCapturingFunctionScope = CurFnScope;
495 void popFunction(
const FunctionScopeInfo *OldFSI) {
496 assert(!IgnoredStackElements &&
497 "cannot change stack while ignoring elements");
498 if (!Stack.empty() && Stack.back().second == OldFSI) {
499 assert(Stack.back().first.empty());
502 CurrentNonCapturingFunctionScope =
nullptr;
503 for (
const FunctionScopeInfo *FSI : llvm::reverse(SemaRef.FunctionScopes)) {
505 CurrentNonCapturingFunctionScope = FSI;
511 void addCriticalWithHint(
const OMPCriticalDirective *D, llvm::APSInt Hint) {
512 Criticals.try_emplace(D->getDirectiveName().getAsString(), D, Hint);
514 std::pair<const OMPCriticalDirective *, llvm::APSInt>
515 getCriticalWithHint(
const DeclarationNameInfo &Name)
const {
517 if (I != Criticals.end())
519 return std::make_pair(
nullptr, llvm::APSInt());
524 const Expr *addUniqueAligned(
const ValueDecl *D,
const Expr *NewDE);
528 const Expr *addUniqueNontemporal(
const ValueDecl *D,
const Expr *NewDE);
531 void addLoopControlVariable(
const ValueDecl *D, VarDecl *
Capture);
536 const LCDeclInfo isLoopControlVariable(
const ValueDecl *D)
const;
541 const LCDeclInfo isParentLoopControlVariable(
const ValueDecl *D)
const;
546 const LCDeclInfo isLoopControlVariable(
const ValueDecl *D,
547 unsigned Level)
const;
550 const ValueDecl *getParentLoopControlVariable(
unsigned I)
const;
553 void markDeclAsUsedInScanDirective(ValueDecl *D) {
554 if (SharingMapTy *Stack = getSecondOnStackOrNull())
555 Stack->UsedInScanDirective.insert(D);
559 bool isUsedInScanDirective(ValueDecl *D)
const {
560 if (
const SharingMapTy *Stack = getTopOfStackOrNull())
561 return Stack->UsedInScanDirective.contains(D);
567 DeclRefExpr *PrivateCopy =
nullptr,
unsigned Modifier = 0,
568 bool AppliedToPointee =
false);
572 void addTaskgroupReductionData(
const ValueDecl *D, SourceRange SR,
576 void addTaskgroupReductionData(
const ValueDecl *D, SourceRange SR,
577 const Expr *ReductionRef);
581 getTopMostTaskgroupReductionData(
const ValueDecl *D, SourceRange &SR,
583 Expr *&TaskgroupDescriptor)
const;
587 getTopMostTaskgroupReductionData(
const ValueDecl *D, SourceRange &SR,
588 const Expr *&ReductionRef,
589 Expr *&TaskgroupDescriptor)
const;
592 Expr *getTaskgroupReductionRef()
const {
593 assert((getTopOfStack().
Directive == OMPD_taskgroup ||
597 "taskgroup reference expression requested for non taskgroup or "
598 "parallel/worksharing directive.");
599 return getTopOfStack().TaskgroupReductionRef;
603 bool isTaskgroupReductionRef(
const ValueDecl *VD,
unsigned Level)
const {
604 return getStackElemAtLevel(Level).TaskgroupReductionRef &&
611 const DSAVarData getTopDSA(ValueDecl *D,
bool FromParent);
613 const DSAVarData getImplicitDSA(ValueDecl *D,
bool FromParent)
const;
615 const DSAVarData getImplicitDSA(ValueDecl *D,
unsigned Level)
const;
622 DefaultDataSharingAttributes)>
625 bool FromParent)
const;
630 hasInnermostDSA(ValueDecl *D,
633 bool FromParent)
const;
638 hasExplicitDSA(
const ValueDecl *D,
640 unsigned Level,
bool NotLastprivate =
false)
const;
644 bool hasExplicitDirective(
646 unsigned Level)
const;
650 const llvm::function_ref<
bool(
653 bool FromParent)
const;
657 const SharingMapTy *Top = getTopOfStackOrNull();
658 return Top ? Top->Directive : OMPD_unknown;
662 assert(!isStackEmpty() &&
"No directive at specified level.");
663 return getStackElemAtLevel(Level).Directive;
667 unsigned OpenMPCaptureLevel)
const {
668 SmallVector<OpenMPDirectiveKind, 4> CaptureRegions;
670 return CaptureRegions[OpenMPCaptureLevel];
674 const SharingMapTy *Parent = getSecondOnStackOrNull();
675 return Parent ? Parent->Directive : OMPD_unknown;
679 void addRequiresDecl(OMPRequiresDecl *RD) { RequiresDecls.push_back(RD); }
682 template <
typename ClauseType>
bool hasRequiresDeclWithClause()
const {
683 return llvm::any_of(RequiresDecls, [](
const OMPRequiresDecl *D) {
684 return llvm::any_of(D->
clauselists(), [](
const OMPClause *
C) {
685 return isa<ClauseType>(C);
692 bool hasDuplicateRequiresClause(ArrayRef<OMPClause *> ClauseList)
const {
693 bool IsDuplicate =
false;
694 for (OMPClause *CNew : ClauseList) {
695 for (
const OMPRequiresDecl *D : RequiresDecls) {
696 for (
const OMPClause *CPrev : D->clauselists()) {
697 if (CNew->getClauseKind() == CPrev->getClauseKind()) {
698 SemaRef.Diag(CNew->getBeginLoc(),
699 diag::err_omp_requires_clause_redeclaration)
701 SemaRef.Diag(CPrev->getBeginLoc(),
702 diag::note_omp_requires_previous_clause)
713 void addTargetDirLocation(SourceLocation LocStart) {
714 TargetLocations.push_back(LocStart);
718 void addAtomicDirectiveLoc(SourceLocation Loc) {
719 if (AtomicLocation.isInvalid())
720 AtomicLocation = Loc;
725 SourceLocation getAtomicDirectiveLoc()
const {
return AtomicLocation; }
728 ArrayRef<SourceLocation> getEncounteredTargetLocs()
const {
729 return TargetLocations;
733 void setDefaultDSANone(SourceLocation Loc) {
734 getTopOfStack().DefaultAttr = DSA_none;
735 getTopOfStack().DefaultAttrLoc = Loc;
738 void setDefaultDSAShared(SourceLocation Loc) {
739 getTopOfStack().DefaultAttr = DSA_shared;
740 getTopOfStack().DefaultAttrLoc = Loc;
743 void setDefaultDSAPrivate(SourceLocation Loc) {
744 getTopOfStack().DefaultAttr = DSA_private;
745 getTopOfStack().DefaultAttrLoc = Loc;
748 void setDefaultDSAFirstPrivate(SourceLocation Loc) {
749 getTopOfStack().DefaultAttr = DSA_firstprivate;
750 getTopOfStack().DefaultAttrLoc = Loc;
753 void setDefaultDSAVCAggregate(SourceLocation VCLoc) {
754 getTopOfStack().DefaultVCAttr = DSA_VC_aggregate;
755 getTopOfStack().DefaultAttrVCLoc = VCLoc;
758 void setDefaultDSAVCAll(SourceLocation VCLoc) {
759 getTopOfStack().DefaultVCAttr = DSA_VC_all;
760 getTopOfStack().DefaultAttrVCLoc = VCLoc;
763 void setDefaultDSAVCPointer(SourceLocation VCLoc) {
764 getTopOfStack().DefaultVCAttr = DSA_VC_pointer;
765 getTopOfStack().DefaultAttrVCLoc = VCLoc;
768 void setDefaultDSAVCScalar(SourceLocation VCLoc) {
769 getTopOfStack().DefaultVCAttr = DSA_VC_scalar;
770 getTopOfStack().DefaultAttrVCLoc = VCLoc;
775 DefaultmapInfo &DMI = getTopOfStack().DefaultmapMap[
Kind];
776 DMI.ImplicitBehavior = M;
782 return getTopOfStack()
783 .DefaultmapMap[OMPC_DEFAULTMAP_aggregate]
786 .DefaultmapMap[OMPC_DEFAULTMAP_scalar]
789 .DefaultmapMap[OMPC_DEFAULTMAP_pointer]
791 return getTopOfStack().DefaultmapMap[VariableCategory].ImplicitBehavior !=
795 ArrayRef<llvm::omp::TraitProperty> getConstructTraits() {
796 return ConstructTraits;
798 void handleConstructTrait(ArrayRef<llvm::omp::TraitProperty> Traits,
801 ConstructTraits.append(Traits.begin(), Traits.end());
803 for (llvm::omp::TraitProperty Trait : llvm::reverse(Traits)) {
804 llvm::omp::TraitProperty Top = ConstructTraits.pop_back_val();
805 assert(Top == Trait &&
"Something left a trait on the stack!");
811 DefaultDataSharingAttributes getDefaultDSA(
unsigned Level)
const {
812 return getStackSize() <=
Level ? DSA_unspecified
813 : getStackElemAtLevel(Level).DefaultAttr;
815 DefaultDataSharingAttributes getDefaultDSA()
const {
816 return isStackEmpty() ? DSA_unspecified : getTopOfStack().DefaultAttr;
818 SourceLocation getDefaultDSALocation()
const {
819 return isStackEmpty() ? SourceLocation() : getTopOfStack().DefaultAttrLoc;
823 return isStackEmpty()
825 : getTopOfStack().DefaultmapMap[
Kind].ImplicitBehavior;
828 getDefaultmapModifierAtLevel(
unsigned Level,
830 return getStackElemAtLevel(Level).DefaultmapMap[
Kind].ImplicitBehavior;
832 bool isDefaultmapCapturedByRef(
unsigned Level,
835 getDefaultmapModifierAtLevel(Level, Kind);
836 if (Kind == OMPC_DEFAULTMAP_scalar || Kind == OMPC_DEFAULTMAP_pointer) {
837 return (M == OMPC_DEFAULTMAP_MODIFIER_alloc) ||
838 (M == OMPC_DEFAULTMAP_MODIFIER_to) ||
839 (M == OMPC_DEFAULTMAP_MODIFIER_from) ||
840 (M == OMPC_DEFAULTMAP_MODIFIER_tofrom) ||
841 (M == OMPC_DEFAULTMAP_MODIFIER_present) ||
842 (M == OMPC_DEFAULTMAP_MODIFIER_storage);
849 case OMPC_DEFAULTMAP_scalar:
850 case OMPC_DEFAULTMAP_pointer:
852 (M == OMPC_DEFAULTMAP_MODIFIER_firstprivate) ||
853 (M == OMPC_DEFAULTMAP_MODIFIER_default);
854 case OMPC_DEFAULTMAP_aggregate:
855 return M == OMPC_DEFAULTMAP_MODIFIER_firstprivate;
859 llvm_unreachable(
"Unexpected OpenMPDefaultmapClauseKind enum");
861 bool mustBeFirstprivateAtLevel(
unsigned Level,
864 getDefaultmapModifierAtLevel(Level, Kind);
865 return mustBeFirstprivateBase(M, Kind);
869 return mustBeFirstprivateBase(M, Kind);
873 bool isThreadPrivate(VarDecl *D) {
874 const DSAVarData DVar = getTopDSA(D,
false);
879 void setOrderedRegion(
bool IsOrdered,
const Expr *Param,
880 OMPOrderedClause *Clause) {
882 getTopOfStack().OrderedRegion.emplace(Param, Clause);
884 getTopOfStack().OrderedRegion.reset();
888 bool isOrderedRegion()
const {
889 if (
const SharingMapTy *Top = getTopOfStackOrNull())
890 return Top->OrderedRegion.has_value();
894 std::pair<const Expr *, OMPOrderedClause *> getOrderedRegionParam()
const {
895 if (
const SharingMapTy *Top = getTopOfStackOrNull())
896 if (Top->OrderedRegion)
897 return *Top->OrderedRegion;
898 return std::make_pair(
nullptr,
nullptr);
902 bool isParentOrderedRegion()
const {
903 if (
const SharingMapTy *Parent = getSecondOnStackOrNull())
904 return Parent->OrderedRegion.has_value();
908 std::pair<const Expr *, OMPOrderedClause *>
909 getParentOrderedRegionParam()
const {
910 if (
const SharingMapTy *Parent = getSecondOnStackOrNull())
911 if (Parent->OrderedRegion)
912 return *Parent->OrderedRegion;
913 return std::make_pair(
nullptr,
nullptr);
916 void setRegionHasOrderConcurrent(
bool HasOrderConcurrent) {
917 getTopOfStack().RegionHasOrderConcurrent = HasOrderConcurrent;
921 bool isParentOrderConcurrent()
const {
922 if (
const SharingMapTy *Parent = getSecondOnStackOrNull())
923 return Parent->RegionHasOrderConcurrent;
927 void setNowaitRegion(
bool IsNowait =
true) {
928 getTopOfStack().NowaitRegion = IsNowait;
932 bool isParentNowaitRegion()
const {
933 if (
const SharingMapTy *Parent = getSecondOnStackOrNull())
934 return Parent->NowaitRegion;
938 void setUntiedRegion(
bool IsUntied =
true) {
939 getTopOfStack().UntiedRegion = IsUntied;
942 bool isUntiedRegion()
const {
943 const SharingMapTy *Top = getTopOfStackOrNull();
944 return Top ? Top->UntiedRegion :
false;
947 void setParentCancelRegion(
bool Cancel =
true) {
948 if (SharingMapTy *Parent = getSecondOnStackOrNull())
949 Parent->CancelRegion |= Cancel;
952 bool isCancelRegion()
const {
953 const SharingMapTy *Top = getTopOfStackOrNull();
954 return Top ? Top->CancelRegion :
false;
958 void setParentHasScanDirective(SourceLocation Loc) {
959 if (SharingMapTy *Parent = getSecondOnStackOrNull())
960 Parent->PrevScanLocation = Loc;
963 bool doesParentHasScanDirective()
const {
964 const SharingMapTy *Top = getSecondOnStackOrNull();
965 return Top ? Top->PrevScanLocation.isValid() :
false;
968 SourceLocation getParentScanDirectiveLoc()
const {
969 const SharingMapTy *Top = getSecondOnStackOrNull();
970 return Top ? Top->PrevScanLocation : SourceLocation();
973 void setParentHasOrderedDirective(SourceLocation Loc) {
974 if (SharingMapTy *Parent = getSecondOnStackOrNull())
975 Parent->PrevOrderedLocation = Loc;
978 bool doesParentHasOrderedDirective()
const {
979 const SharingMapTy *Top = getSecondOnStackOrNull();
980 return Top ? Top->PrevOrderedLocation.isValid() :
false;
983 SourceLocation getParentOrderedDirectiveLoc()
const {
984 const SharingMapTy *Top = getSecondOnStackOrNull();
985 return Top ? Top->PrevOrderedLocation : SourceLocation();
989 void setAssociatedLoops(
unsigned Val) {
990 getTopOfStack().AssociatedLoops = Val;
992 getTopOfStack().HasMutipleLoops =
true;
995 unsigned getAssociatedLoops()
const {
996 const SharingMapTy *Top = getTopOfStackOrNull();
997 return Top ? Top->AssociatedLoops : 0;
1000 bool hasMutipleLoops()
const {
1001 const SharingMapTy *Top = getTopOfStackOrNull();
1002 return Top ? Top->HasMutipleLoops :
false;
1007 void setParentTeamsRegionLoc(SourceLocation TeamsRegionLoc) {
1008 if (SharingMapTy *Parent = getSecondOnStackOrNull())
1009 Parent->InnerTeamsRegionLoc = TeamsRegionLoc;
1012 bool hasInnerTeamsRegion()
const {
1013 return getInnerTeamsRegionLoc().
isValid();
1016 SourceLocation getInnerTeamsRegionLoc()
const {
1017 const SharingMapTy *Top = getTopOfStackOrNull();
1018 return Top ? Top->InnerTeamsRegionLoc : SourceLocation();
1021 Scope *getCurScope()
const {
1022 const SharingMapTy *Top = getTopOfStackOrNull();
1023 return Top ? Top->CurScope :
nullptr;
1025 void setContext(DeclContext *DC) { getTopOfStack().Context = DC; }
1026 SourceLocation getConstructLoc()
const {
1027 const SharingMapTy *Top = getTopOfStackOrNull();
1028 return Top ? Top->ConstructLoc : SourceLocation();
1033 bool checkMappableExprComponentListsForDecl(
1034 const ValueDecl *VD,
bool CurrentRegionOnly,
1035 const llvm::function_ref<
1047 if (CurrentRegionOnly)
1050 std::advance(SI, 1);
1052 for (; SI != SE; ++SI) {
1053 auto MI = SI->MappedExprComponents.find(VD);
1054 if (MI != SI->MappedExprComponents.end())
1056 MI->second.Components)
1057 if (Check(L, MI->second.Kind))
1065 bool checkMappableExprComponentListsForDeclAtLevel(
1066 const ValueDecl *VD,
unsigned Level,
1067 const llvm::function_ref<
1071 if (getStackSize() <= Level)
1074 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
1075 auto MI = StackElem.MappedExprComponents.find(VD);
1076 if (MI != StackElem.MappedExprComponents.end())
1078 MI->second.Components)
1079 if (Check(L, MI->second.Kind))
1086 void addMappableExpressionComponents(
1087 const ValueDecl *VD,
1090 MappedExprComponentTy &MEC = getTopOfStack().MappedExprComponents[VD];
1092 MEC.Components.resize(MEC.Components.size() + 1);
1093 MEC.Components.back().append(Components.begin(), Components.end());
1094 MEC.Kind = WhereFoundClauseKind;
1097 unsigned getNestingLevel()
const {
1098 assert(!isStackEmpty());
1099 return getStackSize() - 1;
1101 void addDoacrossDependClause(OMPClause *
C,
const OperatorOffsetTy &OpsOffs) {
1102 SharingMapTy *Parent = getSecondOnStackOrNull();
1104 Parent->DoacrossDepends.try_emplace(
C, OpsOffs);
1106 llvm::iterator_range<DoacrossClauseMapTy::const_iterator>
1107 getDoacrossDependClauses()
const {
1108 const SharingMapTy &StackElem = getTopOfStack();
1110 const DoacrossClauseMapTy &Ref = StackElem.DoacrossDepends;
1111 return llvm::make_range(Ref.begin(), Ref.end());
1113 return llvm::make_range(StackElem.DoacrossDepends.end(),
1114 StackElem.DoacrossDepends.end());
1118 void addMappedClassesQualTypes(QualType QT) {
1119 SharingMapTy &StackElem = getTopOfStack();
1120 StackElem.MappedClassesQualTypes.insert(QT);
1124 bool isClassPreviouslyMapped(QualType QT)
const {
1125 const SharingMapTy &StackElem = getTopOfStack();
1126 return StackElem.MappedClassesQualTypes.contains(QT);
1130 void addToParentTargetRegionLinkGlobals(DeclRefExpr *E) {
1131 assert(*OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(
1132 E->
getDecl()) == OMPDeclareTargetDeclAttr::MT_Link &&
1133 "Expected declare target link global.");
1134 for (
auto &Elem : *
this) {
1136 Elem.DeclareTargetLinkVarDecls.push_back(E);
1144 ArrayRef<DeclRefExpr *> getLinkGlobals()
const {
1146 "Expected target executable directive.");
1147 return getTopOfStack().DeclareTargetLinkVarDecls;
1151 void addInnerAllocatorExpr(Expr *E) {
1152 getTopOfStack().InnerUsedAllocators.push_back(E);
1155 ArrayRef<Expr *> getInnerAllocators()
const {
1156 return getTopOfStack().InnerUsedAllocators;
1160 void addImplicitTaskFirstprivate(
unsigned Level, Decl *D) {
1161 getStackElemAtLevel(Level).ImplicitTaskFirstprivates.insert(D);
1164 bool isImplicitTaskFirstprivate(Decl *D)
const {
1165 return getTopOfStack().ImplicitTaskFirstprivates.contains(D);
1169 void addUsesAllocatorsDecl(
const Decl *D, UsesAllocatorsDeclKind Kind) {
1170 getTopOfStack().UsesAllocatorsDecls.try_emplace(D, Kind);
1174 std::optional<UsesAllocatorsDeclKind>
1175 isUsesAllocatorsDecl(
unsigned Level,
const Decl *D)
const {
1176 const SharingMapTy &StackElem = getTopOfStack();
1177 auto I = StackElem.UsesAllocatorsDecls.find(D);
1178 if (I == StackElem.UsesAllocatorsDecls.end())
1179 return std::nullopt;
1180 return I->getSecond();
1182 std::optional<UsesAllocatorsDeclKind>
1183 isUsesAllocatorsDecl(
const Decl *D)
const {
1184 const SharingMapTy &StackElem = getTopOfStack();
1185 auto I = StackElem.UsesAllocatorsDecls.find(D);
1186 if (I == StackElem.UsesAllocatorsDecls.end())
1187 return std::nullopt;
1188 return I->getSecond();
1191 void addDeclareMapperVarRef(Expr *Ref) {
1192 SharingMapTy &StackElem = getTopOfStack();
1193 StackElem.DeclareMapperVar = Ref;
1195 const Expr *getDeclareMapperVarRef()
const {
1196 const SharingMapTy *Top = getTopOfStackOrNull();
1197 return Top ? Top->DeclareMapperVar :
nullptr;
1201 void addIteratorVarDecl(VarDecl *VD) {
1202 SharingMapTy &StackElem = getTopOfStack();
1206 bool isIteratorVarDecl(
const VarDecl *VD)
const {
1207 const SharingMapTy *Top = getTopOfStackOrNull();
1214 VarDecl *getImplicitFDCapExprDecl(
const FieldDecl *FD)
const {
1215 const_iterator I = begin();
1216 const_iterator EndI = end();
1217 size_t StackLevel = getStackSize();
1218 for (; I != EndI; ++I) {
1219 if (I->DefaultAttr == DSA_firstprivate || I->DefaultAttr == DSA_private)
1223 assert((StackLevel > 0 && I != EndI) || (StackLevel == 0 && I == EndI));
1226 for (
const auto &IFD : I->ImplicitDefaultFirstprivateFDs)
1227 if (IFD.FD == FD && IFD.StackLevel == StackLevel)
1232 bool isImplicitDefaultFirstprivateFD(VarDecl *VD)
const {
1233 const_iterator I = begin();
1234 const_iterator EndI = end();
1235 for (; I != EndI; ++I)
1236 if (I->DefaultAttr == DSA_firstprivate || I->DefaultAttr == DSA_private)
1240 for (
const auto &IFD : I->ImplicitDefaultFirstprivateFDs)
1246 void addImplicitDefaultFirstprivateFD(
const FieldDecl *FD, VarDecl *VD) {
1247 iterator I = begin();
1248 const_iterator EndI = end();
1249 size_t StackLevel = getStackSize();
1250 for (; I != EndI; ++I) {
1251 if (I->DefaultAttr == DSA_private || I->DefaultAttr == DSA_firstprivate) {
1252 I->ImplicitDefaultFirstprivateFDs.emplace_back(FD, StackLevel, VD);
1257 assert((StackLevel > 0 && I != EndI) || (StackLevel == 0 && I == EndI));
1267 DKind == OMPD_unknown;
1273 if (
const auto *FE = dyn_cast<FullExpr>(E))
1274 E = FE->getSubExpr();
1276 if (
const auto *MTE = dyn_cast<MaterializeTemporaryExpr>(E))
1277 E = MTE->getSubExpr();
1279 while (
const auto *Binder = dyn_cast<CXXBindTemporaryExpr>(E))
1280 E = Binder->getSubExpr();
1282 if (
const auto *ICE = dyn_cast<ImplicitCastExpr>(E))
1283 E = ICE->getSubExprAsWritten();
1292 if (
const auto *CED = dyn_cast<OMPCapturedExprDecl>(D))
1293 if (
const auto *ME = dyn_cast<MemberExpr>(
getExprAsWritten(CED->getInit())))
1294 D = ME->getMemberDecl();
1306 if (
C == OMPC_threadprivate)
1307 return getOpenMPClauseName(
C).str() +
" or thread local";
1308 return getOpenMPClauseName(
C).str();
1314 auto *VD = dyn_cast<VarDecl>(D);
1315 const auto *FD = dyn_cast<FieldDecl>(D);
1317 if (Iter == end()) {
1324 DVar.CKind = OMPC_shared;
1331 DVar.CKind = OMPC_shared;
1335 DVar.CKind = OMPC_shared;
1346 DVar.CKind = OMPC_private;
1350 DVar.DKind = Iter->Directive;
1353 if (Iter->SharingMap.count(D)) {
1354 const DSAInfo &
Data = Iter->SharingMap.lookup(D);
1355 DVar.RefExpr =
Data.RefExpr.getPointer();
1356 DVar.PrivateCopy =
Data.PrivateCopy;
1357 DVar.CKind =
Data.Attributes;
1358 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1359 DVar.Modifier =
Data.Modifier;
1360 DVar.AppliedToPointee =
Data.AppliedToPointee;
1364 DefaultDataSharingAttributes IterDA = Iter->DefaultAttr;
1365 switch (Iter->DefaultVCAttr) {
1366 case DSA_VC_aggregate:
1370 case DSA_VC_pointer:
1388 DVar.CKind = OMPC_shared;
1389 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1393 case DSA_firstprivate:
1396 DVar.CKind = OMPC_unknown;
1398 DVar.CKind = OMPC_firstprivate;
1400 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1408 DVar.CKind = OMPC_unknown;
1410 DVar.CKind = OMPC_private;
1412 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1414 case DSA_unspecified:
1419 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1423 DVar.CKind = OMPC_shared;
1433 DSAVarData DVarTemp;
1434 const_iterator I = Iter, E = end();
1442 DVarTemp = getDSA(I, D);
1443 if (DVarTemp.CKind != OMPC_shared) {
1444 DVar.RefExpr =
nullptr;
1445 DVar.CKind = OMPC_firstprivate;
1448 }
while (I != E && !isImplicitTaskingRegion(I->Directive));
1450 (DVarTemp.CKind == OMPC_unknown) ? OMPC_firstprivate : OMPC_shared;
1459 return getDSA(++Iter, D);
1462const Expr *DSAStackTy::addUniqueAligned(
const ValueDecl *D,
1463 const Expr *NewDE) {
1464 assert(!isStackEmpty() &&
"Data sharing attributes stack is empty");
1466 SharingMapTy &StackElem = getTopOfStack();
1467 auto [It, Inserted] = StackElem.AlignedMap.try_emplace(D, NewDE);
1469 assert(NewDE &&
"Unexpected nullptr expr to be added into aligned map");
1472 assert(It->second &&
"Unexpected nullptr expr in the aligned map");
1476const Expr *DSAStackTy::addUniqueNontemporal(
const ValueDecl *D,
1477 const Expr *NewDE) {
1478 assert(!isStackEmpty() &&
"Data sharing attributes stack is empty");
1480 SharingMapTy &StackElem = getTopOfStack();
1481 auto [It, Inserted] = StackElem.NontemporalMap.try_emplace(D, NewDE);
1483 assert(NewDE &&
"Unexpected nullptr expr to be added into aligned map");
1486 assert(It->second &&
"Unexpected nullptr expr in the aligned map");
1490void DSAStackTy::addLoopControlVariable(
const ValueDecl *D, VarDecl *
Capture) {
1491 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1493 SharingMapTy &StackElem = getTopOfStack();
1494 StackElem.LCVMap.try_emplace(
1495 D, LCDeclInfo(StackElem.LCVMap.size() + 1,
Capture));
1498const DSAStackTy::LCDeclInfo
1499DSAStackTy::isLoopControlVariable(
const ValueDecl *D)
const {
1500 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1502 const SharingMapTy &StackElem = getTopOfStack();
1503 auto It = StackElem.LCVMap.find(D);
1504 if (It != StackElem.LCVMap.end())
1506 return {0,
nullptr};
1509const DSAStackTy::LCDeclInfo
1510DSAStackTy::isLoopControlVariable(
const ValueDecl *D,
unsigned Level)
const {
1511 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1513 for (
unsigned I = Level + 1; I > 0; --I) {
1514 const SharingMapTy &StackElem = getStackElemAtLevel(I - 1);
1515 auto It = StackElem.LCVMap.find(D);
1516 if (It != StackElem.LCVMap.end())
1519 return {0,
nullptr};
1522const DSAStackTy::LCDeclInfo
1523DSAStackTy::isParentLoopControlVariable(
const ValueDecl *D)
const {
1524 const SharingMapTy *Parent = getSecondOnStackOrNull();
1525 assert(Parent &&
"Data-sharing attributes stack is empty");
1527 auto It = Parent->LCVMap.find(D);
1528 if (It != Parent->LCVMap.end())
1530 return {0,
nullptr};
1533const ValueDecl *DSAStackTy::getParentLoopControlVariable(
unsigned I)
const {
1534 const SharingMapTy *Parent = getSecondOnStackOrNull();
1535 assert(Parent &&
"Data-sharing attributes stack is empty");
1536 if (Parent->LCVMap.size() < I)
1538 for (
const auto &Pair : Parent->LCVMap)
1539 if (Pair.second.first == I)
1544void DSAStackTy::addDSA(
const ValueDecl *D,
const Expr *E,
OpenMPClauseKind A,
1545 DeclRefExpr *PrivateCopy,
unsigned Modifier,
1546 bool AppliedToPointee) {
1548 if (A == OMPC_threadprivate) {
1549 DSAInfo &
Data = Threadprivates[D];
1550 Data.Attributes = A;
1551 Data.RefExpr.setPointer(E);
1552 Data.PrivateCopy =
nullptr;
1553 Data.Modifier = Modifier;
1554 }
else if (A == OMPC_groupprivate) {
1555 DSAInfo &
Data = Groupprivates[D];
1556 Data.Attributes = A;
1557 Data.RefExpr.setPointer(E);
1558 Data.PrivateCopy =
nullptr;
1559 Data.Modifier = Modifier;
1561 DSAInfo &
Data = getTopOfStack().SharingMap[D];
1562 assert(
Data.Attributes == OMPC_unknown || (A ==
Data.Attributes) ||
1563 (A == OMPC_firstprivate &&
Data.Attributes == OMPC_lastprivate) ||
1564 (A == OMPC_lastprivate &&
Data.Attributes == OMPC_firstprivate) ||
1565 (isLoopControlVariable(D).first && A == OMPC_private));
1566 Data.Modifier = Modifier;
1567 if (A == OMPC_lastprivate &&
Data.Attributes == OMPC_firstprivate) {
1568 Data.RefExpr.setInt(
true);
1571 const bool IsLastprivate =
1572 A == OMPC_lastprivate ||
Data.Attributes == OMPC_lastprivate;
1573 Data.Attributes = A;
1574 Data.RefExpr.setPointerAndInt(E, IsLastprivate);
1575 Data.PrivateCopy = PrivateCopy;
1576 Data.AppliedToPointee = AppliedToPointee;
1578 DSAInfo &
Data = getTopOfStack().SharingMap[PrivateCopy->
getDecl()];
1579 Data.Modifier = Modifier;
1580 Data.Attributes = A;
1581 Data.RefExpr.setPointerAndInt(PrivateCopy, IsLastprivate);
1582 Data.PrivateCopy =
nullptr;
1583 Data.AppliedToPointee = AppliedToPointee;
1590 StringRef Name,
const AttrVec *Attrs =
nullptr,
1605 OMPReferencedVarAttr::CreateImplicit(SemaRef.
Context, OrigRef));
1612 bool RefersToCapture =
false) {
1620void DSAStackTy::addTaskgroupReductionData(
const ValueDecl *D, SourceRange SR,
1623 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1625 getTopOfStack().SharingMap[D].Attributes == OMPC_reduction &&
1626 "Additional reduction info may be specified only for reduction items.");
1627 ReductionData &ReductionData = getTopOfStack().ReductionMap[D];
1628 assert(ReductionData.ReductionRange.isInvalid() &&
1629 (getTopOfStack().
Directive == OMPD_taskgroup ||
1633 "Additional reduction info may be specified only once for reduction "
1635 ReductionData.set(BOK, SR);
1636 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1637 if (!TaskgroupReductionRef) {
1640 TaskgroupReductionRef =
1645void DSAStackTy::addTaskgroupReductionData(
const ValueDecl *D, SourceRange SR,
1646 const Expr *ReductionRef) {
1648 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1650 getTopOfStack().SharingMap[D].Attributes == OMPC_reduction &&
1651 "Additional reduction info may be specified only for reduction items.");
1652 ReductionData &ReductionData = getTopOfStack().ReductionMap[D];
1653 assert(ReductionData.ReductionRange.isInvalid() &&
1654 (getTopOfStack().
Directive == OMPD_taskgroup ||
1658 "Additional reduction info may be specified only once for reduction "
1660 ReductionData.set(ReductionRef, SR);
1661 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1662 if (!TaskgroupReductionRef) {
1665 TaskgroupReductionRef =
1670const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1672 Expr *&TaskgroupDescriptor)
const {
1674 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty.");
1675 for (const_iterator I = begin() + 1, E = end(); I != E; ++I) {
1676 const DSAInfo &
Data = I->SharingMap.lookup(D);
1677 if (
Data.Attributes != OMPC_reduction ||
1678 Data.Modifier != OMPC_REDUCTION_task)
1680 const ReductionData &ReductionData = I->ReductionMap.lookup(D);
1681 if (!ReductionData.ReductionOp ||
1683 return DSAVarData();
1684 SR = ReductionData.ReductionRange;
1686 assert(I->TaskgroupReductionRef &&
"taskgroup reduction reference "
1687 "expression for the descriptor is not "
1689 TaskgroupDescriptor = I->TaskgroupReductionRef;
1690 return DSAVarData(I->Directive, OMPC_reduction,
Data.RefExpr.getPointer(),
1691 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1694 return DSAVarData();
1697const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1698 const ValueDecl *D, SourceRange &SR,
const Expr *&ReductionRef,
1699 Expr *&TaskgroupDescriptor)
const {
1701 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty.");
1702 for (const_iterator I = begin() + 1, E = end(); I != E; ++I) {
1703 const DSAInfo &
Data = I->SharingMap.lookup(D);
1704 if (
Data.Attributes != OMPC_reduction ||
1705 Data.Modifier != OMPC_REDUCTION_task)
1707 const ReductionData &ReductionData = I->ReductionMap.lookup(D);
1708 if (!ReductionData.ReductionOp ||
1710 return DSAVarData();
1711 SR = ReductionData.ReductionRange;
1713 assert(I->TaskgroupReductionRef &&
"taskgroup reduction reference "
1714 "expression for the descriptor is not "
1716 TaskgroupDescriptor = I->TaskgroupReductionRef;
1717 return DSAVarData(I->Directive, OMPC_reduction,
Data.RefExpr.getPointer(),
1718 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1721 return DSAVarData();
1724bool DSAStackTy::isOpenMPLocal(VarDecl *D, const_iterator I)
const {
1726 for (const_iterator E = end(); I != E; ++I) {
1727 if (isImplicitOrExplicitTaskingRegion(I->Directive) ||
1730 Scope *TopScope = I->CurScope->getParent();
1731 Scope *CurScope = getCurScope();
1732 while (CurScope && CurScope != TopScope && !CurScope->
isDeclScope(D))
1734 return CurScope != TopScope;
1737 if (I->Context == DC)
1746 bool AcceptIfMutable =
true,
1747 bool *IsClassType =
nullptr) {
1749 Type =
Type.getNonReferenceType().getCanonicalType();
1750 bool IsConstant =
Type.isConstant(Context);
1751 Type = Context.getBaseElementType(
Type);
1755 if (
const auto *CTSD = dyn_cast_or_null<ClassTemplateSpecializationDecl>(RD))
1757 RD = CTD->getTemplatedDecl();
1767 bool AcceptIfMutable =
true,
1768 bool ListItemNotVar =
false) {
1772 unsigned Diag = ListItemNotVar ? diag::err_omp_const_list_item
1773 : IsClassType ? diag::err_omp_const_not_mutable_variable
1774 : diag::err_omp_const_variable;
1776 if (!ListItemNotVar && D) {
1777 const VarDecl *VD = dyn_cast<VarDecl>(D);
1781 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
1789const DSAStackTy::DSAVarData DSAStackTy::getTopDSA(ValueDecl *D,
1794 auto *VD = dyn_cast<VarDecl>(D);
1795 auto TI = Threadprivates.find(D);
1796 if (TI != Threadprivates.end()) {
1797 DVar.RefExpr = TI->getSecond().RefExpr.getPointer();
1798 DVar.CKind = OMPC_threadprivate;
1799 DVar.Modifier = TI->getSecond().Modifier;
1802 if (VD && VD->
hasAttr<OMPThreadPrivateDeclAttr>()) {
1805 VD->
getAttr<OMPThreadPrivateDeclAttr>()->getLocation());
1806 DVar.CKind = OMPC_threadprivate;
1807 addDSA(D, DVar.RefExpr, OMPC_threadprivate);
1814 !(VD->
hasAttr<OMPThreadPrivateDeclAttr>() &&
1821 DVar.CKind = OMPC_threadprivate;
1822 addDSA(D, DVar.RefExpr, OMPC_threadprivate);
1827 !isLoopControlVariable(D).first) {
1828 const_iterator IterTarget =
1829 std::find_if(begin(), end(), [](
const SharingMapTy &
Data) {
1832 if (IterTarget != end()) {
1833 const_iterator ParentIterTarget = IterTarget + 1;
1834 for (const_iterator Iter = begin(); Iter != ParentIterTarget; ++Iter) {
1835 if (isOpenMPLocal(VD, Iter)) {
1839 DVar.CKind = OMPC_threadprivate;
1843 if (!isClauseParsingMode() || IterTarget != begin()) {
1844 auto DSAIter = IterTarget->SharingMap.find(D);
1845 if (DSAIter != IterTarget->SharingMap.end() &&
1847 DVar.RefExpr = DSAIter->getSecond().RefExpr.getPointer();
1848 DVar.CKind = OMPC_threadprivate;
1851 const_iterator End = end();
1853 D, std::distance(ParentIterTarget, End),
1857 IterTarget->ConstructLoc);
1858 DVar.CKind = OMPC_threadprivate;
1878 const_iterator I = begin();
1879 const_iterator EndI = end();
1880 if (FromParent && I != EndI)
1883 auto It = I->SharingMap.find(D);
1884 if (It != I->SharingMap.end()) {
1885 const DSAInfo &
Data = It->getSecond();
1886 DVar.RefExpr =
Data.RefExpr.getPointer();
1887 DVar.PrivateCopy =
Data.PrivateCopy;
1888 DVar.CKind =
Data.Attributes;
1889 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1890 DVar.DKind = I->Directive;
1891 DVar.Modifier =
Data.Modifier;
1892 DVar.AppliedToPointee =
Data.AppliedToPointee;
1897 DVar.CKind = OMPC_shared;
1904 if (SemaRef.
LangOpts.OpenMP <= 31) {
1912 DSAVarData DVarTemp = hasInnermostDSA(
1915 return C == OMPC_firstprivate ||
C == OMPC_shared;
1917 MatchesAlways, FromParent);
1918 if (DVarTemp.CKind != OMPC_unknown && DVarTemp.RefExpr)
1921 DVar.CKind = OMPC_shared;
1928 const_iterator I = begin();
1929 const_iterator EndI = end();
1930 if (FromParent && I != EndI)
1934 auto It = I->SharingMap.find(D);
1935 if (It != I->SharingMap.end()) {
1936 const DSAInfo &
Data = It->getSecond();
1937 DVar.RefExpr =
Data.RefExpr.getPointer();
1938 DVar.PrivateCopy =
Data.PrivateCopy;
1939 DVar.CKind =
Data.Attributes;
1940 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1941 DVar.DKind = I->Directive;
1942 DVar.Modifier =
Data.Modifier;
1943 DVar.AppliedToPointee =
Data.AppliedToPointee;
1949const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(ValueDecl *D,
1950 bool FromParent)
const {
1951 if (isStackEmpty()) {
1953 return getDSA(I, D);
1956 const_iterator StartI = begin();
1957 const_iterator EndI = end();
1958 if (FromParent && StartI != EndI)
1960 return getDSA(StartI, D);
1963const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(ValueDecl *D,
1964 unsigned Level)
const {
1965 if (getStackSize() <= Level)
1966 return DSAVarData();
1968 const_iterator StartI = std::next(begin(), getStackSize() - 1 - Level);
1969 return getDSA(StartI, D);
1972const DSAStackTy::DSAVarData
1973DSAStackTy::hasDSA(ValueDecl *D,
1975 DefaultDataSharingAttributes)>
1978 bool FromParent)
const {
1982 const_iterator I = begin();
1983 const_iterator EndI = end();
1984 if (FromParent && I != EndI)
1986 for (; I != EndI; ++I) {
1987 if (!DPred(I->Directive) &&
1988 !isImplicitOrExplicitTaskingRegion(I->Directive))
1990 const_iterator NewI = I;
1991 DSAVarData DVar = getDSA(NewI, D);
1992 if (I == NewI && CPred(DVar.CKind, DVar.AppliedToPointee, I->DefaultAttr))
1998const DSAStackTy::DSAVarData DSAStackTy::hasInnermostDSA(
1999 ValueDecl *D,
const llvm::function_ref<
bool(
OpenMPClauseKind,
bool)> CPred,
2001 bool FromParent)
const {
2005 const_iterator StartI = begin();
2006 const_iterator EndI = end();
2007 if (FromParent && StartI != EndI)
2009 if (StartI == EndI || !DPred(StartI->Directive))
2011 const_iterator NewI = StartI;
2012 DSAVarData DVar = getDSA(NewI, D);
2013 return (NewI == StartI && CPred(DVar.CKind, DVar.AppliedToPointee))
2018bool DSAStackTy::hasExplicitDSA(
2021 unsigned Level,
bool NotLastprivate)
const {
2022 if (getStackSize() <= Level)
2025 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
2026 auto I = StackElem.SharingMap.find(D);
2027 if (I != StackElem.SharingMap.end() && I->getSecond().RefExpr.getPointer() &&
2028 CPred(I->getSecond().Attributes, I->getSecond().AppliedToPointee) &&
2029 (!NotLastprivate || !I->getSecond().RefExpr.getInt()))
2032 auto LI = StackElem.LCVMap.find(D);
2033 if (LI != StackElem.LCVMap.end())
2034 return CPred(OMPC_private,
false);
2038bool DSAStackTy::hasExplicitDirective(
2040 unsigned Level)
const {
2041 if (getStackSize() <= Level)
2043 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
2044 return DPred(StackElem.Directive);
2047bool DSAStackTy::hasDirective(
2049 const DeclarationNameInfo &, SourceLocation)>
2051 bool FromParent)
const {
2053 size_t Skip = FromParent ? 2 : 1;
2054 for (const_iterator I = begin() + std::min(
Skip, getStackSize()), E = end();
2056 if (DPred(I->Directive, I->DirectiveName, I->ConstructLoc))
2062void SemaOpenMP::InitDataSharingAttributesStack() {
2063 VarDataSharingAttributesStack =
new DSAStackTy(SemaRef);
2066#define DSAStack static_cast<DSAStackTy *>(VarDataSharingAttributesStack)
2068void SemaOpenMP::pushOpenMPFunctionRegion() {
DSAStack->pushFunction(); }
2070void SemaOpenMP::popOpenMPFunctionRegion(
const FunctionScopeInfo *OldFSI) {
2076 "Expected OpenMP device compilation.");
2082enum class FunctionEmissionStatus {
2089SemaBase::SemaDiagnosticBuilder
2093 "Expected OpenMP device compilation.");
2095 SemaDiagnosticBuilder::Kind Kind = SemaDiagnosticBuilder::K_Nop;
2100 Kind = SemaDiagnosticBuilder::K_Immediate;
2111 ? SemaDiagnosticBuilder::K_Deferred
2112 : SemaDiagnosticBuilder::K_Immediate;
2116 Kind = SemaDiagnosticBuilder::K_Nop;
2119 llvm_unreachable(
"CUDADiscarded unexpected in OpenMP device compilation");
2131 "Expected OpenMP host compilation.");
2133 SemaDiagnosticBuilder::Kind Kind = SemaDiagnosticBuilder::K_Nop;
2138 Kind = SemaDiagnosticBuilder::K_Immediate;
2141 Kind = SemaDiagnosticBuilder::K_Deferred;
2146 Kind = SemaDiagnosticBuilder::K_Nop;
2156 if (LO.OpenMP <= 45) {
2158 return OMPC_DEFAULTMAP_scalar;
2159 return OMPC_DEFAULTMAP_aggregate;
2162 return OMPC_DEFAULTMAP_pointer;
2164 return OMPC_DEFAULTMAP_scalar;
2165 return OMPC_DEFAULTMAP_aggregate;
2169 unsigned OpenMPCaptureLevel)
const {
2170 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2173 bool IsByRef =
true;
2179 bool IsVariableUsedInMapClause =
false;
2241 bool IsVariableAssociatedWithSection =
false;
2243 DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2245 [&IsVariableUsedInMapClause, &IsVariableAssociatedWithSection,
2252 if (WhereFoundClauseKind != OMPC_map &&
2253 WhereFoundClauseKind != OMPC_has_device_addr)
2256 auto EI = MapExprComponents.rbegin();
2257 auto EE = MapExprComponents.rend();
2259 assert(EI != EE &&
"Invalid map expression!");
2262 IsVariableUsedInMapClause |= EI->getAssociatedDeclaration() == D;
2267 auto Last = std::prev(EE);
2269 dyn_cast<UnaryOperator>(
Last->getAssociatedExpression());
2270 if ((UO && UO->getOpcode() == UO_Deref) ||
2275 IsVariableAssociatedWithSection =
true;
2284 if (IsVariableUsedInMapClause) {
2287 IsByRef = !(Ty->
isPointerType() && IsVariableAssociatedWithSection);
2292 IsByRef = (
DSAStack->isForceCaptureByReferenceInTargetExecutable() &&
2295 DSAStack->isDefaultmapCapturedByRef(
2300 return K == OMPC_reduction && !AppliedToPointee;
2308 ((IsVariableUsedInMapClause &&
2309 DSAStack->getCaptureRegion(Level, OpenMPCaptureLevel) ==
2314 return K == OMPC_firstprivate ||
2315 (K == OMPC_reduction && AppliedToPointee);
2318 DSAStack->isUsesAllocatorsDecl(Level, D))) &&
2325 !((
DSAStack->getDefaultDSA() == DSA_firstprivate ||
2326 DSAStack->getDefaultDSA() == DSA_private) &&
2330 !
DSAStack->isLoopControlVariable(D, Level).first);
2347unsigned SemaOpenMP::getOpenMPNestingLevel()
const {
2348 assert(getLangOpts().OpenMP);
2349 return DSAStack->getNestingLevel();
2359 !
DSAStack->isClauseParsingMode()) ||
2372 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2375 DefaultDataSharingAttributes DefaultAttr) {
2377 (DefaultAttr == DSA_firstprivate || DefaultAttr == DSA_private);
2381 if (DVarPrivate.CKind != OMPC_unknown)
2387 Expr *CaptureExpr,
bool WithInit,
2393 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2396 auto *VD = dyn_cast<VarDecl>(D);
2405 DSAStackTy::ParentDirectiveScope InParentDirectiveRAII(
2415 DSAStackTy::DSAVarData DVarTop =
2417 if (DVarTop.CKind != OMPC_unknown && DVarTop.RefExpr)
2422 if (OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2426 llvm::reverse(
SemaRef.FunctionScopes),
2427 CheckScopeInfo ? (
SemaRef.FunctionScopes.size() - (StopAt + 1))
2431 if (
auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2437 assert(CSI &&
"Failed to find CapturedRegionScopeInfo");
2448 !OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2454 if (CheckScopeInfo) {
2455 bool OpenMPFound =
false;
2456 for (
unsigned I = StopAt + 1; I > 0; --I) {
2460 if (
auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2470 if (
DSAStack->getCurrentDirective() != OMPD_unknown &&
2471 (!
DSAStack->isClauseParsingMode() ||
2472 DSAStack->getParentDirective() != OMPD_unknown)) {
2473 auto &&Info =
DSAStack->isLoopControlVariable(D);
2476 isImplicitOrExplicitTaskingRegion(
DSAStack->getCurrentDirective())) ||
2477 (VD &&
DSAStack->isForceVarCapturing()))
2478 return VD ? VD : Info.second;
2479 DSAStackTy::DSAVarData DVarTop =
2481 if (DVarTop.CKind != OMPC_unknown &&
isOpenMPPrivate(DVarTop.CKind) &&
2483 !(DVarTop.AppliedToPointee && DVarTop.CKind != OMPC_reduction)))
2490 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2498 if (VD && !VD->
hasLocalStorage() && DVarPrivate.CKind == OMPC_unknown &&
2499 ((
DSAStack->getDefaultDSA() != DSA_none &&
2500 DSAStack->getDefaultDSA() != DSA_private &&
2501 DSAStack->getDefaultDSA() != DSA_firstprivate) ||
2502 DVarTop.CKind == OMPC_shared))
2504 auto *FD = dyn_cast<FieldDecl>(D);
2505 if (DVarPrivate.CKind != OMPC_unknown && !VD && FD &&
2506 !DVarPrivate.PrivateCopy) {
2507 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2510 DefaultDataSharingAttributes DefaultAttr) {
2512 (DefaultAttr == DSA_firstprivate ||
2513 DefaultAttr == DSA_private);
2517 if (DVarPrivate.CKind == OMPC_unknown)
2523 if (
SemaRef.getCurrentThisType().isNull())
2537 SemaRef.CurContext->getParent(),
false);
2541 DSAStack->addImplicitDefaultFirstprivateFD(FD, VD);
2544 if (DVarPrivate.CKind != OMPC_unknown ||
2545 (VD && (
DSAStack->getDefaultDSA() == DSA_none ||
2546 DSAStack->getDefaultDSA() == DSA_private ||
2547 DSAStack->getDefaultDSA() == DSA_firstprivate)))
2553void SemaOpenMP::adjustOpenMPTargetScopeIndex(
unsigned &FunctionScopesIndex,
2554 unsigned Level)
const {
2555 FunctionScopesIndex -= getOpenMPCaptureLevels(
DSAStack->getDirective(Level));
2559 assert(
getLangOpts().OpenMP &&
"OpenMP must be enabled.");
2565 assert(
getLangOpts().OpenMP &&
"OpenMP must be enabled.");
2567 DSAStack->resetPossibleLoopCounter();
2573 unsigned CapLevel)
const {
2574 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2575 if (
DSAStack->getCurrentDirective() != OMPD_unknown &&
2576 (!
DSAStack->isClauseParsingMode() ||
2577 DSAStack->getParentDirective() != OMPD_unknown)) {
2578 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2581 DefaultDataSharingAttributes DefaultAttr) {
2583 DefaultAttr == DSA_private;
2589 !
DSAStack->isLoopControlVariable(D).first)
2590 return OMPC_private;
2593 bool IsTriviallyCopyable =
2604 (IsTriviallyCopyable ||
2610 return OMPC_firstprivate;
2611 DSAStackTy::DSAVarData DVar =
DSAStack->getImplicitDSA(D, Level);
2612 if (DVar.CKind != OMPC_shared &&
2613 !
DSAStack->isLoopControlVariable(D, Level).first && !DVar.RefExpr) {
2614 DSAStack->addImplicitTaskFirstprivate(Level, D);
2615 return OMPC_firstprivate;
2622 DSAStack->resetPossibleLoopCounter(D);
2624 return OMPC_private;
2627 DSAStack->isLoopControlVariable(D).first) &&
2632 return OMPC_private;
2634 if (
const auto *VD = dyn_cast<VarDecl>(D)) {
2640 return OMPC_private;
2645 DSAStack->isUsesAllocatorsDecl(Level, D).value_or(
2646 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait) ==
2647 DSAStackTy::UsesAllocatorsDeclKind::UserDefinedAllocator)
2648 return OMPC_private;
2652 (
DSAStack->isClauseParsingMode() &&
2653 DSAStack->getClauseParsingMode() == OMPC_private) ||
2658 return K == OMPD_taskgroup ||
2659 ((isOpenMPParallelDirective(K) ||
2660 isOpenMPWorksharingDirective(K)) &&
2661 !isOpenMPSimdDirective(K));
2664 DSAStack->isTaskgroupReductionRef(D, Level)))
2671 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2674 for (
unsigned I =
DSAStack->getNestingLevel() + 1; I > Level; --I) {
2675 const unsigned NewLevel = I - 1;
2679 if (isOpenMPPrivate(K) && !AppliedToPointee) {
2687 if (
DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2697 if (
DSAStack->mustBeFirstprivateAtLevel(
2699 OMPC = OMPC_firstprivate;
2703 if (OMPC != OMPC_unknown)
2705 OMPCaptureKindAttr::CreateImplicit(getASTContext(),
unsigned(OMPC)));
2709 unsigned CaptureLevel)
const {
2710 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2715 const auto *VD = dyn_cast<VarDecl>(D);
2719 Regions[CaptureLevel] != OMPD_task;
2723 unsigned CaptureLevel)
const {
2724 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2727 if (
const auto *VD = dyn_cast<VarDecl>(D)) {
2731 DSAStackTy::DSAVarData TopDVar =
2733 unsigned NumLevels =
2738 return (NumLevels == CaptureLevel + 1 &&
2739 (TopDVar.CKind != OMPC_shared ||
2740 DSAStack->getDefaultDSA() == DSA_firstprivate));
2743 DSAStackTy::DSAVarData DVar =
DSAStack->getImplicitDSA(D, Level);
2744 if (DVar.CKind != OMPC_shared)
2746 }
while (Level > 0);
2752void SemaOpenMP::DestroyDataSharingAttributesStack() {
delete DSAStack; }
2756 OMPDeclareVariantScopes.push_back(OMPDeclareVariantScope(TI));
2761 "Not in OpenMP declare variant scope!");
2763 OMPDeclareVariantScopes.pop_back();
2769 assert(
getLangOpts().OpenMP &&
"Expected OpenMP compilation mode.");
2770 std::optional<OMPDeclareTargetDeclAttr::DevTypeTy> DevTy =
2774 (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host))
2777 if (!
getLangOpts().OpenMPIsTargetDevice && DevTy &&
2778 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost)
2781 DevTy = OMPDeclareTargetDeclAttr::getDeviceType(FD);
2782 if (
getLangOpts().OpenMPIsTargetDevice && DevTy &&
2783 *DevTy == OMPDeclareTargetDeclAttr::DT_Host) {
2785 StringRef HostDevTy =
2787 Diag(Loc, diag::err_omp_wrong_device_function_call) << HostDevTy << 0;
2788 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2789 diag::note_omp_marked_device_type_here)
2795 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost) {
2799 for (OMPDeclareVariantAttr *A :
2800 Callee->specific_attrs<OMPDeclareVariantAttr>()) {
2803 std::optional<OMPDeclareTargetDeclAttr::DevTypeTy> DevTy =
2804 OMPDeclareTargetDeclAttr::getDeviceType(
2805 VariantFD->getMostRecentDecl());
2806 if (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host)
2812 Callee->hasAttr<OMPDeclareVariantAttr>() && HasHostAttr(Callee))
2816 OMPC_device_type, OMPC_DEVICE_TYPE_nohost);
2817 Diag(Loc, diag::err_omp_wrong_device_function_call) << NoHostDevTy << 1;
2818 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2819 diag::note_omp_marked_device_type_here)
2827 DSAStack->push(DKind, DirName, CurScope, Loc);
2828 SemaRef.PushExpressionEvaluationContext(
2837 DSAStack->setClauseParsingMode(OMPC_unknown);
2838 SemaRef.CleanupVarDeclMarking();
2841static std::pair<ValueDecl *, bool>
2843 SourceRange &ERange,
bool AllowArraySection =
false,
2844 bool AllowAssumedSizeArray =
false, StringRef DiagType =
"");
2849 bool InscanFound =
false;
2856 if (
C->getClauseKind() != OMPC_reduction)
2859 if (RC->getModifier() == OMPC_REDUCTION_inscan) {
2861 InscanLoc = RC->getModifierLoc();
2864 if (RC->getModifier() == OMPC_REDUCTION_task) {
2874 S.
Diag(RC->getModifierLoc(),
2875 diag::err_omp_reduction_task_not_parallel_or_worksharing);
2881 if (
C->getClauseKind() != OMPC_reduction)
2884 if (RC->getModifier() != OMPC_REDUCTION_inscan) {
2887 : RC->getModifierLoc(),
2888 diag::err_omp_inscan_reduction_expected);
2889 S.
Diag(InscanLoc, diag::note_omp_previous_inscan_reduction);
2892 for (
Expr *Ref : RC->varlist()) {
2893 assert(Ref &&
"NULL expr in OpenMP reduction clause.");
2896 Expr *SimpleRefExpr = Ref;
2903 S.
Diag(Ref->getExprLoc(),
2904 diag::err_omp_reduction_not_inclusive_exclusive)
2905 << Ref->getSourceRange();
2913 ArrayRef<OMPClause *> Clauses);
2914static DeclRefExpr *
buildCapture(Sema &S, ValueDecl *D, Expr *CaptureExpr,
2919 const DSAStackTy::DSAVarData &DVar,
2920 bool IsLoopIterVar =
false);
2931 for (
Expr *DE : Clause->varlist()) {
2932 if (DE->isValueDependent() || DE->isTypeDependent()) {
2933 PrivateCopies.push_back(
nullptr);
2939 const DSAStackTy::DSAVarData DVar =
2941 if (DVar.CKind != OMPC_lastprivate) {
2944 PrivateCopies.push_back(
nullptr);
2955 SemaRef.ActOnUninitializedDecl(VDPrivate);
2957 PrivateCopies.push_back(
nullptr);
2963 Clause->setPrivateCopies(PrivateCopies);
2969 for (
Expr *RefExpr : Clause->varlist()) {
2970 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
2973 Expr *SimpleRefExpr = RefExpr;
2977 PrivateRefs.push_back(RefExpr);
2982 const DSAStackTy::DSAVarData DVar =
2984 PrivateRefs.push_back(DVar.PrivateCopy ? DVar.PrivateCopy
2987 Clause->setPrivateRefs(PrivateRefs);
2991 for (
unsigned I = 0, E = Clause->getNumberOfAllocators(); I < E; ++I) {
2999 DSAStackTy::DSAVarData DVar =
3005 Expr *MapExpr =
nullptr;
3007 DSAStack->checkMappableExprComponentListsForDecl(
3013 auto MI = MapExprComponents.rbegin();
3014 auto ME = MapExprComponents.rend();
3016 MI->getAssociatedDeclaration()->getCanonicalDecl() ==
3017 VD->getCanonicalDecl()) {
3018 MapExpr = MI->getAssociatedExpression();
3023 Diag(D.Allocator->getExprLoc(), diag::err_omp_allocator_used_in_clauses)
3028 Diag(MapExpr->getExprLoc(), diag::note_used_here)
3029 << MapExpr->getSourceRange();
3034 if (
const auto *D = dyn_cast_or_null<OMPExecutableDirective>(CurDirective)) {
3036 if (
auto *Clause = dyn_cast<OMPLastprivateClause>(
C)) {
3037 FinalizeLastprivate(Clause);
3038 }
else if (
auto *Clause = dyn_cast<OMPNontemporalClause>(
C)) {
3039 FinalizeNontemporal(Clause);
3040 }
else if (
auto *Clause = dyn_cast<OMPUsesAllocatorsClause>(
C)) {
3041 FinalizeAllocators(Clause);
3056 Expr *NumIterations, Sema &SemaRef,
3057 Scope *S, DSAStackTy *Stack);
3060 OMPLoopBasedDirective::HelperExprs &B,
3061 DSAStackTy *Stack) {
3063 "loop exprs were not built");
3070 auto *LC = dyn_cast<OMPLinearClause>(
C);
3074 B.NumIterations, SemaRef,
3084class VarDeclFilterCCC final :
public CorrectionCandidateCallback {
3089 explicit VarDeclFilterCCC(Sema &S) : SemaRef(S) {}
3090 bool ValidateCandidate(
const TypoCorrection &Candidate)
override {
3092 if (
const auto *VD = dyn_cast_or_null<VarDecl>(ND)) {
3100 std::unique_ptr<CorrectionCandidateCallback> clone()
override {
3101 return std::make_unique<VarDeclFilterCCC>(*
this);
3105class VarOrFuncDeclFilterCCC final :
public CorrectionCandidateCallback {
3110 explicit VarOrFuncDeclFilterCCC(Sema &S) : SemaRef(S) {}
3111 bool ValidateCandidate(
const TypoCorrection &Candidate)
override {
3121 std::unique_ptr<CorrectionCandidateCallback> clone()
override {
3122 return std::make_unique<VarOrFuncDeclFilterCCC>(*
this);
3135 SemaRef.LookupParsedName(Lookup, CurScope, &ScopeSpec,
3144 VarDeclFilterCCC CCC(
SemaRef);
3150 SemaRef.PDiag(Lookup.
empty() ? diag::err_undeclared_var_use_suggest
3151 : diag::err_omp_expected_var_arg_suggest)
3153 VD = Corrected.getCorrectionDeclAs<
VarDecl>();
3156 : diag::err_omp_expected_var_arg)
3169 if ((Kind == OMPD_threadprivate || Kind == OMPD_groupprivate) &&
3171 Diag(Id.
getLoc(), diag::err_omp_global_var_arg)
3172 << getOpenMPDirectiveName(Kind, OMPVersion) << !VD->
isStaticLocal();
3176 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3187 !
SemaRef.getCurLexicalContext()->isTranslationUnit()) {
3189 << getOpenMPDirectiveName(Kind, OMPVersion) << VD;
3193 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3204 << getOpenMPDirectiveName(Kind, OMPVersion) << VD;
3208 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3217 (!
SemaRef.getCurLexicalContext()->isFileContext() ||
3218 !
SemaRef.getCurLexicalContext()->Encloses(
3221 << getOpenMPDirectiveName(Kind, OMPVersion) << VD;
3225 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3234 !
SemaRef.isDeclInScope(ND,
SemaRef.getCurLexicalContext(), CurScope)) {
3236 << getOpenMPDirectiveName(Kind, OMPVersion) << VD;
3240 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3248 if ((Kind == OMPD_threadprivate && VD->
isUsed() &&
3250 (Kind == OMPD_groupprivate && VD->
isUsed())) {
3252 << getOpenMPDirectiveName(Kind, OMPVersion) << VD;
3267 SemaRef.CurContext->addDecl(D);
3277 Diag(Loc, diag::err_omp_unexpected_directive)
3278 << getOpenMPDirectiveName(OMPD_groupprivate,
getLangOpts().OpenMP);
3282 SemaRef.CurContext->addDecl(D);
3289class LocalVarRefChecker final
3295 if (
const auto *VD = dyn_cast<VarDecl>(E->
getDecl())) {
3298 diag::err_omp_local_var_in_threadprivate_init)
3300 SemaRef.Diag(VD->
getLocation(), diag::note_defined_here)
3307 bool VisitStmt(
const Stmt *S) {
3308 for (
const Stmt *Child : S->
children()) {
3309 if (Child && Visit(Child))
3314 explicit LocalVarRefChecker(Sema &SemaRef) : SemaRef(SemaRef) {}
3318OMPThreadPrivateDecl *
3323 for (
Expr *RefExpr : VarList) {
3341 if (
SemaRef.RequireCompleteType(
3342 ILoc, VD->
getType(), diag::err_omp_threadprivate_incomplete_type)) {
3350 Diag(ILoc, diag::err_omp_ref_type_arg)
3351 << getOpenMPDirectiveName(OMPD_threadprivate, OMPVersion)
3356 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3364 !(VD->
hasAttr<OMPThreadPrivateDeclAttr>() &&
3369 Diag(ILoc, diag::err_omp_var_thread_local)
3374 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3382 LocalVarRefChecker Checker(
SemaRef);
3383 if (Checker.Visit(
Init))
3387 Vars.push_back(RefExpr);
3388 DSAStack->addDSA(VD, DE, OMPC_threadprivate);
3389 VD->
addAttr(OMPThreadPrivateDeclAttr::CreateImplicit(
3392 ML->DeclarationMarkedOpenMPThreadPrivate(VD);
3395 if (!Vars.empty()) {
3408 for (
Expr *RefExpr : VarList) {
3426 if (
SemaRef.RequireCompleteType(
3427 ILoc, VD->
getType(), diag::err_omp_groupprivate_incomplete_type)) {
3433 Diag(ILoc, diag::err_omp_ref_type_arg)
3434 << getOpenMPDirectiveName(OMPD_groupprivate) << VD->
getType();
3438 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3446 Diag(ILoc, diag::err_omp_groupprivate_with_initializer)
3451 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3456 Vars.push_back(RefExpr);
3457 DSAStack->addDSA(VD, DE, OMPC_groupprivate);
3458 VD->
addAttr(OMPGroupPrivateDeclAttr::CreateImplicit(Context,
3461 ML->DeclarationMarkedOpenMPGroupPrivate(VD);
3464 if (!Vars.empty()) {
3472static OMPAllocateDeclAttr::AllocatorTypeTy
3475 return OMPAllocateDeclAttr::OMPNullMemAlloc;
3479 return OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3480 auto AllocatorKindRes = OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3481 llvm::FoldingSetNodeID AEId;
3484 for (
int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
3485 auto AllocatorKind =
static_cast<OMPAllocateDeclAttr::AllocatorTypeTy
>(I);
3486 const Expr *DefAllocator = Stack->getAllocator(AllocatorKind);
3487 llvm::FoldingSetNodeID DAEId;
3490 if (AEId == DAEId) {
3491 AllocatorKindRes = AllocatorKind;
3495 return AllocatorKindRes;
3500 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
Expr *Allocator) {
3501 if (!VD->
hasAttr<OMPAllocateDeclAttr>())
3503 const auto *A = VD->
getAttr<OMPAllocateDeclAttr>();
3504 Expr *PrevAllocator = A->getAllocator();
3505 OMPAllocateDeclAttr::AllocatorTypeTy PrevAllocatorKind =
3507 bool AllocatorsMatch = AllocatorKind == PrevAllocatorKind;
3508 if (AllocatorsMatch &&
3509 AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc &&
3510 Allocator && PrevAllocator) {
3513 llvm::FoldingSetNodeID AEId, PAEId;
3516 AllocatorsMatch = AEId == PAEId;
3518 if (!AllocatorsMatch) {
3520 llvm::raw_svector_ostream AllocatorStream(AllocatorBuffer);
3524 llvm::raw_svector_ostream PrevAllocatorStream(PrevAllocatorBuffer);
3526 PrevAllocator->printPretty(PrevAllocatorStream,
nullptr,
3534 PrevAllocator ? PrevAllocator->getExprLoc() : A->getLocation();
3536 PrevAllocator ? PrevAllocator->getSourceRange() : A->getRange();
3537 S.
Diag(AllocatorLoc, diag::warn_omp_used_different_allocator)
3538 << (Allocator ? 1 : 0) << AllocatorStream.str()
3539 << (PrevAllocator ? 1 : 0) << PrevAllocatorStream.str()
3541 S.
Diag(PrevAllocatorLoc, diag::note_omp_previous_allocator)
3542 << PrevAllocatorRange;
3550 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
3552 if (VD->
hasAttr<OMPAllocateDeclAttr>())
3565 auto *A = OMPAllocateDeclAttr::CreateImplicit(S.
Context, AllocatorKind,
3566 Allocator, Alignment, SR);
3569 ML->DeclarationMarkedOpenMPAllocate(VD, A);
3575 assert(Clauses.size() <= 2 &&
"Expected at most two clauses.");
3576 Expr *Alignment =
nullptr;
3577 Expr *Allocator =
nullptr;
3578 if (Clauses.empty()) {
3585 SemaRef.targetDiag(Loc, diag::err_expected_allocator_clause);
3588 if (
const auto *AC = dyn_cast<OMPAllocatorClause>(
C))
3589 Allocator = AC->getAllocator();
3590 else if (
const auto *AC = dyn_cast<OMPAlignClause>(
C))
3591 Alignment = AC->getAlignment();
3593 llvm_unreachable(
"Unexpected clause on allocate directive");
3595 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind =
3598 for (
Expr *RefExpr : VarList) {
3604 VD->
hasAttr<OMPThreadPrivateDeclAttr>() ||
3612 AllocatorKind, Allocator))
3620 if (AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc) {
3622 diag::err_omp_expected_predefined_allocator)
3627 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3633 Vars.push_back(RefExpr);
3640 Owner =
SemaRef.getCurLexicalContext();
3651 if (!
SemaRef.CurContext->isFileContext()) {
3652 Diag(Loc, diag::err_omp_invalid_scope) <<
"requires";
3656 SemaRef.CurContext->addDecl(D);
3666 bool SkippedClauses) {
3667 if (!SkippedClauses && Assumptions.empty()) {
3669 Diag(Loc, diag::err_omp_no_clause_for_directive)
3670 << llvm::omp::getAllAssumeClauseOptions()
3671 << llvm::omp::getOpenMPDirectiveName(DKind, OMPVersion);
3675 OMPAssumeAttr::Create(
getASTContext(), llvm::join(Assumptions,
","), Loc);
3676 if (DKind == llvm::omp::Directive::OMPD_begin_assumes) {
3677 OMPAssumeScoped.push_back(AA);
3682 if (Assumptions.empty())
3685 assert(DKind == llvm::omp::Directive::OMPD_assumes &&
3686 "Unexpected omp assumption directive!");
3687 OMPAssumeGlobal.push_back(AA);
3694 auto *Ctx =
SemaRef.CurContext;
3695 while (Ctx->getLexicalParent())
3696 Ctx = Ctx->getLexicalParent();
3697 DeclContexts.push_back(Ctx);
3698 while (!DeclContexts.empty()) {
3700 for (
auto *SubDC : DC->
decls()) {
3701 if (SubDC->isInvalidDecl())
3703 if (
auto *CTD = dyn_cast<ClassTemplateDecl>(SubDC)) {
3704 DeclContexts.push_back(CTD->getTemplatedDecl());
3705 llvm::append_range(DeclContexts, CTD->specializations());
3708 if (
auto *DC = dyn_cast<DeclContext>(SubDC))
3709 DeclContexts.push_back(DC);
3710 if (
auto *F = dyn_cast<FunctionDecl>(SubDC)) {
3720 OMPAssumeScoped.pop_back();
3741 DSAStack->getEncounteredTargetLocs();
3743 if (!TargetLocations.empty() || !AtomicLoc.
isInvalid()) {
3744 for (
const OMPClause *CNew : ClauseList) {
3750 Diag(Loc, diag::err_omp_directive_before_requires)
3753 Diag(TargetLoc, diag::note_omp_requires_encountered_directive)
3758 Diag(Loc, diag::err_omp_directive_before_requires)
3760 Diag(AtomicLoc, diag::note_omp_requires_encountered_directive)
3766 if (!
DSAStack->hasDuplicateRequiresClause(ClauseList))
3774 const DSAStackTy::DSAVarData &DVar,
3775 bool IsLoopIterVar) {
3777 SemaRef.
Diag(DVar.RefExpr->
getExprLoc(), diag::note_omp_explicit_dsa)
3782 PDSA_StaticMemberShared,
3783 PDSA_StaticLocalVarShared,
3784 PDSA_LoopIterVarPrivate,
3785 PDSA_LoopIterVarLinear,
3786 PDSA_LoopIterVarLastprivate,
3787 PDSA_ConstVarShared,
3788 PDSA_GlobalVarShared,
3789 PDSA_TaskVarFirstprivate,
3790 PDSA_LocalVarPrivate,
3792 } Reason = PDSA_Implicit;
3793 bool ReportHint =
false;
3795 auto *VD = dyn_cast<VarDecl>(D);
3796 if (IsLoopIterVar) {
3797 if (DVar.CKind == OMPC_private)
3798 Reason = PDSA_LoopIterVarPrivate;
3799 else if (DVar.CKind == OMPC_lastprivate)
3800 Reason = PDSA_LoopIterVarLastprivate;
3802 Reason = PDSA_LoopIterVarLinear;
3804 DVar.CKind == OMPC_firstprivate) {
3805 Reason = PDSA_TaskVarFirstprivate;
3806 ReportLoc = DVar.ImplicitDSALoc;
3808 Reason = PDSA_StaticLocalVarShared;
3810 Reason = PDSA_StaticMemberShared;
3812 Reason = PDSA_GlobalVarShared;
3814 Reason = PDSA_ConstVarShared;
3815 else if (VD && VD->
isLocalVarDecl() && DVar.CKind == OMPC_private) {
3817 Reason = PDSA_LocalVarPrivate;
3819 if (Reason != PDSA_Implicit) {
3820 unsigned OMPVersion = SemaRef.
getLangOpts().OpenMP;
3821 SemaRef.
Diag(ReportLoc, diag::note_omp_predetermined_dsa)
3822 << Reason << ReportHint
3823 << getOpenMPDirectiveName(Stack->getCurrentDirective(), OMPVersion);
3824 }
else if (DVar.ImplicitDSALoc.
isValid()) {
3825 SemaRef.
Diag(DVar.ImplicitDSALoc, diag::note_omp_implicit_dsa)
3832 bool IsAggregateOrDeclareTarget) {
3835 case OMPC_DEFAULTMAP_MODIFIER_alloc:
3836 case OMPC_DEFAULTMAP_MODIFIER_storage:
3837 Kind = OMPC_MAP_alloc;
3839 case OMPC_DEFAULTMAP_MODIFIER_to:
3842 case OMPC_DEFAULTMAP_MODIFIER_from:
3843 Kind = OMPC_MAP_from;
3845 case OMPC_DEFAULTMAP_MODIFIER_tofrom:
3846 Kind = OMPC_MAP_tofrom;
3848 case OMPC_DEFAULTMAP_MODIFIER_present:
3854 Kind = OMPC_MAP_alloc;
3856 case OMPC_DEFAULTMAP_MODIFIER_firstprivate:
3857 case OMPC_DEFAULTMAP_MODIFIER_private:
3859 llvm_unreachable(
"Unexpected defaultmap implicit behavior");
3860 case OMPC_DEFAULTMAP_MODIFIER_none:
3861 case OMPC_DEFAULTMAP_MODIFIER_default:
3866 if (IsAggregateOrDeclareTarget) {
3867 Kind = OMPC_MAP_tofrom;
3870 llvm_unreachable(
"Unexpected defaultmap implicit behavior");
3877struct VariableImplicitInfo {
3881 llvm::SetVector<Expr *> Privates;
3882 llvm::SetVector<Expr *> Firstprivates;
3883 llvm::SetVector<Expr *> Mappings[DefaultmapKindNum][MapKindNum];
3884 llvm::SmallVector<OpenMPMapModifierKind, NumberOfOMPMapClauseModifiers>
3885 MapModifiers[DefaultmapKindNum];
3888class DSAAttrChecker final :
public StmtVisitor<DSAAttrChecker, void> {
3892 bool ErrorFound =
false;
3893 bool TryCaptureCXXThisMembers =
false;
3894 CapturedStmt *CS =
nullptr;
3896 VariableImplicitInfo ImpInfo;
3898 llvm::SmallDenseSet<const ValueDecl *, 4> ImplicitDeclarations;
3900 void VisitSubCaptures(OMPExecutableDirective *S) {
3902 if (!S->hasAssociatedStmt() || !S->getAssociatedStmt())
3904 if (S->getDirectiveKind() == OMPD_atomic ||
3905 S->getDirectiveKind() == OMPD_critical ||
3906 S->getDirectiveKind() == OMPD_section ||
3907 S->getDirectiveKind() == OMPD_master ||
3908 S->getDirectiveKind() == OMPD_masked ||
3909 S->getDirectiveKind() == OMPD_scope ||
3910 S->getDirectiveKind() == OMPD_assume ||
3912 Visit(S->getAssociatedStmt());
3915 visitSubCaptures(S->getInnermostCapturedStmt());
3918 if (TryCaptureCXXThisMembers ||
3920 llvm::any_of(S->getInnermostCapturedStmt()->captures(),
3921 [](
const CapturedStmt::Capture &
C) {
3922 return C.capturesThis();
3924 bool SavedTryCaptureCXXThisMembers = TryCaptureCXXThisMembers;
3925 TryCaptureCXXThisMembers =
true;
3926 Visit(S->getInnermostCapturedStmt()->getCapturedStmt());
3927 TryCaptureCXXThisMembers = SavedTryCaptureCXXThisMembers;
3933 for (OMPClause *
C : S->clauses())
3934 if (
auto *FC = dyn_cast<OMPFirstprivateClause>(
C)) {
3935 for (Expr *Ref : FC->varlist())
3942 void VisitDeclRefExpr(DeclRefExpr *E) {
3948 if (
auto *VD = dyn_cast<VarDecl>(E->
getDecl())) {
3951 !Stack->getTopDSA(VD,
false).RefExpr &&
3952 !Stack->isImplicitDefaultFirstprivateFD(VD))) {
3953 if (
auto *CED = dyn_cast<OMPCapturedExprDecl>(VD))
3954 if (!CED->hasAttr<OMPCaptureNoInitAttr>()) {
3955 Visit(CED->getInit());
3961 if (!Stack->isImplicitDefaultFirstprivateFD(VD))
3966 !Stack->isImplicitDefaultFirstprivateFD(VD) &&
3967 !Stack->isImplicitTaskFirstprivate(VD))
3970 if (Stack->isUsesAllocatorsDecl(VD))
3973 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(VD,
false);
3975 if (DVar.RefExpr || !ImplicitDeclarations.insert(VD).second)
3979 std::optional<OMPDeclareTargetDeclAttr::MapTypeTy> Res =
3980 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD);
3982 (Stack->hasRequiresDeclWithClause<OMPUnifiedSharedMemoryClause>() ||
3983 !Res || *Res != OMPDeclareTargetDeclAttr::MT_Link) &&
3984 !Stack->isImplicitDefaultFirstprivateFD(VD) &&
3985 !Stack->isImplicitTaskFirstprivate(VD))
3993 if (DVar.CKind == OMPC_unknown &&
3994 (Stack->getDefaultDSA() == DSA_none ||
3995 Stack->getDefaultDSA() == DSA_private ||
3996 Stack->getDefaultDSA() == DSA_firstprivate) &&
3997 isImplicitOrExplicitTaskingRegion(DKind) &&
3998 VarsWithInheritedDSA.count(VD) == 0) {
3999 bool InheritedDSA = Stack->getDefaultDSA() == DSA_none;
4000 if (!InheritedDSA && (Stack->getDefaultDSA() == DSA_firstprivate ||
4001 Stack->getDefaultDSA() == DSA_private)) {
4002 DSAStackTy::DSAVarData DVar =
4003 Stack->getImplicitDSA(VD,
false);
4004 InheritedDSA = DVar.CKind == OMPC_unknown;
4007 VarsWithInheritedDSA[VD] = E;
4008 if (Stack->getDefaultDSA() == DSA_none)
4023 bool IsModifierNone = Stack->getDefaultmapModifier(ClauseKind) ==
4024 OMPC_DEFAULTMAP_MODIFIER_none;
4025 if (DVar.CKind == OMPC_unknown && IsModifierNone &&
4026 VarsWithInheritedDSA.count(VD) == 0 && !Res) {
4030 if (!Stack->checkMappableExprComponentListsForDecl(
4035 auto MI = MapExprComponents.rbegin();
4036 auto ME = MapExprComponents.rend();
4037 return MI != ME && MI->getAssociatedDeclaration() == VD;
4039 VarsWithInheritedDSA[VD] = E;
4045 bool IsModifierPresent = Stack->getDefaultmapModifier(ClauseKind) ==
4046 OMPC_DEFAULTMAP_MODIFIER_present;
4047 if (IsModifierPresent) {
4048 if (!llvm::is_contained(ImpInfo.MapModifiers[ClauseKind],
4049 OMPC_MAP_MODIFIER_present)) {
4050 ImpInfo.MapModifiers[ClauseKind].push_back(
4051 OMPC_MAP_MODIFIER_present);
4057 !Stack->isLoopControlVariable(VD).first) {
4058 if (!Stack->checkMappableExprComponentListsForDecl(
4063 if (SemaRef.LangOpts.OpenMP >= 50)
4064 return !StackComponents.empty();
4067 return StackComponents.size() == 1 ||
4069 llvm::drop_begin(llvm::reverse(StackComponents)),
4070 [](const OMPClauseMappableExprCommon::
4071 MappableComponent &MC) {
4072 return MC.getAssociatedDeclaration() ==
4074 (isa<ArraySectionExpr>(
4075 MC.getAssociatedExpression()) ||
4076 isa<OMPArrayShapingExpr>(
4077 MC.getAssociatedExpression()) ||
4078 isa<ArraySubscriptExpr>(
4079 MC.getAssociatedExpression()));
4082 bool IsFirstprivate =
false;
4084 if (
const auto *RD =
4086 IsFirstprivate = RD->isLambda();
4088 IsFirstprivate || (Stack->mustBeFirstprivate(ClauseKind) && !Res);
4089 if (IsFirstprivate) {
4090 ImpInfo.Firstprivates.insert(E);
4093 Stack->getDefaultmapModifier(ClauseKind);
4094 if (M == OMPC_DEFAULTMAP_MODIFIER_private) {
4095 ImpInfo.Privates.insert(E);
4098 M, ClauseKind == OMPC_DEFAULTMAP_aggregate || Res);
4099 ImpInfo.Mappings[ClauseKind][
Kind].insert(E);
4110 DVar = Stack->hasInnermostDSA(
4113 return C == OMPC_reduction && !AppliedToPointee;
4122 SemaRef.
Diag(ELoc, diag::err_omp_reduction_in_task);
4128 DVar = Stack->getImplicitDSA(VD,
false);
4130 (((Stack->getDefaultDSA() == DSA_firstprivate &&
4131 DVar.CKind == OMPC_firstprivate) ||
4132 (Stack->getDefaultDSA() == DSA_private &&
4133 DVar.CKind == OMPC_private)) &&
4135 !Stack->isLoopControlVariable(VD).first) {
4136 if (Stack->getDefaultDSA() == DSA_private)
4137 ImpInfo.Privates.insert(E);
4139 ImpInfo.Firstprivates.insert(E);
4146 *Res == OMPDeclareTargetDeclAttr::MT_Link) {
4147 Stack->addToParentTargetRegionLinkGlobals(E);
4152 void VisitMemberExpr(MemberExpr *E) {
4160 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(FD,
false);
4163 if (DVar.RefExpr || !ImplicitDeclarations.insert(FD).second)
4167 !Stack->isLoopControlVariable(FD).first &&
4168 !Stack->checkMappableExprComponentListsForDecl(
4173 return isa<CXXThisExpr>(
4175 StackComponents.back().getAssociatedExpression())
4187 if (Stack->isClassPreviouslyMapped(TE->getType()))
4191 Stack->getDefaultmapModifier(OMPC_DEFAULTMAP_aggregate);
4196 ImpInfo.Mappings[ClauseKind][
Kind].insert(E);
4205 DVar = Stack->hasInnermostDSA(
4208 return C == OMPC_reduction && !AppliedToPointee;
4217 SemaRef.
Diag(ELoc, diag::err_omp_reduction_in_task);
4223 DVar = Stack->getImplicitDSA(FD,
false);
4225 !Stack->isLoopControlVariable(FD).first) {
4230 if (DVar.CKind != OMPC_unknown)
4231 ImpInfo.Firstprivates.insert(E);
4241 CurComponents.back().getAssociatedDeclaration()->getCanonicalDecl());
4242 if (!Stack->checkMappableExprComponentListsForDecl(
4248 auto CCI = CurComponents.rbegin();
4249 auto CCE = CurComponents.rend();
4250 for (const auto &SC : llvm::reverse(StackComponents)) {
4252 if (CCI->getAssociatedExpression()->getStmtClass() !=
4253 SC.getAssociatedExpression()->getStmtClass())
4254 if (!((isa<ArraySectionExpr>(
4255 SC.getAssociatedExpression()) ||
4256 isa<OMPArrayShapingExpr>(
4257 SC.getAssociatedExpression())) &&
4258 isa<ArraySubscriptExpr>(
4259 CCI->getAssociatedExpression())))
4262 const Decl *CCD = CCI->getAssociatedDeclaration();
4263 const Decl *SCD = SC.getAssociatedDeclaration();
4264 CCD = CCD ? CCD->getCanonicalDecl() : nullptr;
4265 SCD = SCD ? SCD->getCanonicalDecl() : nullptr;
4268 std::advance(CCI, 1);
4276 }
else if (!TryCaptureCXXThisMembers) {
4280 void VisitOMPExecutableDirective(OMPExecutableDirective *S) {
4281 for (OMPClause *
C : S->clauses()) {
4284 if (isa_and_nonnull<OMPPrivateClause>(
C))
4292 for (Stmt *CC :
C->children()) {
4299 VisitSubCaptures(S);
4302 void VisitOMPCanonicalLoopNestTransformationDirective(
4303 OMPCanonicalLoopNestTransformationDirective *S) {
4308 void VisitCallExpr(CallExpr *S) {
4317 auto *CI =
Callee->IgnoreParenImpCasts();
4318 if (
auto *CE = dyn_cast<MemberExpr>(CI))
4319 Visit(CE->getBase());
4320 else if (
auto *CE = dyn_cast<DeclRefExpr>(CI))
4324 void VisitStmt(Stmt *S) {
4334 void visitSubCaptures(CapturedStmt *S) {
4335 for (
const CapturedStmt::Capture &Cap : S->
captures()) {
4336 if (!Cap.capturesVariable() && !Cap.capturesVariableByCopy())
4338 VarDecl *VD = Cap.getCapturedVar();
4342 Stack->checkMappableExprComponentListsForDecl(
4349 Cap.getLocation(),
true);
4353 bool isErrorFound()
const {
return ErrorFound; }
4354 const VariableImplicitInfo &getImplicitInfo()
const {
return ImpInfo; }
4356 return VarsWithInheritedDSA;
4359 DSAAttrChecker(DSAStackTy *S, Sema &SemaRef, CapturedStmt *CS)
4360 : Stack(S), SemaRef(SemaRef), ErrorFound(
false), CS(CS) {
4361 DKind = S->getCurrentDirective();
4364 for (DeclRefExpr *E : Stack->getLinkGlobals())
4376 Traits.emplace_back(llvm::omp::TraitProperty::construct_target_target);
4378 Traits.emplace_back(llvm::omp::TraitProperty::construct_teams_teams);
4380 Traits.emplace_back(llvm::omp::TraitProperty::construct_parallel_parallel);
4382 Traits.emplace_back(llvm::omp::TraitProperty::construct_for_for);
4384 Traits.emplace_back(llvm::omp::TraitProperty::construct_simd_simd);
4385 Stack->handleConstructTrait(Traits, ScopeEntry);
4388static SmallVector<SemaOpenMP::CapturedParamNameType>
4392 Context.getIntTypeForBitwidth(32, 1).
withConst();
4396 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4397 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4399 if (LoopBoundSharing) {
4401 Params.push_back(std::make_pair(
".previous.lb.", KmpSizeTy));
4402 Params.push_back(std::make_pair(
".previous.ub.", KmpSizeTy));
4406 Params.push_back(std::make_pair(StringRef(),
QualType()));
4410static SmallVector<SemaOpenMP::CapturedParamNameType>
4415static SmallVector<SemaOpenMP::CapturedParamNameType>
4425 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4427 std::make_pair(
".global_tid.", KmpInt32Ty),
4428 std::make_pair(
".part_id.", KmpInt32PtrTy),
4429 std::make_pair(
".privates.", VoidPtrTy),
4432 Context.getPointerType(CopyFnType).withConst().withRestrict()),
4433 std::make_pair(
".task_t.", Context.VoidPtrTy.withConst()),
4434 std::make_pair(StringRef(),
QualType())
4439static SmallVector<SemaOpenMP::CapturedParamNameType>
4445 Params.push_back(std::make_pair(StringRef(
"dyn_ptr"), VoidPtrTy));
4448 Params.push_back(std::make_pair(StringRef(),
QualType()));
4452static SmallVector<SemaOpenMP::CapturedParamNameType>
4455 std::make_pair(StringRef(),
QualType())
4460static SmallVector<SemaOpenMP::CapturedParamNameType>
4464 Context.getIntTypeForBitwidth(32, 1).
withConst();
4466 Context.getIntTypeForBitwidth(64, 0).
withConst();
4468 Context.getIntTypeForBitwidth(64, 1).
withConst();
4475 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4477 std::make_pair(
".global_tid.", KmpInt32Ty),
4478 std::make_pair(
".part_id.", KmpInt32PtrTy),
4479 std::make_pair(
".privates.", VoidPtrTy),
4482 Context.getPointerType(CopyFnType).withConst().withRestrict()),
4483 std::make_pair(
".task_t.", Context.VoidPtrTy.withConst()),
4484 std::make_pair(
".lb.", KmpUInt64Ty),
4485 std::make_pair(
".ub.", KmpUInt64Ty),
4486 std::make_pair(
".st.", KmpInt64Ty),
4487 std::make_pair(
".liter.", KmpInt32Ty),
4488 std::make_pair(
".reductions.", VoidPtrTy),
4489 std::make_pair(StringRef(),
QualType())
4502 CSI->TheCapturedDecl->addAttr(AlwaysInlineAttr::CreateImplicit(
4503 SemaRef.
getASTContext(), {}, AlwaysInlineAttr::Keyword_forceinline));
4506 for (
auto [Level, RKind] : llvm::enumerate(Regions)) {
4541 case OMPD_metadirective:
4544 llvm_unreachable(
"Unexpected capture region");
4561 case OMPD_interchange:
4575int SemaOpenMP::getNumberOfConstructScopes(
unsigned Level)
const {
4576 return getOpenMPCaptureLevels(
DSAStack->getDirective(Level));
4582 return CaptureRegions.size();
4586 Expr *CaptureExpr,
bool WithInit,
4588 bool AsExpression) {
4589 assert(CaptureExpr);
4595 Ty =
C.getLValueReferenceType(Ty);
4597 Ty =
C.getPointerType(Ty);
4609 CED->addAttr(OMPCaptureNoInitAttr::CreateImplicit(
C));
4659class CaptureRegionUnwinderRAII {
4666 CaptureRegionUnwinderRAII(Sema &S,
bool &ErrorFound,
4668 : S(S), ErrorFound(ErrorFound), DKind(DKind) {}
4669 ~CaptureRegionUnwinderRAII() {
4672 while (--ThisCaptureLevel >= 0)
4682 if (!
SemaRef.CurContext->isDependentContext() &&
4685 DSAStack->getCurrentDirective()))) {
4687 if (
const auto *RD =
Type.getCanonicalType()
4688 .getNonReferenceType()
4690 bool SavedForceCaptureByReferenceInTargetExecutable =
4691 DSAStack->isForceCaptureByReferenceInTargetExecutable();
4692 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4694 if (RD->isLambda()) {
4695 llvm::DenseMap<const ValueDecl *, FieldDecl *> Captures;
4697 RD->getCaptureFields(Captures, ThisCapture);
4704 SemaRef.MarkVariableReferenced(LC.getLocation(), VD);
4705 }
else if (LC.getCaptureKind() ==
LCK_This) {
4708 ThisTy, ThisCapture->
getType()))
4709 SemaRef.CheckCXXThisCapture(LC.getLocation());
4713 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4714 SavedForceCaptureByReferenceInTargetExecutable);
4724 for (
const OMPClause *Clause : Clauses) {
4725 if (Clause->getClauseKind() == OMPC_ordered)
4727 else if (Clause->getClauseKind() == OMPC_order) {
4729 if (Order->
getKind() != OMPC_ORDER_concurrent)
4732 if (Ordered && Order)
4736 if (Ordered && Order) {
4738 diag::err_omp_simple_clause_incompatible_with_ordered)
4756 bool ErrorFound =
false;
4757 CaptureRegionUnwinderRAII CaptureRegionUnwinder(
4774 DSAStack->getCurrentDirective() == OMPD_target) &&
4775 Clause->getClauseKind() == OMPC_in_reduction) {
4779 for (
Expr *E : IRC->taskgroup_descriptors())
4781 SemaRef.MarkDeclarationsReferencedInExpr(E);
4784 Clause->getClauseKind() == OMPC_copyprivate ||
4787 Clause->getClauseKind() == OMPC_copyin)) {
4788 DSAStack->setForceVarCapturing(Clause->getClauseKind() == OMPC_copyin);
4791 if (
auto *E = cast_or_null<Expr>(VarRef)) {
4792 SemaRef.MarkDeclarationsReferencedInExpr(E);
4795 DSAStack->setForceVarCapturing(
false);
4796 }
else if (CaptureRegions.size() > 1 ||
4797 CaptureRegions.back() != OMPD_unknown) {
4801 if (
Expr *E =
C->getPostUpdateExpr())
4802 SemaRef.MarkDeclarationsReferencedInExpr(E);
4805 if (Clause->getClauseKind() == OMPC_schedule)
4807 else if (Clause->getClauseKind() == OMPC_ordered)
4809 else if (Clause->getClauseKind() == OMPC_linear)
4814 SemaRef.MarkDeclarationsReferencedInExpr(E);
4821 OMPC_SCHEDULE_MODIFIER_nonmonotonic) &&
4826 diag::err_omp_simple_clause_incompatible_with_ordered)
4829 OMPC_SCHEDULE_MODIFIER_nonmonotonic)
4840 Diag(
C->getBeginLoc(), diag::err_omp_linear_ordered)
4850 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective(), OMPVersion);
4857 unsigned CompletedRegions = 0;
4862 if (ThisCaptureRegion != OMPD_unknown) {
4870 if (CaptureRegion == ThisCaptureRegion ||
4871 CaptureRegion == OMPD_unknown) {
4872 if (
auto *DS = cast_or_null<DeclStmt>(
C->getPreInitStmt())) {
4873 for (
Decl *D : DS->decls())
4880 if (ThisCaptureRegion == OMPD_target) {
4884 if (
const auto *UAC = dyn_cast<OMPUsesAllocatorsClause>(
C)) {
4885 for (
unsigned I = 0, End = UAC->getNumberOfAllocators(); I < End;
4889 SemaRef.MarkDeclarationsReferencedInExpr(E);
4895 if (ThisCaptureRegion == OMPD_parallel) {
4899 if (
auto *RC = dyn_cast<OMPReductionClause>(
C)) {
4900 if (RC->getModifier() != OMPC_REDUCTION_inscan)
4902 for (
Expr *E : RC->copy_array_temps())
4904 SemaRef.MarkDeclarationsReferencedInExpr(E);
4906 if (
auto *AC = dyn_cast<OMPAlignedClause>(
C)) {
4907 for (
Expr *E : AC->varlist())
4908 SemaRef.MarkDeclarationsReferencedInExpr(E);
4912 if (++CompletedRegions == CaptureRegions.size())
4914 SR =
SemaRef.ActOnCapturedRegionEnd(SR.
get());
4923 if (CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_cancellation_point)
4926 if (CancelRegion == OMPD_parallel || CancelRegion == OMPD_for ||
4927 CancelRegion == OMPD_sections || CancelRegion == OMPD_taskgroup)
4930 unsigned OMPVersion = SemaRef.
getLangOpts().OpenMP;
4931 SemaRef.
Diag(StartLoc, diag::err_omp_wrong_cancel_region)
4932 << getOpenMPDirectiveName(CancelRegion, OMPVersion);
4942 if (!Stack->getCurScope())
4947 bool NestingProhibited =
false;
4948 bool CloseNesting =
true;
4949 bool OrphanSeen =
false;
4952 ShouldBeInParallelRegion,
4953 ShouldBeInOrderedRegion,
4954 ShouldBeInTargetRegion,
4955 ShouldBeInTeamsRegion,
4956 ShouldBeInLoopSimdRegion,
4957 } Recommend = NoRecommend;
4961 getLeafOrCompositeConstructs(ParentRegion, LeafOrComposite);
4963 unsigned OMPVersion = SemaRef.
getLangOpts().OpenMP;
4965 if (OMPVersion >= 50 && Stack->isParentOrderConcurrent() &&
4968 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region_order)
4969 << getOpenMPDirectiveName(CurrentRegion, OMPVersion);
4973 ((OMPVersion <= 45 && CurrentRegion != OMPD_ordered) ||
4974 (OMPVersion >= 50 && CurrentRegion != OMPD_ordered &&
4975 CurrentRegion != OMPD_simd && CurrentRegion != OMPD_atomic &&
4976 CurrentRegion != OMPD_scan))) {
4989 SemaRef.
Diag(StartLoc, (CurrentRegion != OMPD_simd)
4990 ? diag::err_omp_prohibited_region_simd
4991 : diag::warn_omp_nesting_simd)
4992 << (OMPVersion >= 50 ? 1 : 0);
4993 return CurrentRegion != OMPD_simd;
4995 if (EnclosingConstruct == OMPD_atomic) {
4998 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region_atomic);
5001 if (CurrentRegion == OMPD_section) {
5006 if (EnclosingConstruct != OMPD_sections) {
5007 SemaRef.
Diag(StartLoc, diag::err_omp_orphaned_section_directive)
5008 << (ParentRegion != OMPD_unknown)
5009 << getOpenMPDirectiveName(ParentRegion, OMPVersion);
5017 if (ParentRegion == OMPD_unknown &&
5019 CurrentRegion != OMPD_cancellation_point &&
5020 CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_scan)
5024 if (OMPVersion >= 50 && CurrentRegion == OMPD_loop &&
5025 (BindKind == OMPC_BIND_parallel || BindKind == OMPC_BIND_teams) &&
5027 EnclosingConstruct == OMPD_loop)) {
5028 int ErrorMsgNumber = (BindKind == OMPC_BIND_parallel) ? 1 : 4;
5029 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region)
5030 <<
true << getOpenMPDirectiveName(ParentRegion, OMPVersion)
5031 << ErrorMsgNumber << getOpenMPDirectiveName(CurrentRegion, OMPVersion);
5034 if (CurrentRegion == OMPD_cancellation_point ||
5035 CurrentRegion == OMPD_cancel) {
5048 if (CancelRegion == OMPD_taskgroup) {
5050 EnclosingConstruct != OMPD_task &&
5051 (OMPVersion < 50 || EnclosingConstruct != OMPD_taskloop);
5052 }
else if (CancelRegion == OMPD_sections) {
5053 NestingProhibited = EnclosingConstruct != OMPD_section &&
5054 EnclosingConstruct != OMPD_sections;
5056 NestingProhibited = CancelRegion != Leafs.back();
5058 OrphanSeen = ParentRegion == OMPD_unknown;
5059 }
else if (CurrentRegion == OMPD_master || CurrentRegion == OMPD_masked) {
5066 }
else if (CurrentRegion == OMPD_critical && CurrentName.
getName()) {
5072 bool DeadLock = Stack->hasDirective(
5076 if (K == OMPD_critical && DNI.
getName() == CurrentName.
getName()) {
5077 PreviousCriticalLoc = Loc;
5084 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region_critical_same_name)
5086 if (PreviousCriticalLoc.
isValid())
5087 SemaRef.
Diag(PreviousCriticalLoc,
5088 diag::note_omp_previous_critical_region);
5091 }
else if (CurrentRegion == OMPD_barrier || CurrentRegion == OMPD_scope) {
5101 llvm::is_contained({OMPD_masked, OMPD_master,
5102 OMPD_critical, OMPD_ordered},
5103 EnclosingConstruct);
5114 llvm::is_contained({OMPD_masked, OMPD_master,
5115 OMPD_critical, OMPD_ordered},
5116 EnclosingConstruct);
5117 Recommend = ShouldBeInParallelRegion;
5118 }
else if (CurrentRegion == OMPD_ordered) {
5127 NestingProhibited = EnclosingConstruct == OMPD_critical ||
5130 Stack->isParentOrderedRegion());
5131 Recommend = ShouldBeInOrderedRegion;
5137 (OMPVersion <= 45 && EnclosingConstruct != OMPD_target) ||
5138 (OMPVersion >= 50 && EnclosingConstruct != OMPD_unknown &&
5139 EnclosingConstruct != OMPD_target);
5140 OrphanSeen = ParentRegion == OMPD_unknown;
5141 Recommend = ShouldBeInTargetRegion;
5142 }
else if (CurrentRegion == OMPD_scan) {
5143 if (OMPVersion >= 50) {
5148 NestingProhibited = !llvm::is_contained(
5149 {OMPD_for, OMPD_simd, OMPD_for_simd}, EnclosingConstruct);
5151 NestingProhibited =
true;
5153 OrphanSeen = ParentRegion == OMPD_unknown;
5154 Recommend = ShouldBeInLoopSimdRegion;
5158 EnclosingConstruct == OMPD_teams) {
5170 CurrentRegion != OMPD_loop &&
5172 CurrentRegion == OMPD_atomic);
5173 Recommend = ShouldBeInParallelRegion;
5175 if (!NestingProhibited && CurrentRegion == OMPD_loop) {
5181 BindKind == OMPC_BIND_teams && EnclosingConstruct != OMPD_teams;
5182 Recommend = ShouldBeInTeamsRegion;
5188 NestingProhibited = EnclosingConstruct != OMPD_teams;
5189 Recommend = ShouldBeInTeamsRegion;
5191 if (!NestingProhibited &&
5198 NestingProhibited = Stack->hasDirective(
5202 OffendingRegion = K;
5208 CloseNesting =
false;
5210 if (NestingProhibited) {
5212 SemaRef.
Diag(StartLoc, diag::err_omp_orphaned_device_directive)
5213 << getOpenMPDirectiveName(CurrentRegion, OMPVersion) << Recommend;
5215 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region)
5216 << CloseNesting << getOpenMPDirectiveName(OffendingRegion, OMPVersion)
5217 << Recommend << getOpenMPDirectiveName(CurrentRegion, OMPVersion);
5231 bool ErrorFound =
false;
5232 unsigned NamedModifiersNumber = 0;
5233 llvm::IndexedMap<const OMPIfClause *, Kind2Unsigned> FoundNameModifiers;
5234 FoundNameModifiers.resize(llvm::omp::Directive_enumSize + 1);
5238 if (
const auto *IC = dyn_cast_or_null<OMPIfClause>(
C)) {
5242 auto &FNM = FoundNameModifiers[CurNM];
5244 S.
Diag(
C->getBeginLoc(), diag::err_omp_more_one_clause)
5245 << getOpenMPDirectiveName(Kind, OMPVersion)
5247 << getOpenMPDirectiveName(CurNM, OMPVersion);
5249 }
else if (CurNM != OMPD_unknown) {
5250 NameModifierLoc.push_back(IC->getNameModifierLoc());
5251 ++NamedModifiersNumber;
5254 if (CurNM == OMPD_unknown)
5260 if (!llvm::is_contained(AllowedNameModifiers, CurNM)) {
5261 S.
Diag(IC->getNameModifierLoc(),
5262 diag::err_omp_wrong_if_directive_name_modifier)
5263 << getOpenMPDirectiveName(CurNM, OMPVersion)
5264 << getOpenMPDirectiveName(Kind, OMPVersion);
5271 if (FoundNameModifiers[OMPD_unknown] && NamedModifiersNumber > 0) {
5272 if (NamedModifiersNumber == AllowedNameModifiers.size()) {
5273 S.
Diag(FoundNameModifiers[OMPD_unknown]->getBeginLoc(),
5274 diag::err_omp_no_more_if_clause);
5277 std::string Sep(
", ");
5278 unsigned AllowedCnt = 0;
5279 unsigned TotalAllowedNum =
5280 AllowedNameModifiers.size() - NamedModifiersNumber;
5281 for (
unsigned Cnt = 0, End = AllowedNameModifiers.size(); Cnt < End;
5284 if (!FoundNameModifiers[NM]) {
5286 Values += getOpenMPDirectiveName(NM, OMPVersion);
5288 if (AllowedCnt + 2 == TotalAllowedNum)
5290 else if (AllowedCnt + 1 != TotalAllowedNum)
5295 S.
Diag(FoundNameModifiers[OMPD_unknown]->getCondition()->getBeginLoc(),
5296 diag::err_omp_unnamed_if_clause)
5297 << (TotalAllowedNum > 1) << Values;
5300 S.
Diag(Loc, diag::note_omp_previous_named_if_clause);
5307static std::pair<ValueDecl *, bool>
5310 bool AllowAssumedSizeArray, StringRef DiagType) {
5313 return std::make_pair(
nullptr,
true);
5339 } IsArrayExpr = NoArrayExpr;
5340 if (AllowArraySection) {
5341 if (
auto *ASE = dyn_cast_or_null<ArraySubscriptExpr>(RefExpr)) {
5342 Expr *
Base = ASE->getBase()->IgnoreParenImpCasts();
5343 while (
auto *TempASE = dyn_cast<ArraySubscriptExpr>(
Base))
5344 Base = TempASE->getBase()->IgnoreParenImpCasts();
5347 }
else if (
auto *OASE = dyn_cast_or_null<ArraySectionExpr>(RefExpr)) {
5348 Expr *
Base = OASE->getBase()->IgnoreParenImpCasts();
5349 if (S.
getLangOpts().OpenMP >= 60 && !AllowAssumedSizeArray &&
5350 OASE->getColonLocFirst().isValid() && !OASE->getLength()) {
5352 if (BaseType.isNull() || (!BaseType->isConstantArrayType() &&
5353 !BaseType->isVariableArrayType())) {
5354 S.
Diag(OASE->getColonLocFirst(),
5355 diag::err_omp_section_length_undefined)
5356 << (!BaseType.isNull() && BaseType->isArrayType());
5357 return std::make_pair(
nullptr,
false);
5360 while (
auto *TempOASE = dyn_cast<ArraySectionExpr>(
Base))
5361 Base = TempOASE->getBase()->IgnoreParenImpCasts();
5362 while (
auto *TempASE = dyn_cast<ArraySubscriptExpr>(
Base))
5363 Base = TempASE->getBase()->IgnoreParenImpCasts();
5365 IsArrayExpr = OMPArraySection;
5371 auto *DE = dyn_cast_or_null<DeclRefExpr>(RefExpr);
5372 auto *ME = dyn_cast_or_null<MemberExpr>(RefExpr);
5377 if (IsArrayExpr != NoArrayExpr) {
5378 S.
Diag(ELoc, diag::err_omp_expected_base_var_name)
5379 << IsArrayExpr << ERange;
5380 }
else if (!DiagType.empty()) {
5384 S.
Diag(ELoc, diag::err_omp_expected_var_name_member_expr_with_type)
5385 << DiagSelect << DiagType << ERange;
5389 ? diag::err_omp_expected_var_name_member_expr_or_array_item
5390 : diag::err_omp_expected_var_name_member_expr)
5393 return std::make_pair(
nullptr,
false);
5395 return std::make_pair(
5402class AllocatorChecker final :
public ConstStmtVisitor<AllocatorChecker, bool> {
5403 DSAStackTy *S =
nullptr;
5406 bool VisitDeclRefExpr(
const DeclRefExpr *E) {
5407 return S->isUsesAllocatorsDecl(E->
getDecl())
5408 .value_or(DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait) ==
5409 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait;
5411 bool VisitStmt(
const Stmt *S) {
5412 for (
const Stmt *Child : S->
children()) {
5413 if (Child && Visit(Child))
5418 explicit AllocatorChecker(DSAStackTy *S) : S(S) {}
5425 "Expected non-dependent context.");
5426 auto AllocateRange =
5429 auto PrivateRange = llvm::make_filter_range(Clauses, [](
const OMPClause *
C) {
5434 if (
Cl->getClauseKind() == OMPC_private) {
5436 I = PC->private_copies().begin();
5437 It = PC->varlist_begin();
5438 Et = PC->varlist_end();
5439 }
else if (
Cl->getClauseKind() == OMPC_firstprivate) {
5441 I = PC->private_copies().begin();
5442 It = PC->varlist_begin();
5443 Et = PC->varlist_end();
5444 }
else if (
Cl->getClauseKind() == OMPC_lastprivate) {
5446 I = PC->private_copies().begin();
5447 It = PC->varlist_begin();
5448 Et = PC->varlist_end();
5449 }
else if (
Cl->getClauseKind() == OMPC_linear) {
5451 I = PC->privates().begin();
5452 It = PC->varlist_begin();
5453 Et = PC->varlist_end();
5454 }
else if (
Cl->getClauseKind() == OMPC_reduction) {
5456 I = PC->privates().begin();
5457 It = PC->varlist_begin();
5458 Et = PC->varlist_end();
5459 }
else if (
Cl->getClauseKind() == OMPC_task_reduction) {
5461 I = PC->privates().begin();
5462 It = PC->varlist_begin();
5463 Et = PC->varlist_end();
5464 }
else if (
Cl->getClauseKind() == OMPC_in_reduction) {
5466 I = PC->privates().begin();
5467 It = PC->varlist_begin();
5468 Et = PC->varlist_end();
5470 llvm_unreachable(
"Expected private clause.");
5472 for (
Expr *E : llvm::make_range(It, Et)) {
5479 Expr *SimpleRefExpr = E;
5482 DeclToCopy.try_emplace(Res.first,
5492 AC->getAllocator()) {
5493 Expr *Allocator = AC->getAllocator();
5499 AllocatorChecker Checker(Stack);
5500 if (Checker.Visit(Allocator))
5502 diag::err_omp_allocator_not_in_uses_allocators)
5505 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind =
5511 if (AllocatorKind == OMPAllocateDeclAttr::OMPThreadMemAlloc &&
5515 S.
Diag(AC->getAllocator()->getExprLoc(),
5516 diag::warn_omp_allocate_thread_on_task_target_directive)
5517 << getOpenMPDirectiveName(Stack->getCurrentDirective(), OMPVersion);
5519 for (
Expr *E : AC->varlist()) {
5522 Expr *SimpleRefExpr = E;
5527 DSAStackTy::DSAVarData
Data = Stack->getTopDSA(VD,
false);
5530 diag::err_omp_expected_private_copy_for_allocate);
5533 VarDecl *PrivateVD = DeclToCopy[VD];
5535 AllocatorKind, AC->getAllocator()))
5549class CaptureVars :
public TreeTransform<CaptureVars> {
5550 using BaseTransform = TreeTransform<CaptureVars>;
5553 CaptureVars(Sema &Actions) : BaseTransform(Actions) {}
5555 bool AlwaysRebuild() {
return true; }
5568 BodyStmts.push_back(NewDeclStmt);
5606 DistParam, LogicalTy,
VK_LValue, {},
nullptr,
nullptr, {},
nullptr);
5617 auto BuildVarRef = [&](
VarDecl *VD) {
5622 Ctx, llvm::APInt(Ctx.
getIntWidth(LogicalTy), 0), LogicalTy, {});
5624 Ctx, llvm::APInt(Ctx.
getIntWidth(LogicalTy), 1), LogicalTy, {});
5630 Actions.
BuildBinOp(
nullptr, {}, BO_LT, BuildVarRef(NewStep),
Zero));
5634 nullptr, {}, BO_Sub, BuildVarRef(NewStop), BuildVarRef(NewStart)));
5638 nullptr, {}, BO_Div, ForwardRange, BuildVarRef(NewStep)));
5642 nullptr, {}, BO_Sub, BuildVarRef(NewStart), BuildVarRef(NewStop)));
5646 Actions.
BuildUnaryOp(
nullptr, {}, UO_Minus, BuildVarRef(NewStep)));
5648 Actions.
BuildBinOp(
nullptr, {}, BO_Div, BackwardRange, NegIncAmount));
5652 {}, {}, IsNegStep, BackwardDist, ForwardDist));
5654 assert((Rel == BO_LT || Rel == BO_LE || Rel == BO_GE || Rel == BO_GT) &&
5655 "Expected one of these relational operators");
5662 nullptr, {}, Rel, BuildVarRef(NewStart), BuildVarRef(NewStop)));
5666 if (Rel == BO_GE || Rel == BO_GT)
5668 nullptr, {}, BO_Sub, BuildVarRef(NewStart), BuildVarRef(NewStop)));
5671 nullptr, {}, BO_Sub, BuildVarRef(NewStop), BuildVarRef(NewStart)));
5677 if (Rel == BO_LE || Rel == BO_GE) {
5689 Expr *Divisor = BuildVarRef(NewStep);
5690 if (Rel == BO_GE || Rel == BO_GT)
5693 Expr *DivisorMinusOne =
5696 Actions.
BuildBinOp(
nullptr, {}, BO_Add, Range, DivisorMinusOne));
5698 Actions.
BuildBinOp(
nullptr, {}, BO_Div, RangeRoundUp, Divisor));
5708 Actions.
getCurScope(), {}, BO_Assign, DistRef, Dist));
5709 BodyStmts.push_back(ResultAssign);
5741 {
"Logical", LogicalTy},
5752 assert(!
Invalid &&
"Expecting capture-by-value to work.");
5761 TargetParam, LoopVarTy,
VK_LValue, {},
nullptr,
nullptr, {},
nullptr);
5764 IndvarParam, LogicalTy,
VK_LValue, {},
nullptr,
nullptr, {},
nullptr);
5767 CaptureVars Recap(Actions);
5772 Actions.
BuildBinOp(
nullptr, {}, BO_Mul, NewStep, LogicalRef));
5787 BO_Assign, TargetRef, Advanced));
5800 if (
auto *For = dyn_cast<ForStmt>(AStmt)) {
5802 if (
auto *LCVarDeclStmt = dyn_cast<DeclStmt>(
Init)) {
5805 }
else if (
auto *LCAssign = dyn_cast<BinaryOperator>(
Init)) {
5807 assert(LCAssign->getOpcode() == BO_Assign &&
5808 "init part must be a loop variable assignment");
5812 llvm_unreachable(
"Cannot determine loop variable");
5815 Cond = For->getCond();
5816 Inc = For->getInc();
5817 }
else if (
auto *RangeFor = dyn_cast<CXXForRangeStmt>(AStmt)) {
5818 DeclStmt *BeginStmt = RangeFor->getBeginStmt();
5820 LUVDecl = RangeFor->getLoopVariable();
5822 Cond = RangeFor->getCond();
5823 Inc = RangeFor->getInc();
5825 llvm_unreachable(
"unhandled kind of loop");
5834 if (
auto *CondBinExpr = dyn_cast<BinaryOperator>(
Cond)) {
5835 LHS = CondBinExpr->getLHS();
5836 RHS = CondBinExpr->getRHS();
5837 CondRel = CondBinExpr->getOpcode();
5838 }
else if (
auto *CondCXXOp = dyn_cast<CXXOperatorCallExpr>(
Cond)) {
5839 assert(CondCXXOp->getNumArgs() == 2 &&
"Comparison should have 2 operands");
5840 LHS = CondCXXOp->getArg(0);
5841 RHS = CondCXXOp->getArg(1);
5842 switch (CondCXXOp->getOperator()) {
5843 case OO_ExclaimEqual:
5855 case OO_GreaterEqual:
5859 llvm_unreachable(
"unexpected iterator operator");
5862 llvm_unreachable(
"unexpected loop condition");
5867 std::swap(LHS, RHS);
5884 if (
auto *IncUn = dyn_cast<UnaryOperator>(Inc)) {
5886 switch (IncUn->getOpcode()) {
5896 llvm_unreachable(
"unhandled unary increment operator");
5900 llvm::APInt(Ctx.
getIntWidth(LogicalTy), Direction,
true),
5902 }
else if (
auto *IncBin = dyn_cast<BinaryOperator>(Inc)) {
5903 if (IncBin->getOpcode() == BO_AddAssign) {
5904 Step = IncBin->getRHS();
5905 }
else if (IncBin->getOpcode() == BO_SubAssign) {
5907 SemaRef.BuildUnaryOp(
nullptr, {}, UO_Minus, IncBin->getRHS()));
5909 llvm_unreachable(
"unhandled binary increment operator");
5910 }
else if (
auto *CondCXXOp = dyn_cast<CXXOperatorCallExpr>(Inc)) {
5911 switch (CondCXXOp->getOperator()) {
5914 Ctx, llvm::APInt(Ctx.
getIntWidth(LogicalTy), 1), LogicalTy, {});
5918 Ctx, llvm::APInt(Ctx.
getIntWidth(LogicalTy), -1), LogicalTy, {});
5921 Step = CondCXXOp->getArg(1);
5925 SemaRef.BuildUnaryOp(
nullptr, {}, UO_Minus, CondCXXOp->getArg(1)));
5928 llvm_unreachable(
"unhandled overloaded increment operator");
5931 llvm_unreachable(
"unknown increment expression");
5939 nullptr,
nullptr, {},
nullptr);
5940 return OMPCanonicalLoop::create(
getASTContext(), AStmt, DistanceFunc,
5941 LoopVarFunc, LVRef);
5953 "Loop transformation directive expected");
5954 return LoopTransform;
5961 Expr *UnresolvedMapper);
5972 for (
int Cnt = 0, EndCnt = Clauses.size(); Cnt < EndCnt; ++Cnt) {
5973 auto *
C = dyn_cast<OMPMapClause>(Clauses[Cnt]);
5977 auto *MI =
C->mapperlist_begin();
5978 for (
auto I =
C->varlist_begin(), End =
C->varlist_end(); I != End;
5997 if (
const auto *ATy = BaseType->getAsArrayTypeUnsafe())
5998 ElemType = ATy->getElementType();
6001 CanonType = ElemType;
6006 1, {CanonType,
nullptr});
6007 llvm::DenseMap<const Type *, Expr *> Visited;
6010 while (!Types.empty()) {
6013 std::tie(BaseType, CurFD) = Types.pop_back_val();
6014 while (ParentChain.back().second == 0)
6015 ParentChain.pop_back();
6016 --ParentChain.back().second;
6017 if (BaseType.isNull())
6020 const RecordDecl *RD = BaseType.getCanonicalType()->getAsRecordDecl();
6023 auto It = Visited.find(BaseType.getTypePtr());
6024 if (It == Visited.end()) {
6032 S, Stack->getCurScope(), MapperIdScopeSpec, DefaultMapperId,
6036 It = Visited.try_emplace(BaseType.getTypePtr(), ER.
get()).first;
6043 Expr *BaseExpr = OE;
6044 for (
const auto &P : ParentChain) {
6062 SubExprs.push_back(BaseExpr);
6066 bool FirstIter =
true;
6076 ParentChain.emplace_back(CurFD, 1);
6078 ++ParentChain.back().second;
6080 Types.emplace_back(FieldTy, FD);
6084 if (SubExprs.empty())
6089 nullptr,
C->getMapTypeModifiers(),
C->getMapTypeModifiersLoc(),
6090 MapperIdScopeSpec, MapperId,
C->getMapType(),
6093 Clauses.push_back(NewClause);
6100class TeamsLoopChecker final :
public ConstStmtVisitor<TeamsLoopChecker> {
6107 void VisitOMPExecutableDirective(
const OMPExecutableDirective *D) {
6108 if (D->getDirectiveKind() == llvm::omp::Directive::OMPD_loop) {
6109 if (
const auto *
C = D->getSingleClause<OMPBindClause>())
6110 if (
C->getBindKind() == OMPC_BIND_parallel) {
6111 TeamsLoopCanBeParallelFor =
false;
6116 for (
const Stmt *Child : D->children())
6121 void VisitCallExpr(
const CallExpr *
C) {
6126 bool IsOpenMPAPI =
false;
6127 auto *FD = dyn_cast_or_null<FunctionDecl>(
C->getCalleeDecl());
6129 std::string Name = FD->getNameInfo().getAsString();
6130 IsOpenMPAPI = Name.find(
"omp_") == 0;
6132 TeamsLoopCanBeParallelFor =
6133 IsOpenMPAPI || SemaRef.getLangOpts().OpenMPNoNestedParallelism;
6134 if (!TeamsLoopCanBeParallelFor)
6137 for (
const Stmt *Child :
C->children())
6142 void VisitCapturedStmt(
const CapturedStmt *S) {
6148 void VisitStmt(
const Stmt *S) {
6151 for (
const Stmt *Child : S->
children())
6155 explicit TeamsLoopChecker(Sema &SemaRef)
6156 : SemaRef(SemaRef), TeamsLoopCanBeParallelFor(
true) {}
6159 bool TeamsLoopCanBeParallelFor;
6164 TeamsLoopChecker Checker(SemaRef);
6165 Checker.Visit(AStmt);
6166 return Checker.teamsLoopCanBeParallelFor();
6180 OMPExecutableDirective::getSingleClause<OMPBindClause>(Clauses))
6181 BindKind = BC->getBindKind();
6193 BindKind = OMPC_BIND_thread;
6195 getLeafConstructsOrSelf(ParentDirective);
6197 if (ParentDirective == OMPD_unknown) {
6199 diag::err_omp_bind_required_on_loop);
6200 }
else if (ParentLeafs.back() == OMPD_parallel) {
6201 BindKind = OMPC_BIND_parallel;
6202 }
else if (ParentLeafs.back() == OMPD_teams) {
6203 BindKind = OMPC_BIND_teams;
6211 ClausesWithImplicit.push_back(
C);
6215 if (Kind == OMPD_loop && BindKind == OMPC_BIND_teams) {
6217 if (
C->getClauseKind() == OMPC_reduction)
6219 diag::err_omp_loop_reduction_clause);
6226 BindKind, StartLoc)) {
6233 Diag(StartLoc, diag::warn_hip_omp_target_directives);
6236 bool ErrorFound =
false;
6237 ClausesWithImplicit.append(Clauses.begin(), Clauses.end());
6239 if (AStmt && !
SemaRef.CurContext->isDependentContext() &&
6247 while (--ThisCaptureLevel >= 0)
6249 DSAChecker.Visit(S);
6257 if (CaptureRegions.size() > 1 && CaptureRegions.front() == OMPD_task)
6259 DSAChecker.visitSubCaptures(CS);
6261 if (DSAChecker.isErrorFound())
6264 VarsWithInheritedDSA = DSAChecker.getVarsWithInheritedDSA();
6265 VariableImplicitInfo ImpInfo = DSAChecker.getImplicitInfo();
6268 ImplicitMapModifiersLoc[VariableImplicitInfo::DefaultmapKindNum];
6270 SourceLocation PresentModifierLocs[VariableImplicitInfo::DefaultmapKindNum];
6272 if (
auto *DMC = dyn_cast<OMPDefaultmapClause>(
C))
6273 if (DMC->getDefaultmapModifier() == OMPC_DEFAULTMAP_MODIFIER_present)
6274 PresentModifierLocs[DMC->getDefaultmapKind()] =
6275 DMC->getDefaultmapModifierLoc();
6279 llvm::enum_seq_inclusive<OpenMPDefaultmapClauseKind>(
6281 std::fill_n(std::back_inserter(ImplicitMapModifiersLoc[K]),
6282 ImpInfo.MapModifiers[K].size(), PresentModifierLocs[K]);
6286 if (
auto *IRC = dyn_cast<OMPInReductionClause>(
C)) {
6287 for (
Expr *E : IRC->taskgroup_descriptors())
6289 ImpInfo.Firstprivates.insert(E);
6294 if (
auto *DC = dyn_cast<OMPDetachClause>(
C))
6295 ImpInfo.Firstprivates.insert(DC->getEventHandler());
6297 if (!ImpInfo.Firstprivates.empty()) {
6301 ClausesWithImplicit.push_back(
Implicit);
6303 ImpInfo.Firstprivates.size();
6308 if (!ImpInfo.Privates.empty()) {
6312 ClausesWithImplicit.push_back(
Implicit);
6314 ImpInfo.Privates.size();
6323 if (
getLangOpts().OpenMP >= 50 && Kind != OMPD_target &&
6327 if (
auto *RC = dyn_cast<OMPReductionClause>(
C))
6328 for (
Expr *E : RC->varlist())
6330 ImplicitExprs.emplace_back(E);
6332 if (!ImplicitExprs.empty()) {
6338 MapperIdScopeSpec, MapperId, OMPC_MAP_tofrom,
6341 ClausesWithImplicit.emplace_back(
Implicit);
6344 for (
unsigned I = 0; I < VariableImplicitInfo::DefaultmapKindNum; ++I) {
6345 int ClauseKindCnt = -1;
6346 for (
unsigned J = 0; J < VariableImplicitInfo::MapKindNum; ++J) {
6349 if (ImplicitMap.empty())
6355 nullptr, ImpInfo.MapModifiers[I], ImplicitMapModifiersLoc[I],
6356 MapperIdScopeSpec, MapperId, K,
true,
6359 ClausesWithImplicit.emplace_back(
Implicit);
6371 ClausesWithImplicit);
6381 VarsWithInheritedDSA);
6396 assert(ClausesWithImplicit.empty() &&
6397 "reverse directive does not support any clauses");
6400 case OMPD_interchange:
6410 VarsWithInheritedDSA);
6414 EndLoc, VarsWithInheritedDSA);
6421 assert(ClausesWithImplicit.empty() &&
6422 "No clauses are allowed for 'omp section' directive");
6430 assert(ClausesWithImplicit.empty() &&
6431 "No clauses are allowed for 'omp master' directive");
6442 case OMPD_parallel_for:
6444 EndLoc, VarsWithInheritedDSA);
6446 case OMPD_parallel_for_simd:
6448 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6454 case OMPD_parallel_master:
6458 case OMPD_parallel_masked:
6462 case OMPD_parallel_sections:
6470 case OMPD_taskyield:
6471 assert(ClausesWithImplicit.empty() &&
6472 "No clauses are allowed for 'omp taskyield' directive");
6473 assert(AStmt ==
nullptr &&
6474 "No associated statement allowed for 'omp taskyield' directive");
6478 assert(AStmt ==
nullptr &&
6479 "No associated statement allowed for 'omp error' directive");
6483 assert(ClausesWithImplicit.empty() &&
6484 "No clauses are allowed for 'omp barrier' directive");
6485 assert(AStmt ==
nullptr &&
6486 "No associated statement allowed for 'omp barrier' directive");
6490 assert(AStmt ==
nullptr &&
6491 "No associated statement allowed for 'omp taskwait' directive");
6494 case OMPD_taskgroup:
6499 assert(AStmt ==
nullptr &&
6500 "No associated statement allowed for 'omp flush' directive");
6504 assert(AStmt ==
nullptr &&
6505 "No associated statement allowed for 'omp depobj' directive");
6509 assert(AStmt ==
nullptr &&
6510 "No associated statement allowed for 'omp scan' directive");
6529 case OMPD_target_parallel:
6533 case OMPD_target_parallel_for:
6535 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6537 case OMPD_cancellation_point:
6538 assert(ClausesWithImplicit.empty() &&
6539 "No clauses are allowed for 'omp cancellation point' directive");
6540 assert(AStmt ==
nullptr &&
"No associated statement allowed for 'omp "
6541 "cancellation point' directive");
6545 assert(AStmt ==
nullptr &&
6546 "No associated statement allowed for 'omp cancel' directive");
6550 case OMPD_target_data:
6554 case OMPD_target_enter_data:
6558 case OMPD_target_exit_data:
6564 EndLoc, VarsWithInheritedDSA);
6566 case OMPD_taskloop_simd:
6568 EndLoc, VarsWithInheritedDSA);
6570 case OMPD_master_taskloop:
6572 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6574 case OMPD_masked_taskloop:
6576 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6578 case OMPD_master_taskloop_simd:
6580 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6582 case OMPD_masked_taskloop_simd:
6584 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6586 case OMPD_parallel_master_taskloop:
6588 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6590 case OMPD_parallel_masked_taskloop:
6592 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6594 case OMPD_parallel_master_taskloop_simd:
6596 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6598 case OMPD_parallel_masked_taskloop_simd:
6600 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6602 case OMPD_distribute:
6604 EndLoc, VarsWithInheritedDSA);
6606 case OMPD_target_update:
6610 case OMPD_distribute_parallel_for:
6612 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6614 case OMPD_distribute_parallel_for_simd:
6616 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6618 case OMPD_distribute_simd:
6620 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6622 case OMPD_target_parallel_for_simd:
6624 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6626 case OMPD_target_simd:
6628 EndLoc, VarsWithInheritedDSA);
6630 case OMPD_teams_distribute:
6632 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6634 case OMPD_teams_distribute_simd:
6636 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6638 case OMPD_teams_distribute_parallel_for_simd:
6640 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6642 case OMPD_teams_distribute_parallel_for:
6644 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6646 case OMPD_target_teams:
6650 case OMPD_target_teams_distribute:
6652 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6654 case OMPD_target_teams_distribute_parallel_for:
6656 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6658 case OMPD_target_teams_distribute_parallel_for_simd:
6660 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6662 case OMPD_target_teams_distribute_simd:
6664 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6667 assert(AStmt ==
nullptr &&
6668 "No associated statement allowed for 'omp interop' directive");
6677 EndLoc, VarsWithInheritedDSA);
6679 case OMPD_teams_loop:
6681 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6683 case OMPD_target_teams_loop:
6685 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6687 case OMPD_parallel_loop:
6689 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6691 case OMPD_target_parallel_loop:
6693 ClausesWithImplicit, AStmt, StartLoc, EndLoc, VarsWithInheritedDSA);
6695 case OMPD_declare_target:
6696 case OMPD_end_declare_target:
6697 case OMPD_threadprivate:
6699 case OMPD_declare_reduction:
6700 case OMPD_declare_mapper:
6701 case OMPD_declare_simd:
6703 case OMPD_declare_variant:
6704 case OMPD_begin_declare_variant:
6705 case OMPD_end_declare_variant:
6706 llvm_unreachable(
"OpenMP Directive is not allowed");
6709 llvm_unreachable(
"Unknown OpenMP directive");
6712 ErrorFound = Res.
isInvalid() || ErrorFound;
6716 if (
DSAStack->getDefaultDSA() == DSA_none ||
6717 DSAStack->getDefaultDSA() == DSA_private ||
6718 DSAStack->getDefaultDSA() == DSA_firstprivate) {
6721 switch (
C->getClauseKind()) {
6722 case OMPC_num_threads:
6723 case OMPC_dist_schedule:
6740 case OMPC_grainsize:
6741 case OMPC_num_tasks:
6744 case OMPC_novariants:
6745 case OMPC_nocontext:
6752 case OMPC_num_teams:
6753 case OMPC_thread_limit:
6760 case OMPC_proc_bind:
6762 case OMPC_firstprivate:
6763 case OMPC_lastprivate:
6765 case OMPC_reduction:
6766 case OMPC_task_reduction:
6767 case OMPC_in_reduction:
6771 case OMPC_copyprivate:
6774 case OMPC_mergeable:
6791 case OMPC_defaultmap:
6794 case OMPC_use_device_ptr:
6795 case OMPC_use_device_addr:
6796 case OMPC_is_device_ptr:
6797 case OMPC_has_device_addr:
6798 case OMPC_nontemporal:
6801 case OMPC_inclusive:
6802 case OMPC_exclusive:
6803 case OMPC_uses_allocators:
6810 case OMPC_allocator:
6813 case OMPC_threadprivate:
6814 case OMPC_groupprivate:
6817 case OMPC_unified_address:
6818 case OMPC_unified_shared_memory:
6819 case OMPC_reverse_offload:
6820 case OMPC_dynamic_allocators:
6821 case OMPC_atomic_default_mem_order:
6822 case OMPC_self_maps:
6823 case OMPC_device_type:
6828 llvm_unreachable(
"Unexpected clause");
6830 for (
Stmt *CC :
C->children()) {
6832 DSAChecker.Visit(CC);
6835 for (
const auto &P : DSAChecker.getVarsWithInheritedDSA())
6836 VarsWithInheritedDSA[P.getFirst()] = P.getSecond();
6838 for (
const auto &P : VarsWithInheritedDSA) {
6842 if (
DSAStack->getDefaultDSA() == DSA_none ||
6843 DSAStack->getDefaultDSA() == DSA_private ||
6844 DSAStack->getDefaultDSA() == DSA_firstprivate) {
6845 Diag(P.second->getExprLoc(), diag::err_omp_no_dsa_for_variable)
6846 << P.first << P.second->getSourceRange();
6847 Diag(
DSAStack->getDefaultDSALocation(), diag::note_omp_default_dsa_none);
6849 Diag(P.second->getExprLoc(),
6850 diag::err_omp_defaultmap_no_attr_for_variable)
6851 << P.first << P.second->getSourceRange();
6853 diag::note_omp_defaultmap_attr_none);
6859 if (isAllowedClauseForDirective(D, OMPC_if,
getLangOpts().OpenMP))
6860 AllowedNameModifiers.push_back(D);
6862 if (!AllowedNameModifiers.empty())
6869 if (!
SemaRef.CurContext->isDependentContext() &&
6876 DSAStack->addTargetDirLocation(StartLoc);
6887 assert(Aligneds.size() == Alignments.size());
6888 assert(Linears.size() == LinModifiers.size());
6889 assert(Linears.size() == Steps.size());
6893 const int SimdId = 0;
6895 Diag(SR.
getBegin(), diag::err_omp_single_decl_in_declare_simd_variant)
6900 if (
auto *FTD = dyn_cast<FunctionTemplateDecl>(ADecl))
6901 ADecl = FTD->getTemplatedDecl();
6903 auto *FD = dyn_cast<FunctionDecl>(ADecl);
6905 Diag(ADecl->
getLocation(), diag::err_omp_function_expected) << SimdId;
6921 llvm::DenseMap<const Decl *, const Expr *> UniformedArgs;
6922 const Expr *UniformedLinearThis =
nullptr;
6923 for (
const Expr *E : Uniforms) {
6925 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E))
6926 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl()))
6927 if (FD->getNumParams() > PVD->getFunctionScopeIndex() &&
6928 FD->getParamDecl(PVD->getFunctionScopeIndex())
6930 UniformedArgs.try_emplace(PVD->getCanonicalDecl(), E);
6934 UniformedLinearThis = E;
6948 llvm::DenseMap<const Decl *, const Expr *> AlignedArgs;
6949 const Expr *AlignedThis =
nullptr;
6950 for (
const Expr *E : Aligneds) {
6952 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E))
6953 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
6955 if (FD->getNumParams() > PVD->getFunctionScopeIndex() &&
6956 FD->getParamDecl(PVD->getFunctionScopeIndex())
6960 auto [It, Inserted] = AlignedArgs.try_emplace(CanonPVD, E);
6965 Diag(It->second->getExprLoc(), diag::note_omp_explicit_dsa)
6970 .getNonReferenceType()
6971 .getUnqualifiedType()
6972 .getCanonicalType();
6975 Diag(E->
getExprLoc(), diag::err_omp_aligned_expected_array_or_ptr)
6977 Diag(PVD->getLocation(), diag::note_previous_decl) << PVD;
7001 for (
Expr *E : Alignments) {
7005 NewAligns.push_back(Align.
get());
7016 llvm::DenseMap<const Decl *, const Expr *> LinearArgs;
7017 const bool IsUniformedThis = UniformedLinearThis !=
nullptr;
7018 auto MI = LinModifiers.begin();
7019 for (
const Expr *E : Linears) {
7023 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E))
7024 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
7026 if (FD->getNumParams() > PVD->getFunctionScopeIndex() &&
7027 FD->getParamDecl(PVD->getFunctionScopeIndex())
7031 if (
auto It = LinearArgs.find(CanonPVD); It != LinearArgs.end()) {
7036 Diag(It->second->getExprLoc(), diag::note_omp_explicit_dsa)
7041 if (
auto It = UniformedArgs.find(CanonPVD);
7042 It != UniformedArgs.end()) {
7047 Diag(It->second->getExprLoc(), diag::note_omp_explicit_dsa)
7051 LinearArgs[CanonPVD] = E;
7057 PVD->getOriginalType(),
7063 if (UniformedLinearThis) {
7069 Diag(UniformedLinearThis->
getExprLoc(), diag::note_omp_explicit_dsa)
7074 UniformedLinearThis = E;
7085 Expr *Step =
nullptr;
7086 Expr *NewStep =
nullptr;
7088 for (
Expr *E : Steps) {
7090 if (Step == E || !E) {
7091 NewSteps.push_back(E ? NewStep :
nullptr);
7095 if (
const auto *DRE = dyn_cast<DeclRefExpr>(Step))
7096 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
7098 if (UniformedArgs.count(CanonPVD) == 0) {
7105 NewSteps.push_back(Step);
7120 .VerifyIntegerConstantExpression(
7124 NewSteps.push_back(NewStep);
7126 auto *NewAttr = OMPDeclareSimdDeclAttr::CreateImplicit(
7128 Uniforms.size(),
const_cast<Expr **
>(Aligneds.data()), Aligneds.size(),
7129 const_cast<Expr **
>(NewAligns.data()), NewAligns.size(),
7130 const_cast<Expr **
>(Linears.data()), Linears.size(),
7131 const_cast<unsigned *
>(LinModifiers.data()), LinModifiers.size(),
7132 NewSteps.data(), NewSteps.size(), SR);
7142 "Unexpected directive category");
7151 llvm_unreachable(
"Unknown OpenMP directive");
7160 "Expected function type with prototype.");
7162 "Expected function with type with no prototype.");
7164 "Expected function with prototype.");
7172 Param->setScopeInfo(0, Params.size());
7173 Param->setImplicit();
7174 Params.push_back(Param);
7177 FD->setParams(Params);
7184 if (
auto *UTemplDecl = dyn_cast<FunctionTemplateDecl>(D))
7185 FD = UTemplDecl->getTemplatedDecl();
7188 assert(FD &&
"Expected a function declaration!");
7193 if (!
SemaRef.inTemplateInstantiation()) {
7194 for (OMPAssumeAttr *AA : OMPAssumeScoped)
7197 for (OMPAssumeAttr *AA : OMPAssumeGlobal)
7201SemaOpenMP::OMPDeclareVariantScope::OMPDeclareVariantScope(
OMPTraitInfo &TI)
7202 : TI(&TI), NameSuffix(TI.getMangledName()) {}
7210 OMPDeclareVariantScope &DVScope = OMPDeclareVariantScopes.back();
7213 bool IsTemplated = !TemplateParamLists.empty();
7216 llvm::omp::TraitProperty::implementation_extension_allow_templates))
7233 for (
auto *Candidate : Lookup) {
7234 auto *CandidateDecl = Candidate->getUnderlyingDecl();
7239 if (
SemaRef.Context.isSameTemplateParameterList(
7240 FTD->getTemplateParameters(), TemplateParamLists.back()))
7241 UDecl = FTD->getTemplatedDecl();
7242 }
else if (!IsTemplated)
7243 UDecl = dyn_cast<FunctionDecl>(CandidateDecl);
7257 FType, UDeclTy,
false,
7264 Bases.push_back(UDecl);
7268 llvm::omp::TraitProperty::implementation_extension_disable_implicit_base);
7270 if (Bases.empty() && UseImplicitBase) {
7272 Decl *BaseD =
SemaRef.HandleDeclarator(S, D, TemplateParamLists);
7274 if (
auto *BaseTemplD = dyn_cast<FunctionTemplateDecl>(BaseD))
7275 Bases.push_back(BaseTemplD->getTemplatedDecl());
7280 std::string MangledName;
7283 MangledName += DVScope.NameSuffix;
7298 if (
auto *UTemplDecl = dyn_cast<FunctionTemplateDecl>(D))
7299 FD = UTemplDecl->getTemplatedDecl();
7307 OMPDeclareVariantScope &DVScope = OMPDeclareVariantScopes.back();
7308 auto *OMPDeclareVariantA = OMPDeclareVariantAttr::CreateImplicit(
7315 BaseFD->addAttr(OMPDeclareVariantA);
7335 CalleeFnDecl->
getName().starts_with_insensitive(
"omp_")) {
7338 Diag(LParenLoc, diag::err_omp_unexpected_call_to_omp_runtime_api);
7341 if (!CalleeFnDecl->
hasAttr<OMPDeclareVariantAttr>())
7346 CE](StringRef ISATrait) {
7360 while (CalleeFnDecl) {
7361 for (OMPDeclareVariantAttr *A :
7363 Expr *VariantRef = A->getVariantFuncRef();
7365 VariantMatchInfo VMI;
7368 if (!isVariantApplicableInContext(VMI, OMPCtx,
7372 VMIs.push_back(VMI);
7373 Exprs.push_back(VariantRef);
7381 int BestIdx = getBestVariantMatchForContext(VMIs, OMPCtx);
7401 if (
auto *SpecializedMethod = dyn_cast<CXXMethodDecl>(BestDecl)) {
7402 auto *MemberCall = dyn_cast<CXXMemberCallExpr>(CE);
7404 Context, MemberCall->getImplicitObjectArgument(),
7405 false, SpecializedMethod, Context.BoundMemberTy,
7406 MemberCall->getValueKind(), MemberCall->getObjectKind());
7408 NewCall =
SemaRef.BuildCallExpr(
Scope, BestExpr, LParenLoc, ArgExprs,
7409 RParenLoc, ExecConfig);
7411 if (
CallExpr *NCE = dyn_cast<CallExpr>(NewCall.
get())) {
7412 FunctionDecl *NewCalleeFnDecl = NCE->getDirectCallee();
7414 CalleeFnType, NewCalleeFnDecl->
getType(),
7425 VMIs.erase(VMIs.begin() + BestIdx);
7426 Exprs.erase(Exprs.begin() + BestIdx);
7427 }
while (!VMIs.empty());
7434std::optional<std::pair<FunctionDecl *, Expr *>>
7438 unsigned NumAppendArgs,
7442 return std::nullopt;
7444 const int VariantId = 1;
7447 Diag(SR.
getBegin(), diag::err_omp_single_decl_in_declare_simd_variant)
7449 return std::nullopt;
7452 if (
auto *FTD = dyn_cast<FunctionTemplateDecl>(ADecl))
7453 ADecl = FTD->getTemplatedDecl();
7456 auto *FD = dyn_cast<FunctionDecl>(ADecl);
7460 return std::nullopt;
7463 auto &&HasMultiVersionAttributes = [](
const FunctionDecl *FD) {
7466 return FD->isMultiVersion() || FD->
hasAttr<TargetAttr>();
7469 if (HasMultiVersionAttributes(FD)) {
7470 Diag(FD->
getLocation(), diag::err_omp_declare_variant_incompat_attributes)
7472 return std::nullopt;
7477 Diag(SR.
getBegin(), diag::warn_omp_declare_variant_after_used)
7482 if (!FD->isThisDeclarationADefinition() && FD->isDefined(
Definition) &&
7484 Diag(SR.
getBegin(), diag::warn_omp_declare_variant_after_emitted)
7489 Diag(SR.
getBegin(), diag::err_omp_function_expected) << VariantId;
7490 return std::nullopt;
7493 auto ShouldDelayChecks = [](
Expr *&E, bool) {
7499 if (FD->isDependentContext() || ShouldDelayChecks(VariantRef,
false) ||
7501 return std::make_pair(FD, VariantRef);
7504 auto HandleNonConstantScoresAndConditions = [
this](
Expr *&E,
7505 bool IsScore) ->
bool {
7511 Diag(E->
getExprLoc(), diag::warn_omp_declare_variant_score_not_constant)
7519 diag::err_omp_declare_variant_user_condition_not_constant)
7525 return std::nullopt;
7528 if (NumAppendArgs) {
7531 Diag(FD->
getLocation(), diag::err_omp_declare_variant_prototype_required)
7533 return std::nullopt;
7542 TD = dyn_cast_or_null<TypeDecl>(ND);
7545 Diag(SR.
getBegin(), diag::err_omp_interop_type_not_found) << SR;
7546 return std::nullopt;
7551 if (PTy->isVariadic()) {
7552 Diag(FD->
getLocation(), diag::err_omp_append_args_with_varargs) << SR;
7553 return std::nullopt;
7556 Params.append(PTy->param_type_begin(), PTy->param_type_end());
7557 Params.insert(Params.end(), NumAppendArgs, InteropType);
7558 AdjustedFnType = Context.getFunctionType(PTy->getReturnType(), Params,
7559 PTy->getExtProtoInfo());
7567 auto *
Method = dyn_cast<CXXMethodDecl>(FD);
7569 FnPtrType = Context.getMemberPointerType(
7570 AdjustedFnType, std::nullopt,
Method->getParent());
7582 return std::nullopt;
7584 VariantRef = ER.
get();
7586 FnPtrType = Context.getPointerType(AdjustedFnType);
7588 QualType VarianPtrType = Context.getPointerType(VariantRef->
getType());
7592 false, Sema::AllowedExplicit::None,
7598 diag::err_omp_declare_variant_incompat_types)
7602 return std::nullopt;
7604 VariantRefCast =
SemaRef.PerformImplicitConversion(
7608 return std::nullopt;
7612 Expr *PossibleAddrOfVariantRef = VariantRefCast.
get();
7613 if (
auto *UO = dyn_cast<UnaryOperator>(
7615 VariantRefCast = UO->getSubExpr();
7624 return std::nullopt;
7632 return std::nullopt;
7634 auto *NewFD = dyn_cast_or_null<FunctionDecl>(DRE->getDecl());
7638 return std::nullopt;
7643 diag::err_omp_declare_variant_same_base_function)
7645 return std::nullopt;
7651 Context.mergeFunctionTypes(AdjustedFnType, NewFD->getType());
7654 diag::err_omp_declare_variant_incompat_types)
7655 << NewFD->getType() << FD->
getType() << (NumAppendArgs ? 1 : 0)
7657 return std::nullopt;
7662 else if (NewFD->getType()->isFunctionNoProtoType())
7668 if (NewFD->hasAttrs() && NewFD->hasAttr<OMPDeclareVariantAttr>()) {
7670 diag::warn_omp_declare_variant_marked_as_declare_variant)
7673 NewFD->specific_attr_begin<OMPDeclareVariantAttr>()->
getRange();
7674 Diag(SR.
getBegin(), diag::note_omp_marked_declare_variant_here) << SR;
7675 return std::nullopt;
7678 enum DoesntSupport {
7687 if (
const auto *CXXFD = dyn_cast<CXXMethodDecl>(FD)) {
7688 if (CXXFD->isVirtual()) {
7691 return std::nullopt;
7697 return std::nullopt;
7703 return std::nullopt;
7707 if (FD->isDeleted()) {
7710 return std::nullopt;
7713 if (FD->isDefaulted()) {
7716 return std::nullopt;
7719 if (FD->isConstexpr()) {
7721 << (NewFD->isConsteval() ? ConstevalFuncs : ConstexprFuncs);
7722 return std::nullopt;
7726 if (
SemaRef.areMultiversionVariantFunctionsCompatible(
7732 SemaRef.PDiag(diag::err_omp_declare_variant_doesnt_support)),
7734 SemaRef.PDiag(diag::err_omp_declare_variant_diff)
7738 return std::nullopt;
7756 llvm::append_range(AllAdjustArgs, AdjustArgsNothing);
7757 llvm::append_range(AllAdjustArgs, AdjustArgsNeedDevicePtr);
7758 llvm::append_range(AllAdjustArgs, AdjustArgsNeedDeviceAddr);
7760 if (!AllAdjustArgs.empty() || !AppendArgs.empty()) {
7761 VariantMatchInfo VMI;
7763 if (!llvm::is_contained(
7764 VMI.ConstructTraits,
7765 llvm::omp::TraitProperty::construct_dispatch_dispatch)) {
7766 if (!AllAdjustArgs.empty())
7767 Diag(AdjustArgsLoc, diag::err_omp_clause_requires_dispatch_construct)
7769 if (!AppendArgs.empty())
7770 Diag(AppendArgsLoc, diag::err_omp_clause_requires_dispatch_construct)
7781 for (
Expr *E : AllAdjustArgs) {
7783 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E)) {
7784 if (
const auto *PVD = dyn_cast<ParmVarDecl>(DRE->getDecl())) {
7786 if (FD->
getNumParams() > PVD->getFunctionScopeIndex() &&
7790 if (!AdjustVars.insert(CanonPVD).second) {
7791 Diag(DRE->getLocation(), diag::err_omp_adjust_arg_multiple_clauses)
7800 Diag(E->
getExprLoc(), diag::err_omp_param_or_this_in_clause) << FD << 0;
7809 for (
Expr *E : AdjustArgsNeedDeviceAddr) {
7811 if (
const auto *DRE = dyn_cast<DeclRefExpr>(E)) {
7812 if (
const auto *VD = dyn_cast<VarDecl>(DRE->getDecl())) {
7815 diag::err_omp_non_by_ref_need_device_addr_modifier_argument);
7821 auto *NewAttr = OMPDeclareVariantAttr::CreateImplicit(
7823 const_cast<Expr **
>(AdjustArgsNothing.data()), AdjustArgsNothing.size(),
7824 const_cast<Expr **
>(AdjustArgsNeedDevicePtr.data()),
7825 AdjustArgsNeedDevicePtr.size(),
7826 const_cast<Expr **
>(AdjustArgsNeedDeviceAddr.data()),
7827 AdjustArgsNeedDeviceAddr.size(),
7828 const_cast<OMPInteropInfo *
>(AppendArgs.data()), AppendArgs.size(), SR);
7834 auto *CS = dyn_cast<CapturedStmt>(AStmt);
7835 assert(CS &&
"Captured statement expected");
7844 ThisCaptureLevel > 1; --ThisCaptureLevel) {
7866 return OMPParallelDirective::Create(
7873struct LoopIterationSpace final {
7876 bool IsStrictCompare =
false;
7878 Expr *PreCond =
nullptr;
7881 Expr *NumIterations =
nullptr;
7883 Expr *CounterVar =
nullptr;
7885 Expr *PrivateCounterVar =
nullptr;
7887 Expr *CounterInit =
nullptr;
7890 Expr *CounterStep =
nullptr;
7892 bool Subtract =
false;
7902 Expr *MinValue =
nullptr;
7906 Expr *MaxValue =
nullptr;
7908 bool IsNonRectangularLB =
false;
7910 bool IsNonRectangularUB =
false;
7913 unsigned LoopDependentIdx = 0;
7917 Expr *FinalCondition =
nullptr;
7924 const llvm::SmallPtrSetImpl<const Decl *> &CollapsedLoopVarDecls;
7925 VarDecl *ForbiddenVar =
nullptr;
7929 explicit ForSubExprChecker(
7930 const llvm::SmallPtrSetImpl<const Decl *> &CollapsedLoopVarDecls)
7931 : CollapsedLoopVarDecls(CollapsedLoopVarDecls) {
7934 ShouldVisitImplicitCode =
true;
7937 bool VisitDeclRefExpr(DeclRefExpr *E)
override {
7942 if (
V->getType()->isReferenceType()) {
7943 VarDecl *VD =
V->getDefinition();
7946 DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(I);
7952 Decl *Canon =
V->getCanonicalDecl();
7953 if (CollapsedLoopVarDecls.contains(Canon)) {
7962 VarDecl *getForbiddenVar()
const {
return ForbiddenVar; }
7963 SourceRange getErrRange()
const {
return ErrLoc; }
7969class OpenMPIterationSpaceChecker {
7973 bool SupportsNonRectangular;
7977 SourceLocation DefaultLoc;
7979 SourceLocation ConditionLoc;
7981 const llvm::SmallPtrSetImpl<const Decl *> &CollapsedLoopVarDecls;
7983 SourceRange InitSrcRange;
7985 SourceRange ConditionSrcRange;
7987 SourceRange IncrementSrcRange;
7989 ValueDecl *LCDecl =
nullptr;
7991 Expr *LCRef =
nullptr;
7997 Expr *Step =
nullptr;
8004 std::optional<bool> TestIsLessOp;
8006 bool TestIsStrictOp =
false;
8008 bool SubtractStep =
false;
8010 const ValueDecl *DepDecl =
nullptr;
8013 std::optional<unsigned> InitDependOnLC;
8016 std::optional<unsigned> CondDependOnLC;
8018 std::optional<unsigned> doesDependOnLoopCounter(
const Stmt *S,
8019 bool IsInitializer);
8025 OpenMPIterationSpaceChecker(
8026 Sema &SemaRef,
bool SupportsNonRectangular, DSAStackTy &Stack,
8027 SourceLocation DefaultLoc,
8028 const llvm::SmallPtrSetImpl<const Decl *> &CollapsedLoopDecls)
8029 : SemaRef(SemaRef), SupportsNonRectangular(SupportsNonRectangular),
8030 Stack(Stack), DefaultLoc(DefaultLoc), ConditionLoc(DefaultLoc),
8031 CollapsedLoopVarDecls(CollapsedLoopDecls) {}
8034 bool checkAndSetInit(Stmt *S,
bool EmitDiags =
true);
8037 bool checkAndSetCond(Expr *S);
8040 bool checkAndSetInc(Expr *S);
8042 ValueDecl *getLoopDecl()
const {
return LCDecl; }
8044 Expr *getLoopDeclRefExpr()
const {
return LCRef; }
8046 SourceRange getInitSrcRange()
const {
return InitSrcRange; }
8048 SourceRange getConditionSrcRange()
const {
return ConditionSrcRange; }
8050 SourceRange getIncrementSrcRange()
const {
return IncrementSrcRange; }
8052 bool shouldSubtractStep()
const {
return SubtractStep; }
8054 bool isStrictTestOp()
const {
return TestIsStrictOp; }
8056 Expr *buildNumIterations(
8057 Scope *S, ArrayRef<LoopIterationSpace> ResultIterSpaces,
bool LimitedType,
8058 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const;
8061 buildPreCond(Scope *S, Expr *
Cond,
8062 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const;
8065 buildCounterVar(llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
8066 DSAStackTy &DSA)
const;
8069 Expr *buildPrivateCounterVar()
const;
8073 Expr *buildCounterStep()
const;
8077 buildOrderedLoopData(Scope *S, Expr *Counter,
8078 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
8079 SourceLocation Loc, Expr *Inc =
nullptr,
8082 std::pair<Expr *, Expr *> buildMinMaxValues(
8083 Scope *S, llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const;
8085 Expr *buildFinalCondition(Scope *S)
const;
8087 bool dependent()
const;
8089 bool doesInitDependOnLC()
const {
return InitDependOnLC.has_value(); }
8091 bool doesCondDependOnLC()
const {
return CondDependOnLC.has_value(); }
8093 unsigned getLoopDependentIdx()
const {
8094 return InitDependOnLC.value_or(CondDependOnLC.value_or(0));
8100 bool checkAndSetIncRHS(Expr *RHS);
8102 bool setLCDeclAndLB(ValueDecl *NewLCDecl, Expr *NewDeclRefExpr, Expr *NewLB,
8105 bool setUB(Expr *NewUB, std::optional<bool> LessOp,
bool StrictOp,
8106 SourceRange SR, SourceLocation SL);
8108 bool setStep(Expr *NewStep,
bool Subtract);
8111bool OpenMPIterationSpaceChecker::dependent()
const {
8113 assert(!LB && !UB && !Step);
8121bool OpenMPIterationSpaceChecker::setLCDeclAndLB(
ValueDecl *NewLCDecl,
8123 Expr *NewLB,
bool EmitDiags) {
8125 assert(LCDecl ==
nullptr && LB ==
nullptr && LCRef ==
nullptr &&
8126 UB ==
nullptr && Step ==
nullptr && !TestIsLessOp && !TestIsStrictOp);
8130 LCRef = NewLCRefExpr;
8131 if (
auto *CE = dyn_cast_or_null<CXXConstructExpr>(NewLB))
8133 if ((Ctor->isCopyOrMoveConstructor() ||
8134 Ctor->isConvertingConstructor(
false)) &&
8135 CE->getNumArgs() > 0 && CE->getArg(0) !=
nullptr)
8139 InitDependOnLC = doesDependOnLoopCounter(LB,
true);
8143bool OpenMPIterationSpaceChecker::setUB(
Expr *NewUB, std::optional<bool> LessOp,
8147 assert(LCDecl !=
nullptr && LB !=
nullptr && UB ==
nullptr &&
8148 Step ==
nullptr && !TestIsLessOp && !TestIsStrictOp);
8153 TestIsLessOp = LessOp;
8154 TestIsStrictOp = StrictOp;
8155 ConditionSrcRange = SR;
8157 CondDependOnLC = doesDependOnLoopCounter(UB,
false);
8161bool OpenMPIterationSpaceChecker::setStep(
Expr *NewStep,
bool Subtract) {
8163 assert(LCDecl !=
nullptr && LB !=
nullptr && Step ==
nullptr);
8173 NewStep = Val.
get();
8186 std::optional<llvm::APSInt>
Result =
8197 TestIsLessOp = IsConstPos || (IsUnsigned && !Subtract);
8198 if (UB && (IsConstZero ||
8199 (*TestIsLessOp ? (IsConstNeg || (IsUnsigned && Subtract))
8200 : (IsConstPos || (IsUnsigned && !Subtract))))) {
8202 diag::err_omp_loop_incr_not_compatible)
8204 SemaRef.
Diag(ConditionLoc,
8205 diag::note_omp_loop_cond_requires_compatible_incr)
8206 << *TestIsLessOp << ConditionSrcRange;
8209 if (*TestIsLessOp == Subtract) {
8213 Subtract = !Subtract;
8218 SubtractStep = Subtract;
8225class LoopCounterRefChecker final
8229 const ValueDecl *CurLCDecl =
nullptr;
8230 const ValueDecl *DepDecl =
nullptr;
8231 const ValueDecl *PrevDepDecl =
nullptr;
8232 bool IsInitializer =
true;
8233 bool SupportsNonRectangular;
8234 unsigned BaseLoopId = 0;
8235 bool checkDecl(
const Expr *E,
const ValueDecl *VD) {
8237 SemaRef.Diag(E->
getExprLoc(), diag::err_omp_stmt_depends_on_loop_counter)
8238 << (IsInitializer ? 0 : 1);
8241 const auto &&
Data = Stack.isLoopControlVariable(VD);
8246 SmallString<128> Name;
8247 llvm::raw_svector_ostream
OS(Name);
8251 diag::err_omp_wrong_dependency_iterator_type)
8253 SemaRef.Diag(VD->
getLocation(), diag::note_previous_decl) << VD;
8256 if (
Data.first && !SupportsNonRectangular) {
8257 SemaRef.Diag(E->
getExprLoc(), diag::err_omp_invariant_dependency);
8261 (DepDecl || (PrevDepDecl &&
8263 if (!DepDecl && PrevDepDecl)
8264 DepDecl = PrevDepDecl;
8265 SmallString<128> Name;
8266 llvm::raw_svector_ostream
OS(Name);
8270 diag::err_omp_invariant_or_linear_dependency)
8276 BaseLoopId =
Data.first;
8282 bool VisitDeclRefExpr(
const DeclRefExpr *E) {
8283 const ValueDecl *VD = E->
getDecl();
8285 return checkDecl(E, VD);
8288 bool VisitMemberExpr(
const MemberExpr *E) {
8292 return checkDecl(E, VD);
8296 bool VisitStmt(
const Stmt *S) {
8298 for (
const Stmt *Child : S->
children())
8299 Res = (Child && Visit(Child)) || Res;
8302 explicit LoopCounterRefChecker(Sema &SemaRef, DSAStackTy &Stack,
8303 const ValueDecl *CurLCDecl,
bool IsInitializer,
8304 const ValueDecl *PrevDepDecl =
nullptr,
8305 bool SupportsNonRectangular =
true)
8306 : SemaRef(SemaRef), Stack(Stack), CurLCDecl(CurLCDecl),
8307 PrevDepDecl(PrevDepDecl), IsInitializer(IsInitializer),
8308 SupportsNonRectangular(SupportsNonRectangular) {}
8309 unsigned getBaseLoopId()
const {
8310 assert(CurLCDecl &&
"Expected loop dependency.");
8313 const ValueDecl *getDepDecl()
const {
8314 assert(CurLCDecl &&
"Expected loop dependency.");
8320std::optional<unsigned>
8321OpenMPIterationSpaceChecker::doesDependOnLoopCounter(
const Stmt *S,
8322 bool IsInitializer) {
8324 LoopCounterRefChecker LoopStmtChecker(SemaRef, Stack, LCDecl, IsInitializer,
8325 DepDecl, SupportsNonRectangular);
8326 if (LoopStmtChecker.Visit(S)) {
8327 DepDecl = LoopStmtChecker.getDepDecl();
8328 return LoopStmtChecker.getBaseLoopId();
8330 return std::nullopt;
8333bool OpenMPIterationSpaceChecker::checkAndSetInit(
Stmt *S,
bool EmitDiags) {
8344 SemaRef.Diag(DefaultLoc, diag::err_omp_loop_not_canonical_init);
8348 if (
auto *ExprTemp = dyn_cast<ExprWithCleanups>(S))
8349 if (!ExprTemp->cleanupsHaveSideEffects())
8350 S = ExprTemp->getSubExpr();
8352 if (!CollapsedLoopVarDecls.empty()) {
8353 ForSubExprChecker FSEC{CollapsedLoopVarDecls};
8354 if (!FSEC.TraverseStmt(S)) {
8356 SemaRef.Diag(
Range.getBegin(), diag::err_omp_loop_bad_collapse_var)
8357 <<
Range.getEnd() << 0 << FSEC.getForbiddenVar();
8363 if (
Expr *E = dyn_cast<Expr>(S))
8365 if (
auto *BO = dyn_cast<BinaryOperator>(S)) {
8366 if (BO->getOpcode() == BO_Assign) {
8368 if (
auto *DRE = dyn_cast<DeclRefExpr>(LHS)) {
8369 if (
auto *CED = dyn_cast<OMPCapturedExprDecl>(DRE->
getDecl()))
8371 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
8373 return setLCDeclAndLB(DRE->
getDecl(), DRE, BO->getRHS(), EmitDiags);
8375 if (
auto *ME = dyn_cast<MemberExpr>(LHS)) {
8376 if (ME->isArrow() &&
8378 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
8382 }
else if (
auto *DS = dyn_cast<DeclStmt>(S)) {
8383 if (DS->isSingleDecl()) {
8384 if (
auto *Var = dyn_cast_or_null<VarDecl>(DS->getSingleDecl())) {
8385 if (Var->hasInit() && !Var->getType()->isReferenceType()) {
8389 diag::ext_omp_loop_not_canonical_init)
8391 return setLCDeclAndLB(
8394 Var->getType().getNonReferenceType(),
8396 Var->getInit(), EmitDiags);
8400 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(S)) {
8401 if (CE->getOperator() == OO_Equal) {
8402 Expr *LHS = CE->getArg(0);
8403 if (
auto *DRE = dyn_cast<DeclRefExpr>(LHS)) {
8404 if (
auto *CED = dyn_cast<OMPCapturedExprDecl>(DRE->
getDecl()))
8406 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
8408 return setLCDeclAndLB(DRE->
getDecl(), DRE, CE->getArg(1), EmitDiags);
8410 if (
auto *ME = dyn_cast<MemberExpr>(LHS)) {
8411 if (ME->isArrow() &&
8413 return setLCDeclAndLB(ME->getMemberDecl(), ME, BO->getRHS(),
8419 if (dependent() || SemaRef.CurContext->isDependentContext())
8422 SemaRef.Diag(S->
getBeginLoc(), diag::err_omp_loop_not_canonical_init)
8434 if (
const auto *CE = dyn_cast_or_null<CXXConstructExpr>(E))
8436 if ((Ctor->isCopyOrMoveConstructor() ||
8437 Ctor->isConvertingConstructor(
false)) &&
8438 CE->getNumArgs() > 0 && CE->getArg(0) !=
nullptr)
8440 if (
const auto *DRE = dyn_cast_or_null<DeclRefExpr>(E)) {
8441 if (
const auto *VD = dyn_cast<VarDecl>(DRE->
getDecl()))
8444 if (
const auto *ME = dyn_cast_or_null<MemberExpr>(E))
8445 if (ME->isArrow() &&
isa<CXXThisExpr>(ME->getBase()->IgnoreParenImpCasts()))
8450bool OpenMPIterationSpaceChecker::checkAndSetCond(
Expr *S) {
8457 bool IneqCondIsCanonical = SemaRef.getLangOpts().OpenMP >= 50;
8459 SemaRef.Diag(DefaultLoc, diag::err_omp_loop_not_canonical_cond)
8460 << (IneqCondIsCanonical ? 1 : 0) << LCDecl;
8466 if (!CollapsedLoopVarDecls.empty()) {
8467 ForSubExprChecker FSEC{CollapsedLoopVarDecls};
8468 if (!FSEC.TraverseStmt(S)) {
8470 SemaRef.Diag(
Range.getBegin(), diag::err_omp_loop_bad_collapse_var)
8471 <<
Range.getEnd() << 1 << FSEC.getForbiddenVar();
8477 auto &&CheckAndSetCond =
8482 if (getInitLCDecl(LHS) == LCDecl)
8483 return setUB(
const_cast<Expr *
>(RHS),
8484 (Opcode == BO_LT || Opcode == BO_LE),
8485 (Opcode == BO_LT || Opcode == BO_GT), SR, OpLoc);
8486 if (getInitLCDecl(RHS) == LCDecl)
8487 return setUB(
const_cast<Expr *
>(LHS),
8488 (Opcode == BO_GT || Opcode == BO_GE),
8489 (Opcode == BO_LT || Opcode == BO_GT), SR, OpLoc);
8490 }
else if (IneqCondIsCanonical && Opcode == BO_NE) {
8491 return setUB(
const_cast<Expr *
>(getInitLCDecl(LHS) == LCDecl ? RHS : LHS),
8495 return std::nullopt;
8497 std::optional<bool> Res;
8498 if (
auto *RBO = dyn_cast<CXXRewrittenBinaryOperator>(S)) {
8501 RBO->getOperatorLoc());
8502 }
else if (
auto *BO = dyn_cast<BinaryOperator>(S)) {
8503 Res = CheckAndSetCond(BO->getOpcode(), BO->getLHS(), BO->getRHS(),
8504 BO->getSourceRange(), BO->getOperatorLoc());
8505 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(S)) {
8506 if (CE->getNumArgs() == 2) {
8507 Res = CheckAndSetCond(
8509 CE->getArg(1), CE->getSourceRange(), CE->getOperatorLoc());
8514 if (dependent() || SemaRef.CurContext->isDependentContext())
8516 SemaRef.Diag(CondLoc, diag::err_omp_loop_not_canonical_cond)
8517 << (IneqCondIsCanonical ? 1 : 0) << S->
getSourceRange() << LCDecl;
8521bool OpenMPIterationSpaceChecker::checkAndSetIncRHS(
Expr *RHS) {
8528 if (
auto *BO = dyn_cast<BinaryOperator>(RHS)) {
8529 if (BO->isAdditiveOp()) {
8530 bool IsAdd = BO->getOpcode() == BO_Add;
8531 if (getInitLCDecl(BO->getLHS()) == LCDecl)
8532 return setStep(BO->getRHS(), !IsAdd);
8533 if (IsAdd && getInitLCDecl(BO->getRHS()) == LCDecl)
8534 return setStep(BO->getLHS(),
false);
8536 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(RHS)) {
8537 bool IsAdd = CE->getOperator() == OO_Plus;
8538 if ((IsAdd || CE->getOperator() == OO_Minus) && CE->getNumArgs() == 2) {
8539 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8540 return setStep(CE->getArg(1), !IsAdd);
8541 if (IsAdd && getInitLCDecl(CE->getArg(1)) == LCDecl)
8542 return setStep(CE->getArg(0),
false);
8545 if (dependent() || SemaRef.CurContext->isDependentContext())
8547 SemaRef.Diag(RHS->
getBeginLoc(), diag::err_omp_loop_not_canonical_incr)
8552bool OpenMPIterationSpaceChecker::checkAndSetInc(
Expr *S) {
8567 SemaRef.Diag(DefaultLoc, diag::err_omp_loop_not_canonical_incr) << LCDecl;
8570 if (
auto *ExprTemp = dyn_cast<ExprWithCleanups>(S))
8571 if (!ExprTemp->cleanupsHaveSideEffects())
8572 S = ExprTemp->getSubExpr();
8574 if (!CollapsedLoopVarDecls.empty()) {
8575 ForSubExprChecker FSEC{CollapsedLoopVarDecls};
8576 if (!FSEC.TraverseStmt(S)) {
8578 SemaRef.Diag(
Range.getBegin(), diag::err_omp_loop_bad_collapse_var)
8579 <<
Range.getEnd() << 2 << FSEC.getForbiddenVar();
8586 if (
auto *UO = dyn_cast<UnaryOperator>(S)) {
8587 if (UO->isIncrementDecrementOp() &&
8588 getInitLCDecl(UO->getSubExpr()) == LCDecl)
8589 return setStep(SemaRef
8590 .ActOnIntegerConstant(UO->getBeginLoc(),
8591 (UO->isDecrementOp() ? -1 : 1))
8594 }
else if (
auto *BO = dyn_cast<BinaryOperator>(S)) {
8595 switch (BO->getOpcode()) {
8598 if (getInitLCDecl(BO->getLHS()) == LCDecl)
8599 return setStep(BO->getRHS(), BO->getOpcode() == BO_SubAssign);
8602 if (getInitLCDecl(BO->getLHS()) == LCDecl)
8603 return checkAndSetIncRHS(BO->getRHS());
8608 }
else if (
auto *CE = dyn_cast<CXXOperatorCallExpr>(S)) {
8609 switch (CE->getOperator()) {
8612 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8613 return setStep(SemaRef
8614 .ActOnIntegerConstant(
8616 ((CE->getOperator() == OO_MinusMinus) ? -1 : 1))
8622 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8623 return setStep(CE->getArg(1), CE->getOperator() == OO_MinusEqual);
8626 if (getInitLCDecl(CE->getArg(0)) == LCDecl)
8627 return checkAndSetIncRHS(CE->getArg(1));
8633 if (dependent() || SemaRef.CurContext->isDependentContext())
8635 SemaRef.Diag(S->
getBeginLoc(), diag::err_omp_loop_not_canonical_incr)
8642 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
8643 StringRef Name =
".capture_expr.") {
8651 auto I = Captures.find(
Capture);
8652 if (I != Captures.end())
8665 bool TestIsStrictOp,
bool RoundToStep,
8666 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
8667 ExprResult NewStep = tryBuildCapture(SemaRef, Step, Captures,
".new_step");
8670 llvm::APSInt LRes, SRes;
8671 bool IsLowerConst =
false, IsStepConst =
false;
8672 if (std::optional<llvm::APSInt> Res =
8675 IsLowerConst =
true;
8677 if (std::optional<llvm::APSInt> Res =
8682 bool NoNeedToConvert = IsLowerConst && !RoundToStep &&
8683 ((!TestIsStrictOp && LRes.isNonNegative()) ||
8684 (TestIsStrictOp && LRes.isStrictlyPositive()));
8685 bool NeedToReorganize =
false;
8687 if (!NoNeedToConvert && IsLowerConst &&
8688 (TestIsStrictOp || (RoundToStep && IsStepConst))) {
8689 NoNeedToConvert =
true;
8691 unsigned BW = LRes.getBitWidth() > SRes.getBitWidth()
8692 ? LRes.getBitWidth()
8693 : SRes.getBitWidth();
8694 LRes = LRes.extend(BW + 1);
8695 LRes.setIsSigned(
true);
8696 SRes = SRes.extend(BW + 1);
8697 SRes.setIsSigned(
true);
8699 NoNeedToConvert = LRes.trunc(BW).extend(BW + 1) == LRes;
8700 LRes = LRes.trunc(BW);
8702 if (TestIsStrictOp) {
8703 unsigned BW = LRes.getBitWidth();
8704 LRes = LRes.extend(BW + 1);
8705 LRes.setIsSigned(
true);
8708 NoNeedToConvert && LRes.trunc(BW).extend(BW + 1) == LRes;
8710 LRes = LRes.trunc(BW);
8712 NeedToReorganize = NoNeedToConvert;
8715 bool IsUpperConst =
false;
8716 if (std::optional<llvm::APSInt> Res =
8719 IsUpperConst =
true;
8721 if (NoNeedToConvert && IsLowerConst && IsUpperConst &&
8722 (!RoundToStep || IsStepConst)) {
8723 unsigned BW = LRes.getBitWidth() > URes.getBitWidth() ? LRes.getBitWidth()
8724 : URes.getBitWidth();
8725 LRes = LRes.extend(BW + 1);
8726 LRes.setIsSigned(
true);
8727 URes = URes.extend(BW + 1);
8728 URes.setIsSigned(
true);
8730 NoNeedToConvert = URes.trunc(BW).extend(BW + 1) == URes;
8731 NeedToReorganize = NoNeedToConvert;
8736 if ((!NoNeedToConvert || (LRes.isNegative() && !IsUpperConst)) &&
8742 if ((LowerSize <= UpperSize && UpperTy->hasSignedIntegerRepresentation()) ||
8745 LowerSize > UpperSize ? LowerSize : UpperSize, 0);
8756 if (!Lower || !Upper || NewStep.
isInvalid())
8762 if (NeedToReorganize) {
8776 S, DefaultLoc, BO_Add, Diff.
get(),
8786 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Sub, Upper, Diff.
get());
8790 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Sub, Upper, Lower);
8806 S, DefaultLoc, BO_Sub, Diff.
get(),
8826 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Div, Diff.
get(), NewStep.
get());
8834Expr *OpenMPIterationSpaceChecker::buildNumIterations(
8836 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const {
8845 if (InitDependOnLC) {
8846 const LoopIterationSpace &IS = ResultIterSpaces[*InitDependOnLC - 1];
8847 if (!IS.MinValue || !IS.MaxValue)
8856 IS.CounterVar, MinValue.
get());
8861 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, LBMinVal.
get(), LBVal);
8876 IS.CounterVar, MaxValue.
get());
8881 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, LBMaxVal.
get(), LBVal);
8890 tryBuildCapture(SemaRef, LBMinVal.
get(), Captures,
".lb_min").get();
8892 tryBuildCapture(SemaRef, LBMaxVal.
get(), Captures,
".lb_max").get();
8893 if (!LBMin || !LBMax)
8897 SemaRef.
BuildBinOp(S, DefaultLoc, BO_LT, LBMin, LBMax);
8901 tryBuildCapture(SemaRef, MinLessMaxRes.
get(), Captures,
".min_less_max")
8905 if (*TestIsLessOp) {
8909 MinLessMax, LBMin, LBMax);
8912 LBVal = MinLB.
get();
8917 MinLessMax, LBMax, LBMin);
8920 LBVal = MaxLB.
get();
8924 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Assign, IS.CounterVar, LBVal);
8927 LBVal = LBMinVal.
get();
8931 if (CondDependOnLC) {
8932 const LoopIterationSpace &IS = ResultIterSpaces[*CondDependOnLC - 1];
8933 if (!IS.MinValue || !IS.MaxValue)
8942 IS.CounterVar, MinValue.
get());
8947 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, UBMinVal.
get(), UBVal);
8962 IS.CounterVar, MaxValue.
get());
8967 SemaRef.
BuildBinOp(S, DefaultLoc, BO_Comma, UBMaxVal.
get(), UBVal);
8976 tryBuildCapture(SemaRef, UBMinVal.
get(), Captures,
".ub_min").get();
8978 tryBuildCapture(SemaRef, UBMaxVal.
get(), Captures,
".ub_max").get();
8979 if (!UBMin || !UBMax)
8983 SemaRef.
BuildBinOp(S, DefaultLoc, BO_GT, UBMin, UBMax);
8986 Expr *MinGreaterMax = tryBuildCapture(SemaRef, MinGreaterMaxRes.
get(),
8987 Captures,
".min_greater_max")
8991 if (*TestIsLessOp) {
8995 DefaultLoc, DefaultLoc, MinGreaterMax, UBMin, UBMax);
8998 UBVal = MaxUB.
get();
9003 DefaultLoc, DefaultLoc, MinGreaterMax, UBMax, UBMin);
9006 UBVal = MinUB.
get();
9009 Expr *UBExpr = *TestIsLessOp ? UBVal : LBVal;
9010 Expr *LBExpr = *TestIsLessOp ? LBVal : UBVal;
9011 Expr *Upper = tryBuildCapture(SemaRef, UBExpr, Captures,
".upper").get();
9012 Expr *Lower = tryBuildCapture(SemaRef, LBExpr, Captures,
".lower").get();
9013 if (!Upper || !Lower)
9016 ExprResult Diff = calculateNumIters(SemaRef, S, DefaultLoc, Lower, Upper,
9017 Step, VarType, TestIsStrictOp,
9026 C.getTypeSize(
Type) >
C.getTypeSize(VarType);
9029 UseVarType ?
C.getTypeSize(VarType) :
C.getTypeSize(
Type);
9032 Type =
C.getIntTypeForBitwidth(NewSize, IsSigned);
9042 unsigned NewSize = (
C.getTypeSize(
Type) > 32) ? 64 : 32;
9043 if (NewSize !=
C.getTypeSize(
Type)) {
9044 if (NewSize <
C.getTypeSize(
Type)) {
9045 assert(NewSize == 64 &&
"incorrect loop var size");
9046 SemaRef.
Diag(DefaultLoc, diag::warn_omp_loop_64_bit_var)
9047 << InitSrcRange << ConditionSrcRange;
9049 QualType NewType =
C.getIntTypeForBitwidth(
9051 C.getTypeSize(
Type) < NewSize);
9065std::pair<Expr *, Expr *> OpenMPIterationSpaceChecker::buildMinMaxValues(
9066 Scope *S, llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const {
9070 return std::make_pair(
nullptr,
nullptr);
9073 Expr *MinExpr =
nullptr;
9074 Expr *MaxExpr =
nullptr;
9075 Expr *LBExpr = *TestIsLessOp ? LB : UB;
9076 Expr *UBExpr = *TestIsLessOp ? UB : LB;
9078 *TestIsLessOp ? InitDependOnLC.has_value() : CondDependOnLC.has_value();
9080 *TestIsLessOp ? CondDependOnLC.has_value() : InitDependOnLC.has_value();
9082 LBNonRect ? LBExpr : tryBuildCapture(SemaRef, LBExpr, Captures).get();
9084 UBNonRect ? UBExpr : tryBuildCapture(SemaRef, UBExpr, Captures).get();
9085 if (!Upper || !Lower)
9086 return std::make_pair(
nullptr,
nullptr);
9096 ExprResult Diff = calculateNumIters(SemaRef, S, DefaultLoc, Lower, Upper,
9097 Step, VarType, TestIsStrictOp,
9100 return std::make_pair(
nullptr,
nullptr);
9106 return std::make_pair(
nullptr,
nullptr);
9108 ExprResult NewStep = tryBuildCapture(SemaRef, Step, Captures,
".new_step");
9110 return std::make_pair(
nullptr,
nullptr);
9111 Diff = SemaRef.
BuildBinOp(S, DefaultLoc, BO_Mul, Diff.
get(), NewStep.
get());
9113 return std::make_pair(
nullptr,
nullptr);
9118 return std::make_pair(
nullptr,
nullptr);
9130 return std::make_pair(
nullptr,
nullptr);
9132 if (*TestIsLessOp) {
9136 S, DefaultLoc, BO_Add,
9140 return std::make_pair(
nullptr,
nullptr);
9145 S, DefaultLoc, BO_Sub,
9149 return std::make_pair(
nullptr,
nullptr);
9158 return std::make_pair(
nullptr,
nullptr);
9163 return std::make_pair(
nullptr,
nullptr);
9166 MaxExpr = Diff.
get();
9168 MinExpr = Diff.
get();
9170 return std::make_pair(MinExpr, MaxExpr);
9173Expr *OpenMPIterationSpaceChecker::buildFinalCondition(
Scope *S)
const {
9174 if (InitDependOnLC || CondDependOnLC)
9179Expr *OpenMPIterationSpaceChecker::buildPreCond(
9181 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures)
const {
9186 if (CondDependOnLC || InitDependOnLC)
9197 ExprResult NewLB = tryBuildCapture(SemaRef, LB, Captures);
9198 ExprResult NewUB = tryBuildCapture(SemaRef, UB, Captures);
9204 *TestIsLessOp ? (TestIsStrictOp ? BO_LT : BO_LE)
9205 : (TestIsStrictOp ? BO_GT : BO_GE),
9206 NewLB.
get(), NewUB.
get());
9221DeclRefExpr *OpenMPIterationSpaceChecker::buildCounterVar(
9222 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
9223 DSAStackTy &DSA)
const {
9224 auto *VD = dyn_cast<VarDecl>(LCDecl);
9229 const DSAStackTy::DSAVarData
Data =
9230 DSA.getTopDSA(LCDecl,
false);
9234 Captures.insert(std::make_pair(LCRef, Ref));
9240Expr *OpenMPIterationSpaceChecker::buildPrivateCounterVar()
const {
9257Expr *OpenMPIterationSpaceChecker::buildCounterInit()
const {
return LB; }
9260Expr *OpenMPIterationSpaceChecker::buildCounterStep()
const {
return Step; }
9262Expr *OpenMPIterationSpaceChecker::buildOrderedLoopData(
9264 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
SourceLocation Loc,
9270 assert((OOK == OO_Plus || OOK == OO_Minus) &&
9271 "Expected only + or - operations for depend clauses.");
9283 *TestIsLessOp ? Cnt : tryBuildCapture(SemaRef, LB, Captures).get();
9285 *TestIsLessOp ? tryBuildCapture(SemaRef, LB, Captures).get() : Cnt;
9286 if (!Upper || !Lower)
9290 SemaRef, S, DefaultLoc, Lower, Upper, Step, VarType,
9291 false,
false, Captures);
9301 assert(
getLangOpts().OpenMP &&
"OpenMP is not active.");
9302 assert(
Init &&
"Expected loop in canonical form.");
9303 unsigned AssociatedLoops =
DSAStack->getAssociatedLoops();
9310 OpenMPIterationSpaceChecker ISC(
SemaRef,
true,
9312 if (!ISC.checkAndSetInit(
Init,
false)) {
9314 auto *VD = dyn_cast<VarDecl>(D);
9325 DSAStack->addLoopControlVariable(D, VD);
9327 if (LD != D->getCanonicalDecl()) {
9328 DSAStack->resetPossibleLoopCounter();
9329 if (
auto *Var = dyn_cast_or_null<VarDecl>(LD))
9342 DSAStackTy::DSAVarData DVar =
9346 Expr *LoopDeclRefExpr = ISC.getLoopDeclRefExpr();
9349 ? (
DSAStack->hasMutipleLoops() ? OMPC_lastprivate : OMPC_linear)
9352 return getLeafConstructsOrSelf(DK).back() == OMPD_taskloop;
9355 DVar.CKind != PredeterminedCKind && DVar.RefExpr &&
9357 (DVar.CKind != OMPC_lastprivate && DVar.CKind != OMPC_private))) ||
9359 IsOpenMPTaskloopDirective(DKind) ||
9362 DVar.CKind != OMPC_private && DVar.CKind != OMPC_lastprivate)) &&
9363 (DVar.CKind != OMPC_private || DVar.RefExpr)) {
9365 Diag(
Init->getBeginLoc(), diag::err_omp_loop_var_dsa)
9367 << getOpenMPDirectiveName(DKind, OMPVersion)
9369 if (DVar.RefExpr ==
nullptr)
9370 DVar.CKind = PredeterminedCKind;
9372 }
else if (LoopDeclRefExpr) {
9377 if (DVar.CKind == OMPC_unknown)
9378 DSAStack->addDSA(D, LoopDeclRefExpr, PredeterminedCKind, PrivateRef);
9382 DSAStack->setAssociatedLoops(AssociatedLoops - 1);
9387class OMPDoacrossKind {
9390 return C->getDependenceType() == OMPC_DOACROSS_source ||
9391 C->getDependenceType() == OMPC_DOACROSS_source_omp_cur_iteration;
9394 return C->getDependenceType() == OMPC_DOACROSS_sink;
9396 bool isSinkIter(
const OMPDoacrossClause *
C) {
9397 return C->getDependenceType() == OMPC_DOACROSS_sink_omp_cur_iteration;
9405 unsigned CurrentNestedLoopCount,
unsigned NestedLoopCount,
9406 unsigned TotalNestedLoopCount,
Expr *CollapseLoopCountExpr,
9407 Expr *OrderedLoopCountExpr,
9410 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures,
9411 const llvm::SmallPtrSetImpl<const Decl *> &CollapsedLoopVarDecls) {
9416 if (
auto *CanonLoop = dyn_cast_or_null<OMPCanonicalLoop>(S))
9417 S = CanonLoop->getLoopStmt();
9418 auto *For = dyn_cast_or_null<ForStmt>(S);
9419 auto *CXXFor = dyn_cast_or_null<CXXForRangeStmt>(S);
9421 if (!For && (SemaRef.
LangOpts.OpenMP <= 45 || !CXXFor)) {
9422 unsigned OMPVersion = SemaRef.
getLangOpts().OpenMP;
9424 << (CollapseLoopCountExpr !=
nullptr || OrderedLoopCountExpr !=
nullptr)
9425 << getOpenMPDirectiveName(DKind, OMPVersion) << TotalNestedLoopCount
9426 << (CurrentNestedLoopCount > 0) << CurrentNestedLoopCount;
9427 if (TotalNestedLoopCount > 1) {
9428 if (CollapseLoopCountExpr && OrderedLoopCountExpr)
9429 SemaRef.
Diag(DSA.getConstructLoc(),
9430 diag::note_omp_collapse_ordered_expr)
9433 else if (CollapseLoopCountExpr)
9435 diag::note_omp_collapse_ordered_expr)
9437 else if (OrderedLoopCountExpr)
9439 diag::note_omp_collapse_ordered_expr)
9444 assert(((For && For->getBody()) || (CXXFor && CXXFor->getBody())) &&
9450 OpenMPIterationSpaceChecker ISC(SemaRef, SupportsNonRectangular, DSA,
9451 For ? For->getForLoc() : CXXFor->getForLoc(),
9452 CollapsedLoopVarDecls);
9455 Stmt *
Init = For ? For->getInit() : CXXFor->getBeginStmt();
9456 if (ISC.checkAndSetInit(
Init))
9459 bool HasErrors =
false;
9462 if (
ValueDecl *LCDecl = ISC.getLoopDecl()) {
9472 SemaRef.
Diag(
Init->getBeginLoc(), diag::err_omp_loop_variable_type)
9486 VarsWithImplicitDSA.erase(LCDecl);
9490 "DSA for non-loop vars");
9493 HasErrors |= ISC.checkAndSetCond(For ? For->getCond() : CXXFor->getCond());
9496 HasErrors |= ISC.checkAndSetInc(For ? For->getInc() : CXXFor->getInc());
9503 ResultIterSpaces[CurrentNestedLoopCount].PreCond = ISC.buildPreCond(
9504 DSA.getCurScope(), For ? For->getCond() : CXXFor->getCond(), Captures);
9505 ResultIterSpaces[CurrentNestedLoopCount].NumIterations =
9506 ISC.buildNumIterations(DSA.getCurScope(), ResultIterSpaces,
9513 ResultIterSpaces[CurrentNestedLoopCount].CounterVar =
9514 ISC.buildCounterVar(Captures, DSA);
9515 ResultIterSpaces[CurrentNestedLoopCount].PrivateCounterVar =
9516 ISC.buildPrivateCounterVar();
9517 ResultIterSpaces[CurrentNestedLoopCount].CounterInit = ISC.buildCounterInit();
9518 ResultIterSpaces[CurrentNestedLoopCount].CounterStep = ISC.buildCounterStep();
9519 ResultIterSpaces[CurrentNestedLoopCount].InitSrcRange = ISC.getInitSrcRange();
9520 ResultIterSpaces[CurrentNestedLoopCount].CondSrcRange =
9521 ISC.getConditionSrcRange();
9522 ResultIterSpaces[CurrentNestedLoopCount].IncSrcRange =
9523 ISC.getIncrementSrcRange();
9524 ResultIterSpaces[CurrentNestedLoopCount].Subtract = ISC.shouldSubtractStep();
9525 ResultIterSpaces[CurrentNestedLoopCount].IsStrictCompare =
9526 ISC.isStrictTestOp();
9527 std::tie(ResultIterSpaces[CurrentNestedLoopCount].MinValue,
9528 ResultIterSpaces[CurrentNestedLoopCount].MaxValue) =
9529 ISC.buildMinMaxValues(DSA.getCurScope(), Captures);
9530 ResultIterSpaces[CurrentNestedLoopCount].FinalCondition =
9531 ISC.buildFinalCondition(DSA.getCurScope());
9532 ResultIterSpaces[CurrentNestedLoopCount].IsNonRectangularLB =
9533 ISC.doesInitDependOnLC();
9534 ResultIterSpaces[CurrentNestedLoopCount].IsNonRectangularUB =
9535 ISC.doesCondDependOnLC();
9536 ResultIterSpaces[CurrentNestedLoopCount].LoopDependentIdx =
9537 ISC.getLoopDependentIdx();
9540 (ResultIterSpaces[CurrentNestedLoopCount].PreCond ==
nullptr ||
9541 ResultIterSpaces[CurrentNestedLoopCount].NumIterations ==
nullptr ||
9542 ResultIterSpaces[CurrentNestedLoopCount].CounterVar ==
nullptr ||
9543 ResultIterSpaces[CurrentNestedLoopCount].PrivateCounterVar ==
nullptr ||
9544 ResultIterSpaces[CurrentNestedLoopCount].CounterInit ==
nullptr ||
9545 ResultIterSpaces[CurrentNestedLoopCount].CounterStep ==
nullptr);
9546 if (!HasErrors && DSA.isOrderedRegion()) {
9547 if (DSA.getOrderedRegionParam().second->getNumForLoops()) {
9548 if (CurrentNestedLoopCount <
9549 DSA.getOrderedRegionParam().second->getLoopNumIterations().size()) {
9550 DSA.getOrderedRegionParam().second->setLoopNumIterations(
9551 CurrentNestedLoopCount,
9552 ResultIterSpaces[CurrentNestedLoopCount].NumIterations);
9553 DSA.getOrderedRegionParam().second->setLoopCounter(
9554 CurrentNestedLoopCount,
9555 ResultIterSpaces[CurrentNestedLoopCount].CounterVar);
9558 for (
auto &Pair : DSA.getDoacrossDependClauses()) {
9559 auto *DependC = dyn_cast<OMPDependClause>(Pair.first);
9560 auto *DoacrossC = dyn_cast<OMPDoacrossClause>(Pair.first);
9562 DependC ? DependC->getNumLoops() : DoacrossC->getNumLoops();
9563 if (CurrentNestedLoopCount >= NumLoops) {
9567 if (DependC && DependC->getDependencyKind() == OMPC_DEPEND_sink &&
9568 Pair.second.size() <= CurrentNestedLoopCount) {
9570 DependC->setLoopData(CurrentNestedLoopCount,
nullptr);
9573 OMPDoacrossKind ODK;
9574 if (DoacrossC && ODK.isSink(DoacrossC) &&
9575 Pair.second.size() <= CurrentNestedLoopCount) {
9577 DoacrossC->setLoopData(CurrentNestedLoopCount,
nullptr);
9582 DependC ? DependC->getDependencyLoc() : DoacrossC->getDependenceLoc();
9583 if ((DependC && DependC->getDependencyKind() == OMPC_DEPEND_source) ||
9584 (DoacrossC && ODK.isSource(DoacrossC)))
9585 CntValue = ISC.buildOrderedLoopData(
9587 ResultIterSpaces[CurrentNestedLoopCount].CounterVar, Captures,
9589 else if (DoacrossC && ODK.isSinkIter(DoacrossC)) {
9592 ResultIterSpaces[CurrentNestedLoopCount].CounterVar)
9600 CntValue = ISC.buildOrderedLoopData(
9602 ResultIterSpaces[CurrentNestedLoopCount].CounterVar, Captures,
9603 DepLoc, Inc, clang::OO_Minus);
9605 CntValue = ISC.buildOrderedLoopData(
9607 ResultIterSpaces[CurrentNestedLoopCount].CounterVar, Captures,
9608 DepLoc, Pair.second[CurrentNestedLoopCount].first,
9609 Pair.second[CurrentNestedLoopCount].second);
9611 DependC->setLoopData(CurrentNestedLoopCount, CntValue);
9613 DoacrossC->setLoopData(CurrentNestedLoopCount, CntValue);
9624 llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
9628 : tryBuildCapture(SemaRef, Start.
get(), Captures);
9649 bool IsNonRectangularLB,
9650 llvm::MapVector<const Expr *, DeclRefExpr *> *Captures =
nullptr) {
9659 NewStep = tryBuildCapture(SemaRef, Step.
get(), *Captures);
9674 if (Captures && !IsNonRectangularLB)
9675 NewStart = tryBuildCapture(SemaRef, Start.
get(), *Captures);
9684 Update.get()->getType()->isOverloadableType()) {
9691 SemaRef.
BuildBinOp(S, Loc, Subtract ? BO_SubAssign : BO_AddAssign,
9692 VarRef.
get(), SavedUpdate.
get());
9703 NewStart.
get(), SavedUpdate.
get());
9728 unsigned HasBits =
C.getTypeSize(OldType);
9729 if (HasBits >= Bits)
9732 QualType NewType =
C.getIntTypeForBitwidth(Bits,
true);
9742 if (std::optional<llvm::APSInt> Result =
9744 return Signed ? Result->isSignedIntN(Bits) : Result->isIntN(Bits);
9751 if (!PreInits.empty()) {
9772 if (
auto *CS = dyn_cast<CompoundStmt>(Item))
9781 const llvm::MapVector<const Expr *, DeclRefExpr *> &Captures) {
9782 if (!Captures.empty()) {
9784 for (
const auto &Pair : Captures)
9785 PreInits.push_back(Pair.second->getDecl());
9793 if (PreInits.empty())
9797 for (
Stmt *S : PreInits)
9804 Expr *PostUpdate =
nullptr;
9805 if (!PostUpdates.empty()) {
9806 for (
Expr *E : PostUpdates) {
9812 PostUpdate = PostUpdate
9826 int NestingDepth = 0;
9827 llvm::SmallPtrSetImpl<const Decl *> &VarDecls;
9849 if (NestingDepth > 0)
9860 Expr *OrderedLoopCountExpr,
Stmt *AStmt,
Sema &SemaRef,
9863 OMPLoopBasedDirective::HelperExprs &Built) {
9867 if ((CollapseLoopCountExpr && CollapseLoopCountExpr->
containsErrors()) ||
9868 (OrderedLoopCountExpr && OrderedLoopCountExpr->
containsErrors()))
9871 unsigned NestedLoopCount = 1;
9872 bool SupportsNonPerfectlyNested = (SemaRef.
LangOpts.OpenMP >= 50) &&
9876 if (CollapseLoopCountExpr) {
9881 NestedLoopCount = Result.Val.getInt().getLimitedValue();
9890 unsigned OrderedLoopCount = 1;
9891 if (OrderedLoopCountExpr) {
9897 llvm::APSInt Result = EVResult.
Val.
getInt();
9898 if (Result.getLimitedValue() < NestedLoopCount) {
9900 diag::err_omp_wrong_ordered_loop_count)
9903 diag::note_collapse_loop_count)
9906 OrderedLoopCount = Result.getLimitedValue();
9914 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
9915 unsigned NumLoops = std::max(OrderedLoopCount, NestedLoopCount);
9917 if (!OMPLoopBasedDirective::doForAllLoops(
9920 SupportsNonPerfectlyNested, NumLoops,
9921 [DKind, &SemaRef, &DSA, NumLoops, NestedLoopCount,
9922 CollapseLoopCountExpr, OrderedLoopCountExpr, &VarsWithImplicitDSA,
9923 &IterSpaces, &Captures,
9924 &CollapsedLoopVarDecls](
unsigned Cnt,
Stmt *CurStmt) {
9926 DKind, CurStmt, SemaRef, DSA, Cnt, NestedLoopCount,
9927 NumLoops, CollapseLoopCountExpr, OrderedLoopCountExpr,
9928 VarsWithImplicitDSA, IterSpaces, Captures,
9929 CollapsedLoopVarDecls))
9931 if (Cnt > 0 && Cnt >= NestedLoopCount &&
9932 IterSpaces[Cnt].CounterVar) {
9936 Captures[DRE] = DRE;
9942 Stmt *DependentPreInits = Transform->getPreInits();
9943 if (!DependentPreInits)
9950 for (
Stmt *S : Constituents) {
9951 if (
auto *DC = dyn_cast<DeclStmt>(S)) {
9952 for (
Decl *
C : DC->decls()) {
9955 SemaRef, D, D->getType().getNonReferenceType(),
9958 Captures[Ref] = Ref;
9965 Built.clear(NestedLoopCount);
9968 return NestedLoopCount;
10001 auto PreCond =
ExprResult(IterSpaces[0].PreCond);
10002 Expr *N0 = IterSpaces[0].NumIterations;
10021 return NestedLoopCount;
10024 bool AllCountsNeedLessThan32Bits =
C.getTypeSize(N0->
getType()) < 32;
10026 Scope *CurScope = DSA.getCurScope();
10027 for (
unsigned Cnt = 1; Cnt < NestedLoopCount; ++Cnt) {
10028 if (PreCond.isUsable()) {
10030 SemaRef.
BuildBinOp(CurScope, PreCond.get()->getExprLoc(), BO_LAnd,
10031 PreCond.get(), IterSpaces[Cnt].PreCond);
10033 Expr *N = IterSpaces[Cnt].NumIterations;
10035 AllCountsNeedLessThan32Bits &=
C.getTypeSize(N->
getType()) < 32;
10038 CurScope, Loc, BO_Mul, LastIteration32.
get(),
10046 CurScope, Loc, BO_Mul, LastIteration64.
get(),
10056 if (SemaRef.
getLangOpts().OpenMPOptimisticCollapse ||
10058 C.getTypeSize(LastIteration32.
get()->
getType()) == 32 &&
10059 (AllCountsNeedLessThan32Bits || NestedLoopCount == 1 ||
10063 LastIteration64.
get(), SemaRef))))
10064 LastIteration = LastIteration32;
10083 LastIteration.
get(),
10095 tryBuildCapture(SemaRef, LastIteration.
get(), Captures);
10096 LastIteration = SaveRef;
10109 ExprResult LB, UB, IL, ST, EUB, CombLB, CombUB, PrevLB, PrevUB, CombEUB;
10138 buildVarDecl(SemaRef, InitLoc, StrideVType,
".omp.stride");
10147 UB.
get(), LastIteration.
get());
10150 LastIteration.
get(), UB.
get());
10151 EUB = SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, UB.
get(),
10162 buildVarDecl(SemaRef, InitLoc, VType,
".omp.comb.lb");
10170 buildVarDecl(SemaRef, InitLoc, VType,
".omp.comb.ub");
10176 CurScope, InitLoc, BO_GT, CombUB.
get(), LastIteration.
get());
10179 LastIteration.
get(), CombUB.
get());
10180 CombEUB = SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, CombUB.
get(),
10189 "Unexpected number of parameters in loop combined directive");
10230 SemaRef.
BuildBinOp(CurScope, InitLoc, BO_Assign, IV.
get(), CombRHS);
10236 bool UseStrictCompare =
10238 llvm::all_of(IterSpaces, [](
const LoopIterationSpace &LIS) {
10239 return LIS.IsStrictCompare;
10245 if (UseStrictCompare) {
10248 .
BuildBinOp(CurScope, CondLoc, BO_Add, BoundUB,
10260 UseStrictCompare ? BO_LT : BO_LE, IV.
get(),
10263 NumIterations.
get());
10266 CombDistCond = SemaRef.
BuildBinOp(CurScope, CondLoc, BO_LT, IV.
get(),
10267 NumIterations.
get());
10272 Expr *BoundCombUB = CombUB.
get();
10273 if (UseStrictCompare) {
10277 CurScope, CondLoc, BO_Add, BoundCombUB,
10285 SemaRef.
BuildBinOp(CurScope, CondLoc, UseStrictCompare ? BO_LT : BO_LE,
10286 IV.
get(), BoundCombUB);
10293 if (!Inc.isUsable())
10295 Inc = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Assign, IV.
get(), Inc.get());
10297 if (!Inc.isUsable())
10304 ExprResult NextLB, NextUB, CombNextLB, CombNextUB;
10310 NextLB = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Add, LB.
get(), ST.
get());
10321 NextUB = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Add, UB.
get(), ST.
get());
10337 CombNextLB = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Assign, CombLB.
get(),
10349 CombNextUB = SemaRef.
BuildBinOp(CurScope, IncLoc, BO_Assign, CombUB.
get(),
10363 ExprResult DistCond, DistInc, PrevEUB, ParForInDistCond;
10366 CurScope, CondLoc, UseStrictCompare ? BO_LT : BO_LE, IV.
get(), BoundUB);
10367 assert(DistCond.
isUsable() &&
"distribute cond expr was not built");
10371 assert(DistInc.
isUsable() &&
"distribute inc expr was not built");
10372 DistInc = SemaRef.
BuildBinOp(CurScope, DistIncLoc, BO_Assign, IV.
get(),
10376 assert(DistInc.
isUsable() &&
"distribute inc expr was not built");
10387 DistEUBLoc, NewPrevUB.
get());
10392 UB.
get(), NewPrevUB.
get());
10394 DistEUBLoc, DistEUBLoc, IsUBGreater.
get(), NewPrevUB.
get(), UB.
get());
10395 PrevEUB = SemaRef.
BuildBinOp(CurScope, DistIncLoc, BO_Assign, UB.
get(),
10403 Expr *BoundPrevUB = PrevUB.
get();
10404 if (UseStrictCompare) {
10408 CurScope, CondLoc, BO_Add, BoundPrevUB,
10416 SemaRef.
BuildBinOp(CurScope, CondLoc, UseStrictCompare ? BO_LT : BO_LE,
10417 IV.
get(), BoundPrevUB);
10421 bool HasErrors =
false;
10422 Built.Counters.resize(NestedLoopCount);
10423 Built.Inits.resize(NestedLoopCount);
10424 Built.Updates.resize(NestedLoopCount);
10425 Built.Finals.resize(NestedLoopCount);
10426 Built.DependentCounters.resize(NestedLoopCount);
10427 Built.DependentInits.resize(NestedLoopCount);
10428 Built.FinalsConditions.resize(NestedLoopCount);
10446 for (
unsigned int Cnt = 0; Cnt < NestedLoopCount; ++Cnt) {
10447 LoopIterationSpace &IS = IterSpaces[Cnt];
10453 for (
unsigned int K = Cnt + 1; K < NestedLoopCount; ++K)
10454 Prod = SemaRef.
BuildBinOp(CurScope, UpdLoc, BO_Mul, Prod.
get(),
10455 IterSpaces[K].NumIterations);
10460 if (Cnt + 1 < NestedLoopCount)
10474 if (Cnt + 1 < NestedLoopCount)
10475 Prod = SemaRef.
BuildBinOp(CurScope, UpdLoc, BO_Mul, Iter.
get(),
10479 Acc = SemaRef.
BuildBinOp(CurScope, UpdLoc, BO_Sub, Acc.
get(), Prod.
get());
10488 IS.CounterInit, IS.IsNonRectangularLB, Captures);
10489 if (!
Init.isUsable()) {
10494 SemaRef, CurScope, UpdLoc, CounterVar, IS.CounterInit, Iter,
10495 IS.CounterStep, IS.Subtract, IS.IsNonRectangularLB, &Captures);
10496 if (!
Update.isUsable()) {
10504 IS.CounterInit, IS.NumIterations, IS.CounterStep,
10505 IS.Subtract, IS.IsNonRectangularLB, &Captures);
10506 if (!Final.isUsable()) {
10511 if (!
Update.isUsable() || !Final.isUsable()) {
10516 Built.Counters[Cnt] = IS.CounterVar;
10517 Built.PrivateCounters[Cnt] = IS.PrivateCounterVar;
10518 Built.Inits[Cnt] =
Init.get();
10519 Built.Updates[Cnt] =
Update.get();
10520 Built.Finals[Cnt] = Final.get();
10521 Built.DependentCounters[Cnt] =
nullptr;
10522 Built.DependentInits[Cnt] =
nullptr;
10523 Built.FinalsConditions[Cnt] =
nullptr;
10524 if (IS.IsNonRectangularLB || IS.IsNonRectangularUB) {
10525 Built.DependentCounters[Cnt] = Built.Counters[IS.LoopDependentIdx - 1];
10526 Built.DependentInits[Cnt] = Built.Inits[IS.LoopDependentIdx - 1];
10527 Built.FinalsConditions[Cnt] = IS.FinalCondition;
10536 Built.IterationVarRef = IV.
get();
10537 Built.LastIteration = LastIteration.
get();
10538 Built.NumIterations = NumIterations.
get();
10539 Built.CalcLastIteration = SemaRef
10543 Built.PreCond = PreCond.get();
10545 Built.Cond =
Cond.get();
10546 Built.Init =
Init.get();
10547 Built.Inc = Inc.get();
10548 Built.LB = LB.
get();
10549 Built.UB = UB.
get();
10550 Built.IL = IL.
get();
10551 Built.ST = ST.
get();
10552 Built.EUB = EUB.
get();
10553 Built.NLB = NextLB.
get();
10554 Built.NUB = NextUB.
get();
10555 Built.PrevLB = PrevLB.
get();
10556 Built.PrevUB = PrevUB.
get();
10557 Built.DistInc = DistInc.
get();
10558 Built.PrevEUB = PrevEUB.
get();
10559 Built.DistCombinedFields.LB = CombLB.
get();
10560 Built.DistCombinedFields.UB = CombUB.
get();
10561 Built.DistCombinedFields.EUB = CombEUB.
get();
10562 Built.DistCombinedFields.Init = CombInit.
get();
10563 Built.DistCombinedFields.Cond = CombCond.
get();
10564 Built.DistCombinedFields.NLB = CombNextLB.
get();
10565 Built.DistCombinedFields.NUB = CombNextUB.
get();
10566 Built.DistCombinedFields.DistCond = CombDistCond.
get();
10567 Built.DistCombinedFields.ParForInDistCond = ParForInDistCond.
get();
10569 return NestedLoopCount;
10573 auto CollapseClauses =
10574 OMPExecutableDirective::getClausesOfKind<OMPCollapseClause>(Clauses);
10575 if (CollapseClauses.begin() != CollapseClauses.end())
10576 return (*CollapseClauses.begin())->getNumForLoops();
10581 auto OrderedClauses =
10582 OMPExecutableDirective::getClausesOfKind<OMPOrderedClause>(Clauses);
10583 if (OrderedClauses.begin() != OrderedClauses.end())
10584 return (*OrderedClauses.begin())->getNumForLoops();
10593 for (
const OMPClause *Clause : Clauses) {
10594 if (Clause->getClauseKind() == OMPC_safelen)
10596 else if (Clause->getClauseKind() == OMPC_simdlen)
10598 if (Safelen && Simdlen)
10602 if (Simdlen && Safelen) {
10616 llvm::APSInt SimdlenRes = SimdlenResult.
Val.
getInt();
10617 llvm::APSInt SafelenRes = SafelenResult.
Val.
getInt();
10622 if (SimdlenRes > SafelenRes) {
10624 diag::err_omp_wrong_simdlen_safelen_values)
10641 OMPLoopBasedDirective::HelperExprs B;
10647 if (NestedLoopCount == 0)
10656 auto *SimdDirective = OMPSimdDirective::Create(
10657 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10658 return SimdDirective;
10668 OMPLoopBasedDirective::HelperExprs B;
10674 if (NestedLoopCount == 0)
10680 auto *ForDirective = OMPForDirective::Create(
10681 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
10683 return ForDirective;
10695 OMPLoopBasedDirective::HelperExprs B;
10698 unsigned NestedLoopCount =
10701 VarsWithImplicitDSA, B);
10702 if (NestedLoopCount == 0)
10711 return OMPForSimdDirective::Create(
getASTContext(), StartLoc, EndLoc,
10712 NestedLoopCount, Clauses, AStmt, B);
10716 Stmt *AStmt, DSAStackTy *Stack) {
10721 unsigned OMPVersion = SemaRef.
getLangOpts().OpenMP;
10722 auto BaseStmt = AStmt;
10723 while (
auto *CS = dyn_cast_or_null<CapturedStmt>(BaseStmt))
10725 if (
auto *
C = dyn_cast_or_null<CompoundStmt>(BaseStmt)) {
10726 auto S =
C->children();
10727 if (S.begin() == S.end())
10731 for (
Stmt *SectionStmt : llvm::drop_begin(S)) {
10734 SemaRef.
Diag(SectionStmt->getBeginLoc(),
10735 diag::err_omp_sections_substmt_not_section)
10736 << getOpenMPDirectiveName(DKind, OMPVersion);
10740 ->setHasCancel(Stack->isCancelRegion());
10743 SemaRef.
Diag(AStmt->
getBeginLoc(), diag::err_omp_sections_not_compound_stmt)
10744 << getOpenMPDirectiveName(DKind, OMPVersion);
10757 SemaRef.setFunctionHasBranchProtectedScope();
10759 return OMPSectionsDirective::Create(
10770 SemaRef.setFunctionHasBranchProtectedScope();
10773 return OMPSectionDirective::Create(
getASTContext(), StartLoc, EndLoc, AStmt,
10779 if (
auto *CE = dyn_cast<CallExpr>(E))
10780 if (CE->getDirectCallee())
10801 if (!
SemaRef.CurContext->isDependentContext()) {
10802 Expr *TargetCall =
nullptr;
10804 auto *E = dyn_cast<Expr>(S);
10812 if (
auto *BO = dyn_cast<BinaryOperator>(E)) {
10813 if (BO->getOpcode() == BO_Assign)
10816 if (
auto *COCE = dyn_cast<CXXOperatorCallExpr>(E))
10817 if (COCE->getOperator() == OO_Equal)
10829 SemaRef.setFunctionHasBranchProtectedScope();
10832 Clauses, AStmt, TargetCallLoc);
10837 DSAStackTy *Stack) {
10838 bool ErrorFound =
false;
10840 if (
auto *LPC = dyn_cast<OMPLastprivateClause>(
C)) {
10841 for (
Expr *RefExpr : LPC->varlist()) {
10844 Expr *SimpleRefExpr = RefExpr;
10847 auto &&Info = Stack->isLoopControlVariable(D);
10850 S.
Diag(ELoc, diag::err_omp_lastprivate_loop_var_non_loop_iteration)
10851 << getOpenMPDirectiveName(K, OMPVersion);
10875 OMPLoopDirective::HelperExprs B;
10880 if (NestedLoopCount == 0)
10883 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
10884 "omp loop exprs were not built");
10887 NestedLoopCount, Clauses, AStmt, B);
10904 OMPLoopDirective::HelperExprs B;
10906 unsigned NestedLoopCount =
10909 VarsWithImplicitDSA, B);
10910 if (NestedLoopCount == 0)
10913 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
10914 "omp loop exprs were not built");
10916 DSAStack->setParentTeamsRegionLoc(StartLoc);
10919 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
10938 OMPLoopDirective::HelperExprs B;
10940 unsigned NestedLoopCount =
10943 VarsWithImplicitDSA, B);
10944 if (NestedLoopCount == 0)
10947 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
10948 "omp loop exprs were not built");
10951 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
10971 OMPLoopDirective::HelperExprs B;
10973 unsigned NestedLoopCount =
10976 VarsWithImplicitDSA, B);
10977 if (NestedLoopCount == 0)
10980 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
10981 "omp loop exprs were not built");
10984 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
11003 OMPLoopDirective::HelperExprs B;
11005 unsigned NestedLoopCount =
11008 VarsWithImplicitDSA, B);
11009 if (NestedLoopCount == 0)
11012 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
11013 "omp loop exprs were not built");
11016 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
11028 SemaRef.setFunctionHasBranchProtectedScope();
11033 const OMPClause *Copyprivate =
nullptr;
11034 for (
const OMPClause *Clause : Clauses) {
11035 if (Clause->getClauseKind() == OMPC_nowait)
11037 else if (Clause->getClauseKind() == OMPC_copyprivate)
11038 Copyprivate = Clause;
11039 if (Copyprivate && Nowait) {
11041 diag::err_omp_single_copyprivate_with_nowait);
11047 return OMPSingleDirective::Create(
getASTContext(), StartLoc, EndLoc, Clauses,
11057 SemaRef.setFunctionHasBranchProtectedScope();
11059 return OMPMasterDirective::Create(
getASTContext(), StartLoc, EndLoc, AStmt);
11069 SemaRef.setFunctionHasBranchProtectedScope();
11081 bool ErrorFound =
false;
11084 bool DependentHint =
false;
11086 if (
C->getClauseKind() == OMPC_hint) {
11088 Diag(
C->getBeginLoc(), diag::err_omp_hint_clause_no_name);
11094 DependentHint =
true;
11097 HintLoc =
C->getBeginLoc();
11103 const auto Pair =
DSAStack->getCriticalWithHint(DirName);
11104 if (Pair.first && DirName.
getName() && !DependentHint) {
11105 if (llvm::APSInt::compareValues(Hint, Pair.second) != 0) {
11106 Diag(StartLoc, diag::err_omp_critical_with_hint);
11108 Diag(HintLoc, diag::note_omp_critical_hint_here)
11109 << 0 <<
toString(Hint, 10,
false);
11111 Diag(StartLoc, diag::note_omp_critical_no_hint) << 0;
11112 if (
const auto *
C = Pair.first->getSingleClause<
OMPHintClause>()) {
11113 Diag(
C->getBeginLoc(), diag::note_omp_critical_hint_here)
11118 Diag(Pair.first->getBeginLoc(), diag::note_omp_critical_no_hint) << 1;
11123 SemaRef.setFunctionHasBranchProtectedScope();
11125 auto *Dir = OMPCriticalDirective::Create(
getASTContext(), DirName, StartLoc,
11126 EndLoc, Clauses, AStmt);
11127 if (!Pair.first && DirName.
getName() && !DependentHint)
11128 DSAStack->addCriticalWithHint(Dir, Hint);
11140 OMPLoopBasedDirective::HelperExprs B;
11143 unsigned NestedLoopCount =
11146 VarsWithImplicitDSA, B);
11147 if (NestedLoopCount == 0)
11153 return OMPParallelForDirective::Create(
11154 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
11167 OMPLoopBasedDirective::HelperExprs B;
11170 unsigned NestedLoopCount =
11173 VarsWithImplicitDSA, B);
11174 if (NestedLoopCount == 0)
11183 return OMPParallelForSimdDirective::Create(
11184 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
11195 return OMPParallelMasterDirective::Create(
11197 DSAStack->getTaskgroupReductionRef());
11208 return OMPParallelMaskedDirective::Create(
11210 DSAStack->getTaskgroupReductionRef());
11219 SemaRef.setFunctionHasBranchProtectedScope();
11221 return OMPParallelSectionsDirective::Create(
11231 bool ErrorFound =
false;
11233 if (llvm::is_contained(MutuallyExclusiveClauses,
C->getClauseKind())) {
11237 S.
Diag(
C->getBeginLoc(), diag::err_omp_clauses_mutually_exclusive)
11260 {OMPC_detach, OMPC_mergeable}))
11265 return OMPTaskDirective::Create(
getASTContext(), StartLoc, EndLoc, Clauses,
11266 AStmt,
DSAStack->isCancelRegion());
11271 return OMPTaskyieldDirective::Create(
getASTContext(), StartLoc, EndLoc);
11276 return OMPBarrierDirective::Create(
getASTContext(), StartLoc, EndLoc);
11282 bool InExContext) {
11284 OMPExecutableDirective::getSingleClause<OMPAtClause>(Clauses);
11286 if (AtC && !InExContext && AtC->
getAtKind() == OMPC_AT_execution) {
11291 if (!AtC || AtC->
getAtKind() == OMPC_AT_compilation) {
11293 OMPExecutableDirective::getSingleClause<OMPSeverityClause>(Clauses);
11295 OMPExecutableDirective::getSingleClause<OMPMessageClause>(Clauses);
11296 std::optional<std::string> SL =
11299 if (MessageC && !SL)
11301 diag::warn_clause_expected_string)
11303 if (SeverityC && SeverityC->
getSeverityKind() == OMPC_SEVERITY_warning)
11305 << SL.value_or(
"WARNING");
11307 Diag(StartLoc, diag::err_diagnose_if_succeeded) << SL.value_or(
"ERROR");
11308 if (!SeverityC || SeverityC->
getSeverityKind() != OMPC_SEVERITY_warning)
11320 OMPExecutableDirective::getSingleClause<OMPNowaitClause>(Clauses);
11322 !OMPExecutableDirective::getClausesOfKind<OMPDependClause>(Clauses)
11324 if (NowaitC && !HasDependC) {
11325 Diag(StartLoc, diag::err_omp_nowait_clause_without_depend);
11329 return OMPTaskwaitDirective::Create(
getASTContext(), StartLoc, EndLoc,
11342 SemaRef.setFunctionHasBranchProtectedScope();
11344 return OMPTaskgroupDirective::Create(
getASTContext(), StartLoc, EndLoc,
11346 DSAStack->getTaskgroupReductionRef());
11355 if (
C->getClauseKind() == OMPC_flush)
11364 if (
C->getClauseKind() == OMPC_acq_rel ||
11365 C->getClauseKind() == OMPC_acquire ||
11366 C->getClauseKind() == OMPC_release ||
11367 C->getClauseKind() == OMPC_seq_cst ) {
11368 if (MemOrderKind != OMPC_unknown) {
11369 Diag(
C->getBeginLoc(), diag::err_omp_several_mem_order_clauses)
11370 << getOpenMPDirectiveName(OMPD_flush, OMPVersion) << 1
11372 Diag(MemOrderLoc, diag::note_omp_previous_mem_order_clause)
11375 MemOrderKind =
C->getClauseKind();
11376 MemOrderLoc =
C->getBeginLoc();
11380 if (FC && OrderClause) {
11383 Diag(OrderClause->
getBeginLoc(), diag::note_omp_flush_order_clause_here)
11387 return OMPFlushDirective::Create(
getASTContext(), StartLoc, EndLoc, Clauses);
11393 if (Clauses.empty()) {
11394 Diag(StartLoc, diag::err_omp_depobj_expected);
11396 }
else if (Clauses[0]->getClauseKind() != OMPC_depobj) {
11397 Diag(Clauses[0]->getBeginLoc(), diag::err_omp_depobj_expected);
11401 if (Clauses.size() > 2) {
11402 Diag(Clauses[2]->getBeginLoc(),
11403 diag::err_omp_depobj_single_clause_expected);
11405 }
else if (Clauses.size() < 1) {
11406 Diag(Clauses[0]->getEndLoc(), diag::err_omp_depobj_single_clause_expected);
11409 return OMPDepobjDirective::Create(
getASTContext(), StartLoc, EndLoc, Clauses);
11416 if (Clauses.size() != 1) {
11417 Diag(Clauses.empty() ? EndLoc : Clauses[1]->getBeginLoc(),
11418 diag::err_omp_scan_single_clause_expected);
11427 return StmtError(
Diag(StartLoc, diag::err_omp_orphaned_device_directive)
11428 << getOpenMPDirectiveName(OMPD_scan, OMPVersion) << 5);
11433 if (
DSAStack->doesParentHasScanDirective()) {
11434 Diag(StartLoc, diag::err_omp_several_directives_in_region) <<
"scan";
11436 diag::note_omp_previous_directive)
11440 DSAStack->setParentHasScanDirective(StartLoc);
11448 const OMPClause *DependFound =
nullptr;
11449 const OMPClause *DependSourceClause =
nullptr;
11450 const OMPClause *DependSinkClause =
nullptr;
11451 const OMPClause *DoacrossFound =
nullptr;
11452 const OMPClause *DoacrossSourceClause =
nullptr;
11453 const OMPClause *DoacrossSinkClause =
nullptr;
11454 bool ErrorFound =
false;
11458 auto DOC = dyn_cast<OMPDoacrossClause>(
C);
11459 auto DC = dyn_cast<OMPDependClause>(
C);
11461 DependFound = DC ?
C :
nullptr;
11462 DoacrossFound = DOC ?
C :
nullptr;
11463 OMPDoacrossKind ODK;
11464 if ((DC && DC->getDependencyKind() == OMPC_DEPEND_source) ||
11465 (DOC && (ODK.isSource(DOC)))) {
11466 if ((DC && DependSourceClause) || (DOC && DoacrossSourceClause)) {
11468 Diag(
C->getBeginLoc(), diag::err_omp_more_one_clause)
11469 << getOpenMPDirectiveName(OMPD_ordered, OMPVersion)
11475 DependSourceClause =
C;
11477 DoacrossSourceClause =
C;
11479 if ((DC && DependSinkClause) || (DOC && DoacrossSinkClause)) {
11480 Diag(
C->getBeginLoc(), diag::err_omp_sink_and_source_not_allowed)
11481 << (DC ?
"depend" :
"doacross") << 0;
11484 }
else if ((DC && DC->getDependencyKind() == OMPC_DEPEND_sink) ||
11485 (DOC && (ODK.isSink(DOC) || ODK.isSinkIter(DOC)))) {
11486 if (DependSourceClause || DoacrossSourceClause) {
11487 Diag(
C->getBeginLoc(), diag::err_omp_sink_and_source_not_allowed)
11488 << (DC ?
"depend" :
"doacross") << 1;
11492 DependSinkClause =
C;
11494 DoacrossSinkClause =
C;
11496 }
else if (
C->getClauseKind() == OMPC_threads) {
11498 }
else if (
C->getClauseKind() == OMPC_simd) {
11502 if (!ErrorFound && !SC &&
11507 Diag(StartLoc, diag::err_omp_prohibited_region_simd)
11510 }
else if ((DependFound || DoacrossFound) && (TC || SC)) {
11513 Diag(Loc, diag::err_omp_depend_clause_thread_simd)
11518 }
else if ((DependFound || DoacrossFound) &&
11519 !
DSAStack->getParentOrderedRegionParam().first) {
11522 Diag(Loc, diag::err_omp_ordered_directive_without_param)
11526 }
else if (TC || Clauses.empty()) {
11527 if (
const Expr *Param =
DSAStack->getParentOrderedRegionParam().first) {
11529 Diag(ErrLoc, diag::err_omp_ordered_directive_with_param)
11530 << (TC !=
nullptr);
11531 Diag(Param->getBeginLoc(), diag::note_omp_ordered_param) << 1;
11535 if ((!AStmt && !DependFound && !DoacrossFound) || ErrorFound)
11543 if (!DependFound && !DoacrossFound) {
11544 if (
DSAStack->doesParentHasOrderedDirective()) {
11545 Diag(StartLoc, diag::err_omp_several_directives_in_region) <<
"ordered";
11547 diag::note_omp_previous_directive)
11551 DSAStack->setParentHasOrderedDirective(StartLoc);
11557 SemaRef.setFunctionHasBranchProtectedScope();
11560 return OMPOrderedDirective::Create(
getASTContext(), StartLoc, EndLoc, Clauses,
11567class OpenMPAtomicUpdateChecker {
11569 enum ExprAnalysisErrorCode {
11573 NotABinaryOrUnaryExpression,
11575 NotAnUnaryIncDecExpression,
11581 NotABinaryExpression,
11584 NotABinaryOperator,
11587 NotAnUpdateExpression,
11590 NotAValidExpression,
11616 OpenMPAtomicUpdateChecker(
Sema &SemaRef)
11626 bool checkStatement(Stmt *S,
unsigned DiagId = 0,
unsigned NoteId = 0);
11628 Expr *
getX()
const {
return X; }
11630 Expr *
getExpr()
const {
return E; }
11644 bool checkBinaryOperation(BinaryOperator *AtomicBinOp,
unsigned DiagId = 0,
11645 unsigned NoteId = 0);
11648bool OpenMPAtomicUpdateChecker::checkBinaryOperation(
11649 BinaryOperator *AtomicBinOp,
unsigned DiagId,
unsigned NoteId) {
11650 ExprAnalysisErrorCode ErrorFound = NoError;
11656 if (AtomicBinOp->
getOpcode() == BO_Assign) {
11658 if (
const auto *AtomicInnerBinOp = dyn_cast<BinaryOperator>(
11660 if (AtomicInnerBinOp->isMultiplicativeOp() ||
11661 AtomicInnerBinOp->isAdditiveOp() || AtomicInnerBinOp->isShiftOp() ||
11662 AtomicInnerBinOp->isBitwiseOp()) {
11663 Op = AtomicInnerBinOp->getOpcode();
11664 OpLoc = AtomicInnerBinOp->getOperatorLoc();
11665 Expr *LHS = AtomicInnerBinOp->getLHS();
11666 Expr *RHS = AtomicInnerBinOp->getRHS();
11667 llvm::FoldingSetNodeID XId, LHSId, RHSId;
11674 if (XId == LHSId) {
11677 }
else if (XId == RHSId) {
11681 ErrorLoc = AtomicInnerBinOp->getExprLoc();
11682 ErrorRange = AtomicInnerBinOp->getSourceRange();
11683 NoteLoc =
X->getExprLoc();
11684 NoteRange =
X->getSourceRange();
11685 ErrorFound = NotAnUpdateExpression;
11688 ErrorLoc = AtomicInnerBinOp->getExprLoc();
11689 ErrorRange = AtomicInnerBinOp->getSourceRange();
11690 NoteLoc = AtomicInnerBinOp->getOperatorLoc();
11692 ErrorFound = NotABinaryOperator;
11697 ErrorFound = NotABinaryExpression;
11704 ErrorFound = NotAnAssignmentOp;
11706 if (ErrorFound != NoError && DiagId != 0 && NoteId != 0) {
11707 SemaRef.
Diag(ErrorLoc, DiagId) << ErrorRange;
11708 SemaRef.
Diag(NoteLoc, NoteId) << ErrorFound << NoteRange;
11712 E =
X = UpdateExpr =
nullptr;
11713 return ErrorFound != NoError;
11716bool OpenMPAtomicUpdateChecker::checkStatement(
Stmt *S,
unsigned DiagId,
11718 ExprAnalysisErrorCode ErrorFound = NoError;
11729 if (
auto *AtomicBody = dyn_cast<Expr>(S)) {
11730 AtomicBody = AtomicBody->IgnoreParenImpCasts();
11731 if (AtomicBody->getType()->isScalarType() ||
11732 AtomicBody->isInstantiationDependent()) {
11733 if (
const auto *AtomicCompAssignOp = dyn_cast<CompoundAssignOperator>(
11734 AtomicBody->IgnoreParenImpCasts())) {
11737 AtomicCompAssignOp->getOpcode());
11738 OpLoc = AtomicCompAssignOp->getOperatorLoc();
11739 E = AtomicCompAssignOp->getRHS();
11742 }
else if (
auto *AtomicBinOp = dyn_cast<BinaryOperator>(
11743 AtomicBody->IgnoreParenImpCasts())) {
11745 if (checkBinaryOperation(AtomicBinOp, DiagId, NoteId))
11747 }
else if (
const auto *AtomicUnaryOp = dyn_cast<UnaryOperator>(
11748 AtomicBody->IgnoreParenImpCasts())) {
11750 if (AtomicUnaryOp->isIncrementDecrementOp()) {
11752 Op = AtomicUnaryOp->isIncrementOp() ? BO_Add : BO_Sub;
11753 OpLoc = AtomicUnaryOp->getOperatorLoc();
11754 X = AtomicUnaryOp->getSubExpr()->IgnoreParens();
11758 ErrorFound = NotAnUnaryIncDecExpression;
11759 ErrorLoc = AtomicUnaryOp->getExprLoc();
11760 ErrorRange = AtomicUnaryOp->getSourceRange();
11761 NoteLoc = AtomicUnaryOp->getOperatorLoc();
11764 }
else if (!AtomicBody->isInstantiationDependent()) {
11765 ErrorFound = NotABinaryOrUnaryExpression;
11766 NoteLoc = ErrorLoc = AtomicBody->getExprLoc();
11767 NoteRange = ErrorRange = AtomicBody->getSourceRange();
11768 }
else if (AtomicBody->containsErrors()) {
11769 ErrorFound = NotAValidExpression;
11770 NoteLoc = ErrorLoc = AtomicBody->getExprLoc();
11771 NoteRange = ErrorRange = AtomicBody->getSourceRange();
11774 ErrorFound = NotAScalarType;
11775 NoteLoc = ErrorLoc = AtomicBody->getBeginLoc();
11776 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
11779 ErrorFound = NotAnExpression;
11781 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
11783 if (ErrorFound != NoError && DiagId != 0 && NoteId != 0) {
11784 SemaRef.
Diag(ErrorLoc, DiagId) << ErrorRange;
11785 SemaRef.
Diag(NoteLoc, NoteId) << ErrorFound << NoteRange;
11789 E =
X = UpdateExpr =
nullptr;
11790 if (ErrorFound == NoError && E &&
X) {
11807 UpdateExpr =
Update.get();
11809 return ErrorFound != NoError;
11813llvm::FoldingSetNodeID getNodeId(
ASTContext &Context,
const Expr *S) {
11814 llvm::FoldingSetNodeID Id;
11820bool checkIfTwoExprsAreSame(
ASTContext &Context,
const Expr *LHS,
11822 return getNodeId(Context, LHS) == getNodeId(Context, RHS);
11825class OpenMPAtomicCompareChecker {
11872 struct ErrorInfoTy {
11874 SourceLocation ErrorLoc;
11875 SourceRange ErrorRange;
11876 SourceLocation NoteLoc;
11877 SourceRange NoteRange;
11880 OpenMPAtomicCompareChecker(Sema &S) : ContextRef(S.getASTContext()) {}
11883 bool checkStmt(Stmt *S, ErrorInfoTy &ErrorInfo);
11885 Expr *
getX()
const {
return X; }
11886 Expr *getE()
const {
return E; }
11887 Expr *
getD()
const {
return D; }
11888 Expr *getCond()
const {
return C; }
11889 bool isXBinopExpr()
const {
return IsXBinopExpr; }
11893 ASTContext &ContextRef;
11908 bool IsXBinopExpr =
true;
11911 bool checkCondUpdateStmt(IfStmt *S, ErrorInfoTy &ErrorInfo);
11914 bool checkCondExprStmt(Stmt *S, ErrorInfoTy &ErrorInfo);
11917 bool checkType(ErrorInfoTy &ErrorInfo)
const;
11919 static bool CheckValue(
const Expr *E, ErrorInfoTy &ErrorInfo,
11920 bool ShouldBeLValue,
bool ShouldBeInteger =
false) {
11924 if (ShouldBeLValue && !E->
isLValue()) {
11925 ErrorInfo.Error = ErrorTy::XNotLValue;
11926 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = E->
getExprLoc();
11927 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = E->
getSourceRange();
11933 ErrorInfo.Error = ErrorTy::NotScalar;
11934 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = E->
getExprLoc();
11935 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = E->
getSourceRange();
11939 ErrorInfo.Error = ErrorTy::NotInteger;
11940 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = E->
getExprLoc();
11941 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = E->
getSourceRange();
11949bool OpenMPAtomicCompareChecker::checkCondUpdateStmt(
IfStmt *S,
11950 ErrorInfoTy &ErrorInfo) {
11952 if (
auto *CS = dyn_cast<CompoundStmt>(Then)) {
11953 if (CS->body_empty()) {
11954 ErrorInfo.Error = ErrorTy::NoStmt;
11955 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11956 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
11959 if (CS->size() > 1) {
11960 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
11961 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
11962 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->
getSourceRange();
11965 Then = CS->body_front();
11968 auto *BO = dyn_cast<BinaryOperator>(Then);
11970 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11971 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Then->getBeginLoc();
11972 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Then->getSourceRange();
11975 if (BO->getOpcode() != BO_Assign) {
11976 ErrorInfo.Error = ErrorTy::NotAnAssignment;
11977 ErrorInfo.ErrorLoc = BO->getExprLoc();
11978 ErrorInfo.NoteLoc = BO->getOperatorLoc();
11979 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
11985 auto *
Cond = dyn_cast<BinaryOperator>(S->
getCond());
11986 auto *
Call = dyn_cast<CXXOperatorCallExpr>(S->
getCond());
11987 Expr *LHS =
nullptr;
11988 Expr *RHS =
nullptr;
11990 LHS =
Cond->getLHS();
11991 RHS =
Cond->getRHS();
11993 LHS =
Call->getArg(0);
11994 RHS =
Call->getArg(1);
11996 ErrorInfo.Error = ErrorTy::NotABinaryOp;
12002 if ((
Cond &&
Cond->getOpcode() == BO_EQ) ||
12003 (
Call &&
Call->getOperator() == OverloadedOperatorKind::OO_EqualEqual)) {
12006 if (checkIfTwoExprsAreSame(ContextRef,
X, LHS)) {
12008 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, RHS)) {
12011 ErrorInfo.Error = ErrorTy::InvalidComparison;
12013 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12017 }
else if ((
Cond &&
12018 (
Cond->getOpcode() == BO_LT ||
Cond->getOpcode() == BO_GT)) ||
12020 (
Call->getOperator() == OverloadedOperatorKind::OO_Less ||
12021 Call->getOperator() == OverloadedOperatorKind::OO_Greater))) {
12023 if (checkIfTwoExprsAreSame(ContextRef,
X, LHS) &&
12024 checkIfTwoExprsAreSame(ContextRef, E, RHS)) {
12026 }
else if (checkIfTwoExprsAreSame(ContextRef, E, LHS) &&
12027 checkIfTwoExprsAreSame(ContextRef,
X, RHS)) {
12029 IsXBinopExpr =
false;
12031 ErrorInfo.Error = ErrorTy::InvalidComparison;
12033 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12038 ErrorInfo.Error = ErrorTy::InvalidBinaryOp;
12045 ErrorInfo.Error = ErrorTy::UnexpectedElse;
12054bool OpenMPAtomicCompareChecker::checkCondExprStmt(
Stmt *S,
12055 ErrorInfoTy &ErrorInfo) {
12056 auto *BO = dyn_cast<BinaryOperator>(S);
12058 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12059 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->
getBeginLoc();
12060 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->
getSourceRange();
12063 if (BO->getOpcode() != BO_Assign) {
12064 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12065 ErrorInfo.ErrorLoc = BO->getExprLoc();
12066 ErrorInfo.NoteLoc = BO->getOperatorLoc();
12067 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
12073 auto *CO = dyn_cast<ConditionalOperator>(BO->getRHS()->IgnoreParenImpCasts());
12075 ErrorInfo.Error = ErrorTy::NotCondOp;
12076 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = BO->getRHS()->getExprLoc();
12077 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getRHS()->getSourceRange();
12081 if (!checkIfTwoExprsAreSame(ContextRef,
X, CO->getFalseExpr())) {
12082 ErrorInfo.Error = ErrorTy::WrongFalseExpr;
12083 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CO->getFalseExpr()->getExprLoc();
12084 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12085 CO->getFalseExpr()->getSourceRange();
12089 auto *
Cond = dyn_cast<BinaryOperator>(CO->getCond());
12090 auto *
Call = dyn_cast<CXXOperatorCallExpr>(CO->getCond());
12091 Expr *LHS =
nullptr;
12092 Expr *RHS =
nullptr;
12094 LHS =
Cond->getLHS();
12095 RHS =
Cond->getRHS();
12097 LHS =
Call->getArg(0);
12098 RHS =
Call->getArg(1);
12100 ErrorInfo.Error = ErrorTy::NotABinaryOp;
12101 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CO->getCond()->getExprLoc();
12102 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12103 CO->getCond()->getSourceRange();
12107 if ((
Cond &&
Cond->getOpcode() == BO_EQ) ||
12108 (
Call &&
Call->getOperator() == OverloadedOperatorKind::OO_EqualEqual)) {
12110 D = CO->getTrueExpr();
12111 if (checkIfTwoExprsAreSame(ContextRef,
X, LHS)) {
12113 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, RHS)) {
12116 ErrorInfo.Error = ErrorTy::InvalidComparison;
12117 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CO->getCond()->getExprLoc();
12118 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12119 CO->getCond()->getSourceRange();
12122 }
else if ((
Cond &&
12123 (
Cond->getOpcode() == BO_LT ||
Cond->getOpcode() == BO_GT)) ||
12125 (
Call->getOperator() == OverloadedOperatorKind::OO_Less ||
12126 Call->getOperator() == OverloadedOperatorKind::OO_Greater))) {
12128 E = CO->getTrueExpr();
12129 if (checkIfTwoExprsAreSame(ContextRef,
X, LHS) &&
12130 checkIfTwoExprsAreSame(ContextRef, E, RHS)) {
12132 }
else if (checkIfTwoExprsAreSame(ContextRef, E, LHS) &&
12133 checkIfTwoExprsAreSame(ContextRef,
X, RHS)) {
12135 IsXBinopExpr =
false;
12137 ErrorInfo.Error = ErrorTy::InvalidComparison;
12138 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CO->getCond()->getExprLoc();
12139 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12140 CO->getCond()->getSourceRange();
12144 ErrorInfo.Error = ErrorTy::InvalidBinaryOp;
12145 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CO->getCond()->getExprLoc();
12146 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12147 CO->getCond()->getSourceRange();
12154bool OpenMPAtomicCompareChecker::checkType(ErrorInfoTy &ErrorInfo)
const {
12156 assert(
X && E &&
"X and E cannot be nullptr");
12158 if (!CheckValue(
X, ErrorInfo,
true))
12161 if (!CheckValue(E, ErrorInfo,
false))
12164 if (D && !CheckValue(D, ErrorInfo,
false))
12170bool OpenMPAtomicCompareChecker::checkStmt(
12171 Stmt *S, OpenMPAtomicCompareChecker::ErrorInfoTy &ErrorInfo) {
12172 auto *CS = dyn_cast<CompoundStmt>(S);
12174 if (CS->body_empty()) {
12175 ErrorInfo.Error = ErrorTy::NoStmt;
12176 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12177 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12181 if (CS->size() != 1) {
12182 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
12183 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12184 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12187 S = CS->body_front();
12192 if (
auto *IS = dyn_cast<IfStmt>(S)) {
12198 Res = checkCondUpdateStmt(IS, ErrorInfo);
12204 Res = checkCondExprStmt(S, ErrorInfo);
12210 return checkType(ErrorInfo);
12213class OpenMPAtomicCompareCaptureChecker final
12214 :
public OpenMPAtomicCompareChecker {
12216 OpenMPAtomicCompareCaptureChecker(Sema &S) : OpenMPAtomicCompareChecker(S) {}
12218 Expr *
getV()
const {
return V; }
12219 Expr *
getR()
const {
return R; }
12224 bool checkStmt(Stmt *S, ErrorInfoTy &ErrorInfo);
12227 bool checkType(ErrorInfoTy &ErrorInfo);
12239 bool checkForm3(IfStmt *S, ErrorInfoTy &ErrorInfo);
12243 bool checkForm45(Stmt *S, ErrorInfoTy &ErrorInfo);
12255bool OpenMPAtomicCompareCaptureChecker::checkType(ErrorInfoTy &ErrorInfo) {
12256 if (!OpenMPAtomicCompareChecker::checkType(ErrorInfo))
12259 if (
V && !CheckValue(
V, ErrorInfo,
true))
12262 if (R && !CheckValue(R, ErrorInfo,
true,
true))
12268bool OpenMPAtomicCompareCaptureChecker::checkForm3(
IfStmt *S,
12269 ErrorInfoTy &ErrorInfo) {
12273 if (
auto *CS = dyn_cast<CompoundStmt>(Then)) {
12274 if (CS->body_empty()) {
12275 ErrorInfo.Error = ErrorTy::NoStmt;
12276 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12277 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12280 if (CS->size() > 1) {
12281 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
12282 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12283 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12286 Then = CS->body_front();
12289 auto *BO = dyn_cast<BinaryOperator>(Then);
12291 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12292 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Then->getBeginLoc();
12293 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Then->getSourceRange();
12296 if (BO->getOpcode() != BO_Assign) {
12297 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12298 ErrorInfo.ErrorLoc = BO->getExprLoc();
12299 ErrorInfo.NoteLoc = BO->getOperatorLoc();
12300 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
12307 auto *
Cond = dyn_cast<BinaryOperator>(S->
getCond());
12308 auto *
Call = dyn_cast<CXXOperatorCallExpr>(S->
getCond());
12309 Expr *LHS =
nullptr;
12310 Expr *RHS =
nullptr;
12312 LHS =
Cond->getLHS();
12313 RHS =
Cond->getRHS();
12315 LHS =
Call->getArg(0);
12316 RHS =
Call->getArg(1);
12318 ErrorInfo.Error = ErrorTy::NotABinaryOp;
12323 if ((
Cond &&
Cond->getOpcode() != BO_EQ) ||
12324 (
Call &&
Call->getOperator() != OverloadedOperatorKind::OO_EqualEqual)) {
12325 ErrorInfo.Error = ErrorTy::NotEQ;
12331 if (checkIfTwoExprsAreSame(ContextRef,
X, LHS)) {
12333 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, RHS)) {
12336 ErrorInfo.Error = ErrorTy::InvalidComparison;
12345 ErrorInfo.Error = ErrorTy::NoElse;
12346 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->
getBeginLoc();
12347 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->
getSourceRange();
12352 if (
auto *CS = dyn_cast<CompoundStmt>(Else)) {
12353 if (CS->body_empty()) {
12354 ErrorInfo.Error = ErrorTy::NoStmt;
12355 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12356 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12359 if (CS->size() > 1) {
12360 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
12361 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12362 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->
getSourceRange();
12365 Else = CS->body_front();
12368 auto *ElseBO = dyn_cast<BinaryOperator>(Else);
12370 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12371 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Else->getBeginLoc();
12372 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Else->getSourceRange();
12375 if (ElseBO->getOpcode() != BO_Assign) {
12376 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12377 ErrorInfo.ErrorLoc = ElseBO->getExprLoc();
12378 ErrorInfo.NoteLoc = ElseBO->getOperatorLoc();
12379 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseBO->getSourceRange();
12383 if (!checkIfTwoExprsAreSame(ContextRef,
X, ElseBO->getRHS())) {
12384 ErrorInfo.Error = ErrorTy::InvalidAssignment;
12385 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ElseBO->getRHS()->getExprLoc();
12386 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12387 ElseBO->getRHS()->getSourceRange();
12391 V = ElseBO->getLHS();
12393 return checkType(ErrorInfo);
12396bool OpenMPAtomicCompareCaptureChecker::checkForm45(
Stmt *S,
12397 ErrorInfoTy &ErrorInfo) {
12401 assert(CS->size() == 2 &&
"CompoundStmt size is not expected");
12404 assert(S1->getOpcode() == BO_Assign &&
"unexpected binary operator");
12406 if (!checkIfTwoExprsAreSame(ContextRef, S1->getLHS(), S2->getCond())) {
12407 ErrorInfo.Error = ErrorTy::InvalidCondition;
12408 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S2->getCond()->getExprLoc();
12409 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S1->getLHS()->getSourceRange();
12415 auto *Then = S2->getThen();
12416 if (
auto *ThenCS = dyn_cast<CompoundStmt>(Then)) {
12417 if (ThenCS->body_empty()) {
12418 ErrorInfo.Error = ErrorTy::NoStmt;
12419 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ThenCS->getBeginLoc();
12420 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ThenCS->getSourceRange();
12423 if (ThenCS->size() > 1) {
12424 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
12425 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ThenCS->getBeginLoc();
12426 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ThenCS->getSourceRange();
12429 Then = ThenCS->body_front();
12432 auto *ThenBO = dyn_cast<BinaryOperator>(Then);
12434 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12435 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S2->getBeginLoc();
12436 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S2->getSourceRange();
12439 if (ThenBO->getOpcode() != BO_Assign) {
12440 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12441 ErrorInfo.ErrorLoc = ThenBO->getExprLoc();
12442 ErrorInfo.NoteLoc = ThenBO->getOperatorLoc();
12443 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ThenBO->getSourceRange();
12447 X = ThenBO->getLHS();
12448 D = ThenBO->getRHS();
12451 if (BO->getOpcode() != BO_EQ) {
12452 ErrorInfo.Error = ErrorTy::NotEQ;
12453 ErrorInfo.ErrorLoc = BO->getExprLoc();
12454 ErrorInfo.NoteLoc = BO->getOperatorLoc();
12455 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
12461 if (checkIfTwoExprsAreSame(ContextRef,
X, BO->getLHS())) {
12463 }
else if (checkIfTwoExprsAreSame(ContextRef,
X, BO->getRHS())) {
12466 ErrorInfo.Error = ErrorTy::InvalidComparison;
12467 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = BO->getExprLoc();
12468 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
12472 if (S2->getElse()) {
12475 auto *Else = S2->getElse();
12476 if (
auto *ElseCS = dyn_cast<CompoundStmt>(Else)) {
12477 if (ElseCS->body_empty()) {
12478 ErrorInfo.Error = ErrorTy::NoStmt;
12479 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ElseCS->getBeginLoc();
12480 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseCS->getSourceRange();
12483 if (ElseCS->size() > 1) {
12484 ErrorInfo.Error = ErrorTy::MoreThanOneStmt;
12485 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = ElseCS->getBeginLoc();
12486 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseCS->getSourceRange();
12489 Else = ElseCS->body_front();
12492 auto *ElseBO = dyn_cast<BinaryOperator>(Else);
12494 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12495 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = Else->getBeginLoc();
12496 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = Else->getSourceRange();
12499 if (ElseBO->getOpcode() != BO_Assign) {
12500 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12501 ErrorInfo.ErrorLoc = ElseBO->getExprLoc();
12502 ErrorInfo.NoteLoc = ElseBO->getOperatorLoc();
12503 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = ElseBO->getSourceRange();
12506 if (!checkIfTwoExprsAreSame(ContextRef,
X, ElseBO->getRHS())) {
12507 ErrorInfo.Error = ErrorTy::InvalidAssignment;
12508 ErrorInfo.ErrorLoc = ElseBO->getRHS()->getExprLoc();
12509 ErrorInfo.NoteLoc =
X->getExprLoc();
12510 ErrorInfo.ErrorRange = ElseBO->getRHS()->getSourceRange();
12511 ErrorInfo.NoteRange =
X->getSourceRange();
12515 V = ElseBO->getLHS();
12518 return checkType(ErrorInfo);
12521bool OpenMPAtomicCompareCaptureChecker::checkStmt(
Stmt *S,
12522 ErrorInfoTy &ErrorInfo) {
12524 if (
auto *IS = dyn_cast<IfStmt>(S))
12525 return checkForm3(IS, ErrorInfo);
12527 auto *CS = dyn_cast<CompoundStmt>(S);
12529 ErrorInfo.Error = ErrorTy::NotCompoundStmt;
12530 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = S->
getBeginLoc();
12531 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = S->
getSourceRange();
12534 if (CS->body_empty()) {
12535 ErrorInfo.Error = ErrorTy::NoStmt;
12536 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12537 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12542 if (CS->size() == 1) {
12543 auto *IS = dyn_cast<IfStmt>(CS->body_front());
12545 ErrorInfo.Error = ErrorTy::NotIfStmt;
12546 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->body_front()->
getBeginLoc();
12547 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
12552 return checkForm3(IS, ErrorInfo);
12553 }
else if (CS->size() == 2) {
12554 auto *S1 = CS->body_front();
12555 auto *S2 = CS->body_back();
12557 Stmt *UpdateStmt =
nullptr;
12558 Stmt *CondUpdateStmt =
nullptr;
12559 Stmt *CondExprStmt =
nullptr;
12561 if (
auto *BO = dyn_cast<BinaryOperator>(S1)) {
12571 return checkForm45(CS, ErrorInfo);
12580 CondUpdateStmt = S2;
12589 CondUpdateStmt = S1;
12592 auto CheckCondUpdateStmt = [
this, &ErrorInfo](
Stmt *CUS) {
12593 auto *IS = dyn_cast<IfStmt>(CUS);
12595 ErrorInfo.Error = ErrorTy::NotIfStmt;
12596 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CUS->getBeginLoc();
12597 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CUS->getSourceRange();
12601 return checkCondUpdateStmt(IS, ErrorInfo);
12605 auto CheckUpdateStmt = [
this, &ErrorInfo](
Stmt *US) {
12606 auto *BO = dyn_cast<BinaryOperator>(US);
12608 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12609 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = US->getBeginLoc();
12610 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = US->getSourceRange();
12613 if (BO->getOpcode() != BO_Assign) {
12614 ErrorInfo.Error = ErrorTy::NotAnAssignment;
12615 ErrorInfo.ErrorLoc = BO->getExprLoc();
12616 ErrorInfo.NoteLoc = BO->getOperatorLoc();
12617 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = BO->getSourceRange();
12620 if (!checkIfTwoExprsAreSame(ContextRef, this->
X, BO->getRHS())) {
12621 ErrorInfo.Error = ErrorTy::InvalidAssignment;
12622 ErrorInfo.ErrorLoc = BO->getRHS()->getExprLoc();
12623 ErrorInfo.NoteLoc = this->
X->getExprLoc();
12624 ErrorInfo.ErrorRange = BO->getRHS()->getSourceRange();
12625 ErrorInfo.NoteRange = this->
X->getSourceRange();
12629 this->
V = BO->getLHS();
12634 if (CondUpdateStmt && !CheckCondUpdateStmt(CondUpdateStmt))
12636 if (CondExprStmt && !checkCondExprStmt(CondExprStmt, ErrorInfo))
12638 if (!CheckUpdateStmt(UpdateStmt))
12641 ErrorInfo.Error = ErrorTy::MoreThanTwoStmts;
12642 ErrorInfo.ErrorLoc = ErrorInfo.NoteLoc = CS->
getBeginLoc();
12643 ErrorInfo.ErrorRange = ErrorInfo.NoteRange = CS->
getSourceRange();
12647 return checkType(ErrorInfo);
12658 DSAStack->addAtomicDirectiveLoc(StartLoc);
12671 bool MutexClauseEncountered =
false;
12672 llvm::SmallSet<OpenMPClauseKind, 2> EncounteredAtomicKinds;
12674 switch (
C->getClauseKind()) {
12678 MutexClauseEncountered =
true;
12681 case OMPC_compare: {
12682 if (AtomicKind != OMPC_unknown && MutexClauseEncountered) {
12683 Diag(
C->getBeginLoc(), diag::err_omp_atomic_several_clauses)
12685 Diag(AtomicKindLoc, diag::note_omp_previous_mem_order_clause)
12688 AtomicKind =
C->getClauseKind();
12689 AtomicKindLoc =
C->getBeginLoc();
12690 if (!EncounteredAtomicKinds.insert(
C->getClauseKind()).second) {
12691 Diag(
C->getBeginLoc(), diag::err_omp_atomic_several_clauses)
12693 Diag(AtomicKindLoc, diag::note_omp_previous_mem_order_clause)
12701 if (!EncounteredAtomicKinds.contains(OMPC_compare)) {
12702 Diag(
C->getBeginLoc(), diag::err_omp_atomic_no_compare)
12713 case OMPC_relaxed: {
12714 if (MemOrderKind != OMPC_unknown) {
12715 Diag(
C->getBeginLoc(), diag::err_omp_several_mem_order_clauses)
12716 << getOpenMPDirectiveName(OMPD_atomic, OMPVersion) << 0
12718 Diag(MemOrderLoc, diag::note_omp_previous_mem_order_clause)
12721 MemOrderKind =
C->getClauseKind();
12722 MemOrderLoc =
C->getBeginLoc();
12730 llvm_unreachable(
"unknown clause is encountered");
12733 bool IsCompareCapture =
false;
12734 if (EncounteredAtomicKinds.contains(OMPC_compare) &&
12735 EncounteredAtomicKinds.contains(OMPC_capture)) {
12736 IsCompareCapture =
true;
12737 AtomicKind = OMPC_compare;
12746 if ((AtomicKind == OMPC_read &&
12747 (MemOrderKind == OMPC_acq_rel || MemOrderKind == OMPC_release)) ||
12748 ((AtomicKind == OMPC_write || AtomicKind == OMPC_update ||
12749 AtomicKind == OMPC_unknown) &&
12750 (MemOrderKind == OMPC_acq_rel || MemOrderKind == OMPC_acquire))) {
12752 if (AtomicKind == OMPC_unknown)
12754 Diag(Loc, diag::err_omp_atomic_incompatible_mem_order_clause)
12756 << (AtomicKind == OMPC_unknown ? 1 : 0)
12758 Diag(MemOrderLoc, diag::note_omp_previous_mem_order_clause)
12762 Stmt *Body = AStmt;
12763 if (
auto *EWC = dyn_cast<ExprWithCleanups>(Body))
12764 Body = EWC->getSubExpr();
12769 Expr *UE =
nullptr;
12771 Expr *CE =
nullptr;
12798 if (AtomicKind == OMPC_read) {
12805 } ErrorFound = NoError;
12810 if (
const auto *AtomicBody = dyn_cast<Expr>(Body)) {
12811 const auto *AtomicBinOp =
12812 dyn_cast<BinaryOperator>(AtomicBody->IgnoreParenImpCasts());
12813 if (AtomicBinOp && AtomicBinOp->
getOpcode() == BO_Assign) {
12816 if ((
X->isInstantiationDependent() ||
X->getType()->isScalarType()) &&
12817 (
V->isInstantiationDependent() ||
V->getType()->isScalarType())) {
12818 if (!
X->isLValue() || !
V->isLValue()) {
12819 const Expr *NotLValueExpr =
X->isLValue() ?
V :
X;
12820 ErrorFound = NotAnLValue;
12826 }
else if (!
X->isInstantiationDependent() ||
12827 !
V->isInstantiationDependent()) {
12828 const Expr *NotScalarExpr =
12829 (
X->isInstantiationDependent() ||
X->getType()->isScalarType())
12832 ErrorFound = NotAScalarType;
12838 }
else if (!AtomicBody->isInstantiationDependent()) {
12839 ErrorFound = NotAnAssignmentOp;
12840 ErrorLoc = AtomicBody->getExprLoc();
12841 ErrorRange = AtomicBody->getSourceRange();
12843 : AtomicBody->getExprLoc();
12845 : AtomicBody->getSourceRange();
12848 ErrorFound = NotAnExpression;
12850 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
12852 if (ErrorFound != NoError) {
12853 Diag(ErrorLoc, diag::err_omp_atomic_read_not_expression_statement)
12855 Diag(NoteLoc, diag::note_omp_atomic_read_write)
12856 << ErrorFound << NoteRange;
12859 if (
SemaRef.CurContext->isDependentContext())
12861 }
else if (AtomicKind == OMPC_write) {
12868 } ErrorFound = NoError;
12873 if (
const auto *AtomicBody = dyn_cast<Expr>(Body)) {
12874 const auto *AtomicBinOp =
12875 dyn_cast<BinaryOperator>(AtomicBody->IgnoreParenImpCasts());
12876 if (AtomicBinOp && AtomicBinOp->
getOpcode() == BO_Assign) {
12878 E = AtomicBinOp->
getRHS();
12879 if ((
X->isInstantiationDependent() ||
X->getType()->isScalarType()) &&
12881 if (!
X->isLValue()) {
12882 ErrorFound = NotAnLValue;
12885 NoteLoc =
X->getExprLoc();
12886 NoteRange =
X->getSourceRange();
12888 }
else if (!
X->isInstantiationDependent() ||
12890 const Expr *NotScalarExpr =
12891 (
X->isInstantiationDependent() ||
X->getType()->isScalarType())
12894 ErrorFound = NotAScalarType;
12900 }
else if (!AtomicBody->isInstantiationDependent()) {
12901 ErrorFound = NotAnAssignmentOp;
12902 ErrorLoc = AtomicBody->getExprLoc();
12903 ErrorRange = AtomicBody->getSourceRange();
12905 : AtomicBody->getExprLoc();
12907 : AtomicBody->getSourceRange();
12910 ErrorFound = NotAnExpression;
12912 NoteRange = ErrorRange =
SourceRange(NoteLoc, NoteLoc);
12914 if (ErrorFound != NoError) {
12915 Diag(ErrorLoc, diag::err_omp_atomic_write_not_expression_statement)
12917 Diag(NoteLoc, diag::note_omp_atomic_read_write)
12918 << ErrorFound << NoteRange;
12921 if (
SemaRef.CurContext->isDependentContext())
12923 }
else if (AtomicKind == OMPC_update || AtomicKind == OMPC_unknown) {
12932 OpenMPAtomicUpdateChecker Checker(
SemaRef);
12933 if (Checker.checkStatement(
12935 (AtomicKind == OMPC_update)
12936 ? diag::err_omp_atomic_update_not_expression_statement
12937 : diag::err_omp_atomic_not_expression_statement,
12938 diag::note_omp_atomic_update))
12940 if (!
SemaRef.CurContext->isDependentContext()) {
12941 E = Checker.getExpr();
12942 X = Checker.getX();
12943 UE = Checker.getUpdateExpr();
12946 }
else if (AtomicKind == OMPC_capture) {
12949 NotACompoundStatement,
12950 NotTwoSubstatements,
12951 NotASpecificExpression,
12953 } ErrorFound = NoError;
12956 if (
const auto *AtomicBody = dyn_cast<Expr>(Body)) {
12965 const auto *AtomicBinOp =
12966 dyn_cast<BinaryOperator>(AtomicBody->IgnoreParenImpCasts());
12967 if (AtomicBinOp && AtomicBinOp->
getOpcode() == BO_Assign) {
12970 OpenMPAtomicUpdateChecker Checker(
SemaRef);
12971 if (Checker.checkStatement(
12972 Body, diag::err_omp_atomic_capture_not_expression_statement,
12973 diag::note_omp_atomic_update))
12975 E = Checker.getExpr();
12976 X = Checker.getX();
12977 UE = Checker.getUpdateExpr();
12980 }
else if (!AtomicBody->isInstantiationDependent()) {
12981 ErrorLoc = AtomicBody->getExprLoc();
12982 ErrorRange = AtomicBody->getSourceRange();
12984 : AtomicBody->getExprLoc();
12986 : AtomicBody->getSourceRange();
12987 ErrorFound = NotAnAssignmentOp;
12989 if (ErrorFound != NoError) {
12990 Diag(ErrorLoc, diag::err_omp_atomic_capture_not_expression_statement)
12992 Diag(NoteLoc, diag::note_omp_atomic_capture) << ErrorFound << NoteRange;
12995 if (
SemaRef.CurContext->isDependentContext())
12996 UE =
V = E =
X =
nullptr;
13014 if (
auto *CS = dyn_cast<CompoundStmt>(Body)) {
13016 if (CS->size() == 2) {
13018 Stmt *Second = CS->body_back();
13019 if (
auto *EWC = dyn_cast<ExprWithCleanups>(
First))
13020 First = EWC->getSubExpr()->IgnoreParenImpCasts();
13021 if (
auto *EWC = dyn_cast<ExprWithCleanups>(Second))
13022 Second = EWC->getSubExpr()->IgnoreParenImpCasts();
13024 OpenMPAtomicUpdateChecker Checker(
SemaRef);
13025 bool IsUpdateExprFound = !Checker.checkStatement(Second);
13027 if (IsUpdateExprFound) {
13028 BinOp = dyn_cast<BinaryOperator>(
First);
13029 IsUpdateExprFound = BinOp && BinOp->
getOpcode() == BO_Assign;
13031 if (IsUpdateExprFound && !
SemaRef.CurContext->isDependentContext()) {
13041 llvm::FoldingSetNodeID XId, PossibleXId;
13042 Checker.getX()->Profile(XId, Context,
true);
13043 PossibleX->
Profile(PossibleXId, Context,
true);
13044 IsUpdateExprFound = XId == PossibleXId;
13045 if (IsUpdateExprFound) {
13047 X = Checker.getX();
13048 E = Checker.getExpr();
13049 UE = Checker.getUpdateExpr();
13054 if (!IsUpdateExprFound) {
13055 IsUpdateExprFound = !Checker.checkStatement(
First);
13057 if (IsUpdateExprFound) {
13058 BinOp = dyn_cast<BinaryOperator>(Second);
13059 IsUpdateExprFound = BinOp && BinOp->
getOpcode() == BO_Assign;
13061 if (IsUpdateExprFound &&
13062 !
SemaRef.CurContext->isDependentContext()) {
13072 llvm::FoldingSetNodeID XId, PossibleXId;
13073 Checker.getX()->Profile(XId, Context,
true);
13074 PossibleX->
Profile(PossibleXId, Context,
true);
13075 IsUpdateExprFound = XId == PossibleXId;
13076 if (IsUpdateExprFound) {
13078 X = Checker.getX();
13079 E = Checker.getExpr();
13080 UE = Checker.getUpdateExpr();
13086 if (!IsUpdateExprFound) {
13088 auto *FirstExpr = dyn_cast<Expr>(
First);
13089 auto *SecondExpr = dyn_cast<Expr>(Second);
13090 if (!FirstExpr || !SecondExpr ||
13091 !(FirstExpr->isInstantiationDependent() ||
13092 SecondExpr->isInstantiationDependent())) {
13093 auto *FirstBinOp = dyn_cast<BinaryOperator>(
First);
13094 if (!FirstBinOp || FirstBinOp->getOpcode() != BO_Assign) {
13095 ErrorFound = NotAnAssignmentOp;
13096 NoteLoc = ErrorLoc = FirstBinOp ? FirstBinOp->getOperatorLoc()
13097 :
First->getBeginLoc();
13098 NoteRange = ErrorRange = FirstBinOp
13099 ? FirstBinOp->getSourceRange()
13102 auto *SecondBinOp = dyn_cast<BinaryOperator>(Second);
13103 if (!SecondBinOp || SecondBinOp->getOpcode() != BO_Assign) {
13104 ErrorFound = NotAnAssignmentOp;
13105 NoteLoc = ErrorLoc = SecondBinOp
13106 ? SecondBinOp->getOperatorLoc()
13108 NoteRange = ErrorRange =
13109 SecondBinOp ? SecondBinOp->getSourceRange()
13112 Expr *PossibleXRHSInFirst =
13114 Expr *PossibleXLHSInSecond =
13116 llvm::FoldingSetNodeID X1Id, X2Id;
13117 PossibleXRHSInFirst->
Profile(X1Id, Context,
13119 PossibleXLHSInSecond->
Profile(X2Id, Context,
13121 IsUpdateExprFound = X1Id == X2Id;
13122 if (IsUpdateExprFound) {
13123 V = FirstBinOp->getLHS();
13124 X = SecondBinOp->getLHS();
13125 E = SecondBinOp->getRHS();
13130 ErrorFound = NotASpecificExpression;
13131 ErrorLoc = FirstBinOp->getExprLoc();
13132 ErrorRange = FirstBinOp->getSourceRange();
13133 NoteLoc = SecondBinOp->getLHS()->getExprLoc();
13134 NoteRange = SecondBinOp->getRHS()->getSourceRange();
13142 NoteRange = ErrorRange =
13144 ErrorFound = NotTwoSubstatements;
13148 NoteRange = ErrorRange =
13150 ErrorFound = NotACompoundStatement;
13153 if (ErrorFound != NoError) {
13154 Diag(ErrorLoc, diag::err_omp_atomic_capture_not_compound_statement)
13156 Diag(NoteLoc, diag::note_omp_atomic_capture) << ErrorFound << NoteRange;
13159 if (
SemaRef.CurContext->isDependentContext())
13160 UE =
V = E =
X =
nullptr;
13161 }
else if (AtomicKind == OMPC_compare) {
13162 if (IsCompareCapture) {
13163 OpenMPAtomicCompareCaptureChecker::ErrorInfoTy ErrorInfo;
13164 OpenMPAtomicCompareCaptureChecker Checker(
SemaRef);
13165 if (!Checker.checkStmt(Body, ErrorInfo)) {
13166 Diag(ErrorInfo.ErrorLoc, diag::err_omp_atomic_compare_capture)
13167 << ErrorInfo.ErrorRange;
13168 Diag(ErrorInfo.NoteLoc, diag::note_omp_atomic_compare)
13169 << ErrorInfo.Error << ErrorInfo.NoteRange;
13172 X = Checker.getX();
13173 E = Checker.getE();
13174 D = Checker.getD();
13175 CE = Checker.getCond();
13176 V = Checker.getV();
13177 R = Checker.getR();
13183 OpenMPAtomicCompareChecker::ErrorInfoTy ErrorInfo;
13184 OpenMPAtomicCompareChecker Checker(
SemaRef);
13185 if (!Checker.checkStmt(Body, ErrorInfo)) {
13186 Diag(ErrorInfo.ErrorLoc, diag::err_omp_atomic_compare)
13187 << ErrorInfo.ErrorRange;
13188 Diag(ErrorInfo.NoteLoc, diag::note_omp_atomic_compare)
13189 << ErrorInfo.Error << ErrorInfo.NoteRange;
13192 X = Checker.getX();
13193 E = Checker.getE();
13194 D = Checker.getD();
13195 CE = Checker.getCond();
13201 auto *It = find_if(Clauses, [](
OMPClause *
C) {
13202 return C->getClauseKind() == llvm::omp::Clause::OMPC_weak;
13204 if (It != Clauses.end()) {
13205 auto *
Cond = dyn_cast<BinaryOperator>(CE);
13206 if (
Cond->getOpcode() != BO_EQ) {
13207 ErrorInfo.Error = Checker.ErrorTy::NotAnAssignment;
13208 ErrorInfo.ErrorLoc =
Cond->getExprLoc();
13209 ErrorInfo.NoteLoc =
Cond->getOperatorLoc();
13210 ErrorInfo.ErrorRange = ErrorInfo.NoteRange =
Cond->getSourceRange();
13212 Diag(ErrorInfo.ErrorLoc, diag::err_omp_atomic_weak_no_equality)
13213 << ErrorInfo.ErrorRange;
13222 SemaRef.setFunctionHasBranchProtectedScope();
13224 return OMPAtomicDirective::Create(
13225 Context, StartLoc, EndLoc, Clauses, AStmt,
13242 if (
DSAStack->hasInnerTeamsRegion()) {
13244 bool OMPTeamsFound =
true;
13245 if (
const auto *CS = dyn_cast<CompoundStmt>(S)) {
13246 auto I = CS->body_begin();
13247 while (I != CS->body_end()) {
13248 const auto *OED = dyn_cast<OMPExecutableDirective>(*I);
13250 if (!IsTeams || I != CS->body_begin()) {
13251 OMPTeamsFound =
false;
13252 if (IsTeams && I != CS->body_begin()) {
13261 assert(I != CS->body_end() &&
"Not found statement");
13264 const auto *OED = dyn_cast<OMPExecutableDirective>(S);
13267 if (!OMPTeamsFound) {
13268 Diag(StartLoc, diag::err_omp_target_contains_not_only_teams);
13270 diag::note_omp_nested_teams_construct_here);
13303 OMPLoopBasedDirective::HelperExprs B;
13306 unsigned NestedLoopCount =
13309 VarsWithImplicitDSA, B);
13310 if (NestedLoopCount == 0)
13317 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13324 return llvm::any_of(
13325 Clauses, [K](
const OMPClause *
C) {
return C->getClauseKind() == K; });
13328template <
typename... Params>
13330 const Params... ClauseTypes) {
13337 if (
auto *TC = dyn_cast<OMPToClause>(
C))
13338 return llvm::all_of(TC->all_decls(), [](
ValueDecl *VD) {
13339 return !VD || !VD->hasAttr<OMPDeclareTargetDeclAttr>() ||
13340 (VD->isExternallyVisible() &&
13341 VD->getVisibility() != HiddenVisibility);
13343 else if (
auto *FC = dyn_cast<OMPFromClause>(
C))
13344 return llvm::all_of(FC->all_decls(), [](
ValueDecl *VD) {
13345 return !VD || !VD->hasAttr<OMPDeclareTargetDeclAttr>() ||
13346 (VD->isExternallyVisible() &&
13347 VD->getVisibility() != HiddenVisibility);
13366 if (!
hasClauses(Clauses, OMPC_map, OMPC_use_device_ptr) &&
13368 !
hasClauses(Clauses, OMPC_use_device_addr))) {
13371 Expected =
"'map' or 'use_device_ptr'";
13373 Expected =
"'map', 'use_device_ptr', or 'use_device_addr'";
13375 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
13376 <<
Expected << getOpenMPDirectiveName(OMPD_target_data, OMPVersion);
13380 SemaRef.setFunctionHasBranchProtectedScope();
13398 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
13400 << getOpenMPDirectiveName(OMPD_target_enter_data, OMPVersion);
13420 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
13421 <<
"'map'" << getOpenMPDirectiveName(OMPD_target_exit_data, OMPVersion);
13437 if (!
hasClauses(Clauses, OMPC_to, OMPC_from)) {
13438 Diag(StartLoc, diag::err_omp_at_least_one_motion_clause_required);
13443 Diag(StartLoc, diag::err_omp_cannot_update_with_internal_linkage);
13453template <
typename ClauseType>
13456 unsigned MaxNum,
unsigned Diag) {
13457 auto ClauseItr = llvm::find_if(Clauses, llvm::IsaPred<ClauseType>);
13458 if (ClauseItr == Clauses.end())
13461 auto VarList =
C->getVarRefs();
13462 if (VarList.size() > MaxNum) {
13463 SemaRef.
Diag(VarList[MaxNum]->getBeginLoc(),
Diag)
13478 *
this, Clauses, 1, diag::err_omp_multi_expr_not_allowed) ||
13480 *
this, Clauses, 1, diag::err_omp_multi_expr_not_allowed))
13485 Diag(StartLoc, diag::warn_hip_omp_target_directives);
13489 DSAStack->setParentTeamsRegionLoc(StartLoc);
13498 if (
DSAStack->isParentNowaitRegion()) {
13499 Diag(StartLoc, diag::err_omp_parent_cancel_region_nowait) << 0;
13502 if (
DSAStack->isParentOrderedRegion()) {
13503 Diag(StartLoc, diag::err_omp_parent_cancel_region_ordered) << 0;
13507 EndLoc, CancelRegion);
13513 if (
DSAStack->isParentNowaitRegion()) {
13514 Diag(StartLoc, diag::err_omp_parent_cancel_region_nowait) << 1;
13517 if (
DSAStack->isParentOrderedRegion()) {
13518 Diag(StartLoc, diag::err_omp_parent_cancel_region_ordered) << 1;
13521 DSAStack->setParentCancelRegion(
true);
13528 const OMPClause *ReductionClause =
nullptr;
13529 const OMPClause *NogroupClause =
nullptr;
13531 if (
C->getClauseKind() == OMPC_reduction) {
13532 ReductionClause =
C;
13537 if (
C->getClauseKind() == OMPC_nogroup) {
13539 if (ReductionClause)
13544 if (ReductionClause && NogroupClause) {
13545 S.
Diag(ReductionClause->
getBeginLoc(), diag::err_omp_reduction_with_nogroup)
13560 OMPLoopBasedDirective::HelperExprs B;
13563 unsigned NestedLoopCount =
13566 *
DSAStack, VarsWithImplicitDSA, B);
13567 if (NestedLoopCount == 0)
13570 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
13571 "omp for loop exprs were not built");
13577 {OMPC_grainsize, OMPC_num_tasks}))
13585 SemaRef.setFunctionHasBranchProtectedScope();
13587 NestedLoopCount, Clauses, AStmt, B,
13601 OMPLoopBasedDirective::HelperExprs B;
13604 unsigned NestedLoopCount =
13607 VarsWithImplicitDSA, B);
13608 if (NestedLoopCount == 0)
13618 {OMPC_grainsize, OMPC_num_tasks}))
13629 NestedLoopCount, Clauses, AStmt, B);
13639 OMPLoopBasedDirective::HelperExprs B;
13642 unsigned NestedLoopCount =
13645 *
DSAStack, VarsWithImplicitDSA, B);
13646 if (NestedLoopCount == 0)
13649 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
13650 "omp for loop exprs were not built");
13656 {OMPC_grainsize, OMPC_num_tasks}))
13664 SemaRef.setFunctionHasBranchProtectedScope();
13666 NestedLoopCount, Clauses, AStmt, B,
13677 OMPLoopBasedDirective::HelperExprs B;
13680 unsigned NestedLoopCount =
13683 *
DSAStack, VarsWithImplicitDSA, B);
13684 if (NestedLoopCount == 0)
13687 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
13688 "omp for loop exprs were not built");
13694 {OMPC_grainsize, OMPC_num_tasks}))
13702 SemaRef.setFunctionHasBranchProtectedScope();
13704 NestedLoopCount, Clauses, AStmt, B,
13718 OMPLoopBasedDirective::HelperExprs B;
13721 unsigned NestedLoopCount =
13724 VarsWithImplicitDSA, B);
13725 if (NestedLoopCount == 0)
13735 {OMPC_grainsize, OMPC_num_tasks}))
13746 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13759 OMPLoopBasedDirective::HelperExprs B;
13762 unsigned NestedLoopCount =
13765 VarsWithImplicitDSA, B);
13766 if (NestedLoopCount == 0)
13776 {OMPC_grainsize, OMPC_num_tasks}))
13787 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13799 OMPLoopBasedDirective::HelperExprs B;
13805 VarsWithImplicitDSA, B);
13806 if (NestedLoopCount == 0)
13809 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
13810 "omp for loop exprs were not built");
13816 {OMPC_grainsize, OMPC_num_tasks}))
13825 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13838 OMPLoopBasedDirective::HelperExprs B;
13844 VarsWithImplicitDSA, B);
13845 if (NestedLoopCount == 0)
13848 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
13849 "omp for loop exprs were not built");
13855 {OMPC_grainsize, OMPC_num_tasks}))
13864 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
13875 SemaRef, OMPD_parallel_master_taskloop_simd, AStmt);
13877 OMPLoopBasedDirective::HelperExprs B;
13883 VarsWithImplicitDSA, B);
13884 if (NestedLoopCount == 0)
13894 {OMPC_grainsize, OMPC_num_tasks}))
13905 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13915 SemaRef, OMPD_parallel_masked_taskloop_simd, AStmt);
13917 OMPLoopBasedDirective::HelperExprs B;
13923 VarsWithImplicitDSA, B);
13924 if (NestedLoopCount == 0)
13934 {OMPC_grainsize, OMPC_num_tasks}))
13945 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13955 OMPLoopBasedDirective::HelperExprs B;
13958 unsigned NestedLoopCount =
13962 if (NestedLoopCount == 0)
13965 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
13966 "omp for loop exprs were not built");
13968 SemaRef.setFunctionHasBranchProtectedScope();
13970 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
13971 return DistributeDirective;
13983 OMPLoopBasedDirective::HelperExprs B;
13989 VarsWithImplicitDSA, B);
13990 if (NestedLoopCount == 0)
13993 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
13994 "omp for loop exprs were not built");
13997 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
14008 SemaRef, OMPD_distribute_parallel_for_simd, AStmt);
14010 OMPLoopBasedDirective::HelperExprs B;
14016 VarsWithImplicitDSA, B);
14017 if (NestedLoopCount == 0)
14027 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14039 OMPLoopBasedDirective::HelperExprs B;
14042 unsigned NestedLoopCount =
14046 if (NestedLoopCount == 0)
14056 NestedLoopCount, Clauses, AStmt, B);
14068 OMPLoopBasedDirective::HelperExprs B;
14074 VarsWithImplicitDSA, B);
14075 if (NestedLoopCount == 0)
14085 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14096 OMPLoopBasedDirective::HelperExprs B;
14099 unsigned NestedLoopCount =
14102 VarsWithImplicitDSA, B);
14103 if (NestedLoopCount == 0)
14113 NestedLoopCount, Clauses, AStmt, B);
14125 OMPLoopBasedDirective::HelperExprs B;
14128 unsigned NestedLoopCount =
14132 if (NestedLoopCount == 0)
14135 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
14136 "omp teams distribute loop exprs were not built");
14138 DSAStack->setParentTeamsRegionLoc(StartLoc);
14141 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14153 OMPLoopBasedDirective::HelperExprs B;
14159 VarsWithImplicitDSA, B);
14160 if (NestedLoopCount == 0)
14169 DSAStack->setParentTeamsRegionLoc(StartLoc);
14172 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14182 SemaRef, OMPD_teams_distribute_parallel_for_simd, AStmt);
14184 OMPLoopBasedDirective::HelperExprs B;
14190 VarsWithImplicitDSA, B);
14191 if (NestedLoopCount == 0)
14200 DSAStack->setParentTeamsRegionLoc(StartLoc);
14203 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14213 SemaRef, OMPD_teams_distribute_parallel_for, AStmt);
14215 OMPLoopBasedDirective::HelperExprs B;
14221 VarsWithImplicitDSA, B);
14223 if (NestedLoopCount == 0)
14226 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
14227 "omp for loop exprs were not built");
14229 DSAStack->setParentTeamsRegionLoc(StartLoc);
14232 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
14245 bool HasThreadLimitAndNumTeamsClause =
hasClauses(Clauses, OMPC_num_teams) &&
14247 bool HasBareClause = llvm::any_of(Clauses, [&](
const OMPClause *
C) {
14249 return C->getClauseKind() == OMPC_ompx_bare;
14252 if (HasBareClause && !HasThreadLimitAndNumTeamsClause) {
14257 unsigned ClauseMaxNumExprs = HasBareClause ? 3 : 1;
14258 unsigned DiagNo = HasBareClause
14259 ? diag::err_ompx_more_than_three_expr_not_allowed
14260 : diag::err_omp_multi_expr_not_allowed;
14262 ClauseMaxNumExprs, DiagNo) ||
14264 ClauseMaxNumExprs, DiagNo))
14278 *
this, Clauses, 1, diag::err_omp_multi_expr_not_allowed) ||
14280 *
this, Clauses, 1, diag::err_omp_multi_expr_not_allowed))
14286 OMPLoopBasedDirective::HelperExprs B;
14292 VarsWithImplicitDSA, B);
14293 if (NestedLoopCount == 0)
14296 assert((
SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
14297 "omp target teams distribute loop exprs were not built");
14300 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14310 *
this, Clauses, 1, diag::err_omp_multi_expr_not_allowed) ||
14312 *
this, Clauses, 1, diag::err_omp_multi_expr_not_allowed))
14316 SemaRef, OMPD_target_teams_distribute_parallel_for, AStmt);
14318 OMPLoopBasedDirective::HelperExprs B;
14324 VarsWithImplicitDSA, B);
14325 if (NestedLoopCount == 0)
14332 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B,
14343 *
this, Clauses, 1, diag::err_omp_multi_expr_not_allowed) ||
14345 *
this, Clauses, 1, diag::err_omp_multi_expr_not_allowed))
14349 SemaRef, OMPD_target_teams_distribute_parallel_for_simd, AStmt);
14351 OMPLoopBasedDirective::HelperExprs B;
14354 unsigned NestedLoopCount =
14359 if (NestedLoopCount == 0)
14369 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14379 *
this, Clauses, 1, diag::err_omp_multi_expr_not_allowed) ||
14381 *
this, Clauses, 1, diag::err_omp_multi_expr_not_allowed))
14385 SemaRef, OMPD_target_teams_distribute_simd, AStmt);
14387 OMPLoopBasedDirective::HelperExprs B;
14393 VarsWithImplicitDSA, B);
14394 if (NestedLoopCount == 0)
14404 getASTContext(), StartLoc, EndLoc, NestedLoopCount, Clauses, AStmt, B);
14411 Stmt *Dir = Transform->getDirective();
14413#define STMT(CLASS, PARENT)
14414#define ABSTRACT_STMT(CLASS)
14415#define COMMON_OMP_LOOP_TRANSFORMATION(CLASS, PARENT) \
14416 case Stmt::CLASS##Class: \
14417 appendFlattenedStmtList(PreInits, \
14418 static_cast<const CLASS *>(Dir)->getPreInits()); \
14420#define OMPCANONICALLOOPNESTTRANSFORMATIONDIRECTIVE(CLASS, PARENT) \
14421 COMMON_OMP_LOOP_TRANSFORMATION(CLASS, PARENT)
14422#define OMPCANONICALLOOPSEQUENCETRANSFORMATIONDIRECTIVE(CLASS, PARENT) \
14423 COMMON_OMP_LOOP_TRANSFORMATION(CLASS, PARENT)
14424#include "clang/AST/StmtNodes.inc"
14425#undef COMMON_OMP_LOOP_TRANSFORMATION
14427 llvm_unreachable(
"Not a loop transformation");
14431bool SemaOpenMP::checkTransformableLoopNest(
14435 OriginalInits.emplace_back();
14436 bool Result = OMPLoopBasedDirective::doForAllLoops(
14438 [
this, &LoopHelpers, &Body, &OriginalInits, Kind](
unsigned Cnt,
14440 VarsWithInheritedDSAType TmpDSA;
14441 unsigned SingleNumLoops =
14442 checkOpenMPLoop(Kind, nullptr, nullptr, CurStmt, SemaRef, *DSAStack,
14443 TmpDSA, LoopHelpers[Cnt]);
14444 if (SingleNumLoops == 0)
14446 assert(SingleNumLoops == 1 &&
"Expect single loop iteration space");
14447 if (auto *For = dyn_cast<ForStmt>(CurStmt)) {
14448 OriginalInits.back().push_back(For->getInit());
14449 Body = For->getBody();
14451 assert(isa<CXXForRangeStmt>(CurStmt) &&
14452 "Expected canonical for or range-based for loops.");
14453 auto *CXXFor = cast<CXXForRangeStmt>(CurStmt);
14454 OriginalInits.back().push_back(CXXFor->getBeginStmt());
14455 Body = CXXFor->getBody();
14457 OriginalInits.emplace_back();
14460 [&OriginalInits](OMPLoopTransformationDirective *Transform) {
14463 assert(OriginalInits.back().empty() &&
"No preinit after innermost loop");
14464 OriginalInits.pop_back();
14502 unsigned NestedLoopCount = 0;
14549bool SemaOpenMP::analyzeLoopSequence(
Stmt *LoopSeqStmt,
14550 LoopSequenceAnalysis &SeqAnalysis,
14556 auto StoreLoopStatements = [](LoopAnalysis &Analysis, Stmt *LoopStmt) {
14557 if (
auto *For = dyn_cast<ForStmt>(LoopStmt)) {
14558 Analysis.OriginalInits.push_back(For->getInit());
14559 Analysis.TheForStmt = For;
14562 Analysis.OriginalInits.push_back(CXXFor->getBeginStmt());
14563 Analysis.TheForStmt = CXXFor;
14570 auto AnalyzeLoopGeneration = [&](Stmt *Child) {
14572 Stmt *TransformedStmt = LoopTransform->getTransformedStmt();
14573 unsigned NumGeneratedTopLevelLoops =
14574 LoopTransform->getNumGeneratedTopLevelLoops();
14577 if (!TransformedStmt) {
14578 if (NumGeneratedTopLevelLoops > 0) {
14579 SeqAnalysis.LoopSeqSize += NumGeneratedTopLevelLoops;
14583 Diag(Child->getBeginLoc(), diag::err_omp_not_for)
14584 << 0 << getOpenMPDirectiveName(Kind);
14589 if (!NumGeneratedTopLevelLoops) {
14590 Diag(Child->getBeginLoc(), diag::err_omp_not_for)
14591 << 0 << getOpenMPDirectiveName(Kind);
14595 if (NumGeneratedTopLevelLoops > 1) {
14602 updatePreInits(LoopTransform, SeqAnalysis.LoopSequencePreInits);
14603 return analyzeLoopSequence(TransformedStmt, SeqAnalysis, Context, Kind);
14607 LoopAnalysis &NewTransformedSingleLoop =
14608 SeqAnalysis.Loops.emplace_back(Child);
14611 NewTransformedSingleLoop.HelperExprs);
14616 StoreLoopStatements(NewTransformedSingleLoop, TransformedStmt);
14617 updatePreInits(LoopTransform, NewTransformedSingleLoop.TransformsPreInits);
14619 SeqAnalysis.LoopSeqSize++;
14624 auto AnalyzeRegularLoop = [&](Stmt *Child) {
14625 LoopAnalysis &NewRegularLoop = SeqAnalysis.Loops.emplace_back(Child);
14626 unsigned IsCanonical =
14628 TmpDSA, NewRegularLoop.HelperExprs);
14633 StoreLoopStatements(NewRegularLoop, Child);
14634 NestedLoopCounterVisitor NLCV;
14640 for (Stmt *Child : LoopSeqStmt->
children()) {
14644 if (!LoopSequenceAnalysis::isLoopSequenceDerivation(Child)) {
14645 Child = Child->IgnoreContainers();
14652 if (!analyzeLoopSequence(Child, SeqAnalysis, Context, Kind))
14659 if (LoopSequenceAnalysis::isLoopSequenceDerivation(Child)) {
14660 if (LoopAnalysis::isLoopTransformation(Child)) {
14661 if (!AnalyzeLoopGeneration(Child))
14665 if (!AnalyzeRegularLoop(Child))
14667 SeqAnalysis.LoopSeqSize++;
14671 Diag(Child->getBeginLoc(), diag::err_omp_not_for)
14672 << 0 << getOpenMPDirectiveName(Kind);
14679bool SemaOpenMP::checkTransformableLoopSequence(
14704 << getOpenMPDirectiveName(Kind);
14709 if (!analyzeLoopSequence(AStmt, SeqAnalysis, Context, Kind))
14713 if (!SeqAnalysis.LoopSeqSize) {
14715 << getOpenMPDirectiveName(Kind);
14723 OMPLoopBasedDirective::HelperExprs &LoopHelper,
14729 if (
auto *CXXRangeFor = dyn_cast<CXXForRangeStmt>(LoopStmt)) {
14730 Stmt *RangeInit = CXXRangeFor->getInit();
14732 PreInits.push_back(RangeInit);
14734 DeclStmt *RangeStmt = CXXRangeFor->getRangeStmt();
14739 DeclStmt *RangeEnd = CXXRangeFor->getEndStmt();
14745 llvm::append_range(PreInits, OriginalInit);
14748 if (
auto *PI = cast_or_null<DeclStmt>(LoopHelper.PreInits)) {
14749 PreInits.push_back(
new (Context)
DeclStmt(
14750 PI->getDeclGroup(), PI->getBeginLoc(), PI->getEndLoc()));
14754 for (
Expr *CounterRef : LoopHelper.Counters) {
14757 PreInits.push_back(
new (Context)
DeclStmt(
14765 size_t NumLoops = LoopStmts.size();
14766 OMPLoopBasedDirective::doForAllLoops(
14767 AStmt,
false, NumLoops,
14768 [LoopStmts](
unsigned Cnt,
Stmt *CurStmt) {
14769 assert(!LoopStmts[Cnt] &&
"Loop statement must not yet be assigned");
14770 LoopStmts[Cnt] = CurStmt;
14773 assert(!is_contained(LoopStmts,
nullptr) &&
14774 "Expecting a loop statement for each affected loop");
14792 const auto *SizesClause =
14793 OMPExecutableDirective::getSingleClause<OMPSizesClause>(Clauses);
14794 if (!SizesClause ||
14795 llvm::any_of(SizesClause->getSizesRefs(), [](
Expr *E) { return !E; }))
14797 unsigned NumLoops = SizesClause->getNumSizes();
14805 Stmt *Body =
nullptr;
14807 if (!checkTransformableLoopNest(OMPD_tile, AStmt, NumLoops, LoopHelpers, Body,
14812 if (
SemaRef.CurContext->isDependentContext())
14814 NumLoops, AStmt,
nullptr,
nullptr);
14816 assert(LoopHelpers.size() == NumLoops &&
14817 "Expecting loop iteration space dimensionality to match number of "
14819 assert(OriginalInits.size() == NumLoops &&
14820 "Expecting loop iteration space dimensionality to match number of "
14828 CaptureVars CopyTransformer(
SemaRef);
14833 FloorIndVars.resize(NumLoops);
14834 TileIndVars.resize(NumLoops);
14835 for (
unsigned I = 0; I < NumLoops; ++I) {
14836 OMPLoopBasedDirective::HelperExprs &LoopHelper = LoopHelpers[I];
14838 assert(LoopHelper.Counters.size() == 1 &&
14839 "Expect single-dimensional loop iteration space");
14841 std::string OrigVarName = OrigCntVar->getNameInfo().getAsString();
14847 std::string FloorCntName =
14848 (Twine(
".floor_") + llvm::utostr(I) +
".iv." + OrigVarName).str();
14851 FloorIndVars[I] = FloorCntDecl;
14856 std::string TileCntName =
14857 (Twine(
".tile_") + llvm::utostr(I) +
".iv." + OrigVarName).str();
14863 TileCntDecl->setDeclName(
14864 &
SemaRef.PP.getIdentifierTable().get(TileCntName));
14865 TileIndVars[I] = TileCntDecl;
14873 Stmt *Inner = Body;
14875 auto MakeDimTileSize = [&
SemaRef = this->SemaRef, &CopyTransformer, &Context,
14876 SizesClause, CurScope](
int I) ->
Expr * {
14877 Expr *DimTileSizeExpr = SizesClause->getSizesRefs()[I];
14883 return AssertSuccess(CopyTransformer.TransformExpr(DimTileSizeExpr));
14902 uint64_t DimWidth = Context.getTypeSize(DimTy);
14904 Context, llvm::APInt::getZero(DimWidth), DimTy, {});
14908 CurScope, {}, BO_LE,
14912 AssertSuccess(CopyTransformer.TransformExpr(DimTileSizeExpr)), DimTy,
14918 for (
int I = NumLoops - 1; I >= 0; --I) {
14919 OMPLoopBasedDirective::HelperExprs &LoopHelper = LoopHelpers[I];
14920 Expr *NumIterations = LoopHelper.NumIterations;
14923 Stmt *LoopStmt = LoopStmts[I];
14928 auto MakeTileIVRef = [&
SemaRef = this->SemaRef, &TileIndVars, I, IVTy,
14931 OrigCntVar->getExprLoc());
14935 SemaRef.AddInitializerToDecl(
14938 .DefaultLvalueConversion(
14942 Decl *CounterDecl = TileIndVars[I];
14945 OrigCntVar->getBeginLoc(), OrigCntVar->getEndLoc());
14951 Expr *DimTileSize = MakeDimTileSize(I);
14955 CurScope, LoopHelper.Cond->getExprLoc(), BO_Add,
14961 SemaRef.BuildBinOp(CurScope, LoopHelper.Cond->getExprLoc(), BO_LT,
14962 NumIterations, EndOfTile.
get());
14966 LoopHelper.Cond->getBeginLoc(), LoopHelper.Cond->getEndLoc(),
14967 IsPartialTile.
get(), NumIterations, EndOfTile.
get());
14968 if (!MinTileAndIterSpace.
isUsable())
14971 SemaRef.BuildBinOp(CurScope, LoopHelper.Cond->getExprLoc(), BO_LT,
14972 MakeTileIVRef(), MinTileAndIterSpace.
get());
14978 CurScope, LoopHelper.Inc->getExprLoc(), UO_PreInc, MakeTileIVRef());
14999 BodyParts.append(LoopHelper.Updates.begin(), LoopHelper.Updates.end());
15000 if (
auto *SourceCXXFor = dyn_cast<CXXForRangeStmt>(LoopStmt))
15001 BodyParts.push_back(SourceCXXFor->getLoopVarStmt());
15002 BodyParts.push_back(Inner);
15005 Inner =
new (Context)
15008 LoopHelper.Init->getBeginLoc(), LoopHelper.Inc->getEndLoc());
15012 for (
int I = NumLoops - 1; I >= 0; --I) {
15013 auto &LoopHelper = LoopHelpers[I];
15014 Expr *NumIterations = LoopHelper.NumIterations;
15019 SemaRef.AddInitializerToDecl(
15021 SemaRef.ActOnIntegerConstant(LoopHelper.Init->getExprLoc(), 0).get(),
15023 Decl *CounterDecl = FloorIndVars[I];
15032 CurScope, LoopHelper.Cond->getExprLoc(), BO_LT,
15039 Expr *DimTileSize = MakeDimTileSize(I);
15043 CurScope, LoopHelper.Inc->getExprLoc(), BO_AddAssign,
15049 Inner =
new (Context)
15052 LoopHelper.Init->getBeginLoc(), LoopHelper.Inc->getEndLoc());
15067 const auto *SizesClause =
15068 OMPExecutableDirective::getSingleClause<OMPSizesClause>(Clauses);
15069 if (!SizesClause ||
15070 llvm::any_of(SizesClause->getSizesRefs(), [](
const Expr *SizeExpr) {
15071 return !SizeExpr || SizeExpr->containsErrors();
15074 unsigned NumLoops = SizesClause->getNumSizes();
15082 Stmt *Body =
nullptr;
15084 if (!checkTransformableLoopNest(OMPD_stripe, AStmt, NumLoops, LoopHelpers,
15085 Body, OriginalInits))
15089 if (
SemaRef.CurContext->isDependentContext())
15091 NumLoops, AStmt,
nullptr,
nullptr);
15093 assert(LoopHelpers.size() == NumLoops &&
15094 "Expecting loop iteration space dimensionality to match number of "
15096 assert(OriginalInits.size() == NumLoops &&
15097 "Expecting loop iteration space dimensionality to match number of "
15105 CaptureVars CopyTransformer(
SemaRef);
15110 FloorIndVars.resize(NumLoops);
15111 StripeIndVars.resize(NumLoops);
15112 for (
unsigned I : llvm::seq<unsigned>(NumLoops)) {
15113 OMPLoopBasedDirective::HelperExprs &LoopHelper = LoopHelpers[I];
15115 assert(LoopHelper.Counters.size() == 1 &&
15116 "Expect single-dimensional loop iteration space");
15118 std::string OrigVarName = OrigCntVar->getNameInfo().getAsString();
15124 std::string FloorCntName =
15125 (Twine(
".floor_") + llvm::utostr(I) +
".iv." + OrigVarName).str();
15128 FloorIndVars[I] = FloorCntDecl;
15133 std::string StripeCntName =
15134 (Twine(
".stripe_") + llvm::utostr(I) +
".iv." + OrigVarName).str();
15140 StripeCntDecl->setDeclName(
15141 &
SemaRef.PP.getIdentifierTable().get(StripeCntName));
15142 StripeIndVars[I] = StripeCntDecl;
15150 Stmt *Inner = Body;
15152 auto MakeDimStripeSize = [&](
int I) ->
Expr * {
15153 Expr *DimStripeSizeExpr = SizesClause->getSizesRefs()[I];
15155 return AssertSuccess(CopyTransformer.TransformExpr(DimStripeSizeExpr));
15174 uint64_t DimWidth = Context.getTypeSize(DimTy);
15176 Context, llvm::APInt::getZero(DimWidth), DimTy, {});
15180 CurScope, {}, BO_LE,
15184 AssertSuccess(CopyTransformer.TransformExpr(DimStripeSizeExpr)), DimTy,
15190 for (
int I = NumLoops - 1; I >= 0; --I) {
15191 OMPLoopBasedDirective::HelperExprs &LoopHelper = LoopHelpers[I];
15192 Expr *NumIterations = LoopHelper.NumIterations;
15195 Stmt *LoopStmt = LoopStmts[I];
15198 SemaRef.AddInitializerToDecl(
15201 .DefaultLvalueConversion(
15205 Decl *CounterDecl = StripeIndVars[I];
15208 OrigCntVar->getBeginLoc(), OrigCntVar->getEndLoc());
15215 CurScope, LoopHelper.Cond->getExprLoc(), BO_Add,
15217 MakeDimStripeSize(I));
15221 SemaRef.BuildBinOp(CurScope, LoopHelper.Cond->getExprLoc(), BO_LT,
15222 NumIterations, EndOfStripe.
get());
15226 LoopHelper.Cond->getBeginLoc(), LoopHelper.Cond->getEndLoc(),
15227 IsPartialStripe.
get(), NumIterations, EndOfStripe.
get());
15228 if (!MinStripeAndIterSpace.
isUsable())
15231 CurScope, LoopHelper.Cond->getExprLoc(), BO_LT,
15233 MinStripeAndIterSpace.
get());
15239 CurScope, LoopHelper.Inc->getExprLoc(), UO_PreInc,
15261 BodyParts.append(LoopHelper.Updates.begin(), LoopHelper.Updates.end());
15262 if (
auto *SourceCXXFor = dyn_cast<CXXForRangeStmt>(LoopStmt))
15263 BodyParts.push_back(SourceCXXFor->getLoopVarStmt());
15264 BodyParts.push_back(Inner);
15267 Inner =
new (Context)
15270 LoopHelper.Init->getBeginLoc(), LoopHelper.Inc->getEndLoc());
15274 for (
int I = NumLoops - 1; I >= 0; --I) {
15275 auto &LoopHelper = LoopHelpers[I];
15276 Expr *NumIterations = LoopHelper.NumIterations;
15281 SemaRef.AddInitializerToDecl(
15283 SemaRef.ActOnIntegerConstant(LoopHelper.Init->getExprLoc(), 0).get(),
15285 Decl *CounterDecl = FloorIndVars[I];
15294 CurScope, LoopHelper.Cond->getExprLoc(), BO_LT,
15302 CurScope, LoopHelper.Inc->getExprLoc(), BO_AddAssign,
15304 MakeDimStripeSize(I));
15308 Inner =
new (Context)
15311 LoopHelper.Init->getBeginLoc(), LoopHelper.Inc->getEndLoc());
15315 NumLoops, AStmt, Inner,
15330 {OMPC_partial, OMPC_full}))
15334 OMPExecutableDirective::getSingleClause<OMPFullClause>(Clauses);
15336 OMPExecutableDirective::getSingleClause<OMPPartialClause>(Clauses);
15337 assert(!(FullClause && PartialClause) &&
15338 "mutual exclusivity must have been checked before");
15340 constexpr unsigned NumLoops = 1;
15341 Stmt *Body =
nullptr;
15345 if (!checkTransformableLoopNest(OMPD_unroll, AStmt, NumLoops, LoopHelpers,
15346 Body, OriginalInits))
15349 unsigned NumGeneratedTopLevelLoops = PartialClause ? 1 : 0;
15352 if (
SemaRef.CurContext->isDependentContext())
15354 NumGeneratedTopLevelLoops,
nullptr,
15357 assert(LoopHelpers.size() == NumLoops &&
15358 "Expecting a single-dimensional loop iteration space");
15359 assert(OriginalInits.size() == NumLoops &&
15360 "Expecting a single-dimensional loop iteration space");
15361 OMPLoopBasedDirective::HelperExprs &LoopHelper = LoopHelpers.front();
15365 LoopHelper.NumIterations, OMPC_full,
false,
15368 Diag(AStmt->
getBeginLoc(), diag::err_omp_unroll_full_variable_trip_count);
15370 <<
"#pragma omp unroll full";
15378 if (NumGeneratedTopLevelLoops == 0)
15380 NumGeneratedTopLevelLoops,
nullptr,
15426 Stmt *LoopStmt =
nullptr;
15431 addLoopPreInits(Context, LoopHelper, LoopStmt, OriginalInits[0], PreInits);
15434 QualType IVTy = IterationVarRef->getType();
15435 assert(LoopHelper.Counters.size() == 1 &&
15436 "Expecting a single-dimensional loop iteration space");
15444 Factor = FactorVal->getIntegerConstantExpr(Context)->getZExtValue();
15445 FactorLoc = FactorVal->getExprLoc();
15450 assert(Factor > 0 &&
"Expected positive unroll factor");
15451 auto MakeFactorExpr = [
this, Factor, IVTy, FactorLoc]() {
15463 std::string OrigVarName = OrigVar->getNameInfo().getAsString();
15464 std::string OuterIVName = (Twine(
".unrolled.iv.") + OrigVarName).str();
15465 std::string InnerIVName = (Twine(
".unroll_inner.iv.") + OrigVarName).str();
15470 auto MakeOuterRef = [
this, OuterIVDecl, IVTy, OrigVarLoc]() {
15476 auto *InnerIVDecl =
cast<VarDecl>(IterationVarRef->getDecl());
15477 InnerIVDecl->setDeclName(&
SemaRef.PP.getIdentifierTable().get(InnerIVName));
15478 auto MakeInnerRef = [
this, InnerIVDecl, IVTy, OrigVarLoc]() {
15484 CaptureVars CopyTransformer(
SemaRef);
15485 auto MakeNumIterations = [&CopyTransformer, &LoopHelper]() ->
Expr * {
15487 CopyTransformer.TransformExpr(LoopHelper.NumIterations));
15492 SemaRef.AddInitializerToDecl(InnerIVDecl, LValueConv.
get(),
15507 SemaRef.BuildBinOp(CurScope, LoopHelper.Cond->getExprLoc(), BO_Add,
15508 MakeOuterRef(), MakeFactorExpr());
15512 SemaRef.BuildBinOp(CurScope, LoopHelper.Cond->getExprLoc(), BO_LT,
15513 MakeInnerRef(), EndOfTile.
get());
15517 SemaRef.BuildBinOp(CurScope, LoopHelper.Cond->getExprLoc(), BO_LT,
15518 MakeInnerRef(), MakeNumIterations());
15522 SemaRef.BuildBinOp(CurScope, LoopHelper.Cond->getExprLoc(), BO_LAnd,
15523 InnerCond1.
get(), InnerCond2.
get());
15529 CurScope, LoopHelper.Inc->getExprLoc(), UO_PreInc, MakeInnerRef());
15535 InnerBodyStmts.append(LoopHelper.Updates.begin(), LoopHelper.Updates.end());
15536 if (
auto *CXXRangeFor = dyn_cast<CXXForRangeStmt>(LoopStmt))
15537 InnerBodyStmts.push_back(CXXRangeFor->getLoopVarStmt());
15538 InnerBodyStmts.push_back(Body);
15542 ForStmt *InnerFor =
new (Context)
15543 ForStmt(Context, InnerInit.
get(), InnerCond.
get(),
nullptr,
15545 LoopHelper.Init->getBeginLoc(), LoopHelper.Inc->getEndLoc());
15559 LoopHintAttr *UnrollHintAttr =
15560 LoopHintAttr::CreateImplicit(Context, LoopHintAttr::UnrollCount,
15561 LoopHintAttr::Numeric, MakeFactorExpr());
15566 SemaRef.AddInitializerToDecl(
15568 SemaRef.ActOnIntegerConstant(LoopHelper.Init->getExprLoc(), 0).get(),
15577 SemaRef.BuildBinOp(CurScope, LoopHelper.Cond->getExprLoc(), BO_LT,
15578 MakeOuterRef(), MakeNumIterations());
15584 SemaRef.BuildBinOp(CurScope, LoopHelper.Inc->getExprLoc(), BO_AddAssign,
15585 MakeOuterRef(), MakeFactorExpr());
15590 ForStmt *OuterFor =
new (Context)
15591 ForStmt(Context, OuterInit.
get(), OuterConde.
get(),
nullptr,
15593 LoopHelper.Init->getBeginLoc(), LoopHelper.Inc->getEndLoc());
15596 NumGeneratedTopLevelLoops, OuterFor,
15610 constexpr unsigned NumLoops = 1;
15611 Stmt *Body =
nullptr;
15615 if (!checkTransformableLoopNest(OMPD_reverse, AStmt, NumLoops, LoopHelpers,
15616 Body, OriginalInits))
15621 if (
SemaRef.CurContext->isDependentContext())
15623 NumLoops,
nullptr,
nullptr);
15625 assert(LoopHelpers.size() == NumLoops &&
15626 "Expecting a single-dimensional loop iteration space");
15627 assert(OriginalInits.size() == NumLoops &&
15628 "Expecting a single-dimensional loop iteration space");
15629 OMPLoopBasedDirective::HelperExprs &LoopHelper = LoopHelpers.front();
15632 Stmt *LoopStmt =
nullptr;
15637 addLoopPreInits(Context, LoopHelper, LoopStmt, OriginalInits[0], PreInits);
15640 QualType IVTy = IterationVarRef->getType();
15641 uint64_t IVWidth = Context.getTypeSize(IVTy);
15655 std::string OrigVarName = OrigVar->getNameInfo().getAsString();
15657 ForwardIVName += OrigVarName;
15659 ReversedIVName += OrigVarName;
15692 CaptureVars CopyTransformer(
SemaRef);
15693 auto MakeNumIterations = [&CopyTransformer, &LoopHelper]() ->
Expr * {
15695 CopyTransformer.TransformExpr(LoopHelper.NumIterations));
15701 auto MakeForwardRef = [&
SemaRef = this->SemaRef, ForwardIVDecl, IVTy,
15708 auto *ReversedIVDecl =
cast<VarDecl>(IterationVarRef->getDecl());
15709 ReversedIVDecl->setDeclName(
15710 &
SemaRef.PP.getIdentifierTable().get(ReversedIVName));
15717 ForwardIVDecl->
getType(), OrigVarLoc);
15718 SemaRef.AddInitializerToDecl(ForwardIVDecl,
Zero,
false);
15721 if (!
Init.isUsable())
15729 SemaRef.BuildBinOp(CurScope, LoopHelper.Cond->getExprLoc(), BO_LT,
15730 MakeForwardRef(), MakeNumIterations());
15731 if (!
Cond.isUsable())
15739 UO_PreInc, MakeForwardRef());
15750 MakeNumIterations(), One);
15753 Minus =
SemaRef.BuildBinOp(CurScope, TransformLoc, BO_Sub, Minus.
get(),
15758 DeclGroupRef(ReversedIVDecl), TransformLocBegin, TransformLocEnd);
15761 SemaRef.AddInitializerToDecl(ReversedIVDecl, Minus.
get(),
15766 BodyStmts.reserve(LoopHelper.Updates.size() + 2 +
15768 BodyStmts.push_back(InitReversed.
get());
15769 llvm::append_range(BodyStmts, LoopHelper.Updates);
15770 if (
auto *CXXRangeFor = dyn_cast<CXXForRangeStmt>(LoopStmt))
15771 BodyStmts.push_back(CXXRangeFor->getLoopVarStmt());
15772 BodyStmts.push_back(Body);
15773 auto *ReversedBody =
15778 auto *ReversedFor =
new (Context)
15781 LoopHelper.Init->getBeginLoc(), LoopHelper.Inc->getEndLoc());
15800 OMPExecutableDirective::getSingleClause<OMPPermutationClause>(Clauses);
15801 size_t NumLoops = PermutationClause ? PermutationClause->
getNumLoops() : 2;
15805 Stmt *Body =
nullptr;
15807 if (!checkTransformableLoopNest(OMPD_interchange, AStmt, NumLoops,
15808 LoopHelpers, Body, OriginalInits))
15814 NumLoops, AStmt,
nullptr,
nullptr);
15818 if (PermutationClause &&
15819 llvm::is_contained(PermutationClause->
getArgsRefs(),
nullptr))
15822 assert(LoopHelpers.size() == NumLoops &&
15823 "Expecting loop iteration space dimensionaly to match number of "
15825 assert(OriginalInits.size() == NumLoops &&
15826 "Expecting loop iteration space dimensionaly to match number of "
15831 if (!PermutationClause) {
15832 Permutation = {1, 0};
15835 llvm::BitVector Flags(PermArgs.size());
15836 for (
Expr *PermArg : PermArgs) {
15837 std::optional<llvm::APSInt> PermCstExpr =
15838 PermArg->getIntegerConstantExpr(Context);
15841 uint64_t PermInt = PermCstExpr->getZExtValue();
15842 assert(1 <= PermInt && PermInt <= NumLoops &&
15843 "Must be a permutation; diagnostic emitted in "
15844 "ActOnOpenMPPermutationClause");
15845 if (Flags[PermInt - 1]) {
15846 SourceRange ExprRange(PermArg->getBeginLoc(), PermArg->getEndLoc());
15847 Diag(PermArg->getExprLoc(),
15848 diag::err_omp_interchange_permutation_value_repeated)
15849 << PermInt << ExprRange;
15852 Flags[PermInt - 1] =
true;
15854 Permutation.push_back(PermInt - 1);
15857 if (Permutation.size() != NumLoops)
15862 if (NumLoops <= 1 || llvm::all_of(llvm::enumerate(Permutation), [](
auto P) {
15863 auto [Idx, Arg] = P;
15867 NumLoops, AStmt, AStmt,
nullptr);
15875 for (
auto I : llvm::seq<int>(NumLoops)) {
15876 OMPLoopBasedDirective::HelperExprs &LoopHelper = LoopHelpers[I];
15878 assert(LoopHelper.Counters.size() == 1 &&
15879 "Single-dimensional loop iteration space expected");
15886 CaptureVars CopyTransformer(
SemaRef);
15891 Stmt *Inner = Body;
15892 for (
auto TargetIdx : llvm::reverse(llvm::seq<int>(NumLoops))) {
15894 uint64_t SourceIdx = Permutation[TargetIdx];
15895 OMPLoopBasedDirective::HelperExprs &SourceHelper = LoopHelpers[SourceIdx];
15896 Stmt *SourceLoopStmt = LoopStmts[SourceIdx];
15897 assert(SourceHelper.Counters.size() == 1 &&
15898 "Single-dimensional loop iteration space expected");
15905 "Expected the logical iteration counter to be an integer");
15907 std::string OrigVarName = OrigCntVar->getNameInfo().getAsString();
15912 auto MakeNumIterations = [&CopyTransformer, &SourceHelper]() ->
Expr * {
15914 CopyTransformer.TransformExpr(SourceHelper.NumIterations));
15921 PermutedCntName.append({llvm::utostr(TargetIdx),
".iv.", OrigVarName});
15923 PermutedCntDecl->setDeclName(
15924 &
SemaRef.PP.getIdentifierTable().get(PermutedCntName));
15925 PermutedIndVars[TargetIdx] = PermutedCntDecl;
15926 auto MakePermutedRef = [
this, PermutedCntDecl, IVTy, OrigVarLoc]() {
15935 if (!
Zero.isUsable())
15937 SemaRef.AddInitializerToDecl(PermutedCntDecl,
Zero.get(),
15941 OrigCntVar->getEndLoc());
15950 SemaRef.BuildBinOp(CurScope, SourceHelper.Cond->getExprLoc(), BO_LT,
15951 MakePermutedRef(), MakeNumIterations());
15960 CurScope, SourceHelper.Inc->getExprLoc(), UO_PreInc, MakePermutedRef());
15965 SourceHelper.Updates.end());
15966 if (
auto *SourceCXXFor = dyn_cast<CXXForRangeStmt>(SourceLoopStmt))
15967 BodyParts.push_back(SourceCXXFor->getLoopVarStmt());
15968 BodyParts.push_back(Inner);
15971 Inner =
new (Context)
ForStmt(
15972 Context, InitStmt.
get(), CondExpr.
get(),
nullptr, IncrStmt.
get(), Inner,
15973 SourceHelper.Init->
getBeginLoc(), SourceHelper.Init->getBeginLoc(),
15974 SourceHelper.Inc->getEndLoc());
15978 NumLoops, AStmt, Inner,
15990 CaptureVars CopyTransformer(
SemaRef);
16002 1, AStmt,
nullptr,
nullptr);
16006 LoopSequenceAnalysis SeqAnalysis;
16007 if (!checkTransformableLoopSequence(OMPD_fuse, AStmt, SeqAnalysis, Context))
16012 assert(SeqAnalysis.LoopSeqSize == SeqAnalysis.Loops.size() &&
16013 "Inconsistent size of the loop sequence and the number of loops "
16014 "found in the sequence");
16018 OMPExecutableDirective::getSingleClause<OMPLoopRangeClause>(Clauses);
16022 if (LRC && (!LRC->getFirst() || !LRC->getCount()))
16027 auto EvaluateLoopRangeArguments = [&Context](
Expr *
First,
Expr *Count,
16028 uint64_t &FirstVal,
16029 uint64_t &CountVal) {
16030 llvm::APSInt FirstInt =
First->EvaluateKnownConstInt(Context);
16031 llvm::APSInt CountInt = Count->EvaluateKnownConstInt(Context);
16032 FirstVal = FirstInt.getZExtValue();
16033 CountVal = CountInt.getZExtValue();
16039 auto ValidLoopRange = [](uint64_t FirstVal, uint64_t CountVal,
16040 unsigned NumLoops) ->
bool {
16041 return FirstVal + CountVal - 1 <= NumLoops;
16043 uint64_t FirstVal = 1, CountVal = 0, LastVal = SeqAnalysis.LoopSeqSize;
16049 EvaluateLoopRangeArguments(LRC->getFirst(), LRC->getCount(), FirstVal,
16052 SemaRef.Diag(LRC->getCountLoc(), diag::warn_omp_redundant_fusion)
16053 << getOpenMPDirectiveName(OMPD_fuse);
16055 if (!ValidLoopRange(FirstVal, CountVal, SeqAnalysis.LoopSeqSize)) {
16056 SemaRef.Diag(LRC->getFirstLoc(), diag::err_omp_invalid_looprange)
16057 << getOpenMPDirectiveName(OMPD_fuse) << FirstVal
16058 << (FirstVal + CountVal - 1) << SeqAnalysis.LoopSeqSize;
16062 LastVal = FirstVal + CountVal - 1;
16067 unsigned NumGeneratedTopLevelLoops =
16068 LRC ? SeqAnalysis.LoopSeqSize - CountVal + 1 : 1;
16072 if (SeqAnalysis.LoopSeqSize == 1)
16074 << getOpenMPDirectiveName(OMPD_fuse);
16078 SeqAnalysis.Loops[FirstVal - 1].HelperExprs.IterationVarRef->getType();
16079 for (
unsigned I : llvm::seq<unsigned>(FirstVal, LastVal)) {
16081 SeqAnalysis.Loops[I].HelperExprs.IterationVarRef->getType();
16082 if (Context.getTypeSize(CurrentIVType) > Context.getTypeSize(IVType)) {
16083 IVType = CurrentIVType;
16086 uint64_t IVBitWidth = Context.getIntWidth(IVType);
16099 auto CreateHelperVarAndStmt =
16101 unsigned I,
bool NeedsNewVD =
false) {
16102 Expr *TransformedExpr =
16104 if (!TransformedExpr)
16105 return std::pair<VarDecl *, StmtResult>(
nullptr,
StmtError());
16107 auto Name = (Twine(
".omp.") + BaseName + std::to_string(I)).str();
16112 SemaRef.AddInitializerToDecl(VD, TransformedExpr,
false);
16122 return std::make_pair(VD,
DeclStmt);
16140 llvm::append_range(PreInits, SeqAnalysis.LoopSequencePreInits);
16153 unsigned int TransformIndex = 0;
16154 for (
unsigned I : llvm::seq<unsigned>(FirstVal - 1)) {
16155 if (SeqAnalysis.Loops[I].isLoopTransformation())
16159 for (
unsigned int I = FirstVal - 1, J = 0; I < LastVal; ++I, ++J) {
16160 if (SeqAnalysis.Loops[I].isRegularLoop()) {
16162 SeqAnalysis.Loops[I].TheForStmt,
16163 SeqAnalysis.Loops[I].OriginalInits, PreInits);
16164 }
else if (SeqAnalysis.Loops[I].isLoopTransformation()) {
16169 SeqAnalysis.Loops[TransformIndex++].TransformsPreInits;
16170 llvm::append_range(PreInits, TransformPreInit);
16173 SeqAnalysis.Loops[I].TheForStmt,
16174 SeqAnalysis.Loops[I].OriginalInits, PreInits);
16176 auto [UBVD, UBDStmt] =
16177 CreateHelperVarAndStmt(SeqAnalysis.Loops[I].HelperExprs.UB,
"ub", J);
16178 auto [LBVD, LBDStmt] =
16179 CreateHelperVarAndStmt(SeqAnalysis.Loops[I].HelperExprs.LB,
"lb", J);
16180 auto [STVD, STDStmt] =
16181 CreateHelperVarAndStmt(SeqAnalysis.Loops[I].HelperExprs.ST,
"st", J);
16182 auto [NIVD, NIDStmt] = CreateHelperVarAndStmt(
16183 SeqAnalysis.Loops[I].HelperExprs.NumIterations,
"ni", J,
true);
16184 auto [IVVD, IVDStmt] = CreateHelperVarAndStmt(
16185 SeqAnalysis.Loops[I].HelperExprs.IterationVarRef,
"iv", J);
16187 assert(LBVD && STVD && NIVD && IVVD &&
16188 "OpenMP Fuse Helper variables creation failed");
16190 UBVarDecls.push_back(UBVD);
16191 LBVarDecls.push_back(LBVD);
16192 STVarDecls.push_back(STVD);
16193 NIVarDecls.push_back(NIVD);
16194 IVVarDecls.push_back(IVVD);
16196 PreInits.push_back(LBDStmt.get());
16197 PreInits.push_back(STDStmt.get());
16198 PreInits.push_back(NIDStmt.get());
16199 PreInits.push_back(IVDStmt.get());
16232 StringRef IndexName =
".omp.fuse.index";
16237 SemaRef.AddInitializerToDecl(IndexDecl, InitVal,
false);
16244 auto MakeIVRef = [&
SemaRef = this->SemaRef, IndexDecl, IVType,
16262 for (
unsigned I = FirstVal - 1, J = 0; I < LastVal; ++I, ++J) {
16263 DeclRefExpr *NIRef = MakeVarDeclRef(NIVarDecls[J]);
16271 std::string TempName = (Twine(
".omp.temp.") + Twine(J)).str();
16285 PreInits.push_back(TempStmt.
get());
16294 DeclRefExpr *NIRef2 = MakeVarDeclRef(NIVarDecls[J]);
16308 const std::string MaxName = Twine(
".omp.fuse.max").str();
16316 if (MaxStmt.isInvalid())
16318 PreInits.push_back(MaxStmt.get());
16322 MakeIVRef(), MaxRef);
16348 for (
unsigned I = FirstVal - 1, J = 0; I < LastVal; ++I, ++J) {
16353 MakeVarDeclRef(STVarDecls[J]), MakeIVRef());
16357 MakeVarDeclRef(LBVarDecls[J]), IdxExpr.
get());
16362 MakeVarDeclRef(IVVarDecls[J]), IdxExpr.
get());
16368 BodyStmts.push_back(IdxExpr.
get());
16369 llvm::append_range(BodyStmts, SeqAnalysis.Loops[I].HelperExprs.Updates);
16372 if (
auto *SourceCXXFor =
16373 dyn_cast<CXXForRangeStmt>(SeqAnalysis.Loops[I].TheForStmt))
16374 BodyStmts.push_back(SourceCXXFor->getLoopVarStmt());
16378 ?
cast<ForStmt>(SeqAnalysis.Loops[I].TheForStmt)->getBody()
16380 BodyStmts.push_back(Body);
16387 MakeVarDeclRef(NIVarDecls[J]));
16397 FusedBodyStmts.push_back(IfStatement);
16403 ForStmt *FusedForStmt =
new (Context)
16417 Stmt *FusionStmt = FusedForStmt;
16418 if (LRC && CountVal != SeqAnalysis.LoopSeqSize) {
16422 TransformIndex = 0;
16428 for (
unsigned I : llvm::seq<unsigned>(SeqAnalysis.LoopSeqSize)) {
16429 if (I >= FirstVal - 1 && I < FirstVal + CountVal - 1) {
16432 if (!SeqAnalysis.Loops[I].isLoopTransformation())
16441 if (SeqAnalysis.Loops[I].isRegularLoop()) {
16442 const auto &TransformPreInit =
16443 SeqAnalysis.Loops[TransformIndex++].TransformsPreInits;
16444 if (!TransformPreInit.empty())
16445 llvm::append_range(PreInits, TransformPreInit);
16448 FinalLoops.push_back(SeqAnalysis.Loops[I].TheForStmt);
16451 FinalLoops.insert(FinalLoops.begin() + (FirstVal - 1), FusedForStmt);
16456 NumGeneratedTopLevelLoops, AStmt, FusionStmt,
16476 case OMPC_allocator:
16479 case OMPC_collapse:
16488 case OMPC_priority:
16500 case OMPC_novariants:
16503 case OMPC_nocontext:
16518 case OMPC_ompx_dyn_cgroup_mem:
16524 case OMPC_dyn_groupprivate:
16525 case OMPC_grainsize:
16526 case OMPC_num_tasks:
16527 case OMPC_num_threads:
16531 case OMPC_proc_bind:
16532 case OMPC_schedule:
16534 case OMPC_firstprivate:
16535 case OMPC_lastprivate:
16537 case OMPC_reduction:
16538 case OMPC_task_reduction:
16539 case OMPC_in_reduction:
16543 case OMPC_copyprivate:
16545 case OMPC_mergeable:
16546 case OMPC_threadprivate:
16547 case OMPC_groupprivate:
16549 case OMPC_allocate:
16566 case OMPC_dist_schedule:
16567 case OMPC_defaultmap:
16572 case OMPC_use_device_ptr:
16573 case OMPC_use_device_addr:
16574 case OMPC_is_device_ptr:
16575 case OMPC_unified_address:
16576 case OMPC_unified_shared_memory:
16577 case OMPC_reverse_offload:
16578 case OMPC_dynamic_allocators:
16579 case OMPC_atomic_default_mem_order:
16580 case OMPC_self_maps:
16581 case OMPC_device_type:
16583 case OMPC_nontemporal:
16586 case OMPC_severity:
16588 case OMPC_inclusive:
16589 case OMPC_exclusive:
16590 case OMPC_uses_allocators:
16591 case OMPC_affinity:
16594 case OMPC_num_teams:
16595 case OMPC_thread_limit:
16597 llvm_unreachable(
"Clause is not allowed.");
16610 assert(isAllowedClauseForDirective(DKind, CKind, OpenMPVersion) &&
16611 "Invalid directive with CKind-clause");
16614 if (NameModifier != OMPD_unknown &&
16615 !isAllowedClauseForDirective(NameModifier, CKind, OpenMPVersion))
16616 return OMPD_unknown;
16635 case OMPD_teams_loop:
16636 case OMPD_target_teams_loop:
16640 case OMPD_target_update:
16641 case OMPD_target_enter_data:
16642 case OMPD_target_exit_data:
16648 case OMPC_num_teams:
16649 case OMPC_thread_limit:
16650 case OMPC_ompx_dyn_cgroup_mem:
16651 case OMPC_dyn_groupprivate:
16653 if (Leafs[0] == OMPD_target)
16654 return OMPD_target;
16657 if (Leafs[0] == OMPD_target ||
16658 llvm::is_contained({OMPD_dispatch, OMPD_target_update,
16659 OMPD_target_enter_data, OMPD_target_exit_data},
16663 case OMPC_novariants:
16664 case OMPC_nocontext:
16665 if (DKind == OMPD_dispatch)
16669 if (DKind == OMPD_metadirective)
16670 return OMPD_metadirective;
16673 return OMPD_unknown;
16686 int InnermostIdx = [&]() {
16687 for (
int I = EndIdx - 1; I >= 0; --I) {
16688 if (isAllowedClauseForDirective(Leafs[I], Clause, OpenMPVersion))
16696 for (
int I = InnermostIdx - 1; I >= 0; --I) {
16701 if (Regions[0] != OMPD_unknown)
16702 return Regions.back();
16704 return OMPD_unknown;
16709 for (
int I = 0, E = Leafs.size(); I != E; ++I) {
16710 if (Leafs[I] == Dir)
16716 int End = NameModifier == OMPD_unknown ? Leafs.size()
16717 : GetLeafIndex(NameModifier);
16718 return GetEnclosingRegion(End, CKind);
16721 return OMPD_unknown;
16729 Stmt *HelperValStmt =
nullptr;
16732 !
Condition->isInstantiationDependent() &&
16733 !
Condition->containsUnexpandedParameterPack()) {
16738 ValExpr = Val.
get();
16742 DKind, OMPC_if,
getLangOpts().OpenMP, NameModifier);
16743 if (CaptureRegion != OMPD_unknown &&
16744 !
SemaRef.CurContext->isDependentContext()) {
16745 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
16746 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16747 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
16753 OMPIfClause(NameModifier, ValExpr, HelperValStmt, CaptureRegion, StartLoc,
16754 LParenLoc, NameModifierLoc, ColonLoc, EndLoc);
16762 Stmt *HelperValStmt =
nullptr;
16765 !
Condition->isInstantiationDependent() &&
16766 !
Condition->containsUnexpandedParameterPack()) {
16771 ValExpr =
SemaRef.MakeFullExpr(Val.
get()).get();
16776 if (CaptureRegion != OMPD_unknown &&
16777 !
SemaRef.CurContext->isDependentContext()) {
16778 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
16779 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16780 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
16786 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
16797 IntConvertDiagnoser()
16798 : ICEConvertDiagnoser(
false,
false,
true) {}
16801 return S.
Diag(Loc, diag::err_omp_not_integral) <<
T;
16805 return S.
Diag(Loc, diag::err_omp_incomplete_type) <<
T;
16810 return S.
Diag(Loc, diag::err_omp_explicit_conversion) <<
T << ConvTy;
16819 return S.
Diag(Loc, diag::err_omp_ambiguous_conversion) <<
T;
16828 llvm_unreachable(
"conversion functions are permitted");
16830 } ConvertDiagnoser;
16831 return SemaRef.PerformContextualImplicitConversion(Loc, Op, ConvertDiagnoser);
16836 bool StrictlyPositive,
bool BuildCapture =
false,
16839 Stmt **HelperValStmt =
nullptr) {
16845 if (
Value.isInvalid())
16848 ValExpr =
Value.get();
16850 if (std::optional<llvm::APSInt> Result =
16852 if (Result->isSigned() &&
16853 !((!StrictlyPositive && Result->isNonNegative()) ||
16854 (StrictlyPositive && Result->isStrictlyPositive()))) {
16855 SemaRef.
Diag(Loc, diag::err_omp_negative_expression_in_clause)
16865 if (*CaptureRegion != OMPD_unknown &&
16868 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16869 ValExpr = tryBuildCapture(SemaRef, ValExpr, Captures).get();
16880 llvm::raw_svector_ostream
Out(Buffer);
16881 unsigned Skipped = Exclude.size();
16882 for (
unsigned I =
First; I <
Last; ++I) {
16883 if (llvm::is_contained(Exclude, I)) {
16888 if (I + Skipped + 2 ==
Last)
16890 else if (I + Skipped + 1 !=
Last)
16893 return std::string(
Out.str());
16901 "Unexpected num_threads modifier in OpenMP < 60.");
16906 Diag(ModifierLoc, diag::err_omp_unexpected_clause_value)
16911 Expr *ValExpr = NumThreads;
16912 Stmt *HelperValStmt =
nullptr;
16923 if (CaptureRegion != OMPD_unknown &&
16924 !
SemaRef.CurContext->isDependentContext()) {
16925 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
16926 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
16927 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
16933 StartLoc, LParenLoc, ModifierLoc, EndLoc);
16938 bool SuppressExprDiags) {
16947 if (SuppressExprDiags) {
16951 SuppressedDiagnoser() : VerifyICEDiagnoser(
true) {}
16954 llvm_unreachable(
"Diagnostic suppressed");
16957 ICE =
SemaRef.VerifyIntegerConstantExpression(E, &
Result, Diagnoser,
16967 if ((StrictlyPositive && !
Result.isStrictlyPositive()) ||
16968 (!StrictlyPositive && !
Result.isNonNegative())) {
16969 Diag(E->
getExprLoc(), diag::err_omp_negative_expression_in_clause)
16974 if ((CKind == OMPC_aligned || CKind == OMPC_align ||
16975 CKind == OMPC_allocate) &&
16977 Diag(E->
getExprLoc(), diag::warn_omp_alignment_not_power_of_two)
16982 if (!
Result.isRepresentableByInt64()) {
16988 if (CKind == OMPC_collapse &&
DSAStack->getAssociatedLoops() == 1)
16990 else if (CKind == OMPC_ordered)
17009 if (
Result.isNonNegative()) {
17013 diag::err_omp_negative_expression_in_clause)
17016 }
else if (
auto *DeclRef = dyn_cast<DeclRefExpr>(DeviceNumExpr)) {
17023 Diag(DeviceNumExpr->
getExprLoc(), diag::err_expected_expression);
17057 DSAStackTy *Stack) {
17058 if (!Stack->getOMPAllocatorHandleT().isNull())
17065 S.
Diag(Loc, diag::err_omp_implied_type_not_found)
17066 <<
"omp_allocator_handle_t";
17071 Stack->setOMPAllocatorHandleT(AllocatorHandleEnumTy);
17074 bool ErrorFound =
false;
17075 for (
int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
17076 auto AllocatorKind =
static_cast<OMPAllocateDeclAttr::AllocatorTypeTy
>(I);
17077 StringRef Allocator =
17078 OMPAllocateDeclAttr::ConvertAllocatorTypeTyToStr(AllocatorKind);
17080 auto *VD = dyn_cast_or_null<ValueDecl>(
17100 Stack->setAllocator(AllocatorKind, Res.
get());
17103 S.
Diag(Loc, diag::err_omp_implied_type_not_found)
17104 <<
"omp_allocator_handle_t";
17123 Allocator =
SemaRef.PerformImplicitConversion(
17124 Allocator.
get(),
DSAStack->getOMPAllocatorHandleT(),
17153 Expr *NumForLoops) {
17159 if (NumForLoops && LParenLoc.
isValid()) {
17164 NumForLoops = NumForLoopsResult.
get();
17166 NumForLoops =
nullptr;
17170 NumForLoops ?
DSAStack->getAssociatedLoops() : 0,
17171 StartLoc, LParenLoc, EndLoc);
17172 DSAStack->setOrderedRegion(
true, NumForLoops, Clause);
17181 case OMPC_proc_bind:
17183 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
17185 case OMPC_atomic_default_mem_order:
17188 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
17192 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
17196 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
17200 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
17204 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
17206 case OMPC_severity:
17209 LParenLoc, EndLoc);
17211 case OMPC_threadset:
17213 ArgumentLoc, StartLoc, LParenLoc, EndLoc);
17217 case OMPC_num_threads:
17221 case OMPC_allocator:
17222 case OMPC_collapse:
17223 case OMPC_schedule:
17225 case OMPC_firstprivate:
17226 case OMPC_lastprivate:
17228 case OMPC_reduction:
17229 case OMPC_task_reduction:
17230 case OMPC_in_reduction:
17234 case OMPC_copyprivate:
17238 case OMPC_mergeable:
17239 case OMPC_threadprivate:
17240 case OMPC_groupprivate:
17241 case OMPC_allocate:
17258 case OMPC_num_teams:
17259 case OMPC_thread_limit:
17260 case OMPC_priority:
17261 case OMPC_grainsize:
17263 case OMPC_num_tasks:
17265 case OMPC_dist_schedule:
17267 case OMPC_defaultmap:
17272 case OMPC_use_device_ptr:
17273 case OMPC_use_device_addr:
17274 case OMPC_is_device_ptr:
17275 case OMPC_has_device_addr:
17276 case OMPC_unified_address:
17277 case OMPC_unified_shared_memory:
17278 case OMPC_reverse_offload:
17279 case OMPC_dynamic_allocators:
17280 case OMPC_self_maps:
17281 case OMPC_device_type:
17283 case OMPC_nontemporal:
17285 case OMPC_novariants:
17286 case OMPC_nocontext:
17288 case OMPC_inclusive:
17289 case OMPC_exclusive:
17290 case OMPC_uses_allocators:
17291 case OMPC_affinity:
17295 llvm_unreachable(
"Clause is not allowed.");
17304 if (M == OMP_DEFAULT_unknown) {
17305 Diag(MLoc, diag::err_omp_unexpected_clause_value)
17307 unsigned(OMP_DEFAULT_unknown))
17312 Diag(VCKindLoc, diag::err_omp_default_vc)
17317 bool IsTargetDefault =
17324 if (IsTargetDefault && M == OMP_DEFAULT_shared)
17327 auto SetDefaultClauseAttrs = [&](llvm::omp::DefaultKind M,
17333 case OMP_DEFAULT_none:
17334 if (IsTargetDefault)
17335 DefMapMod = OMPC_DEFAULTMAP_MODIFIER_none;
17337 DSAStack->setDefaultDSANone(MLoc);
17339 case OMP_DEFAULT_firstprivate:
17340 if (IsTargetDefault)
17341 DefMapMod = OMPC_DEFAULTMAP_MODIFIER_firstprivate;
17343 DSAStack->setDefaultDSAFirstPrivate(MLoc);
17345 case OMP_DEFAULT_private:
17346 if (IsTargetDefault)
17347 DefMapMod = OMPC_DEFAULTMAP_MODIFIER_private;
17349 DSAStack->setDefaultDSAPrivate(MLoc);
17351 case OMP_DEFAULT_shared:
17352 assert(!IsTargetDefault &&
"DSA shared invalid with target directive");
17353 DSAStack->setDefaultDSAShared(MLoc);
17356 llvm_unreachable(
"unexpected DSA in OpenMP default clause");
17360 case OMPC_DEFAULT_VC_aggregate:
17361 if (IsTargetDefault)
17362 DefMapKind = OMPC_DEFAULTMAP_aggregate;
17364 DSAStack->setDefaultDSAVCAggregate(VCKindLoc);
17366 case OMPC_DEFAULT_VC_pointer:
17367 if (IsTargetDefault)
17368 DefMapKind = OMPC_DEFAULTMAP_pointer;
17370 DSAStack->setDefaultDSAVCPointer(VCKindLoc);
17372 case OMPC_DEFAULT_VC_scalar:
17373 if (IsTargetDefault)
17374 DefMapKind = OMPC_DEFAULTMAP_scalar;
17376 DSAStack->setDefaultDSAVCScalar(VCKindLoc);
17378 case OMPC_DEFAULT_VC_all:
17379 if (IsTargetDefault)
17380 DefMapKind = OMPC_DEFAULTMAP_all;
17382 DSAStack->setDefaultDSAVCAll(VCKindLoc);
17385 llvm_unreachable(
"unexpected variable category in OpenMP default clause");
17394 if (IsTargetDefault) {
17395 if (DefMapKind == OMPC_DEFAULTMAP_all) {
17396 DSAStack->setDefaultDMAAttr(DefMapMod, OMPC_DEFAULTMAP_aggregate, MLoc);
17397 DSAStack->setDefaultDMAAttr(DefMapMod, OMPC_DEFAULTMAP_scalar, MLoc);
17398 DSAStack->setDefaultDMAAttr(DefMapMod, OMPC_DEFAULTMAP_pointer, MLoc);
17400 DSAStack->setDefaultDMAAttr(DefMapMod, DefMapKind, MLoc);
17405 SetDefaultClauseAttrs(M, VCKind);
17407 OMPDefaultClause(M, MLoc, VCKind, VCKindLoc, StartLoc, LParenLoc, EndLoc);
17416 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
17419 << getOpenMPClauseName(OMPC_threadset);
17432 if (Kind == OMP_PROC_BIND_unknown) {
17433 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
17435 unsigned(OMP_PROC_BIND_master),
17438 ? OMP_PROC_BIND_primary
17439 : OMP_PROC_BIND_spread) +
17444 if (Kind == OMP_PROC_BIND_primary &&
getLangOpts().OpenMP < 51)
17445 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
17447 unsigned(OMP_PROC_BIND_master),
17449 unsigned(OMP_PROC_BIND_spread) + 1)
17459 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
17461 OMPC_atomic_default_mem_order, 0,
17467 Kind, KindKwLoc, StartLoc, LParenLoc, EndLoc);
17476 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
17483 OMPAtClause(Kind, KindKwLoc, StartLoc, LParenLoc, EndLoc);
17492 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
17506 assert(ME &&
"NULL expr in Message clause");
17515 Stmt *HelperValStmt =
nullptr;
17521 DKind == OMPD_unknown ? OMPD_unknown
17524 if (CaptureRegion != OMPD_unknown &&
17525 !
SemaRef.CurContext->isDependentContext()) {
17526 ME =
SemaRef.MakeFullExpr(ME).get();
17527 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
17528 ME = tryBuildCapture(
SemaRef, ME, Captures).get();
17533 ME =
SemaRef.DefaultFunctionArrayLvalueConversion(ME).get();
17536 ME, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
17543 if (Kind != OMPC_ORDER_concurrent ||
17548 "OMPC_ORDER_unknown not greater than 0");
17550 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
17559 Diag(MLoc, diag::err_omp_unexpected_clause_value)
17565 DSAStack->setRegionHasOrderConcurrent(
true);
17568 unsigned existingFlags =
DSAStack->getCurScope()->getFlags();
17569 DSAStack->getCurScope()->setFlags(existingFlags |
17574 Kind, KindLoc, StartLoc, LParenLoc, EndLoc, Modifier, MLoc);
17583 Kind == OMPC_DEPEND_sink || Kind == OMPC_DEPEND_depobj) {
17585 OMPC_DEPEND_source, OMPC_DEPEND_sink, OMPC_DEPEND_depobj,
17586 OMPC_DEPEND_outallmemory, OMPC_DEPEND_inoutallmemory};
17588 Except.push_back(OMPC_DEPEND_inoutset);
17589 Diag(KindKwLoc, diag::err_omp_unexpected_clause_value)
17596 KindKwLoc, Kind, EndLoc);
17605 for (
Expr *&SizeExpr : SanitizedSizeExprs) {
17615 QualType SizeTy = SizeExpr->getType();
17644 if (!SizeExpr->isInstantiationDependent() && !IsValid)
17645 SizeExpr =
nullptr;
17649 SanitizedSizeExprs);
17656 size_t NumLoops = PermExprs.size();
17658 llvm::append_range(SanitizedPermExprs, PermExprs);
17660 for (
Expr *&PermExpr : SanitizedPermExprs) {
17663 if (!PermExpr || PermExpr->isInstantiationDependent())
17666 llvm::APSInt PermVal;
17669 bool IsValid = PermEvalExpr.
isUsable();
17671 PermExpr = PermEvalExpr.
get();
17673 if (IsValid && (PermVal < 1 || NumLoops < PermVal)) {
17677 diag::err_omp_interchange_permutation_value_range)
17678 << NumLoops << ExprRange;
17682 if (!PermExpr->isInstantiationDependent() && !IsValid)
17683 PermExpr =
nullptr;
17687 EndLoc, SanitizedPermExprs);
17703 FactorExpr, OMPC_partial,
true);
17706 FactorExpr = FactorResult.
get();
17735 FirstLoc, CountLoc, EndLoc,
First, Count);
17746 LParenLoc, EndLoc);
17756 case OMPC_schedule: {
17757 enum { Modifier1, Modifier2, ScheduleKind, NumberOfElements };
17758 assert(Argument.size() == NumberOfElements &&
17759 ArgumentLoc.size() == NumberOfElements);
17764 StartLoc, LParenLoc, ArgumentLoc[Modifier1], ArgumentLoc[Modifier2],
17765 ArgumentLoc[ScheduleKind], DelimLoc, EndLoc);
17769 assert(Argument.size() == 1 && ArgumentLoc.size() == 1);
17771 Expr, StartLoc, LParenLoc, ArgumentLoc.back(),
17774 case OMPC_dist_schedule:
17777 StartLoc, LParenLoc, ArgumentLoc.back(), DelimLoc, EndLoc);
17780 enum { DefaultModifier, DefaultVarCategory };
17782 static_cast<llvm::omp::DefaultKind
>(Argument[DefaultModifier]),
17783 ArgumentLoc[DefaultModifier],
17785 Argument[DefaultVarCategory]),
17786 ArgumentLoc[DefaultVarCategory], StartLoc, LParenLoc, EndLoc);
17788 case OMPC_defaultmap:
17789 enum { Modifier, DefaultmapKind };
17793 StartLoc, LParenLoc, ArgumentLoc[Modifier], ArgumentLoc[DefaultmapKind],
17797 enum { OrderModifier, OrderKind };
17801 LParenLoc, ArgumentLoc[OrderModifier], ArgumentLoc[OrderKind], EndLoc);
17804 assert(Argument.size() == 1 && ArgumentLoc.size() == 1);
17807 StartLoc, LParenLoc, ArgumentLoc.back(), EndLoc);
17809 case OMPC_grainsize:
17810 assert(Argument.size() == 1 && ArgumentLoc.size() == 1 &&
17811 "Modifier for grainsize clause and its location are expected.");
17814 StartLoc, LParenLoc, ArgumentLoc.back(), EndLoc);
17816 case OMPC_num_tasks:
17817 assert(Argument.size() == 1 && ArgumentLoc.size() == 1 &&
17818 "Modifier for num_tasks clause and its location are expected.");
17821 StartLoc, LParenLoc, ArgumentLoc.back(), EndLoc);
17823 case OMPC_dyn_groupprivate: {
17824 enum { Modifier1, Modifier2, NumberOfElements };
17825 assert(Argument.size() == NumberOfElements &&
17826 ArgumentLoc.size() == NumberOfElements &&
17827 "Modifiers for dyn_groupprivate clause and their locations are "
17832 Argument[Modifier2]),
17833 Expr, StartLoc, LParenLoc, ArgumentLoc[Modifier1],
17834 ArgumentLoc[Modifier2], EndLoc);
17837 case OMPC_num_threads:
17838 assert(Argument.size() == 1 && ArgumentLoc.size() == 1 &&
17839 "Modifier for num_threads clause and its location are expected.");
17842 StartLoc, LParenLoc, ArgumentLoc.back(), EndLoc);
17848 case OMPC_allocator:
17849 case OMPC_collapse:
17850 case OMPC_proc_bind:
17852 case OMPC_firstprivate:
17853 case OMPC_lastprivate:
17855 case OMPC_reduction:
17856 case OMPC_task_reduction:
17857 case OMPC_in_reduction:
17861 case OMPC_copyprivate:
17865 case OMPC_mergeable:
17866 case OMPC_threadprivate:
17867 case OMPC_groupprivate:
17868 case OMPC_allocate:
17885 case OMPC_num_teams:
17886 case OMPC_thread_limit:
17887 case OMPC_priority:
17894 case OMPC_use_device_ptr:
17895 case OMPC_use_device_addr:
17896 case OMPC_is_device_ptr:
17897 case OMPC_has_device_addr:
17898 case OMPC_unified_address:
17899 case OMPC_unified_shared_memory:
17900 case OMPC_reverse_offload:
17901 case OMPC_dynamic_allocators:
17902 case OMPC_atomic_default_mem_order:
17903 case OMPC_self_maps:
17904 case OMPC_device_type:
17906 case OMPC_nontemporal:
17908 case OMPC_severity:
17911 case OMPC_novariants:
17912 case OMPC_nocontext:
17914 case OMPC_inclusive:
17915 case OMPC_exclusive:
17916 case OMPC_uses_allocators:
17917 case OMPC_affinity:
17921 llvm_unreachable(
"Clause is not allowed.");
17932 Excluded.push_back(M2);
17933 if (M2 == OMPC_SCHEDULE_MODIFIER_nonmonotonic)
17934 Excluded.push_back(OMPC_SCHEDULE_MODIFIER_monotonic);
17935 if (M2 == OMPC_SCHEDULE_MODIFIER_monotonic)
17936 Excluded.push_back(OMPC_SCHEDULE_MODIFIER_nonmonotonic);
17937 S.
Diag(M1Loc, diag::err_omp_unexpected_clause_value)
17960 (M1 == OMPC_SCHEDULE_MODIFIER_monotonic &&
17961 M2 == OMPC_SCHEDULE_MODIFIER_nonmonotonic) ||
17962 (M1 == OMPC_SCHEDULE_MODIFIER_nonmonotonic &&
17963 M2 == OMPC_SCHEDULE_MODIFIER_monotonic)) {
17964 Diag(M2Loc, diag::err_omp_unexpected_schedule_modifier)
17970 std::string Values;
17980 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
17989 (M1 == OMPC_SCHEDULE_MODIFIER_nonmonotonic ||
17990 M2 == OMPC_SCHEDULE_MODIFIER_nonmonotonic) &&
17991 Kind != OMPC_SCHEDULE_dynamic && Kind != OMPC_SCHEDULE_guided) {
17992 Diag(M1 == OMPC_SCHEDULE_MODIFIER_nonmonotonic ? M1Loc : M2Loc,
17993 diag::err_omp_schedule_nonmonotonic_static);
17996 Expr *ValExpr = ChunkSize;
17997 Stmt *HelperValStmt =
nullptr;
18008 ValExpr = Val.
get();
18013 if (std::optional<llvm::APSInt>
Result =
18015 if (
Result->isSigned() && !
Result->isStrictlyPositive()) {
18016 Diag(ChunkSizeLoc, diag::err_omp_negative_expression_in_clause)
18021 DSAStack->getCurrentDirective(), OMPC_schedule,
18023 !
SemaRef.CurContext->isDependentContext()) {
18024 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
18025 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
18026 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
18034 ValExpr, HelperValStmt, M1, M1Loc, M2, M2Loc);
18053 case OMPC_mergeable:
18101 case OMPC_unified_address:
18104 case OMPC_unified_shared_memory:
18107 case OMPC_reverse_offload:
18110 case OMPC_dynamic_allocators:
18113 case OMPC_self_maps:
18127 case OMPC_ompx_bare:
18132 case OMPC_num_threads:
18136 case OMPC_allocator:
18137 case OMPC_collapse:
18138 case OMPC_schedule:
18140 case OMPC_firstprivate:
18141 case OMPC_lastprivate:
18143 case OMPC_reduction:
18144 case OMPC_task_reduction:
18145 case OMPC_in_reduction:
18149 case OMPC_copyprivate:
18151 case OMPC_proc_bind:
18152 case OMPC_threadprivate:
18153 case OMPC_groupprivate:
18154 case OMPC_allocate:
18160 case OMPC_num_teams:
18161 case OMPC_thread_limit:
18162 case OMPC_priority:
18163 case OMPC_grainsize:
18164 case OMPC_num_tasks:
18166 case OMPC_dist_schedule:
18167 case OMPC_defaultmap:
18172 case OMPC_use_device_ptr:
18173 case OMPC_use_device_addr:
18174 case OMPC_is_device_ptr:
18175 case OMPC_has_device_addr:
18176 case OMPC_atomic_default_mem_order:
18177 case OMPC_device_type:
18179 case OMPC_nontemporal:
18182 case OMPC_severity:
18184 case OMPC_novariants:
18185 case OMPC_nocontext:
18187 case OMPC_inclusive:
18188 case OMPC_exclusive:
18189 case OMPC_uses_allocators:
18190 case OMPC_affinity:
18192 case OMPC_ompx_dyn_cgroup_mem:
18193 case OMPC_dyn_groupprivate:
18195 llvm_unreachable(
"Clause is not allowed.");
18207 !
Condition->isInstantiationDependent() &&
18208 !
Condition->containsUnexpandedParameterPack()) {
18213 ValExpr = Val.
get();
18269 Diag(KindLoc, diag::err_omp_atomic_fail_wrong_or_no_clauses);
18355 if (!
hasClauses(Clauses, OMPC_init, OMPC_use, OMPC_destroy, OMPC_nowait)) {
18357 StringRef
Expected =
"'init', 'use', 'destroy', or 'nowait'";
18358 Diag(StartLoc, diag::err_omp_no_clause_for_directive)
18359 <<
Expected << getOpenMPDirectiveName(OMPD_interop, OMPVersion);
18372 bool HasInitClause =
false;
18373 bool IsTargetSync =
false;
18377 if (
const auto *InitClause = dyn_cast<OMPInitClause>(
C)) {
18378 HasInitClause =
true;
18379 if (InitClause->getIsTargetSync())
18380 IsTargetSync =
true;
18381 }
else if (
const auto *DC = dyn_cast<OMPDependClause>(
C)) {
18385 if (DependClause && HasInitClause && !IsTargetSync) {
18386 Diag(DependClause->
getBeginLoc(), diag::err_omp_interop_bad_depend_clause);
18400 if (ClauseKind == OMPC_init) {
18403 }
else if (ClauseKind == OMPC_use) {
18406 }
else if (ClauseKind == OMPC_destroy) {
18412 if (!InteropVars.insert(
DeclResult.first).second) {
18413 Diag(ELoc, diag::err_omp_interop_var_multiple_actions)
18429 Expr *RefExpr = InteropVarExpr;
18444 bool HasError =
false;
18450 if (
const auto *TD = dyn_cast<TypeDecl>(ND)) {
18451 InteropType =
QualType(TD->getTypeForDecl(), 0);
18460 SemaRef.
Diag(VarLoc, diag::err_omp_implied_type_not_found)
18461 <<
"omp_interop_t";
18467 SemaRef.
Diag(VarLoc, diag::err_omp_interop_variable_wrong_type);
18473 if ((Kind == OMPC_init || Kind == OMPC_destroy) &&
18475 SemaRef.
Diag(VarLoc, diag::err_omp_interop_variable_expected)
18504 StartLoc, LParenLoc, VarLoc, EndLoc);
18517 OMPUseClause(InteropVar, StartLoc, LParenLoc, VarLoc, EndLoc);
18526 DSAStack->getCurrentDirective() == OMPD_depobj) {
18528 Diag(StartLoc, diag::err_omp_expected_clause_argument)
18530 << getOpenMPDirectiveName(OMPD_depobj, OMPVersion);
18546 Stmt *HelperValStmt =
nullptr;
18549 !
Condition->isInstantiationDependent() &&
18550 !
Condition->containsUnexpandedParameterPack()) {
18555 ValExpr =
SemaRef.MakeFullExpr(Val.
get()).get();
18560 if (CaptureRegion != OMPD_unknown &&
18561 !
SemaRef.CurContext->isDependentContext()) {
18562 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
18563 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
18564 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
18570 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
18578 Stmt *HelperValStmt =
nullptr;
18581 !
Condition->isInstantiationDependent() &&
18582 !
Condition->containsUnexpandedParameterPack()) {
18587 ValExpr =
SemaRef.MakeFullExpr(Val.
get()).get();
18592 if (CaptureRegion != OMPD_unknown &&
18593 !
SemaRef.CurContext->isDependentContext()) {
18594 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
18595 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
18596 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
18602 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
18609 Expr *ValExpr = ThreadID;
18610 Stmt *HelperValStmt =
nullptr;
18615 if (CaptureRegion != OMPD_unknown &&
18616 !
SemaRef.CurContext->isDependentContext()) {
18617 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
18618 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
18619 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
18624 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
18635 int ExtraModifier =
Data.ExtraModifier;
18636 int OriginalSharingModifier =
Data.OriginalSharingModifier;
18643 case OMPC_firstprivate:
18646 case OMPC_lastprivate:
18648 "Unexpected lastprivate modifier.");
18651 ExtraModifierLoc, ColonLoc, StartLoc, LParenLoc, EndLoc);
18656 case OMPC_reduction:
18658 "Unexpected lastprivate modifier.");
18662 ExtraModifier, OriginalSharingModifier),
18663 StartLoc, LParenLoc, ExtraModifierLoc, ColonLoc, EndLoc,
18664 Data.ReductionOrMapperIdScopeSpec,
Data.ReductionOrMapperId);
18666 case OMPC_task_reduction:
18668 VarList, StartLoc, LParenLoc, ColonLoc, EndLoc,
18669 Data.ReductionOrMapperIdScopeSpec,
Data.ReductionOrMapperId);
18671 case OMPC_in_reduction:
18673 VarList, StartLoc, LParenLoc, ColonLoc, EndLoc,
18674 Data.ReductionOrMapperIdScopeSpec,
Data.ReductionOrMapperId);
18678 "Unexpected linear modifier.");
18680 VarList,
Data.DepModOrTailExpr, StartLoc, LParenLoc,
18682 ColonLoc,
Data.StepModifierLoc, EndLoc);
18686 LParenLoc, ColonLoc, EndLoc);
18691 case OMPC_copyprivate:
18699 "Unexpected depend modifier.");
18702 ColonLoc,
Data.OmpAllMemoryLoc},
18703 Data.DepModOrTailExpr, VarList, StartLoc, LParenLoc, EndLoc);
18707 "Unexpected map modifier.");
18709 Data.IteratorExpr,
Data.MapTypeModifiers,
Data.MapTypeModifiersLoc,
18710 Data.ReductionOrMapperIdScopeSpec,
Data.ReductionOrMapperId,
18712 ExtraModifierLoc, ColonLoc, VarList, Locs);
18716 Data.MotionModifiers,
Data.MotionModifiersLoc,
Data.IteratorExpr,
18717 Data.ReductionOrMapperIdScopeSpec,
Data.ReductionOrMapperId, ColonLoc,
18722 Data.MotionModifiers,
Data.MotionModifiersLoc,
Data.IteratorExpr,
18723 Data.ReductionOrMapperIdScopeSpec,
Data.ReductionOrMapperId, ColonLoc,
18726 case OMPC_use_device_ptr:
18729 case OMPC_use_device_addr:
18732 case OMPC_is_device_ptr:
18735 case OMPC_has_device_addr:
18738 case OMPC_allocate: {
18742 if (!
Data.AllocClauseModifiers.empty()) {
18743 assert(
Data.AllocClauseModifiers.size() <= 2 &&
18744 "More allocate modifiers than expected");
18745 Modifier1 =
Data.AllocClauseModifiers[0];
18746 Modifier1Loc =
Data.AllocClauseModifiersLoc[0];
18747 if (
Data.AllocClauseModifiers.size() == 2) {
18748 Modifier2 =
Data.AllocClauseModifiers[1];
18749 Modifier2Loc =
Data.AllocClauseModifiersLoc[1];
18753 Data.DepModOrTailExpr,
Data.AllocateAlignment, Modifier1, Modifier1Loc,
18754 Modifier2, Modifier2Loc, VarList, StartLoc, LParenLoc, ColonLoc,
18758 case OMPC_nontemporal:
18761 case OMPC_inclusive:
18764 case OMPC_exclusive:
18767 case OMPC_affinity:
18769 Data.DepModOrTailExpr, VarList);
18771 case OMPC_doacross:
18774 ExtraModifierLoc, ColonLoc, VarList, StartLoc, LParenLoc, EndLoc);
18776 case OMPC_num_teams:
18779 case OMPC_thread_limit:
18785 case OMPC_num_threads:
18789 case OMPC_allocator:
18790 case OMPC_collapse:
18792 case OMPC_proc_bind:
18793 case OMPC_schedule:
18797 case OMPC_mergeable:
18798 case OMPC_threadprivate:
18799 case OMPC_groupprivate:
18813 case OMPC_priority:
18814 case OMPC_grainsize:
18816 case OMPC_num_tasks:
18818 case OMPC_dist_schedule:
18819 case OMPC_defaultmap:
18822 case OMPC_unified_address:
18823 case OMPC_unified_shared_memory:
18824 case OMPC_reverse_offload:
18825 case OMPC_dynamic_allocators:
18826 case OMPC_atomic_default_mem_order:
18827 case OMPC_self_maps:
18828 case OMPC_device_type:
18832 case OMPC_severity:
18835 case OMPC_novariants:
18836 case OMPC_nocontext:
18838 case OMPC_uses_allocators:
18842 llvm_unreachable(
"Clause is not allowed.");
18855 Res =
SemaRef.CreateBuiltinUnaryOp(Loc, UO_Deref, Res.
get());
18860 Res =
SemaRef.DefaultLvalueConversion(Res.
get());
18874 bool IsImplicitClause =
18876 for (
Expr *RefExpr : VarList) {
18877 assert(RefExpr &&
"NULL expr in OpenMP private clause.");
18880 Expr *SimpleRefExpr = RefExpr;
18884 Vars.push_back(RefExpr);
18885 PrivateCopies.push_back(
nullptr);
18892 auto *VD = dyn_cast<VarDecl>(D);
18898 diag::err_omp_private_incomplete_type))
18900 Type =
Type.getNonReferenceType();
18920 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
18921 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_private) {
18922 Diag(ELoc, diag::err_omp_wrong_dsa)
18933 Diag(ELoc, diag::err_omp_variably_modified_type_not_supported)
18935 << getOpenMPDirectiveName(CurrDir, OMPVersion);
18936 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
getASTContext()) ==
18939 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
18954 CurrDir == OMPD_target) {
18956 if (
DSAStack->checkMappableExprComponentListsForDecl(
18960 ConflictKind = WhereFoundClauseKind;
18963 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
18966 << getOpenMPDirectiveName(CurrDir, OMPVersion);
18986 SemaRef.ActOnUninitializedDecl(VDPrivate);
18993 if (!VD && !
SemaRef.CurContext->isDependentContext()) {
18994 auto *FD = dyn_cast<FieldDecl>(D);
18998 RefExpr->getExprLoc());
19002 if (!IsImplicitClause)
19003 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_private, Ref);
19004 Vars.push_back((VD ||
SemaRef.CurContext->isDependentContext())
19005 ? RefExpr->IgnoreParens()
19007 PrivateCopies.push_back(VDPrivateRefExpr);
19014 Vars, PrivateCopies);
19025 bool IsImplicitClause =
19030 for (
Expr *RefExpr : VarList) {
19031 assert(RefExpr &&
"NULL expr in OpenMP firstprivate clause.");
19034 Expr *SimpleRefExpr = RefExpr;
19038 Vars.push_back(RefExpr);
19039 PrivateCopies.push_back(
nullptr);
19040 Inits.push_back(
nullptr);
19046 ELoc = IsImplicitClause ? ImplicitClauseLoc : ELoc;
19048 auto *VD = dyn_cast<VarDecl>(D);
19054 diag::err_omp_firstprivate_incomplete_type))
19056 Type =
Type.getNonReferenceType();
19066 DSAStackTy::DSAVarData TopDVar;
19067 if (!IsImplicitClause) {
19068 DSAStackTy::DSAVarData DVar =
19080 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_firstprivate &&
19082 DVar.CKind != OMPC_lastprivate) &&
19084 Diag(ELoc, diag::err_omp_wrong_dsa)
19102 if (!(IsConstant || (VD && VD->isStaticDataMember())) && !DVar.RefExpr &&
19103 DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_shared) {
19104 Diag(ELoc, diag::err_omp_wrong_dsa)
19130 DVar =
DSAStack->getImplicitDSA(D,
true);
19131 if (DVar.CKind != OMPC_shared &&
19134 DVar.DKind == OMPD_unknown)) {
19135 Diag(ELoc, diag::err_omp_required_access)
19157 return C == OMPC_reduction && !AppliedToPointee;
19165 if (DVar.CKind == OMPC_reduction &&
19169 Diag(ELoc, diag::err_omp_parallel_reduction_in_task_firstprivate)
19170 << getOpenMPDirectiveName(DVar.DKind, OMPVersion);
19186 CurrDir == OMPD_target) {
19188 if (
DSAStack->checkMappableExprComponentListsForDecl(
19193 ConflictKind = WhereFoundClauseKind;
19196 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
19199 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective(),
19210 Diag(ELoc, diag::err_omp_variably_modified_type_not_supported)
19212 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective(),
19214 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
getASTContext()) ==
19217 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
19232 Expr *VDInitRefExpr =
nullptr;
19242 ElemType,
".firstprivate.temp");
19257 ".firstprivate.temp");
19259 RefExpr->getExprLoc());
19260 SemaRef.AddInitializerToDecl(
19261 VDPrivate,
SemaRef.DefaultLvalueConversion(VDInitRefExpr).get(),
19265 if (IsImplicitClause) {
19266 Diag(RefExpr->getExprLoc(),
19267 diag::note_omp_task_predetermined_firstprivate_here);
19271 SemaRef.CurContext->addDecl(VDPrivate);
19274 RefExpr->getExprLoc());
19276 if (!VD && !
SemaRef.CurContext->isDependentContext()) {
19277 if (TopDVar.CKind == OMPC_lastprivate) {
19278 Ref = TopDVar.PrivateCopy;
19280 auto *FD = dyn_cast<FieldDecl>(D);
19285 RefExpr->getExprLoc());
19289 ExprCaptures.push_back(Ref->
getDecl());
19292 if (!IsImplicitClause)
19293 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_firstprivate, Ref);
19294 Vars.push_back((VD ||
SemaRef.CurContext->isDependentContext())
19295 ? RefExpr->IgnoreParens()
19297 PrivateCopies.push_back(VDPrivateRefExpr);
19298 Inits.push_back(VDInitRefExpr);
19305 getASTContext(), StartLoc, LParenLoc, EndLoc, Vars, PrivateCopies, Inits,
19314 assert(ColonLoc.
isValid() &&
"Colon location must be valid.");
19315 Diag(LPKindLoc, diag::err_omp_unexpected_clause_value)
19328 for (
Expr *RefExpr : VarList) {
19329 assert(RefExpr &&
"NULL expr in OpenMP lastprivate clause.");
19332 Expr *SimpleRefExpr = RefExpr;
19336 Vars.push_back(RefExpr);
19337 SrcExprs.push_back(
nullptr);
19338 DstExprs.push_back(
nullptr);
19339 AssignmentOps.push_back(
nullptr);
19346 auto *VD = dyn_cast<VarDecl>(D);
19352 diag::err_omp_lastprivate_incomplete_type))
19354 Type =
Type.getNonReferenceType();
19371 Diag(ELoc, diag::err_omp_lastprivate_conditional_non_scalar);
19372 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
getASTContext()) ==
19375 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
19389 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
19390 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_lastprivate &&
19392 DVar.CKind != OMPC_firstprivate) &&
19393 (DVar.CKind != OMPC_private || DVar.RefExpr !=
nullptr)) {
19394 Diag(ELoc, diag::err_omp_wrong_dsa)
19407 DSAStackTy::DSAVarData TopDVar = DVar;
19411 DVar =
DSAStack->getImplicitDSA(D,
true);
19412 if (DVar.CKind != OMPC_shared) {
19413 Diag(ELoc, diag::err_omp_required_access)
19431 Type.getUnqualifiedType(),
".lastprivate.src",
19442 PseudoDstExpr, PseudoSrcExpr);
19445 AssignmentOp =
SemaRef.ActOnFinishFullExpr(AssignmentOp.
get(), ELoc,
19451 if (!VD && !
SemaRef.CurContext->isDependentContext()) {
19452 if (TopDVar.CKind == OMPC_firstprivate) {
19453 Ref = TopDVar.PrivateCopy;
19457 ExprCaptures.push_back(Ref->
getDecl());
19459 if ((TopDVar.CKind == OMPC_firstprivate && !TopDVar.PrivateCopy) ||
19467 SimpleRefExpr, RefRes.
get());
19470 ExprPostUpdates.push_back(
19471 SemaRef.IgnoredValueConversions(PostUpdateRes.
get()).get());
19474 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_lastprivate, Ref);
19475 Vars.push_back((VD ||
SemaRef.CurContext->isDependentContext())
19476 ? RefExpr->IgnoreParens()
19478 SrcExprs.push_back(PseudoSrcExpr);
19479 DstExprs.push_back(PseudoDstExpr);
19480 AssignmentOps.push_back(AssignmentOp.
get());
19487 getASTContext(), StartLoc, LParenLoc, EndLoc, Vars, SrcExprs, DstExprs,
19488 AssignmentOps, LPKind, LPKindLoc, ColonLoc,
19498 for (
Expr *RefExpr : VarList) {
19499 assert(RefExpr &&
"NULL expr in OpenMP shared clause.");
19502 Expr *SimpleRefExpr = RefExpr;
19506 Vars.push_back(RefExpr);
19512 auto *VD = dyn_cast<VarDecl>(D);
19520 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
19521 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_shared &&
19523 Diag(ELoc, diag::err_omp_wrong_dsa)
19532 !
SemaRef.CurContext->isDependentContext())
19534 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_shared, Ref);
19535 Vars.push_back((VD || !Ref ||
SemaRef.CurContext->isDependentContext())
19536 ? RefExpr->IgnoreParens()
19548class DSARefChecker :
public StmtVisitor<DSARefChecker, bool> {
19553 if (
auto *VD = dyn_cast<VarDecl>(E->
getDecl())) {
19554 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(VD,
false);
19555 if (DVar.CKind == OMPC_shared && !DVar.RefExpr)
19557 if (DVar.CKind != OMPC_unknown)
19559 DSAStackTy::DSAVarData DVarPrivate = Stack->hasDSA(
19566 return DVarPrivate.CKind != OMPC_unknown;
19570 bool VisitStmt(Stmt *S) {
19571 for (Stmt *Child : S->
children()) {
19572 if (Child && Visit(Child))
19577 explicit DSARefChecker(DSAStackTy *S) : Stack(S) {}
19584class TransformExprToCaptures :
public TreeTransform<TransformExprToCaptures> {
19585 typedef TreeTransform<TransformExprToCaptures> BaseTransform;
19586 ValueDecl *
Field =
nullptr;
19587 DeclRefExpr *CapturedExpr =
nullptr;
19590 TransformExprToCaptures(Sema &SemaRef, ValueDecl *FieldDecl)
19591 : BaseTransform(SemaRef),
Field(FieldDecl), CapturedExpr(
nullptr) {}
19593 ExprResult TransformMemberExpr(MemberExpr *E) {
19596 CapturedExpr =
buildCapture(SemaRef, Field, E,
false);
19597 return CapturedExpr;
19599 return BaseTransform::TransformMemberExpr(E);
19601 DeclRefExpr *getCapturedExpr() {
return CapturedExpr; }
19605template <
typename T,
typename U>
19608 for (
U &
Set : Lookups) {
19609 for (
auto *D :
Set) {
19620 for (
auto *RD : D->
redecls()) {
19643 AssociatedClasses);
19656 for (
auto *NS : AssociatedNamespaces) {
19669 for (
auto *D : R) {
19670 auto *Underlying = D;
19671 if (
auto *USD = dyn_cast<UsingShadowDecl>(D))
19672 Underlying = USD->getTargetDecl();
19682 if (
auto *USD = dyn_cast<UsingShadowDecl>(D))
19683 Underlying = USD->getTargetDecl();
19685 Lookups.emplace_back();
19686 Lookups.back().addDecl(Underlying);
19710 Lookups.emplace_back();
19711 Lookups.back().append(Lookup.
begin(), Lookup.
end());
19714 }
else if (
auto *ULE =
19715 cast_or_null<UnresolvedLookupExpr>(UnresolvedReduction)) {
19717 Decl *PrevD =
nullptr;
19721 else if (
auto *DRD = dyn_cast<OMPDeclareReductionDecl>(D))
19722 Lookups.back().addDecl(DRD);
19730 return !D->isInvalidDecl() &&
19731 (D->getType()->isDependentType() ||
19732 D->getType()->isInstantiationDependentType() ||
19733 D->getType()->containsUnexpandedParameterPack());
19746 true, ResSet.
begin(), ResSet.
end(),
false,
19769 if (IsComplete || RD->isBeingDefined()) {
19772 if (Lookup.
empty()) {
19773 Lookups.emplace_back();
19774 Lookups.back().append(Lookup.
begin(), Lookup.
end());
19816 if (ReductionIdScopeSpec.
isSet()) {
19817 SemaRef.
Diag(Loc, diag::err_omp_not_resolved_reduction_identifier)
19826struct ReductionData {
19828 SmallVector<Expr *, 8> Vars;
19830 SmallVector<Expr *, 8> Privates;
19832 SmallVector<Expr *, 8> LHSs;
19834 SmallVector<Expr *, 8> RHSs;
19836 SmallVector<Expr *, 8> ReductionOps;
19838 SmallVector<Expr *, 8> InscanCopyOps;
19840 SmallVector<Expr *, 8> InscanCopyArrayTemps;
19842 SmallVector<Expr *, 8> InscanCopyArrayElems;
19845 SmallVector<Expr *, 8> TaskgroupDescriptors;
19847 SmallVector<Decl *, 4> ExprCaptures;
19849 SmallVector<Expr *, 4> ExprPostUpdates;
19851 unsigned RedModifier = 0;
19853 unsigned OrigSharingModifier = 0;
19855 SmallVector<bool, 8> IsPrivateVarReduction;
19856 ReductionData() =
delete;
19858 ReductionData(
unsigned Size,
unsigned Modifier = 0,
unsigned OrgModifier = 0)
19859 : RedModifier(Modifier), OrigSharingModifier(OrgModifier) {
19860 Vars.reserve(Size);
19861 Privates.reserve(Size);
19862 LHSs.reserve(Size);
19863 RHSs.reserve(Size);
19864 ReductionOps.reserve(Size);
19865 IsPrivateVarReduction.reserve(Size);
19866 if (RedModifier == OMPC_REDUCTION_inscan) {
19867 InscanCopyOps.reserve(Size);
19868 InscanCopyArrayTemps.reserve(Size);
19869 InscanCopyArrayElems.reserve(Size);
19871 TaskgroupDescriptors.reserve(Size);
19872 ExprCaptures.reserve(Size);
19873 ExprPostUpdates.reserve(Size);
19877 void push(Expr *Item, Expr *ReductionOp) {
19878 Vars.emplace_back(Item);
19879 Privates.emplace_back(
nullptr);
19880 LHSs.emplace_back(
nullptr);
19881 RHSs.emplace_back(
nullptr);
19882 ReductionOps.emplace_back(ReductionOp);
19883 IsPrivateVarReduction.emplace_back(
false);
19884 TaskgroupDescriptors.emplace_back(
nullptr);
19885 if (RedModifier == OMPC_REDUCTION_inscan) {
19886 InscanCopyOps.push_back(
nullptr);
19887 InscanCopyArrayTemps.push_back(
nullptr);
19888 InscanCopyArrayElems.push_back(
nullptr);
19892 void push(Expr *Item, Expr *
Private, Expr *LHS, Expr *RHS, Expr *ReductionOp,
19893 Expr *TaskgroupDescriptor, Expr *CopyOp, Expr *CopyArrayTemp,
19894 Expr *CopyArrayElem,
bool IsPrivate) {
19895 Vars.emplace_back(Item);
19896 Privates.emplace_back(
Private);
19897 LHSs.emplace_back(LHS);
19898 RHSs.emplace_back(RHS);
19899 ReductionOps.emplace_back(ReductionOp);
19900 TaskgroupDescriptors.emplace_back(TaskgroupDescriptor);
19901 if (RedModifier == OMPC_REDUCTION_inscan) {
19902 InscanCopyOps.push_back(CopyOp);
19903 InscanCopyArrayTemps.push_back(CopyArrayTemp);
19904 InscanCopyArrayElems.push_back(CopyArrayElem);
19906 assert(CopyOp ==
nullptr && CopyArrayTemp ==
nullptr &&
19907 CopyArrayElem ==
nullptr &&
19908 "Copy operation must be used for inscan reductions only.");
19910 IsPrivateVarReduction.emplace_back(IsPrivate);
19919 if (Length ==
nullptr) {
19926 SingleElement =
true;
19927 ArraySizes.push_back(llvm::APSInt::get(1));
19930 if (!Length->EvaluateAsInt(Result, Context))
19933 llvm::APSInt ConstantLengthValue = Result.
Val.
getInt();
19934 SingleElement = (ConstantLengthValue.getSExtValue() == 1);
19935 ArraySizes.push_back(ConstantLengthValue);
19943 while (
const auto *TempOASE = dyn_cast<ArraySectionExpr>(
Base)) {
19944 Length = TempOASE->getLength();
19945 if (Length ==
nullptr) {
19952 llvm::APSInt ConstantOne = llvm::APSInt::get(1);
19953 ArraySizes.push_back(ConstantOne);
19956 if (!Length->EvaluateAsInt(Result, Context))
19959 llvm::APSInt ConstantLengthValue = Result.
Val.
getInt();
19960 if (ConstantLengthValue.getSExtValue() != 1)
19963 ArraySizes.push_back(ConstantLengthValue);
19965 Base = TempOASE->getBase()->IgnoreParenImpCasts();
19969 if (!SingleElement) {
19970 while (
const auto *TempASE = dyn_cast<ArraySubscriptExpr>(
Base)) {
19972 llvm::APSInt ConstantOne = llvm::APSInt::get(1);
19973 ArraySizes.push_back(ConstantOne);
19974 Base = TempASE->getBase()->IgnoreParenImpCasts();
19986 return BO_AddAssign;
19988 return BO_MulAssign;
19990 return BO_AndAssign;
19992 return BO_OrAssign;
19994 return BO_XorAssign;
20050 case OO_Array_Delete:
20059 case OO_GreaterEqual:
20061 case OO_MinusEqual:
20063 case OO_SlashEqual:
20064 case OO_PercentEqual:
20065 case OO_CaretEqual:
20069 case OO_GreaterGreater:
20070 case OO_LessLessEqual:
20071 case OO_GreaterGreaterEqual:
20072 case OO_EqualEqual:
20073 case OO_ExclaimEqual:
20076 case OO_MinusMinus:
20082 case OO_Conditional:
20085 llvm_unreachable(
"Unexpected reduction identifier");
20088 if (II->isStr(
"max"))
20090 else if (II->isStr(
"min"))
20098 if (OOK == OO_Minus && S.
LangOpts.OpenMP == 52)
20099 S.
Diag(ReductionId.
getLoc(), diag::warn_omp_minus_in_reduction_deprecated);
20102 if (ReductionIdScopeSpec.
isValid())
20108 auto IR = UnresolvedReductions.begin(), ER = UnresolvedReductions.end();
20109 bool FirstIter =
true;
20110 for (
Expr *RefExpr : VarList) {
20111 assert(RefExpr &&
"nullptr expr in OpenMP reduction clause.");
20119 if (!FirstIter && IR != ER)
20124 bool IsPrivate =
false;
20125 Expr *SimpleRefExpr = RefExpr;
20134 S, ELoc, ERange, Stack->getCurScope(), ReductionIdScopeSpec,
20135 ReductionId,
Type, BasePath, IR == ER ?
nullptr : *IR);
20136 Expr *ReductionOp =
nullptr;
20138 (DeclareReductionRef.
isUnset() ||
20140 ReductionOp = DeclareReductionRef.
get();
20142 RD.push(RefExpr, ReductionOp);
20148 Expr *TaskgroupDescriptor =
nullptr;
20150 auto *ASE = dyn_cast<ArraySubscriptExpr>(RefExpr->IgnoreParens());
20151 auto *OASE = dyn_cast<ArraySectionExpr>(RefExpr->IgnoreParens());
20153 Type = ASE->getType().getNonReferenceType();
20157 if (
const auto *ATy = BaseType->getAsArrayTypeUnsafe())
20158 Type = ATy->getElementType();
20161 Type =
Type.getNonReferenceType();
20165 auto *VD = dyn_cast<VarDecl>(D);
20171 diag::err_omp_reduction_incomplete_type))
20177 false, ASE || OASE))
20184 if (!ASE && !OASE) {
20186 VarDecl *VDDef = VD->getDefinition();
20188 DSARefChecker Check(Stack);
20189 if (Check.Visit(VDDef->
getInit())) {
20190 S.
Diag(ELoc, diag::err_omp_reduction_ref_type_arg)
20209 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(D,
false);
20210 if (DVar.CKind == OMPC_reduction) {
20211 S.
Diag(ELoc, diag::err_omp_once_referenced)
20217 if (DVar.CKind != OMPC_unknown) {
20218 S.
Diag(ELoc, diag::err_omp_wrong_dsa)
20234 DVar = Stack->getImplicitDSA(D,
true);
20235 if (DVar.CKind != OMPC_shared) {
20236 S.
Diag(ELoc, diag::err_omp_required_access)
20250 DVar = Stack->getImplicitDSA(D,
true);
20252 IsPrivate = DVar.CKind != OMPC_shared;
20253 bool IsOrphaned =
false;
20255 IsOrphaned = ParentDir == OMPD_unknown;
20257 RD.OrigSharingModifier == OMPC_ORIGINAL_SHARING_private))
20263 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(D,
false);
20264 if (DVar.CKind == OMPC_threadprivate) {
20265 S.
Diag(ELoc, diag::err_omp_wrong_dsa)
20277 S, ELoc, ERange, Stack->getCurScope(), ReductionIdScopeSpec,
20278 ReductionId,
Type, BasePath, IR == ER ?
nullptr : *IR);
20282 (DeclareReductionRef.
isUnset() ||
20284 RD.push(RefExpr, DeclareReductionRef.
get());
20287 if (BOK == BO_Comma && DeclareReductionRef.
isUnset()) {
20291 diag::err_omp_unknown_reduction_identifier_since_omp_6_0)
20292 <<
Type << ReductionIdRange;
20295 diag::err_omp_unknown_reduction_identifier_prior_omp_6_0)
20296 <<
Type << ReductionIdRange;
20308 if (DeclareReductionRef.
isUnset()) {
20309 if ((BOK == BO_GT || BOK == BO_LT) &&
20312 S.
Diag(ELoc, diag::err_omp_clause_not_arithmetic_type_arg)
20315 if (!ASE && !OASE) {
20316 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
20319 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
20324 if ((BOK == BO_OrAssign || BOK == BO_AndAssign || BOK == BO_XorAssign) &&
20326 S.
Diag(ELoc, diag::err_omp_clause_floating_type_arg)
20328 if (!ASE && !OASE) {
20329 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
20332 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
20339 Type =
Type.getNonLValueExprType(Context).getUnqualifiedType();
20348 bool ConstantLengthOASE =
false;
20350 bool SingleElement;
20353 Context, OASE, SingleElement, ArraySizes);
20356 if (ConstantLengthOASE && !SingleElement) {
20357 for (llvm::APSInt &Size : ArraySizes)
20358 PrivateTy = Context.getConstantArrayType(PrivateTy, Size,
nullptr,
20364 if ((OASE && !ConstantLengthOASE) ||
20367 if (!Context.getTargetInfo().isVLASupported()) {
20369 S.
Diag(ELoc, diag::err_omp_reduction_vla_unsupported) << !!OASE;
20370 S.
Diag(ELoc, diag::note_vla_unsupported);
20373 S.
targetDiag(ELoc, diag::err_omp_reduction_vla_unsupported) << !!OASE;
20374 S.
targetDiag(ELoc, diag::note_vla_unsupported);
20382 PrivateTy = Context.getVariableArrayType(
20387 }
else if (!ASE && !OASE &&
20400 if (DeclareReductionRef.
isUsable()) {
20403 if (DRD->getInitializer()) {
20429 Type = ComplexTy->getElementType();
20431 llvm::APFloat InitValue = llvm::APFloat::getAllOnesValue(
20432 Context.getFloatTypeSemantics(
Type));
20436 uint64_t Size = Context.getTypeSize(
Type);
20437 QualType IntTy = Context.getIntTypeForBitwidth(Size, 0);
20438 llvm::APInt InitValue = llvm::APInt::getAllOnes(Size);
20457 uint64_t Size = Context.getTypeSize(
Type);
20459 Context.getIntTypeForBitwidth(Size, IsSigned);
20460 llvm::APInt InitValue =
20461 (BOK != BO_LT) ? IsSigned ? llvm::APInt::getSignedMinValue(Size)
20462 : llvm::APInt::getMinValue(Size)
20463 : IsSigned ? llvm::APInt::getSignedMaxValue(Size)
20464 : llvm::APInt::getMaxValue(Size);
20469 ELoc, Context.getTrivialTypeSourceInfo(
Type, ELoc), ELoc,
Init);
20475 llvm::APFloat InitValue = llvm::APFloat::getLargest(
20476 Context.getFloatTypeSemantics(
Type), BOK != BO_LT);
20506 llvm_unreachable(
"Unexpected reduction operation");
20515 }
else if (!
Init) {
20525 S.
Diag(ELoc, diag::err_omp_reduction_id_not_compatible)
20526 <<
Type << ReductionIdRange;
20527 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
20530 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
20536 if (DeclareReductionRef.
isUsable()) {
20538 QualType PtrRedTy = Context.getPointerType(RedTy);
20541 if (!BasePath.empty()) {
20545 Context, PtrRedTy, CK_UncheckedDerivedToBase, LHS.
get(), &BasePath,
20548 Context, PtrRedTy, CK_UncheckedDerivedToBase, RHS.
get(), &BasePath,
20552 QualType Params[] = {PtrRedTy, PtrRedTy};
20553 QualType FnTy = Context.getFunctionType(Context.VoidTy, Params, EPI);
20567 CombBOK, LHSDRE, RHSDRE);
20574 if (BOK != BO_LT && BOK != BO_GT) {
20577 BO_Assign, LHSDRE, ReductionOp.
get());
20579 auto *ConditionalOp =
new (Context)
20584 BO_Assign, LHSDRE, ConditionalOp);
20597 ExprResult CopyOpRes, TempArrayRes, TempArrayElem;
20598 if (ClauseKind == OMPC_reduction &&
20599 RD.RedModifier == OMPC_REDUCTION_inscan) {
20601 CopyOpRes = S.
BuildBinOp(Stack->getCurScope(), ELoc, BO_Assign, LHSDRE,
20611 if (Stack->getCurrentDirective() == OMPD_simd ||
20649 if (ClauseKind == OMPC_in_reduction) {
20652 const Expr *ParentReductionOp =
nullptr;
20653 Expr *ParentBOKTD =
nullptr, *ParentReductionOpTD =
nullptr;
20654 DSAStackTy::DSAVarData ParentBOKDSA =
20655 Stack->getTopMostTaskgroupReductionData(D, ParentSR, ParentBOK,
20657 DSAStackTy::DSAVarData ParentReductionOpDSA =
20658 Stack->getTopMostTaskgroupReductionData(
20659 D, ParentSR, ParentReductionOp, ParentReductionOpTD);
20660 bool IsParentBOK = ParentBOKDSA.DKind != OMPD_unknown;
20661 bool IsParentReductionOp = ParentReductionOpDSA.DKind != OMPD_unknown;
20662 if ((DeclareReductionRef.
isUnset() && IsParentReductionOp) ||
20663 (DeclareReductionRef.
isUsable() && IsParentBOK) ||
20664 (IsParentBOK && BOK != ParentBOK) || IsParentReductionOp) {
20665 bool EmitError =
true;
20666 if (IsParentReductionOp && DeclareReductionRef.
isUsable()) {
20667 llvm::FoldingSetNodeID RedId, ParentRedId;
20668 ParentReductionOp->
Profile(ParentRedId, Context,
true);
20669 DeclareReductionRef.
get()->
Profile(RedId, Context,
20671 EmitError = RedId != ParentRedId;
20675 diag::err_omp_reduction_identifier_mismatch)
20676 << ReductionIdRange << RefExpr->getSourceRange();
20678 diag::note_omp_previous_reduction_identifier)
20680 << (IsParentBOK ? ParentBOKDSA.RefExpr
20681 : ParentReductionOpDSA.RefExpr)
20682 ->getSourceRange();
20686 TaskgroupDescriptor = IsParentBOK ? ParentBOKTD : ParentReductionOpTD;
20693 TransformExprToCaptures RebuildToCapture(S, D);
20695 RebuildToCapture.TransformExpr(RefExpr->IgnoreParens()).get();
20696 Ref = RebuildToCapture.getCapturedExpr();
20698 VarsExpr = Ref =
buildCapture(S, D, SimpleRefExpr,
false);
20701 RD.ExprCaptures.emplace_back(Ref->
getDecl());
20707 S.
BuildBinOp(Stack->getCurScope(), ELoc, BO_Assign, SimpleRefExpr,
20712 Stack->getCurrentDirective() == OMPD_taskgroup) {
20713 S.
Diag(RefExpr->getExprLoc(),
20714 diag::err_omp_reduction_non_addressable_expression)
20715 << RefExpr->getSourceRange();
20718 RD.ExprPostUpdates.emplace_back(
20725 unsigned Modifier = RD.RedModifier;
20728 if (CurrDir == OMPD_taskgroup && ClauseKind == OMPC_task_reduction)
20729 Modifier = OMPC_REDUCTION_task;
20730 Stack->addDSA(D, RefExpr->IgnoreParens(), OMPC_reduction, Ref, Modifier,
20732 if (Modifier == OMPC_REDUCTION_task &&
20733 (CurrDir == OMPD_taskgroup ||
20737 if (DeclareReductionRef.
isUsable())
20738 Stack->addTaskgroupReductionData(D, ReductionIdRange,
20739 DeclareReductionRef.
get());
20741 Stack->addTaskgroupReductionData(D, ReductionIdRange, BOK);
20743 RD.push(VarsExpr, PrivateDRE, LHSDRE, RHSDRE, ReductionOp.
get(),
20744 TaskgroupDescriptor, CopyOpRes.
get(), TempArrayRes.
get(),
20745 TempArrayElem.
get(), IsPrivate);
20747 return RD.Vars.empty();
20763 Diag(LParenLoc, diag::err_omp_unexpected_clause_value)
20774 if (Modifier == OMPC_REDUCTION_inscan &&
20775 (
DSAStack->getCurrentDirective() != OMPD_for &&
20776 DSAStack->getCurrentDirective() != OMPD_for_simd &&
20777 DSAStack->getCurrentDirective() != OMPD_simd &&
20778 DSAStack->getCurrentDirective() != OMPD_parallel_for &&
20779 DSAStack->getCurrentDirective() != OMPD_parallel_for_simd)) {
20780 Diag(ModifierLoc, diag::err_omp_wrong_inscan_reduction);
20783 ReductionData RD(VarList.size(), Modifier, OriginalSharingModifier);
20785 StartLoc, LParenLoc, ColonLoc, EndLoc,
20786 ReductionIdScopeSpec, ReductionId,
20787 UnresolvedReductions, RD))
20791 getASTContext(), StartLoc, LParenLoc, ModifierLoc, ColonLoc, EndLoc,
20794 RD.Privates, RD.LHSs, RD.RHSs, RD.ReductionOps, RD.InscanCopyOps,
20795 RD.InscanCopyArrayTemps, RD.InscanCopyArrayElems,
20798 OriginalSharingModifier);
20806 ReductionData RD(VarList.size());
20808 VarList, StartLoc, LParenLoc, ColonLoc,
20809 EndLoc, ReductionIdScopeSpec, ReductionId,
20810 UnresolvedReductions, RD))
20814 getASTContext(), StartLoc, LParenLoc, ColonLoc, EndLoc, RD.Vars,
20816 RD.Privates, RD.LHSs, RD.RHSs, RD.ReductionOps,
20826 ReductionData RD(VarList.size());
20828 StartLoc, LParenLoc, ColonLoc, EndLoc,
20829 ReductionIdScopeSpec, ReductionId,
20830 UnresolvedReductions, RD))
20834 getASTContext(), StartLoc, LParenLoc, ColonLoc, EndLoc, RD.Vars,
20836 RD.Privates, RD.LHSs, RD.RHSs, RD.ReductionOps, RD.TaskgroupDescriptors,
20845 Diag(LinLoc, diag::err_omp_wrong_linear_modifier)
20855 const auto *VD = dyn_cast_or_null<VarDecl>(D);
20858 diag::err_omp_linear_incomplete_type))
20860 if ((LinKind == OMPC_LINEAR_uval || LinKind == OMPC_LINEAR_ref) &&
20862 Diag(ELoc, diag::err_omp_wrong_linear_modifier_non_reference)
20866 Type =
Type.getNonReferenceType();
20873 if (!IsDeclareSimd &&
20878 Type =
Type.getUnqualifiedType().getCanonicalType();
20879 const auto *Ty =
Type.getTypePtrOrNull();
20880 if (!Ty || (LinKind != OMPC_LINEAR_ref && !Ty->isDependentType() &&
20881 !Ty->isIntegralType(
getASTContext()) && !Ty->isPointerType())) {
20882 Diag(ELoc, diag::err_omp_linear_expected_int_or_ptr) <<
Type;
20884 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
getASTContext()) ==
20887 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
20910 Diag(Step->
getBeginLoc(), diag::err_omp_step_simple_modifier_exclusive);
20912 LinKind = OMPC_LINEAR_val;
20913 for (
Expr *RefExpr : VarList) {
20914 assert(RefExpr &&
"NULL expr in OpenMP linear clause.");
20917 Expr *SimpleRefExpr = RefExpr;
20921 Vars.push_back(RefExpr);
20922 Privates.push_back(
nullptr);
20923 Inits.push_back(
nullptr);
20930 auto *VD = dyn_cast<VarDecl>(D);
20936 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
20937 if (DVar.RefExpr) {
20938 Diag(ELoc, diag::err_omp_wrong_dsa)
20947 Type =
Type.getNonReferenceType().getUnqualifiedType().getCanonicalType();
20959 if (!VD && !
SemaRef.CurContext->isDependentContext()) {
20962 ExprCaptures.push_back(Ref->
getDecl());
20969 SimpleRefExpr, RefRes.
get());
20972 ExprPostUpdates.push_back(
20973 SemaRef.IgnoredValueConversions(PostUpdateRes.
get()).get());
20977 if (LinKind == OMPC_LINEAR_uval)
20978 InitExpr = VD ? VD->getInit() : SimpleRefExpr;
20980 InitExpr = VD ? SimpleRefExpr : Ref;
20981 SemaRef.AddInitializerToDecl(
20982 Init,
SemaRef.DefaultLvalueConversion(InitExpr).get(),
20986 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_linear, Ref);
20987 Vars.push_back((VD ||
SemaRef.CurContext->isDependentContext())
20988 ? RefExpr->IgnoreParens()
20990 Privates.push_back(PrivateRef);
20991 Inits.push_back(InitRef);
20997 Expr *StepExpr = Step;
20998 Expr *CalcStepExpr =
nullptr;
21006 StepExpr = Val.
get();
21014 SemaRef.getCurScope(), StepLoc, BO_Assign, SaveRef.
get(), StepExpr);
21016 SemaRef.ActOnFinishFullExpr(CalcStep.
get(),
false);
21020 if (std::optional<llvm::APSInt>
Result =
21022 if (!
Result->isNegative() && !
Result->isStrictlyPositive())
21023 Diag(StepLoc, diag::warn_omp_linear_step_zero)
21024 << Vars[0] << (Vars.size() > 1);
21028 CalcStepExpr = CalcStep.
get();
21033 LinLoc, ColonLoc, StepModifierLoc, EndLoc,
21034 Vars, Privates, Inits, StepExpr, CalcStepExpr,
21040 Expr *NumIterations,
Sema &SemaRef,
21041 Scope *S, DSAStackTy *Stack) {
21046 Expr *Step = Clause.getStep();
21047 Expr *CalcStep = Clause.getCalcStep();
21054 bool HasErrors =
false;
21055 auto CurInit = Clause.inits().begin();
21056 auto CurPrivate = Clause.privates().begin();
21058 for (
Expr *RefExpr : Clause.varlist()) {
21061 Expr *SimpleRefExpr = RefExpr;
21062 auto Res =
getPrivateItem(SemaRef, SimpleRefExpr, ELoc, ERange);
21064 if (Res.second || !D) {
21065 Updates.push_back(
nullptr);
21066 Finals.push_back(
nullptr);
21070 auto &&Info = Stack->isLoopControlVariable(D);
21077 diag::err_omp_linear_distribute_var_non_loop_iteration);
21078 Updates.push_back(
nullptr);
21079 Finals.push_back(
nullptr);
21083 Expr *InitExpr = *CurInit;
21088 if (LinKind == OMPC_LINEAR_uval)
21093 DE->getType().getUnqualifiedType(), DE->getExprLoc(),
21100 SemaRef, S, RefExpr->getExprLoc(), *CurPrivate, InitExpr, IV, Step,
21111 S, RefExpr->getExprLoc(), BO_Assign, CapturedRef,
21114 Final = *CurPrivate;
21118 if (!
Update.isUsable() || !Final.isUsable()) {
21119 Updates.push_back(
nullptr);
21120 Finals.push_back(
nullptr);
21121 UsedExprs.push_back(
nullptr);
21124 Updates.push_back(
Update.get());
21125 Finals.push_back(Final.get());
21127 UsedExprs.push_back(SimpleRefExpr);
21132 if (
Expr *S = Clause.getStep())
21133 UsedExprs.push_back(S);
21135 UsedExprs.append(Clause.varlist_size() + 1 - UsedExprs.size(),
nullptr);
21136 Clause.setUpdates(Updates);
21137 Clause.setFinals(Finals);
21138 Clause.setUsedExprs(UsedExprs);
21146 for (
Expr *RefExpr : VarList) {
21147 assert(RefExpr &&
"NULL expr in OpenMP aligned clause.");
21150 Expr *SimpleRefExpr = RefExpr;
21154 Vars.push_back(RefExpr);
21161 auto *VD = dyn_cast<VarDecl>(D);
21169 Diag(ELoc, diag::err_omp_aligned_expected_array_or_ptr)
21171 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
getASTContext()) ==
21174 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
21181 if (
const Expr *PrevRef =
DSAStack->addUniqueAligned(D, SimpleRefExpr)) {
21182 Diag(ELoc, diag::err_omp_used_in_clause_twice)
21184 Diag(PrevRef->getExprLoc(), diag::note_omp_explicit_dsa)
21193 .DefaultFunctionArrayConversion(
21194 (VD || !Ref) ? RefExpr->IgnoreParens() : Ref)
21203 if (Alignment !=
nullptr) {
21208 Alignment = AlignResult.
get();
21214 ColonLoc, EndLoc, Vars, Alignment);
21225 for (
Expr *RefExpr : VarList) {
21226 assert(RefExpr &&
"NULL expr in OpenMP copyin clause.");
21229 Vars.push_back(RefExpr);
21230 SrcExprs.push_back(
nullptr);
21231 DstExprs.push_back(
nullptr);
21232 AssignmentOps.push_back(
nullptr);
21241 auto *DE = dyn_cast<DeclRefExpr>(RefExpr);
21243 Diag(ELoc, diag::err_omp_expected_var_name_member_expr)
21244 << 0 << RefExpr->getSourceRange();
21248 Decl *D = DE->getDecl();
21254 Vars.push_back(DE);
21255 SrcExprs.push_back(
nullptr);
21256 DstExprs.push_back(
nullptr);
21257 AssignmentOps.push_back(
nullptr);
21263 if (!
DSAStack->isThreadPrivate(VD)) {
21265 Diag(ELoc, diag::err_omp_required_access)
21267 << getOpenMPDirectiveName(OMPD_threadprivate, OMPVersion);
21290 SemaRef.BuildBinOp(
nullptr, DE->getExprLoc(), BO_Assign,
21291 PseudoDstExpr, PseudoSrcExpr);
21300 DSAStack->addDSA(VD, DE, OMPC_copyin);
21301 Vars.push_back(DE);
21302 SrcExprs.push_back(PseudoSrcExpr);
21303 DstExprs.push_back(PseudoDstExpr);
21304 AssignmentOps.push_back(AssignmentOp.
get());
21311 Vars, SrcExprs, DstExprs, AssignmentOps);
21322 for (
Expr *RefExpr : VarList) {
21323 assert(RefExpr &&
"NULL expr in OpenMP copyprivate clause.");
21326 Expr *SimpleRefExpr = RefExpr;
21330 Vars.push_back(RefExpr);
21331 SrcExprs.push_back(
nullptr);
21332 DstExprs.push_back(
nullptr);
21333 AssignmentOps.push_back(
nullptr);
21340 auto *VD = dyn_cast<VarDecl>(D);
21345 if (!VD || !
DSAStack->isThreadPrivate(VD)) {
21346 DSAStackTy::DSAVarData DVar =
21348 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_copyprivate &&
21350 Diag(ELoc, diag::err_omp_wrong_dsa)
21360 if (DVar.CKind == OMPC_unknown) {
21361 DVar =
DSAStack->getImplicitDSA(D,
false);
21362 if (DVar.CKind == OMPC_shared) {
21363 Diag(ELoc, diag::err_omp_required_access)
21365 <<
"threadprivate or private in the enclosing context";
21375 Diag(ELoc, diag::err_omp_variably_modified_type_not_supported)
21377 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective(),
21379 bool IsDecl = !VD || VD->isThisDeclarationADefinition(
getASTContext()) ==
21382 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
21403 DSAStack->getCurScope(), ELoc, BO_Assign, PseudoDstExpr, PseudoSrcExpr);
21406 AssignmentOp =
SemaRef.ActOnFinishFullExpr(AssignmentOp.
get(), ELoc,
21415 VD ? RefExpr->IgnoreParens()
21417 SrcExprs.push_back(PseudoSrcExpr);
21418 DstExprs.push_back(PseudoDstExpr);
21419 AssignmentOps.push_back(AssignmentOp.
get());
21426 EndLoc, Vars, SrcExprs, DstExprs,
21434 if (VarList.empty())
21443 bool Diagnose =
true) {
21444 QualType OMPDependT = Stack->getOMPDependT();
21445 if (!OMPDependT.
isNull())
21451 S.
Diag(Loc, diag::err_omp_implied_type_not_found) <<
"omp_depend_t";
21454 Stack->setOMPDependT(PT.
get());
21475 Diag(Depobj->
getExprLoc(), diag::err_omp_expected_omp_depend_t_lvalue)
21480 Diag(Depobj->
getExprLoc(), diag::err_omp_expected_omp_depend_t_lvalue)
21490struct DoacrossDataInfoTy {
21494 DSAStackTy::OperatorOffsetTy OpsOffs;
21496 llvm::APSInt TotalDepCount;
21499static DoacrossDataInfoTy
21505 DSAStackTy::OperatorOffsetTy OpsOffs;
21506 llvm::APSInt DepCounter(32);
21507 llvm::APSInt TotalDepCount(32);
21509 if (
const Expr *OrderedCountExpr =
21510 Stack->getParentOrderedRegionParam().first) {
21511 TotalDepCount = OrderedCountExpr->EvaluateKnownConstInt(SemaRef.
Context);
21512 TotalDepCount.setIsUnsigned(
true);
21515 for (
Expr *RefExpr : VarList) {
21516 assert(RefExpr &&
"NULL expr in OpenMP doacross clause.");
21519 Vars.push_back(RefExpr);
21526 if (Stack->getParentOrderedRegionParam().first &&
21527 DepCounter >= TotalDepCount) {
21528 SemaRef.
Diag(ELoc, diag::err_omp_depend_sink_unexpected_expr);
21542 Vars.push_back(RefExpr);
21549 Expr *RHS =
nullptr;
21550 if (
auto *BO = dyn_cast<BinaryOperator>(
SimpleExpr)) {
21552 OOLoc = BO->getOperatorLoc();
21555 }
else if (
auto *OCE = dyn_cast<CXXOperatorCallExpr>(
SimpleExpr)) {
21556 OOK = OCE->getOperator();
21557 OOLoc = OCE->getOperatorLoc();
21560 }
else if (
auto *MCE = dyn_cast<CXXMemberCallExpr>(
SimpleExpr)) {
21561 OOK = MCE->getMethodDecl()
21564 .getCXXOverloadedOperator();
21565 OOLoc = MCE->getCallee()->getExprLoc();
21574 Vars.push_back(RefExpr);
21580 if (OOK != OO_Plus && OOK != OO_Minus && (RHS || OOK !=
OO_None)) {
21581 SemaRef.
Diag(OOLoc, diag::err_omp_depend_sink_expected_plus_minus);
21587 RHS, OMPC_depend,
false);
21592 Stack->getParentOrderedRegionParam().first &&
21593 DepCounter != Stack->isParentLoopControlVariable(D).first) {
21595 Stack->getParentLoopControlVariable(DepCounter.getZExtValue());
21597 SemaRef.
Diag(ELoc, diag::err_omp_depend_sink_expected_loop_iteration)
21600 SemaRef.
Diag(ELoc, diag::err_omp_depend_sink_expected_loop_iteration)
21604 OpsOffs.emplace_back(RHS, OOK);
21606 Vars.push_back(RefExpr->IgnoreParenImpCasts());
21609 TotalDepCount > VarList.size() &&
21610 Stack->getParentOrderedRegionParam().first &&
21611 Stack->getParentLoopControlVariable(VarList.size() + 1)) {
21612 SemaRef.
Diag(EndLoc, diag::err_omp_depend_sink_expected_loop_iteration)
21613 << 1 << Stack->getParentLoopControlVariable(VarList.size() + 1);
21615 return {Vars, OpsOffs, TotalDepCount};
21624 if (
DSAStack->getCurrentDirective() == OMPD_ordered &&
21625 DepKind != OMPC_DEPEND_source && DepKind != OMPC_DEPEND_sink) {
21626 Diag(DepLoc, diag::err_omp_unexpected_clause_value)
21630 if (
DSAStack->getCurrentDirective() == OMPD_taskwait &&
21631 DepKind == OMPC_DEPEND_mutexinoutset) {
21632 Diag(DepLoc, diag::err_omp_taskwait_depend_mutexinoutset_not_allowed);
21635 if ((
DSAStack->getCurrentDirective() != OMPD_ordered ||
21636 DSAStack->getCurrentDirective() == OMPD_depobj) &&
21638 DepKind == OMPC_DEPEND_sink ||
21640 DSAStack->getCurrentDirective() == OMPD_depobj) &&
21641 DepKind == OMPC_DEPEND_depobj))) {
21643 OMPC_DEPEND_outallmemory,
21644 OMPC_DEPEND_inoutallmemory};
21646 DSAStack->getCurrentDirective() == OMPD_depobj)
21647 Except.push_back(OMPC_DEPEND_depobj);
21649 Except.push_back(OMPC_DEPEND_inoutset);
21651 ?
"depend modifier(iterator) or "
21653 Diag(DepLoc, diag::err_omp_unexpected_clause_value)
21661 (DepKind == OMPC_DEPEND_source || DepKind == OMPC_DEPEND_sink)) {
21663 diag::err_omp_depend_sink_source_with_modifier);
21668 Diag(DepModifier->
getExprLoc(), diag::err_omp_depend_modifier_not_iterator);
21671 DSAStackTy::OperatorOffsetTy OpsOffs;
21672 llvm::APSInt TotalDepCount(32);
21674 if (DepKind == OMPC_DEPEND_sink || DepKind == OMPC_DEPEND_source) {
21677 Vars = VarOffset.Vars;
21678 OpsOffs = VarOffset.OpsOffs;
21679 TotalDepCount = VarOffset.TotalDepCount;
21681 for (
Expr *RefExpr : VarList) {
21682 assert(RefExpr &&
"NULL expr in OpenMP depend clause.");
21685 Vars.push_back(RefExpr);
21691 if (DepKind != OMPC_DEPEND_sink && DepKind != OMPC_DEPEND_source) {
21692 bool OMPDependTFound =
getLangOpts().OpenMP >= 50;
21693 if (OMPDependTFound)
21695 DepKind == OMPC_DEPEND_depobj);
21696 if (DepKind == OMPC_DEPEND_depobj) {
21700 if (!RefExpr->isValueDependent() && !RefExpr->isTypeDependent() &&
21701 !RefExpr->isInstantiationDependent() &&
21702 !RefExpr->containsUnexpandedParameterPack() &&
21703 (OMPDependTFound &&
21705 DSAStack->getOMPDependT(), RefExpr->getType()))) {
21706 Diag(ELoc, diag::err_omp_expected_omp_depend_t_lvalue)
21707 << 0 << RefExpr->getType() << RefExpr->getSourceRange();
21710 if (!RefExpr->isLValue()) {
21711 Diag(ELoc, diag::err_omp_expected_omp_depend_t_lvalue)
21712 << 1 << RefExpr->getType() << RefExpr->getSourceRange();
21719 QualType ExprTy = RefExpr->getType().getNonReferenceType();
21720 const auto *OASE = dyn_cast<ArraySectionExpr>(
SimpleExpr);
21724 if (BaseType.isNull())
21726 if (
const auto *ATy = BaseType->getAsArrayTypeUnsafe())
21727 ExprTy = ATy->getElementType();
21730 if (BaseType.isNull() || ExprTy.
isNull())
21733 const Expr *Length = OASE->getLength();
21735 if (Length && !Length->isValueDependent() &&
21737 Result.Val.getInt().isZero()) {
21739 diag::err_omp_depend_zero_length_array_section_not_allowed)
21749 if (!RefExpr->isValueDependent() && !RefExpr->isTypeDependent() &&
21750 !RefExpr->isInstantiationDependent() &&
21751 !RefExpr->containsUnexpandedParameterPack() &&
21752 (!RefExpr->IgnoreParenImpCasts()->isLValue() ||
21753 (OMPDependTFound &&
DSAStack->getOMPDependT().getTypePtr() ==
21755 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
21758 << RefExpr->getSourceRange();
21762 auto *ASE = dyn_cast<ArraySubscriptExpr>(
SimpleExpr);
21763 if (ASE && !ASE->getBase()->isTypeDependent() &&
21766 .getNonReferenceType()
21767 ->isPointerType() &&
21768 !ASE->getBase()->getType().getNonReferenceType()->isArrayType()) {
21769 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
21772 << RefExpr->getSourceRange();
21779 Res =
SemaRef.CreateBuiltinUnaryOp(ELoc, UO_AddrOf,
21780 RefExpr->IgnoreParenImpCasts());
21784 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
21787 << RefExpr->getSourceRange();
21792 Vars.push_back(RefExpr->IgnoreParenImpCasts());
21796 if (DepKind != OMPC_DEPEND_source && DepKind != OMPC_DEPEND_sink &&
21797 DepKind != OMPC_DEPEND_outallmemory &&
21798 DepKind != OMPC_DEPEND_inoutallmemory && Vars.empty())
21803 {DepKind, DepLoc,
Data.ColonLoc,
Data.OmpAllMemoryLoc}, DepModifier, Vars,
21804 TotalDepCount.getZExtValue());
21805 if ((DepKind == OMPC_DEPEND_sink || DepKind == OMPC_DEPEND_source) &&
21806 DSAStack->isParentOrderedRegion())
21807 DSAStack->addDoacrossDependClause(
C, OpsOffs);
21816 "Unexpected device modifier in OpenMP < 50.");
21818 bool ErrorFound =
false;
21820 std::string Values =
21822 Diag(ModifierLoc, diag::err_omp_unexpected_clause_value)
21828 Stmt *HelperValStmt =
nullptr;
21841 if (Modifier == OMPC_DEVICE_ancestor) {
21845 diag::err_omp_device_ancestor_without_requires_reverse_offload);
21853 if (CaptureRegion != OMPD_unknown &&
21854 !
SemaRef.CurContext->isDependentContext()) {
21855 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
21856 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
21857 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
21862 OMPDeviceClause(Modifier, ValExpr, HelperValStmt, CaptureRegion, StartLoc,
21863 LParenLoc, ModifierLoc, EndLoc);
21868 bool FullCheck =
true) {
21873 SemaRef.
Diag(SL, diag::warn_omp_non_trivial_type_mapped) << QTy << SR;
21883 const auto *OASE = dyn_cast<ArraySectionExpr>(E);
21889 (OASE && OASE->getColonLocFirst().isInvalid())) {
21890 if (
const auto *ATy = dyn_cast<ConstantArrayType>(BaseQTy.
getTypePtr()))
21891 return ATy->getSExtSize() != 1;
21896 assert(OASE &&
"Expecting array section if not an array subscript.");
21897 const Expr *LowerBound = OASE->getLowerBound();
21898 const Expr *Length = OASE->getLength();
21907 llvm::APSInt ConstLowerBound = Result.
Val.
getInt();
21908 if (ConstLowerBound.getSExtValue())
21923 const auto *CATy = dyn_cast<ConstantArrayType>(BaseQTy.
getTypePtr());
21928 if (!Length->EvaluateAsInt(Result, SemaRef.
getASTContext()))
21931 llvm::APSInt ConstLength = Result.
Val.
getInt();
21932 return CATy->getSExtSize() != ConstLength.getSExtValue();
21941 const auto *OASE = dyn_cast<ArraySectionExpr>(E);
21946 (OASE && OASE->getColonLocFirst().isInvalid()))
21949 assert(OASE &&
"Expecting array section if not an array subscript.");
21950 const Expr *Length = OASE->getLength();
21956 if (
const auto *ATy = dyn_cast<ConstantArrayType>(BaseQTy.
getTypePtr()))
21957 return ATy->getSExtSize() != 1;
21964 if (!Length->EvaluateAsInt(Result, SemaRef.
getASTContext()))
21967 llvm::APSInt ConstLength = Result.
Val.
getInt();
21968 return ConstLength.getSExtValue() != 1;
22007class MapBaseChecker final :
public StmtVisitor<MapBaseChecker, bool> {
22012 bool IsNonContiguous =
false;
22013 bool NoDiagnose =
false;
22014 const Expr *RelevantExpr =
nullptr;
22015 bool AllowUnitySizeArraySection =
true;
22016 bool AllowWholeSizeArraySection =
true;
22017 bool AllowAnotherPtr =
true;
22018 SourceLocation ELoc;
22019 SourceRange ERange;
22021 void emitErrorMsg() {
22023 if (SemaRef.getLangOpts().OpenMP < 50) {
22025 diag::err_omp_expected_named_var_member_or_array_expression)
22028 SemaRef.Diag(ELoc, diag::err_omp_non_lvalue_in_map_or_motion_clauses)
22034 bool VisitDeclRefExpr(DeclRefExpr *DRE) {
22039 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
22040 RelevantExpr = DRE;
22042 Components.emplace_back(DRE, DRE->
getDecl(), IsNonContiguous);
22046 bool VisitMemberExpr(MemberExpr *ME) {
22051 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
22060 SemaRef.Diag(ELoc, diag::err_omp_expected_access_to_data_field)
22076 SemaRef.Diag(ELoc, diag::err_omp_bit_fields_forbidden_in_clause)
22096 SemaRef.Diag(ELoc, diag::err_omp_union_type_not_allowed)
22100 return RelevantExpr || Visit(E);
22110 AllowUnitySizeArraySection =
false;
22111 AllowWholeSizeArraySection =
false;
22114 Components.emplace_back(ME, FD, IsNonContiguous);
22115 return RelevantExpr || Visit(E);
22118 bool VisitArraySubscriptExpr(ArraySubscriptExpr *AE) {
22123 SemaRef.Diag(ELoc, diag::err_omp_expected_base_var_name)
22127 return RelevantExpr || Visit(E);
22134 AllowWholeSizeArraySection =
false;
22137 Expr::EvalResult
Result;
22140 !
Result.Val.getInt().isZero()) {
22142 diag::err_omp_invalid_map_this_expr);
22144 diag::note_omp_invalid_subscript_on_this_ptr_map);
22146 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
22151 Components.emplace_back(AE,
nullptr, IsNonContiguous);
22153 return RelevantExpr || Visit(E);
22156 bool VisitArraySectionExpr(ArraySectionExpr *OASE) {
22159 assert(!(SemaRef.getLangOpts().OpenMP < 50 && NoDiagnose) &&
22160 "Array sections cannot be implicitly mapped.");
22174 SemaRef.Diag(ELoc, diag::err_omp_expected_base_var_name)
22184 if (AllowWholeSizeArraySection) {
22191 if (NotWhole || IsPointer)
22192 AllowWholeSizeArraySection =
false;
22193 }
else if (DKind == OMPD_target_update &&
22194 SemaRef.getLangOpts().OpenMP >= 50) {
22195 if (IsPointer && !AllowAnotherPtr)
22196 SemaRef.Diag(ELoc, diag::err_omp_section_length_undefined)
22199 IsNonContiguous =
true;
22200 }
else if (AllowUnitySizeArraySection && NotUnity) {
22206 diag::err_array_section_does_not_specify_contiguous_storage)
22212 AllowAnotherPtr =
false;
22214 if (
const auto *TE = dyn_cast<CXXThisExpr>(E)) {
22215 Expr::EvalResult ResultR;
22216 Expr::EvalResult ResultL;
22221 diag::err_omp_invalid_map_this_expr);
22223 diag::note_omp_invalid_length_on_this_ptr_mapping);
22227 SemaRef.getASTContext()) &&
22230 diag::err_omp_invalid_map_this_expr);
22232 diag::note_omp_invalid_lower_bound_on_this_ptr_mapping);
22234 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
22239 Components.emplace_back(OASE,
nullptr,
false);
22240 return RelevantExpr || Visit(E);
22242 bool VisitOMPArrayShapingExpr(OMPArrayShapingExpr *E) {
22246 Components.emplace_back(E,
nullptr, IsNonContiguous);
22248 return Visit(
Base->IgnoreParenImpCasts());
22251 bool VisitUnaryOperator(UnaryOperator *UO) {
22252 if (SemaRef.getLangOpts().OpenMP < 50 || !UO->
isLValue() ||
22257 if (!RelevantExpr) {
22259 Components.emplace_back(UO,
nullptr,
false);
22263 bool VisitBinaryOperator(BinaryOperator *BO) {
22275 Components.emplace_back(BO,
nullptr,
false);
22278 "Either LHS or RHS have base decl inside");
22280 return RelevantExpr || Visit(LE);
22281 return RelevantExpr || Visit(RE);
22283 bool VisitCXXThisExpr(CXXThisExpr *CTE) {
22284 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
22285 RelevantExpr = CTE;
22286 Components.emplace_back(CTE,
nullptr, IsNonContiguous);
22289 bool VisitCXXOperatorCallExpr(CXXOperatorCallExpr *COCE) {
22290 assert(!RelevantExpr &&
"RelevantExpr is expected to be nullptr");
22291 Components.emplace_back(COCE,
nullptr, IsNonContiguous);
22294 bool VisitOpaqueValueExpr(OpaqueValueExpr *E) {
22300 return Visit(Source);
22302 bool VisitStmt(Stmt *) {
22306 const Expr *getFoundBase()
const {
return RelevantExpr; }
22307 explicit MapBaseChecker(
22310 bool NoDiagnose, SourceLocation &ELoc, SourceRange &ERange)
22311 : SemaRef(SemaRef), CKind(CKind), DKind(DKind), Components(Components),
22312 NoDiagnose(NoDiagnose), ELoc(ELoc), ERange(ERange) {}
22326 MapBaseChecker Checker(SemaRef, CKind, DKind, CurComponents, NoDiagnose, ELoc,
22330 if (SemaRef.
getLangOpts().OpenMP >= 50 && !CurComponents.empty() &&
22331 (CKind == OMPC_to || CKind == OMPC_from)) {
22332 auto CI = CurComponents.rbegin();
22333 auto CE = CurComponents.rend();
22334 for (; CI != CE; ++CI) {
22336 dyn_cast<ArraySectionExpr>(CI->getAssociatedExpression());
22341 SemaRef.
Diag(ELoc, diag::err_array_section_does_not_specify_length)
22345 return Checker.getFoundBase();
22354 bool CurrentRegionOnly,
22365 assert(!CurComponents.empty() &&
"Map clause expression with no components!");
22366 assert(CurComponents.back().getAssociatedDeclaration() == VD &&
22367 "Map clause expression with unexpected base!");
22370 bool IsEnclosedByDataEnvironmentExpr =
false;
22371 const Expr *EnclosingExpr =
nullptr;
22373 bool FoundError = DSAS->checkMappableExprComponentListsForDecl(
22374 VD, CurrentRegionOnly,
22375 [&IsEnclosedByDataEnvironmentExpr, &SemaRef, VD, CurrentRegionOnly, ELoc,
22376 ERange, CKind, &EnclosingExpr,
22380 if (CKind == Kind && SemaRef.
LangOpts.OpenMP >= 50)
22382 assert(!StackComponents.empty() &&
22383 "Map clause expression with no components!");
22384 assert(StackComponents.back().getAssociatedDeclaration() == VD &&
22385 "Map clause expression with unexpected base!");
22389 const Expr *RE = StackComponents.front().getAssociatedExpression();
22395 auto CI = CurComponents.rbegin();
22396 auto CE = CurComponents.rend();
22397 auto SI = StackComponents.rbegin();
22398 auto SE = StackComponents.rend();
22399 for (; CI != CE && SI != SE; ++CI, ++SI) {
22404 if (CurrentRegionOnly &&
22411 SemaRef.
Diag(CI->getAssociatedExpression()->getExprLoc(),
22412 diag::err_omp_multiple_array_items_in_map_clause)
22413 << CI->getAssociatedExpression()->getSourceRange();
22414 SemaRef.
Diag(SI->getAssociatedExpression()->getExprLoc(),
22415 diag::note_used_here)
22416 << SI->getAssociatedExpression()->getSourceRange();
22421 if (CI->getAssociatedExpression()->getStmtClass() !=
22422 SI->getAssociatedExpression()->getStmtClass())
22426 if (CI->getAssociatedDeclaration() != SI->getAssociatedDeclaration())
22432 for (; SI != SE; ++SI) {
22434 if (
const auto *ASE =
22435 dyn_cast<ArraySubscriptExpr>(SI->getAssociatedExpression())) {
22436 Type = ASE->getBase()->IgnoreParenImpCasts()->getType();
22437 }
else if (
const auto *OASE = dyn_cast<ArraySectionExpr>(
22438 SI->getAssociatedExpression())) {
22441 }
else if (
const auto *OASE = dyn_cast<OMPArrayShapingExpr>(
22442 SI->getAssociatedExpression())) {
22447 SemaRef, SI->getAssociatedExpression(),
Type))
22457 if (CI == CE && SI == SE) {
22458 if (CurrentRegionOnly) {
22459 if (CKind == OMPC_map) {
22460 SemaRef.
Diag(ELoc, diag::err_omp_map_shared_storage) << ERange;
22462 assert(CKind == OMPC_to || CKind == OMPC_from);
22463 SemaRef.
Diag(ELoc, diag::err_omp_once_referenced_in_target_update)
22472 IsEnclosedByDataEnvironmentExpr =
true;
22477 std::prev(CI)->getAssociatedDeclaration()->getType();
22479 std::prev(CI)->getAssociatedExpression()->getExprLoc();
22498 if (CI == CE || SI == SE) {
22501 diag::err_omp_pointer_mapped_along_with_derived_section)
22507 if (CI->getAssociatedExpression()->getStmtClass() !=
22508 SI->getAssociatedExpression()->getStmtClass() ||
22509 CI->getAssociatedDeclaration()->getCanonicalDecl() ==
22510 SI->getAssociatedDeclaration()->getCanonicalDecl()) {
22511 assert(CI != CE && SI != SE);
22512 SemaRef.
Diag(DerivedLoc, diag::err_omp_same_pointer_dereferenced)
22525 if (CurrentRegionOnly && (CI == CE || SI == SE)) {
22526 if (CKind == OMPC_map) {
22527 if (CI != CE || SI != SE) {
22531 CI != CE ? CurComponents.begin() : StackComponents.begin();
22532 auto End = CI != CE ? CurComponents.end() : StackComponents.end();
22534 while (It != End && !It->getAssociatedDeclaration())
22535 std::advance(It, 1);
22536 assert(It != End &&
22537 "Expected at least one component with the declaration.");
22538 if (It != Begin && It->getAssociatedDeclaration()
22540 .getCanonicalType()
22541 ->isAnyPointerType()) {
22542 IsEnclosedByDataEnvironmentExpr =
false;
22543 EnclosingExpr =
nullptr;
22547 SemaRef.
Diag(ELoc, diag::err_omp_map_shared_storage) << ERange;
22549 assert(CKind == OMPC_to || CKind == OMPC_from);
22550 SemaRef.
Diag(ELoc, diag::err_omp_once_referenced_in_target_update)
22560 if (!CurrentRegionOnly && SI != SE)
22561 EnclosingExpr = RE;
22565 IsEnclosedByDataEnvironmentExpr |=
22566 (!CurrentRegionOnly && CI != CE && SI == SE);
22571 if (CurrentRegionOnly)
22585 if (EnclosingExpr && !IsEnclosedByDataEnvironmentExpr) {
22587 diag::err_omp_original_storage_is_shared_and_does_not_contain)
22603 Expr *UnresolvedMapper) {
22623 Lookups.emplace_back();
22624 Lookups.back().append(Lookup.
begin(), Lookup.
end());
22627 }
else if (
auto *ULE = cast_or_null<UnresolvedLookupExpr>(UnresolvedMapper)) {
22632 assert(DMD &&
"Expect valid OMPDeclareMapperDecl during instantiation.");
22633 Lookups.back().addDecl(DMD);
22642 return !D->isInvalidDecl() &&
22643 (D->getType()->isDependentType() ||
22644 D->getType()->isInstantiationDependentType() ||
22645 D->getType()->containsUnexpandedParameterPack());
22656 false, URS.
begin(), URS.
end(),
false,
22664 SemaRef.
Diag(Loc, diag::err_omp_mapper_wrong_type);
22705 SemaRef.
Diag(Loc, diag::err_omp_invalid_mapper)
22715struct MappableVarListInfo {
22717 ArrayRef<Expr *> VarList;
22719 SmallVector<Expr *, 16> ProcessedVarList;
22723 SmallVector<ValueDecl *, 16> VarBaseDeclarations;
22725 SmallVector<Expr *, 16> UDMapperList;
22727 MappableVarListInfo(ArrayRef<Expr *> VarList) : VarList(VarList) {
22730 VarComponents.reserve(VarList.size());
22731 VarBaseDeclarations.reserve(VarList.size());
22740 const RecordDecl *RD = BaseType->getAsRecordDecl();
22750 for (
auto *FD : RD->
fields()) {
22752 MapperVarRef,
false, Range.getBegin(),
22758 SExprs.push_back(BE);
22766 MapperId, DKind == OMPD_target_enter_data ? OMPC_MAP_to : OMPC_MAP_tofrom,
22769 Maps.push_back(MapClause);
22770 return MapperVarRef;
22774 DSAStackTy *Stack) {
22780 const RecordDecl *RD = BaseType->getAsRecordDecl();
22789 MapperId = DeclNames.getIdentifier(&Ctx.
Idents.
get(
"default"));
22791 BaseType, MapperId, Maps,
nullptr);
22801 DMD->setMapperVarRef(MapperVarRef);
22825 Lookups.emplace_back();
22826 Lookups.back().append(Lookup.
begin(), Lookup.
end());
22833 return !D->isInvalidDecl() &&
22834 (D->getType()->isDependentType() ||
22835 D->getType()->isInstantiationDependentType() ||
22836 D->getType()->containsUnexpandedParameterPack());
22882 {CanonType,
nullptr});
22883 llvm::DenseMap<const Type *, bool> Visited;
22885 while (!Types.empty()) {
22886 auto [BaseType, CurFD] = Types.pop_back_val();
22887 while (ParentChain.back().second == 0)
22888 ParentChain.pop_back();
22889 --ParentChain.back().second;
22890 if (BaseType.isNull())
22893 const RecordDecl *RD = BaseType.getCanonicalType()->getAsRecordDecl();
22896 auto It = Visited.find(BaseType.getTypePtr());
22897 if (It == Visited.end()) {
22906 DefaultMapperId, BaseType);
22907 It = Visited.try_emplace(BaseType.getTypePtr(), HasUDMapper).first;
22913 bool FirstIter =
true;
22923 ParentChain.emplace_back(CurFD, 1);
22925 ++ParentChain.back().second;
22927 Types.emplace_back(FieldTy, FD);
22946 bool IsMapTypeImplicit =
false,
bool NoDiagnose =
false) {
22948 assert((CKind == OMPC_map || CKind == OMPC_to || CKind == OMPC_from) &&
22949 "Unexpected clause kind with mappable expressions!");
22950 unsigned OMPVersion = SemaRef.
getLangOpts().OpenMP;
22958 MapperId.
setName(DeclNames.getIdentifier(
22960 MapperId.
setLoc(StartLoc);
22964 auto UMIt = UnresolvedMappers.begin(), UMEnd = UnresolvedMappers.end();
22965 bool UpdateUMIt =
false;
22966 Expr *UnresolvedMapper =
nullptr;
22968 bool HasHoldModifier =
22969 llvm::is_contained(Modifiers, OMPC_MAP_MODIFIER_ompx_hold);
22977 for (
Expr *RE : MVLI.VarList) {
22978 assert(RE &&
"Null expr in omp to/from/map clause");
22982 if (UpdateUMIt && UMIt != UMEnd) {
22986 "Expect the size of UnresolvedMappers to match with that of VarList");
22990 UnresolvedMapper = *UMIt;
22994 if (
VE->isValueDependent() ||
VE->isTypeDependent() ||
22995 VE->isInstantiationDependent() ||
22996 VE->containsUnexpandedParameterPack()) {
22999 SemaRef, DSAS->getCurScope(), MapperIdScopeSpec, MapperId,
23000 VE->getType().getCanonicalType(), UnresolvedMapper);
23003 MVLI.UDMapperList.push_back(ER.
get());
23006 MVLI.ProcessedVarList.push_back(RE);
23015 ELoc, diag::err_omp_expected_named_var_member_or_array_expression)
23018 SemaRef.
Diag(ELoc, diag::err_omp_non_lvalue_in_map_or_motion_clauses)
23031 DSAS->getCurrentDirective(), NoDiagnose);
23035 assert(!CurComponents.empty() &&
23036 "Invalid mappable expression information.");
23038 if (
const auto *TE = dyn_cast<CXXThisExpr>(BE)) {
23040 DSAS->addMappedClassesQualTypes(TE->getType());
23043 SemaRef, DSAS->getCurScope(), MapperIdScopeSpec, MapperId,
23044 VE->getType().getCanonicalType(), UnresolvedMapper);
23047 MVLI.UDMapperList.push_back(ER.
get());
23049 MVLI.ProcessedVarList.push_back(RE);
23050 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
23051 MVLI.VarComponents.back().append(CurComponents.begin(),
23052 CurComponents.end());
23053 MVLI.VarBaseDeclarations.push_back(
nullptr);
23060 CurDeclaration = CurComponents.back().getAssociatedDeclaration();
23061 assert(CurDeclaration &&
"Null decl on map clause.");
23064 "Expecting components to have associated only canonical declarations.");
23066 auto *VD = dyn_cast<VarDecl>(CurDeclaration);
23067 const auto *FD = dyn_cast<FieldDecl>(CurDeclaration);
23069 assert((VD || FD) &&
"Only variables or fields are expected here!");
23076 if (VD && DSAS->isThreadPrivate(VD)) {
23079 DSAStackTy::DSAVarData DVar = DSAS->getTopDSA(VD,
false);
23080 SemaRef.
Diag(ELoc, diag::err_omp_threadprivate_in_clause)
23095 true, CurComponents, CKind))
23097 if (CKind == OMPC_map &&
23100 false, CurComponents, CKind))
23107 auto I = llvm::find_if(
23112 assert(I != CurComponents.end() &&
"Null decl on map clause.");
23115 auto *ASE = dyn_cast<ArraySubscriptExpr>(
VE->IgnoreParens());
23116 auto *OASE = dyn_cast<ArraySectionExpr>(
VE->IgnoreParens());
23117 auto *OAShE = dyn_cast<OMPArrayShapingExpr>(
VE->IgnoreParens());
23119 Type = ASE->getType().getNonReferenceType();
23124 Type = ATy->getElementType();
23127 Type =
Type.getNonReferenceType();
23128 }
else if (OAShE) {
23142 if (CKind == OMPC_map) {
23149 if (DKind == OMPD_target_enter_data &&
23150 !(MapType == OMPC_MAP_to || MapType == OMPC_MAP_alloc ||
23152 SemaRef.
Diag(StartLoc, diag::err_omp_invalid_map_type_for_directive)
23153 << (IsMapTypeImplicit ? 1 : 0)
23155 << getOpenMPDirectiveName(DKind, OMPVersion);
23164 if (DKind == OMPD_target_exit_data &&
23165 !(MapType == OMPC_MAP_from || MapType == OMPC_MAP_release ||
23166 MapType == OMPC_MAP_delete || SemaRef.
getLangOpts().OpenMP >= 52)) {
23167 SemaRef.
Diag(StartLoc, diag::err_omp_invalid_map_type_for_directive)
23168 << (IsMapTypeImplicit ? 1 : 0)
23170 << getOpenMPDirectiveName(DKind, OMPVersion);
23179 if ((DKind == OMPD_target_enter_data || DKind == OMPD_target_exit_data) &&
23181 SemaRef.
Diag(StartLoc,
23182 diag::err_omp_invalid_map_type_modifier_for_directive)
23184 OMPC_MAP_MODIFIER_ompx_hold)
23185 << getOpenMPDirectiveName(DKind, OMPVersion);
23193 if ((DKind == OMPD_target_data ||
23195 !(MapType == OMPC_MAP_to || MapType == OMPC_MAP_from ||
23196 MapType == OMPC_MAP_tofrom || MapType == OMPC_MAP_alloc)) {
23197 SemaRef.
Diag(StartLoc, diag::err_omp_invalid_map_type_for_directive)
23198 << (IsMapTypeImplicit ? 1 : 0)
23200 << getOpenMPDirectiveName(DKind, OMPVersion);
23212 if (VD && ((SemaRef.
LangOpts.OpenMP <= 45 &&
23214 DKind == OMPD_target)) {
23215 DSAStackTy::DSAVarData DVar = DSAS->getTopDSA(VD,
false);
23217 SemaRef.
Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
23220 << getOpenMPDirectiveName(DSAS->getCurrentDirective(),
23230 SemaRef, DSAS->getCurScope(), MapperIdScopeSpec, MapperId,
23231 Type.getCanonicalType(), UnresolvedMapper);
23243 BaseType =
VE->getType().getCanonicalType();
23250 ElemType = ATy->getElementType();
23255 }
else if (
VE->getType()->isArrayType()) {
23256 const ArrayType *AT =
VE->getType()->getAsArrayTypeUnsafe();
23266 MVLI.UDMapperList.push_back(ER.
get());
23269 MVLI.ProcessedVarList.push_back(RE);
23273 DSAS->addMappableExpressionComponents(CurDeclaration, CurComponents,
23279 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
23280 MVLI.VarComponents.back().append(CurComponents.begin(),
23281 CurComponents.end());
23303 BuiltinType::OMPIterator))
23305 diag::err_omp_map_modifier_not_iterator);
23308 unsigned Count = 0;
23309 for (
unsigned I = 0, E = MapTypeModifiers.size(); I < E; ++I) {
23311 llvm::is_contained(Modifiers, MapTypeModifiers[I])) {
23312 Diag(MapTypeModifiersLoc[I], diag::err_omp_duplicate_map_type_modifier);
23316 "Modifiers exceed the allowed number of map type modifiers");
23317 Modifiers[Count] = MapTypeModifiers[I];
23318 ModifiersLoc[Count] = MapTypeModifiersLoc[I];
23322 MappableVarListInfo MVLI(VarList);
23324 MapperIdScopeSpec, MapperId, UnresolvedMappers,
23325 MapType, Modifiers, IsMapTypeImplicit,
23331 getASTContext(), Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
23332 MVLI.VarComponents, MVLI.UDMapperList, IteratorModifier, Modifiers,
23334 MapperId, MapType, IsMapTypeImplicit, MapLoc);
23342 if (ReductionType.
isNull())
23350 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 0;
23355 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 1;
23359 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 2;
23363 Diag(TyLoc, diag::err_omp_reduction_wrong_type) << 3;
23366 return ReductionType;
23372 ArrayRef<std::pair<QualType, SourceLocation>> ReductionTypes,
23375 Decls.reserve(ReductionTypes.size());
23379 SemaRef.forRedeclarationInCurContext());
23384 llvm::DenseMap<QualType, SourceLocation> PreviousRedeclTypes;
23386 bool InCompoundScope =
true;
23387 if (S !=
nullptr) {
23393 SemaRef.LookupName(Lookup, S);
23394 SemaRef.FilterLookupForScope(Lookup, DC, S,
false,
23396 llvm::DenseMap<OMPDeclareReductionDecl *, bool> UsedAsPrevious;
23398 while (Filter.hasNext()) {
23400 if (InCompoundScope) {
23401 UsedAsPrevious.try_emplace(PrevDecl,
false);
23403 UsedAsPrevious[D] =
true;
23405 PreviousRedeclTypes[PrevDecl->getType().getCanonicalType()] =
23406 PrevDecl->getLocation();
23409 if (InCompoundScope) {
23410 for (
const auto &PrevData : UsedAsPrevious) {
23411 if (!PrevData.second) {
23412 PrevDRD = PrevData.first;
23417 }
else if (PrevDeclInScope !=
nullptr) {
23418 auto *PrevDRDInScope = PrevDRD =
23421 PreviousRedeclTypes[PrevDRDInScope->getType().getCanonicalType()] =
23422 PrevDRDInScope->getLocation();
23423 PrevDRDInScope = PrevDRDInScope->getPrevDeclInScope();
23424 }
while (PrevDRDInScope !=
nullptr);
23426 for (
const auto &TyData : ReductionTypes) {
23427 const auto I = PreviousRedeclTypes.find(TyData.first.getCanonicalType());
23429 if (I != PreviousRedeclTypes.end()) {
23430 Diag(TyData.second, diag::err_omp_declare_reduction_redefinition)
23432 Diag(I->second, diag::note_previous_definition);
23435 PreviousRedeclTypes[TyData.first.getCanonicalType()] = TyData.second;
23437 getASTContext(), DC, TyData.second, Name, TyData.first, PrevDRD);
23439 DRD->setAccess(AS);
23440 Decls.push_back(DRD);
23442 DRD->setInvalidDecl();
23456 SemaRef.setFunctionHasBranchProtectedScope();
23457 SemaRef.getCurFunction()->setHasOMPDeclareReductionCombiner();
23460 SemaRef.PushDeclContext(S, DRD);
23464 SemaRef.PushExpressionEvaluationContext(
23467 QualType ReductionType = DRD->getType();
23484 if (S !=
nullptr) {
23485 SemaRef.PushOnScopeChains(OmpInParm, S);
23486 SemaRef.PushOnScopeChains(OmpOutParm, S);
23488 DRD->addDecl(OmpInParm);
23489 DRD->addDecl(OmpOutParm);
23495 DRD->setCombinerData(InE, OutE);
23501 SemaRef.DiscardCleanupsInEvaluationContext();
23502 SemaRef.PopExpressionEvaluationContext();
23505 SemaRef.PopFunctionScopeInfo();
23507 if (Combiner !=
nullptr)
23508 DRD->setCombiner(Combiner);
23510 DRD->setInvalidDecl();
23519 SemaRef.setFunctionHasBranchProtectedScope();
23522 SemaRef.PushDeclContext(S, DRD);
23526 SemaRef.PushExpressionEvaluationContext(
23529 QualType ReductionType = DRD->getType();
23546 if (S !=
nullptr) {
23547 SemaRef.PushOnScopeChains(OmpPrivParm, S);
23548 SemaRef.PushOnScopeChains(OmpOrigParm, S);
23550 DRD->addDecl(OmpPrivParm);
23551 DRD->addDecl(OmpOrigParm);
23557 DRD->setInitializerData(OrigE, PrivE);
23558 return OmpPrivParm;
23564 SemaRef.DiscardCleanupsInEvaluationContext();
23565 SemaRef.PopExpressionEvaluationContext();
23568 SemaRef.PopFunctionScopeInfo();
23572 }
else if (OmpPrivParm->
hasInit()) {
23573 DRD->setInitializer(OmpPrivParm->
getInit(),
23578 DRD->setInvalidDecl();
23584 for (
Decl *D : DeclReductions.
get()) {
23590 D->setInvalidDecl();
23593 return DeclReductions;
23605 SemaRef.CheckExtraCXXDefaultArguments(D);
23608 return SemaRef.CreateParsedType(
T, TInfo);
23613 assert(
ParsedType.isUsable() &&
"Expect usable parsed mapper type");
23616 assert(!MapperType.
isNull() &&
"Expect valid mapper type");
23621 Diag(TyLoc, diag::err_omp_mapper_wrong_type);
23633 SemaRef.forRedeclarationInCurContext());
23638 llvm::DenseMap<QualType, SourceLocation> PreviousRedeclTypes;
23640 bool InCompoundScope =
true;
23641 if (S !=
nullptr) {
23647 SemaRef.LookupName(Lookup, S);
23648 SemaRef.FilterLookupForScope(Lookup, DC, S,
false,
23650 llvm::DenseMap<OMPDeclareMapperDecl *, bool> UsedAsPrevious;
23652 while (Filter.hasNext()) {
23654 if (InCompoundScope) {
23655 UsedAsPrevious.try_emplace(PrevDecl,
false);
23657 UsedAsPrevious[D] =
true;
23659 PreviousRedeclTypes[PrevDecl->getType().getCanonicalType()] =
23660 PrevDecl->getLocation();
23663 if (InCompoundScope) {
23664 for (
const auto &PrevData : UsedAsPrevious) {
23665 if (!PrevData.second) {
23666 PrevDMD = PrevData.first;
23671 }
else if (PrevDeclInScope) {
23672 auto *PrevDMDInScope = PrevDMD =
23675 PreviousRedeclTypes[PrevDMDInScope->getType().getCanonicalType()] =
23676 PrevDMDInScope->getLocation();
23677 PrevDMDInScope = PrevDMDInScope->getPrevDeclInScope();
23678 }
while (PrevDMDInScope !=
nullptr);
23682 if (I != PreviousRedeclTypes.end()) {
23683 Diag(StartLoc, diag::err_omp_declare_mapper_redefinition)
23684 << MapperType << Name;
23685 Diag(I->second, diag::note_previous_definition);
23693 ClausesWithImplicit);
23695 MapperType, VN, ClausesWithImplicit,
23698 SemaRef.PushOnScopeChains(DMD, S);
23701 DMD->setAccess(AS);
23703 DMD->setInvalidDecl();
23709 DMD->setMapperVarRef(MapperVarRef);
23723 SemaRef.PushOnScopeChains(VD, S,
false);
23725 DSAStack->addDeclareMapperVarRef(E);
23732 if (
DSAStack->getDeclareMapperVarRef()) {
23738 assert(!IsGlobalVar &&
"Only declare mapper handles TU-scope iterators.");
23743 assert(
getLangOpts().OpenMP &&
"Expected OpenMP mode.");
23745 if (
const auto *DRE = cast_or_null<DeclRefExpr>(Ref)) {
23758 assert(
getLangOpts().OpenMP &&
"Expected OpenMP mode.");
23766 if (VarList.empty())
23769 for (
Expr *ValExpr : VarList) {
23780 if (CaptureRegion == OMPD_unknown ||
SemaRef.CurContext->isDependentContext())
23782 LParenLoc, EndLoc, VarList,
23785 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
23787 for (
Expr *ValExpr : VarList) {
23788 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
23789 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
23790 Vars.push_back(ValExpr);
23795 LParenLoc, EndLoc, Vars, PreInit);
23802 if (VarList.empty())
23805 for (
Expr *ValExpr : VarList) {
23816 if (CaptureRegion == OMPD_unknown ||
SemaRef.CurContext->isDependentContext())
23818 StartLoc, LParenLoc, EndLoc, VarList,
23821 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
23823 for (
Expr *ValExpr : VarList) {
23824 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
23825 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
23826 Vars.push_back(ValExpr);
23831 LParenLoc, EndLoc, Vars, PreInit);
23838 Expr *ValExpr = Priority;
23839 Stmt *HelperValStmt =
nullptr;
23845 ValExpr,
SemaRef, OMPC_priority,
23847 DSAStack->getCurrentDirective(), &CaptureRegion, &HelperValStmt))
23851 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
23859 "Unexpected grainsize modifier in OpenMP < 51.");
23864 Diag(ModifierLoc, diag::err_omp_unexpected_clause_value)
23869 Expr *ValExpr = Grainsize;
23870 Stmt *HelperValStmt =
nullptr;
23880 &CaptureRegion, &HelperValStmt))
23885 StartLoc, LParenLoc, ModifierLoc, EndLoc);
23893 "Unexpected num_tasks modifier in OpenMP < 51.");
23898 Diag(ModifierLoc, diag::err_omp_unexpected_clause_value)
23903 Expr *ValExpr = NumTasks;
23904 Stmt *HelperValStmt =
nullptr;
23911 ValExpr,
SemaRef, OMPC_num_tasks,
23913 DSAStack->getCurrentDirective(), &CaptureRegion, &HelperValStmt))
23918 StartLoc, LParenLoc, ModifierLoc, EndLoc);
23938 DSAStackTy *Stack) {
23939 QualType OMPEventHandleT = Stack->getOMPEventHandleT();
23940 if (!OMPEventHandleT.
isNull())
23945 S.
Diag(Loc, diag::err_omp_implied_type_not_found) <<
"omp_event_handle_t";
23948 Stack->setOMPEventHandleT(PT.
get());
23969 auto *VD = dyn_cast_or_null<VarDecl>(Ref->
getDecl());
23979 <<
"omp_event_handle_t" << 1 << VD->
getType()
23986 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(VD,
false);
23987 if (DVar.CKind != OMPC_unknown && DVar.CKind != OMPC_firstprivate &&
24006 std::string Values;
24010 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
24014 Expr *ValExpr = ChunkSize;
24015 Stmt *HelperValStmt =
nullptr;
24026 ValExpr = Val.
get();
24031 if (std::optional<llvm::APSInt>
Result =
24033 if (
Result->isSigned() && !
Result->isStrictlyPositive()) {
24034 Diag(ChunkSizeLoc, diag::err_omp_negative_expression_in_clause)
24035 <<
"dist_schedule" << 1
24040 DSAStack->getCurrentDirective(), OMPC_dist_schedule,
24042 !
SemaRef.CurContext->isDependentContext()) {
24043 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
24044 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
24045 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
24053 Kind, ValExpr, HelperValStmt);
24061 if (M != OMPC_DEFAULTMAP_MODIFIER_tofrom ||
24062 Kind != OMPC_DEFAULTMAP_scalar) {
24066 if (M != OMPC_DEFAULTMAP_MODIFIER_tofrom) {
24068 OMPC_DEFAULTMAP_MODIFIER_tofrom);
24072 OMPC_DEFAULTMAP_scalar);
24076 Diag(Loc, diag::err_omp_unexpected_clause_value)
24084 if (!isDefaultmapKind || !isDefaultmapModifier) {
24086 ?
"'scalar', 'aggregate', 'pointer'"
24087 :
"'scalar', 'aggregate', 'pointer', 'all'";
24089 StringRef ModifierValue =
"'alloc', 'from', 'to', 'tofrom', "
24090 "'firstprivate', 'none', 'default'";
24091 if (!isDefaultmapKind && isDefaultmapModifier) {
24092 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
24094 }
else if (isDefaultmapKind && !isDefaultmapModifier) {
24095 Diag(MLoc, diag::err_omp_unexpected_clause_value)
24098 Diag(MLoc, diag::err_omp_unexpected_clause_value)
24100 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
24104 StringRef ModifierValue =
24106 ?
"'alloc', 'from', 'to', 'tofrom', "
24107 "'firstprivate', 'none', 'default', 'present'"
24108 :
"'storage', 'from', 'to', 'tofrom', "
24109 "'firstprivate', 'private', 'none', 'default', 'present'";
24110 if (!isDefaultmapKind && isDefaultmapModifier) {
24111 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
24113 }
else if (isDefaultmapKind && !isDefaultmapModifier) {
24114 Diag(MLoc, diag::err_omp_unexpected_clause_value)
24117 Diag(MLoc, diag::err_omp_unexpected_clause_value)
24119 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
24129 if (
DSAStack->checkDefaultmapCategory(Kind)) {
24130 Diag(StartLoc, diag::err_omp_one_defaultmap_each_category);
24136 DSAStack->setDefaultDMAAttr(M, OMPC_DEFAULTMAP_aggregate, StartLoc);
24137 DSAStack->setDefaultDMAAttr(M, OMPC_DEFAULTMAP_scalar, StartLoc);
24138 DSAStack->setDefaultDMAAttr(M, OMPC_DEFAULTMAP_pointer, StartLoc);
24140 DSAStack->setDefaultDMAAttr(M, Kind, StartLoc);
24157 Diag(DTCI.
Loc, diag::err_omp_region_not_file_context);
24163 Diag(DTCI.
Loc, diag::warn_hip_omp_target_directives);
24165 DeclareTargetNesting.push_back(DTCI);
24171 assert(!DeclareTargetNesting.empty() &&
24172 "check isInOpenMPDeclareTargetContext() first!");
24173 return DeclareTargetNesting.pop_back_val();
24183 if (DeclareTargetNesting.empty())
24187 Diag(DTCI.
Loc, diag::warn_omp_unterminated_declare_target)
24188 << getOpenMPDirectiveName(DTCI.
Kind, OMPVersion);
24194 SemaRef.LookupParsedName(Lookup, CurScope, &ScopeSpec,
24203 VarOrFuncDeclFilterCCC CCC(
SemaRef);
24207 SemaRef.diagnoseTypo(Corrected,
24208 SemaRef.PDiag(diag::err_undeclared_var_use_suggest)
24232 "Expected variable, function or function template.");
24234 if (
auto *VD = dyn_cast<VarDecl>(ND)) {
24236 if (!VD->isFileVarDecl() && !VD->isStaticLocal() &&
24237 !VD->isStaticDataMember()) {
24238 Diag(Loc, diag::err_omp_declare_target_has_local_vars)
24247 Diag(Loc, diag::warn_omp_declare_target_after_first_use);
24251 Diag(Loc, diag::warn_hip_omp_target_directives);
24254 const unsigned Level = -1;
24257 std::optional<OMPDeclareTargetDeclAttr *> ActiveAttr =
24258 OMPDeclareTargetDeclAttr::getActiveAttr(VD);
24259 if (ActiveAttr && (*ActiveAttr)->getDevType() != DTCI.
DT &&
24260 (*ActiveAttr)->getLevel() == Level) {
24261 Diag(Loc, diag::err_omp_device_type_mismatch)
24262 << OMPDeclareTargetDeclAttr::ConvertDevTypeTyToStr(DTCI.
DT)
24263 << OMPDeclareTargetDeclAttr::ConvertDevTypeTyToStr(
24264 (*ActiveAttr)->getDevType());
24267 if (ActiveAttr && (*ActiveAttr)->getMapType() != MT &&
24268 (*ActiveAttr)->getLevel() == Level) {
24269 Diag(Loc, diag::err_omp_declare_target_to_and_link) << ND;
24273 if (ActiveAttr && (*ActiveAttr)->getLevel() == Level)
24276 Expr *IndirectE =
nullptr;
24277 bool IsIndirect =
false;
24283 auto *A = OMPDeclareTargetDeclAttr::CreateImplicit(
24288 ML->DeclarationMarkedOpenMPDeclareTarget(ND, A);
24290 if (
auto *VD = dyn_cast<VarDecl>(ND);
24292 VD->hasGlobalStorage())
24301 std::optional<OMPDeclareTargetDeclAttr::MapTypeTy>
MapTy =
24302 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD);
24303 if (SemaRef.
LangOpts.OpenMP >= 50 &&
24306 VD->hasGlobalStorage()) {
24307 if (!
MapTy || (*
MapTy != OMPDeclareTargetDeclAttr::MT_To &&
24308 *
MapTy != OMPDeclareTargetDeclAttr::MT_Enter)) {
24315 diag::err_omp_lambda_capture_in_declare_target_not_to);
24316 SemaRef.
Diag(SL, diag::note_var_explicitly_captured_here)
24323 SemaRef.
Diag(VD->
getLocation(), diag::warn_omp_not_in_target_context);
24324 SemaRef.
Diag(SL, diag::note_used_here) << SR;
24328 Sema &SemaRef, DSAStackTy *Stack,
24330 return OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD) ||
24341 if (
auto *VD = dyn_cast<VarDecl>(D)) {
24343 if (!VD->isFileVarDecl() && !VD->isStaticLocal() &&
24344 !VD->isStaticDataMember())
24348 if (
DSAStack->isThreadPrivate(VD)) {
24349 Diag(SL, diag::err_omp_threadprivate_in_target);
24354 if (
const auto *FTD = dyn_cast<FunctionTemplateDecl>(D))
24355 D = FTD->getTemplatedDecl();
24356 if (
auto *FD = dyn_cast<FunctionDecl>(D)) {
24357 std::optional<OMPDeclareTargetDeclAttr::MapTypeTy> Res =
24358 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(FD);
24359 if (IdLoc.
isValid() && Res && *Res == OMPDeclareTargetDeclAttr::MT_Link) {
24360 Diag(IdLoc, diag::err_omp_function_in_link_clause);
24365 if (
auto *VD = dyn_cast<ValueDecl>(D)) {
24375 std::optional<OMPDeclareTargetDeclAttr *> ActiveAttr =
24376 OMPDeclareTargetDeclAttr::getActiveAttr(VD);
24377 unsigned Level = DeclareTargetNesting.size();
24378 if (ActiveAttr && (*ActiveAttr)->getLevel() >= Level)
24381 Expr *IndirectE =
nullptr;
24382 bool IsIndirect =
false;
24388 auto *A = OMPDeclareTargetDeclAttr::CreateImplicit(
24390 getLangOpts().OpenMP >= 52 ? OMPDeclareTargetDeclAttr::MT_Enter
24391 : OMPDeclareTargetDeclAttr::MT_To,
24392 DTCI.
DT, IndirectE, IsIndirect, Level,
24396 ML->DeclarationMarkedOpenMPDeclareTarget(D, A);
24416 if (
auto *VD = dyn_cast<VarDecl>(Node->
getDecl())) {
24418 DeclVector.push_back(VD);
24423 for (
auto *Child : Ex->
children()) {
24432 A = TD->
getAttr<OMPDeclareTargetDeclAttr>();
24434 llvm::SmallDenseSet<Decl *> Visited;
24435 while (!DeclVector.empty()) {
24436 VarDecl *TargetVarDecl = DeclVector.pop_back_val();
24437 if (!Visited.insert(TargetVarDecl).second)
24440 if (TargetVarDecl->
hasAttr<OMPDeclareTargetDeclAttr>() &&
24470 unsigned Count = 0;
24471 for (
unsigned I = 0, E = MotionModifiers.size(); I < E; ++I) {
24473 llvm::is_contained(Modifiers, MotionModifiers[I])) {
24474 Diag(MotionModifiersLoc[I], diag::err_omp_duplicate_motion_modifier);
24478 "Modifiers exceed the allowed number of motion modifiers");
24479 Modifiers[Count] = MotionModifiers[I];
24480 ModifiersLoc[Count] = MotionModifiersLoc[I];
24484 MappableVarListInfo MVLI(VarList);
24486 MapperIdScopeSpec, MapperId, UnresolvedMappers);
24487 if (MVLI.ProcessedVarList.empty())
24490 if (
auto *DRE = dyn_cast<DeclRefExpr>(IteratorExpr))
24491 if (
auto *VD = dyn_cast<VarDecl>(DRE->
getDecl()))
24494 getASTContext(), Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
24495 MVLI.VarComponents, MVLI.UDMapperList, IteratorExpr, Modifiers,
24512 unsigned Count = 0;
24513 for (
unsigned I = 0, E = MotionModifiers.size(); I < E; ++I) {
24515 llvm::is_contained(Modifiers, MotionModifiers[I])) {
24516 Diag(MotionModifiersLoc[I], diag::err_omp_duplicate_motion_modifier);
24520 "Modifiers exceed the allowed number of motion modifiers");
24521 Modifiers[Count] = MotionModifiers[I];
24522 ModifiersLoc[Count] = MotionModifiersLoc[I];
24526 MappableVarListInfo MVLI(VarList);
24528 MapperIdScopeSpec, MapperId, UnresolvedMappers);
24529 if (MVLI.ProcessedVarList.empty())
24532 if (
auto *DRE = dyn_cast<DeclRefExpr>(IteratorExpr))
24533 if (
auto *VD = dyn_cast<VarDecl>(DRE->
getDecl()))
24536 getASTContext(), Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
24537 MVLI.VarComponents, MVLI.UDMapperList, IteratorExpr, Modifiers,
24545 MappableVarListInfo MVLI(VarList);
24549 for (
Expr *RefExpr : VarList) {
24550 assert(RefExpr &&
"NULL expr in OpenMP use_device_ptr clause.");
24553 Expr *SimpleRefExpr = RefExpr;
24557 MVLI.ProcessedVarList.push_back(RefExpr);
24558 PrivateCopies.push_back(
nullptr);
24559 Inits.push_back(
nullptr);
24566 Type =
Type.getNonReferenceType().getUnqualifiedType();
24568 auto *VD = dyn_cast<VarDecl>(D);
24572 Diag(ELoc, diag::err_omp_usedeviceptr_not_a_pointer)
24573 << 0 << RefExpr->getSourceRange();
24582 if (VDPrivate->isInvalidDecl())
24585 SemaRef.CurContext->addDecl(VDPrivate);
24587 SemaRef, VDPrivate, RefExpr->getType().getUnqualifiedType(), ELoc);
24594 SemaRef.AddInitializerToDecl(
24595 VDPrivate,
SemaRef.DefaultLvalueConversion(VDInitRefExpr).get(),
24603 MVLI.ProcessedVarList.push_back(VD ? RefExpr->IgnoreParens() : Ref);
24604 PrivateCopies.push_back(VDPrivateRefExpr);
24605 Inits.push_back(VDInitRefExpr);
24610 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_firstprivate, Ref);
24614 MVLI.VarBaseDeclarations.push_back(D);
24615 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
24616 MVLI.VarComponents.back().emplace_back(SimpleRefExpr, D,
24620 if (MVLI.ProcessedVarList.empty())
24624 getASTContext(), Locs, MVLI.ProcessedVarList, PrivateCopies, Inits,
24625 MVLI.VarBaseDeclarations, MVLI.VarComponents);
24631 MappableVarListInfo MVLI(VarList);
24633 for (
Expr *RefExpr : VarList) {
24634 assert(RefExpr &&
"NULL expr in OpenMP use_device_addr clause.");
24637 Expr *SimpleRefExpr = RefExpr;
24643 MVLI.ProcessedVarList.push_back(RefExpr);
24648 auto *VD = dyn_cast<VarDecl>(D);
24655 MVLI.ProcessedVarList.push_back(VD ? RefExpr->IgnoreParens() : Ref);
24660 DSAStack->addDSA(D, RefExpr->IgnoreParens(), OMPC_firstprivate, Ref);
24664 MVLI.VarBaseDeclarations.push_back(D);
24665 MVLI.VarComponents.emplace_back();
24666 Expr *Component = SimpleRefExpr;
24670 SemaRef.DefaultFunctionArrayLvalueConversion(SimpleRefExpr).get();
24671 MVLI.VarComponents.back().emplace_back(Component, D,
24675 if (MVLI.ProcessedVarList.empty())
24679 getASTContext(), Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
24680 MVLI.VarComponents);
24686 MappableVarListInfo MVLI(VarList);
24687 for (
Expr *RefExpr : VarList) {
24688 assert(RefExpr &&
"NULL expr in OpenMP is_device_ptr clause.");
24691 Expr *SimpleRefExpr = RefExpr;
24695 MVLI.ProcessedVarList.push_back(RefExpr);
24705 Diag(ELoc, diag::err_omp_argument_type_isdeviceptr)
24706 << 0 << RefExpr->getSourceRange();
24712 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
24715 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
24718 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective(),
24724 const Expr *ConflictExpr;
24725 if (
DSAStack->checkMappableExprComponentListsForDecl(
24730 ConflictExpr = R.front().getAssociatedExpression();
24733 Diag(ELoc, diag::err_omp_map_shared_storage) << RefExpr->getSourceRange();
24742 SimpleRefExpr, D,
false);
24743 DSAStack->addMappableExpressionComponents(
24744 D, MC, OMPC_is_device_ptr);
24747 MVLI.ProcessedVarList.push_back(SimpleRefExpr);
24754 "Unexpected device pointer expression!");
24755 MVLI.VarBaseDeclarations.push_back(
24757 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
24758 MVLI.VarComponents.back().push_back(MC);
24761 if (MVLI.ProcessedVarList.empty())
24765 getASTContext(), Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
24766 MVLI.VarComponents);
24772 MappableVarListInfo MVLI(VarList);
24773 for (
Expr *RefExpr : VarList) {
24774 assert(RefExpr &&
"NULL expr in OpenMP has_device_addr clause.");
24777 Expr *SimpleRefExpr = RefExpr;
24782 MVLI.ProcessedVarList.push_back(RefExpr);
24790 DSAStackTy::DSAVarData DVar =
DSAStack->getTopDSA(D,
false);
24793 Diag(ELoc, diag::err_omp_variable_in_given_clause_and_dsa)
24796 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective(),
24802 const Expr *ConflictExpr;
24803 if (
DSAStack->checkMappableExprComponentListsForDecl(
24808 ConflictExpr = R.front().getAssociatedExpression();
24811 Diag(ELoc, diag::err_omp_map_shared_storage) << RefExpr->getSourceRange();
24819 Expr *Component = SimpleRefExpr;
24820 auto *VD = dyn_cast<VarDecl>(D);
24824 SemaRef.DefaultFunctionArrayLvalueConversion(SimpleRefExpr).get();
24826 Component, D,
false);
24827 DSAStack->addMappableExpressionComponents(
24828 D, MC, OMPC_has_device_addr);
24831 if (!VD && !
SemaRef.CurContext->isDependentContext()) {
24834 assert(Ref &&
"has_device_addr capture failed");
24835 MVLI.ProcessedVarList.push_back(Ref);
24837 MVLI.ProcessedVarList.push_back(RefExpr->IgnoreParens());
24844 "Unexpected device pointer expression!");
24845 MVLI.VarBaseDeclarations.push_back(
24847 MVLI.VarComponents.resize(MVLI.VarComponents.size() + 1);
24848 MVLI.VarComponents.back().push_back(MC);
24851 if (MVLI.ProcessedVarList.empty())
24855 getASTContext(), Locs, MVLI.ProcessedVarList, MVLI.VarBaseDeclarations,
24856 MVLI.VarComponents);
24870 bool AllocDependent =
24874 if (!AllocDependent) {
24883 AllocatorRes =
SemaRef.PerformImplicitConversion(
24884 AllocatorRes.
get(),
DSAStack->getOMPAllocatorHandleT(),
24889 Allocator = AllocatorRes.
isUsable() ? AllocatorRes.
get() :
nullptr;
24899 SemaRef.targetDiag(StartLoc, diag::err_expected_allocator_expression);
24906 if (!AlignmentDependent) {
24909 Alignment = AlignResult.
isUsable() ? AlignResult.
get() :
nullptr;
24914 for (
Expr *RefExpr : VarList) {
24915 assert(RefExpr &&
"NULL expr in OpenMP allocate clause.");
24918 Expr *SimpleRefExpr = RefExpr;
24922 Vars.push_back(RefExpr);
24928 auto *VD = dyn_cast<VarDecl>(D);
24930 if (!VD && !
SemaRef.CurContext->isDependentContext())
24932 Vars.push_back((VD ||
SemaRef.CurContext->isDependentContext())
24933 ? RefExpr->IgnoreParens()
24941 DSAStack->addInnerAllocatorExpr(Allocator);
24944 getASTContext(), StartLoc, LParenLoc, Allocator, Alignment, ColonLoc,
24945 FirstAllocateModifier, FirstAllocateModifierLoc, SecondAllocateModifier,
24946 SecondAllocateModifierLoc, EndLoc, Vars);
24954 for (
Expr *RefExpr : VarList) {
24955 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
24958 Expr *SimpleRefExpr = RefExpr;
24962 Vars.push_back(RefExpr);
24969 if (
const Expr *PrevRef =
24970 DSAStack->addUniqueNontemporal(D, SimpleRefExpr)) {
24971 Diag(ELoc, diag::err_omp_used_in_clause_twice)
24973 Diag(PrevRef->getExprLoc(), diag::note_omp_explicit_dsa)
24978 Vars.push_back(RefExpr);
24995 SemaRef.setFunctionHasBranchProtectedScope();
24997 return OMPScopeDirective::Create(
getASTContext(), StartLoc, EndLoc, Clauses,
25006 for (
Expr *RefExpr : VarList) {
25007 assert(RefExpr &&
"NULL expr in OpenMP inclusive clause.");
25010 Expr *SimpleRefExpr = RefExpr;
25015 Vars.push_back(RefExpr);
25020 const DSAStackTy::DSAVarData DVar =
25026 if (DVar.CKind != OMPC_reduction || DVar.Modifier != OMPC_REDUCTION_inscan)
25027 Diag(ELoc, diag::err_omp_inclusive_exclusive_not_reduction)
25028 << RefExpr->getSourceRange();
25030 if (
DSAStack->getParentDirective() != OMPD_unknown)
25031 DSAStack->markDeclAsUsedInScanDirective(D);
25032 Vars.push_back(RefExpr);
25047 for (
Expr *RefExpr : VarList) {
25048 assert(RefExpr &&
"NULL expr in OpenMP exclusive clause.");
25051 Expr *SimpleRefExpr = RefExpr;
25056 Vars.push_back(RefExpr);
25062 DSAStackTy::DSAVarData DVar;
25063 if (ParentDirective != OMPD_unknown)
25064 DVar =
DSAStack->getTopDSA(D,
true);
25069 if (ParentDirective == OMPD_unknown || DVar.CKind != OMPC_reduction ||
25070 DVar.Modifier != OMPC_REDUCTION_inscan) {
25071 Diag(ELoc, diag::err_omp_inclusive_exclusive_not_reduction)
25072 << RefExpr->getSourceRange();
25074 DSAStack->markDeclAsUsedInScanDirective(D);
25076 Vars.push_back(RefExpr);
25088 QualType OMPAlloctraitT = Stack->getOMPAlloctraitT();
25089 if (!OMPAlloctraitT.
isNull())
25094 S.
Diag(Loc, diag::err_omp_implied_type_not_found) <<
"omp_alloctrait_t";
25097 Stack->setOMPAlloctraitT(PT.
get());
25117 for (
int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
25118 auto AllocatorKind =
static_cast<OMPAllocateDeclAttr::AllocatorTypeTy
>(I);
25119 StringRef Allocator =
25120 OMPAllocateDeclAttr::ConvertAllocatorTypeTyToStr(AllocatorKind);
25122 PredefinedAllocators.insert(
SemaRef.LookupSingleName(
25128 Expr *AllocatorExpr =
nullptr;
25136 auto *DRE = dyn_cast<DeclRefExpr>(AllocatorExpr);
25137 bool IsPredefinedAllocator =
false;
25139 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorTy =
25141 IsPredefinedAllocator =
25143 OMPAllocateDeclAttr::AllocatorTypeTy::OMPUserDefinedMemAlloc;
25147 bool IsTypeCompatible = IsPredefinedAllocator;
25148 IsTypeCompatible = IsTypeCompatible ||
25149 Context.hasSameUnqualifiedType(AllocatorExprType,
25150 OMPAllocatorHandleT);
25152 IsTypeCompatible ||
25153 Context.typesAreCompatible(AllocatorExprType, OMPAllocatorHandleT);
25154 bool IsNonConstantLValue =
25156 if (!DRE || !IsTypeCompatible ||
25157 (!IsPredefinedAllocator && !IsNonConstantLValue)) {
25159 <<
"omp_allocator_handle_t" << (DRE ? 1 : 0)
25168 diag::err_omp_predefined_allocator_with_traits)
25181 diag::err_omp_nonpredefined_allocator_without_traits);
25187 AllocatorExpr =
SemaRef.DefaultLvalueConversion(AllocatorExpr).get();
25190 IsPredefinedAllocator
25191 ? DSAStackTy::UsesAllocatorsDeclKind::PredefinedAllocator
25192 : DSAStackTy::UsesAllocatorsDeclKind::UserDefinedAllocator);
25194 Expr *AllocatorTraitsExpr =
nullptr;
25208 if (
const auto *ConstArrayTy = dyn_cast<ConstantArrayType>(Ty))
25209 TraitTy = ConstArrayTy->getElementType();
25211 !(Context.hasSameUnqualifiedType(TraitTy,
25213 Context.typesAreCompatible(TraitTy,
DSAStack->getOMPAlloctraitT(),
25216 diag::err_omp_expected_array_alloctraits)
25217 << AllocatorTraitsExpr->
getType();
25222 if (
auto *DRE = dyn_cast<DeclRefExpr>(AllocatorTraitsExpr))
25225 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait);
25242 for (
Expr *RefExpr : Locators) {
25243 assert(RefExpr &&
"NULL expr in OpenMP affinity clause.");
25246 Vars.push_back(RefExpr);
25254 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
25255 << 1 << 0 << RefExpr->getSourceRange();
25266 Diag(ELoc, diag::err_omp_expected_addressable_lvalue_or_array_item)
25267 << 1 << 0 << RefExpr->getSourceRange();
25274 ColonLoc, EndLoc, Modifier, Vars);
25283 Diag(KindLoc, diag::err_omp_unexpected_clause_value)
25291 LParenLoc, EndLoc);
25298 Expr *ValExpr = Size;
25299 Stmt *HelperValStmt =
nullptr;
25310 DKind, OMPC_ompx_dyn_cgroup_mem,
getLangOpts().OpenMP);
25311 if (CaptureRegion != OMPD_unknown &&
25312 !
SemaRef.CurContext->isDependentContext()) {
25313 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
25314 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
25315 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
25320 ValExpr, HelperValStmt, CaptureRegion, StartLoc, LParenLoc, EndLoc);
25335 diag::err_omp_unexpected_clause_value)
25336 << Values << getOpenMPClauseName(OMPC_dyn_groupprivate);
25340 Expr *ValExpr = Size;
25341 Stmt *HelperValStmt =
nullptr;
25352 DKind, OMPC_dyn_groupprivate,
getLangOpts().OpenMP);
25353 if (CaptureRegion != OMPD_unknown &&
25354 !
SemaRef.CurContext->isDependentContext()) {
25355 ValExpr =
SemaRef.MakeFullExpr(ValExpr).get();
25356 llvm::MapVector<const Expr *, DeclRefExpr *> Captures;
25357 ValExpr = tryBuildCapture(
SemaRef, ValExpr, Captures).get();
25362 StartLoc, LParenLoc, EndLoc, ValExpr, HelperValStmt, CaptureRegion, M1,
25371 if (
DSAStack->getCurrentDirective() == OMPD_ordered &&
25372 DepType != OMPC_DOACROSS_source && DepType != OMPC_DOACROSS_sink &&
25373 DepType != OMPC_DOACROSS_sink_omp_cur_iteration &&
25374 DepType != OMPC_DOACROSS_source_omp_cur_iteration) {
25375 Diag(DepLoc, diag::err_omp_unexpected_clause_value)
25381 DSAStackTy::OperatorOffsetTy OpsOffs;
25382 llvm::APSInt TotalDepCount(32);
25385 DepType == OMPC_DOACROSS_source ||
25386 DepType == OMPC_DOACROSS_source_omp_cur_iteration ||
25387 DepType == OMPC_DOACROSS_sink_omp_cur_iteration,
25389 Vars = VarOffset.Vars;
25390 OpsOffs = VarOffset.OpsOffs;
25391 TotalDepCount = VarOffset.TotalDepCount;
25393 EndLoc, DepType, DepLoc, ColonLoc, Vars,
25394 TotalDepCount.getZExtValue());
25395 if (
DSAStack->isParentOrderedRegion())
25396 DSAStack->addDoacrossDependClause(
C, OpsOffs);
25425 case OMPC_contains:
25428 llvm_unreachable(
"Unexpected OpenMP clause");
25436 case OMPC_no_openmp:
25438 case OMPC_no_openmp_routines:
25440 case OMPC_no_parallelism:
25442 case OMPC_no_openmp_constructs:
25445 llvm_unreachable(
"Unexpected OpenMP clause");
25454 if (
Base->hasPlaceholderType() &&
25455 !
Base->hasPlaceholderType(BuiltinType::ArraySection)) {
25468 LowerBound =
Result.get();
25470 if (Length && Length->getType()->isNonOverloadPlaceholderType()) {
25490 if (
Base->isTypeDependent() ||
25493 (Length && (Length->isTypeDependent() || Length->isValueDependent())) ||
25496 Base, LowerBound, Length, Stride, Context.DependentTy,
VK_LValue,
25497 OK_Ordinary, ColonLocFirst, ColonLocSecond, RBLoc);
25509 Diag(
Base->getExprLoc(), diag::err_omp_typecheck_section_value)
25510 <<
Base->getSourceRange());
25516 if (Res.isInvalid())
25518 diag::err_omp_typecheck_section_not_integer)
25520 LowerBound = Res.get();
25530 if (Res.isInvalid())
25532 diag::err_omp_typecheck_section_not_integer)
25533 << 1 << Length->getSourceRange());
25534 Length = Res.get();
25536 if (Length->getType()->isSpecificBuiltinType(BuiltinType::Char_S) ||
25537 Length->getType()->isSpecificBuiltinType(BuiltinType::Char_U))
25538 Diag(Length->getExprLoc(), diag::warn_omp_section_is_char)
25539 << 1 << Length->getSourceRange();
25546 diag::err_omp_typecheck_section_not_integer)
25548 Stride = Res.
get();
25561 Diag(
Base->getExprLoc(), diag::err_omp_section_function_type)
25562 << ResultTy <<
Base->getSourceRange();
25566 if (
SemaRef.RequireCompleteType(
Base->getExprLoc(), ResultTy,
25567 diag::err_omp_section_incomplete_type,
Base))
25575 llvm::APSInt LowerBoundValue =
Result.Val.getInt();
25576 if (LowerBoundValue.isNegative()) {
25578 diag::err_omp_section_not_subset_of_array)
25587 if (Length->EvaluateAsInt(
Result, Context)) {
25590 llvm::APSInt LengthValue =
Result.Val.getInt();
25591 if (LengthValue.isNegative()) {
25592 Diag(Length->getExprLoc(), diag::err_omp_section_length_negative)
25593 <<
toString(LengthValue, 10,
true)
25594 << Length->getSourceRange();
25598 }
else if (
SemaRef.getLangOpts().OpenMP < 60 && ColonLocFirst.
isValid() &&
25604 Diag(ColonLocFirst, diag::err_omp_section_length_undefined)
25614 llvm::APSInt StrideValue =
Result.Val.getInt();
25615 if (!StrideValue.isStrictlyPositive()) {
25616 Diag(Stride->
getExprLoc(), diag::err_omp_section_stride_non_positive)
25617 <<
toString(StrideValue, 10,
true)
25624 if (!
Base->hasPlaceholderType(BuiltinType::ArraySection)) {
25631 Base, LowerBound, Length, Stride, Context.ArraySectionTy,
VK_LValue,
25632 OK_Ordinary, ColonLocFirst, ColonLocSecond, RBLoc);
25639 if (
Base->hasPlaceholderType()) {
25653 LParenLoc, RParenLoc, Dims, Brackets);
25655 (!
Base->isTypeDependent() &&
25658 diag::err_omp_non_pointer_type_array_shaping_base)
25659 <<
Base->getSourceRange());
25662 bool ErrorFound =
false;
25664 if (
Dim->hasPlaceholderType()) {
25666 if (
Result.isInvalid()) {
25671 if (
Result.isInvalid()) {
25677 if (!
Dim->isTypeDependent()) {
25680 if (
Result.isInvalid()) {
25682 Diag(
Dim->getExprLoc(), diag::err_omp_typecheck_shaping_not_integer)
25683 <<
Dim->getSourceRange();
25688 if (!
Dim->isValueDependent() &&
Dim->EvaluateAsInt(EvResult, Context)) {
25693 if (!
Value.isStrictlyPositive()) {
25694 Diag(
Dim->getExprLoc(), diag::err_omp_shaping_dimension_not_positive)
25696 <<
Dim->getSourceRange();
25702 NewDims.push_back(
Dim);
25707 LParenLoc, RParenLoc, NewDims, Brackets);
25717 bool IsCorrect =
true;
25722 if (!D.Type.getAsOpaquePtr()) {
25726 DeclTy = Context.IntTy;
25727 StartLoc = D.DeclIdentLoc;
25733 bool IsDeclTyDependent = DeclTy->isDependentType() ||
25734 DeclTy->containsUnexpandedParameterPack() ||
25735 DeclTy->isInstantiationDependentType();
25736 if (!IsDeclTyDependent) {
25737 if (!DeclTy->isIntegralType(Context) && !DeclTy->isAnyPointerType()) {
25740 Diag(StartLoc, diag::err_omp_iterator_not_integral_or_pointer)
25745 if (DeclTy.isConstant(Context)) {
25748 Diag(StartLoc, diag::err_omp_iterator_not_integral_or_pointer)
25756 assert(D.DeclIdent &&
"Identifier expected.");
25761 D.DeclIdent, DeclTy, TInfo,
SC_None);
25779 SemaRef.PushOnScopeChains(VD, S);
25782 SemaRef.CurContext->addDecl(VD);
25788 Expr *Begin = D.Range.Begin;
25792 Begin = BeginRes.
get();
25794 Expr *End = D.Range.End;
25798 End = EndRes.
get();
25800 Expr *Step = D.Range.Step;
25803 Diag(Step->
getExprLoc(), diag::err_omp_iterator_step_not_integral)
25808 std::optional<llvm::APSInt>
Result =
25814 Diag(Step->
getExprLoc(), diag::err_omp_iterator_step_constant_zero)
25820 if (!Begin || !End || !IsCorrect) {
25836 if (
Decl *ID = D.IteratorDecl)
25837 ID->setInvalidDecl();
25842 if (!
SemaRef.CurContext->isDependentContext()) {
25849 D.Range.End, D.Range.Begin);
25855 if (D.Range.Step) {
25858 Res =
SemaRef.CreateBuiltinBinOp(D.AssignmentLoc, BO_Add, Res.
get(),
25865 Res =
SemaRef.CreateBuiltinBinOp(
25866 D.AssignmentLoc, BO_Sub, Res.
get(),
25867 SemaRef.ActOnIntegerConstant(D.AssignmentLoc, 1).get());
25873 Res =
SemaRef.CreateBuiltinBinOp(D.AssignmentLoc, BO_Div, Res.
get(),
25879 St1 =
SemaRef.CreateBuiltinUnaryOp(D.AssignmentLoc, UO_Minus,
25883 D.AssignmentLoc, BO_Sub, D.Range.Begin, D.Range.End);
25889 Res1 =
SemaRef.CreateBuiltinBinOp(D.AssignmentLoc, BO_Add, Res1.
get(),
25896 Res1 =
SemaRef.CreateBuiltinBinOp(
25897 D.AssignmentLoc, BO_Sub, Res1.
get(),
25898 SemaRef.ActOnIntegerConstant(D.AssignmentLoc, 1).get());
25904 Res1 =
SemaRef.CreateBuiltinBinOp(D.AssignmentLoc, BO_Div, Res1.
get(),
25912 D.AssignmentLoc, BO_GT, D.Range.Step,
25913 SemaRef.ActOnIntegerConstant(D.AssignmentLoc, 0).get());
25918 Res =
SemaRef.ActOnConditionalOp(D.AssignmentLoc, D.AssignmentLoc,
25925 Res =
SemaRef.ActOnFinishFullExpr(Res.
get(),
false);