41#include "llvm/ADT/IndexedMap.h"
42#include "llvm/ADT/PointerEmbeddedInt.h"
43#include "llvm/ADT/STLExtras.h"
44#include "llvm/ADT/Sequence.h"
45#include "llvm/ADT/SmallSet.h"
46#include "llvm/ADT/StringExtras.h"
47#include "llvm/Frontend/OpenMP/OMPAssume.h"
48#include "llvm/Frontend/OpenMP/OMPConstants.h"
49#include "llvm/IR/Assumptions.h"
54using namespace llvm::omp;
67enum DefaultDataSharingAttributes {
72 DSA_firstprivate = 1 << 3,
82 unsigned Modifier = 0;
83 const Expr *RefExpr =
nullptr;
86 bool AppliedToPointee =
false;
87 DSAVarData() =
default;
91 bool AppliedToPointee)
92 : DKind(DKind), CKind(CKind), Modifier(Modifier), RefExpr(RefExpr),
93 PrivateCopy(PrivateCopy), ImplicitDSALoc(ImplicitDSALoc),
94 AppliedToPointee(AppliedToPointee) {}
96 using OperatorOffsetTy =
98 using DoacrossClauseMapTy = llvm::DenseMap<OMPClause *, OperatorOffsetTy>;
100 enum class UsesAllocatorsDeclKind {
104 UserDefinedAllocator,
112 unsigned Modifier = 0;
115 llvm::PointerIntPair<const Expr *, 1, bool> RefExpr;
119 bool AppliedToPointee =
false;
121 using DeclSAMapTy = llvm::SmallDenseMap<const ValueDecl *, DSAInfo, 8>;
122 using UsedRefMapTy = llvm::SmallDenseMap<const ValueDecl *, const Expr *, 8>;
123 using LCDeclInfo = std::pair<unsigned, VarDecl *>;
124 using LoopControlVariablesMapTy =
125 llvm::SmallDenseMap<const ValueDecl *, LCDeclInfo, 8>;
128 struct MappedExprComponentTy {
132 using MappedExprComponentsTy =
133 llvm::DenseMap<const ValueDecl *, MappedExprComponentTy>;
134 using CriticalsWithHintsTy =
135 llvm::StringMap<std::pair<const OMPCriticalDirective *, llvm::APSInt>>;
136 struct ReductionData {
137 using BOKPtrType = llvm::PointerEmbeddedInt<BinaryOperatorKind, 16>;
139 llvm::PointerUnion<const Expr *, BOKPtrType> ReductionOp;
140 ReductionData() =
default;
147 ReductionOp = RefExpr;
150 using DeclReductionMapTy =
151 llvm::SmallDenseMap<const ValueDecl *, ReductionData, 4>;
152 struct DefaultmapInfo {
156 DefaultmapInfo() =
default;
158 : ImplicitBehavior(M), SLoc(
Loc) {}
161 struct SharingMapTy {
162 DeclSAMapTy SharingMap;
163 DeclReductionMapTy ReductionMap;
164 UsedRefMapTy AlignedMap;
165 UsedRefMapTy NontemporalMap;
166 MappedExprComponentsTy MappedExprComponents;
167 LoopControlVariablesMapTy LCVMap;
168 DefaultDataSharingAttributes DefaultAttr = DSA_unspecified;
173 Scope *CurScope =
nullptr;
179 DoacrossClauseMapTy DoacrossDepends;
183 std::optional<std::pair<const Expr *, OMPOrderedClause *>> OrderedRegion;
184 bool RegionHasOrderConcurrent =
false;
185 unsigned AssociatedLoops = 1;
186 bool HasMutipleLoops =
false;
187 const Decl *PossiblyLoopCounter =
nullptr;
188 bool NowaitRegion =
false;
189 bool UntiedRegion =
false;
190 bool CancelRegion =
false;
191 bool LoopStart =
false;
192 bool BodyComplete =
false;
197 Expr *TaskgroupReductionRef =
nullptr;
198 llvm::DenseSet<QualType> MappedClassesQualTypes;
200 llvm::DenseSet<CanonicalDeclPtr<Decl>> ImplicitTaskFirstprivates;
205 llvm::DenseSet<CanonicalDeclPtr<Decl>> UsedInScanDirective;
206 llvm::DenseMap<CanonicalDeclPtr<const Decl>, UsesAllocatorsDeclKind>
210 struct ImplicitDefaultFDInfoTy {
214 size_t StackLevel = 0;
217 ImplicitDefaultFDInfoTy(
const FieldDecl *FD,
size_t StackLevel,
219 : FD(FD), StackLevel(StackLevel), VD(VD) {}
223 ImplicitDefaultFirstprivateFDs;
224 Expr *DeclareMapperVar =
nullptr;
228 :
Directive(DKind), DirectiveName(Name), CurScope(CurScope),
230 SharingMapTy() =
default;
236 DeclSAMapTy Threadprivates;
243 bool ForceCapturing =
false;
246 bool ForceCaptureByReferenceInTargetExecutable =
false;
247 CriticalsWithHintsTy Criticals;
248 unsigned IgnoredStackElements = 0;
252 using const_iterator = StackTy::const_reverse_iterator;
253 const_iterator begin()
const {
254 return Stack.empty() ? const_iterator()
255 : Stack.back().first.rbegin() + IgnoredStackElements;
257 const_iterator end()
const {
258 return Stack.empty() ? const_iterator() : Stack.back().first.rend();
260 using iterator = StackTy::reverse_iterator;
262 return Stack.empty() ? iterator()
263 : Stack.back().first.rbegin() + IgnoredStackElements;
266 return Stack.empty() ? iterator() : Stack.back().first.rend();
271 bool isStackEmpty()
const {
272 return Stack.empty() ||
273 Stack.back().second != CurrentNonCapturingFunctionScope ||
274 Stack.back().first.size() <= IgnoredStackElements;
276 size_t getStackSize()
const {
277 return isStackEmpty() ? 0
278 : Stack.back().first.size() - IgnoredStackElements;
281 SharingMapTy *getTopOfStackOrNull() {
282 size_t Size = getStackSize();
285 return &Stack.back().first[
Size - 1];
287 const SharingMapTy *getTopOfStackOrNull()
const {
288 return const_cast<DSAStackTy &
>(*this).getTopOfStackOrNull();
290 SharingMapTy &getTopOfStack() {
291 assert(!isStackEmpty() &&
"no current directive");
292 return *getTopOfStackOrNull();
294 const SharingMapTy &getTopOfStack()
const {
295 return const_cast<DSAStackTy &
>(*this).getTopOfStack();
298 SharingMapTy *getSecondOnStackOrNull() {
299 size_t Size = getStackSize();
302 return &Stack.back().first[
Size - 2];
304 const SharingMapTy *getSecondOnStackOrNull()
const {
305 return const_cast<DSAStackTy &
>(*this).getSecondOnStackOrNull();
314 SharingMapTy &getStackElemAtLevel(
unsigned Level) {
315 assert(Level < getStackSize() &&
"no such stack element");
316 return Stack.back().first[
Level];
318 const SharingMapTy &getStackElemAtLevel(
unsigned Level)
const {
319 return const_cast<DSAStackTy &
>(*this).getStackElemAtLevel(Level);
325 bool isOpenMPLocal(
VarDecl *
D, const_iterator
Iter)
const;
338 Expr *OMPPredefinedAllocators[OMPAllocateDeclAttr::OMPUserDefinedMemAlloc] = {
347 explicit DSAStackTy(
Sema &S) : SemaRef(S) {}
350 void setOMPAllocatorHandleT(
QualType Ty) { OMPAllocatorHandleT = Ty; }
352 QualType getOMPAllocatorHandleT()
const {
return OMPAllocatorHandleT; }
354 void setOMPAlloctraitT(
QualType Ty) { OMPAlloctraitT = Ty; }
356 QualType getOMPAlloctraitT()
const {
return OMPAlloctraitT; }
358 void setAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
360 OMPPredefinedAllocators[AllocatorKind] = Allocator;
363 Expr *getAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind)
const {
364 return OMPPredefinedAllocators[AllocatorKind];
367 void setOMPDependT(
QualType Ty) { OMPDependT = Ty; }
369 QualType getOMPDependT()
const {
return OMPDependT; }
372 void setOMPEventHandleT(
QualType Ty) { OMPEventHandleT = Ty; }
374 QualType getOMPEventHandleT()
const {
return OMPEventHandleT; }
376 bool isClauseParsingMode()
const {
return ClauseKindMode != OMPC_unknown; }
378 assert(isClauseParsingMode() &&
"Must be in clause parsing mode.");
379 return ClauseKindMode;
383 bool isBodyComplete()
const {
384 const SharingMapTy *Top = getTopOfStackOrNull();
385 return Top && Top->BodyComplete;
387 void setBodyComplete() { getTopOfStack().BodyComplete =
true; }
389 bool isForceVarCapturing()
const {
return ForceCapturing; }
390 void setForceVarCapturing(
bool V) { ForceCapturing =
V; }
392 void setForceCaptureByReferenceInTargetExecutable(
bool V) {
393 ForceCaptureByReferenceInTargetExecutable =
V;
395 bool isForceCaptureByReferenceInTargetExecutable()
const {
396 return ForceCaptureByReferenceInTargetExecutable;
401 assert(!IgnoredStackElements &&
402 "cannot change stack while ignoring elements");
404 Stack.back().second != CurrentNonCapturingFunctionScope)
405 Stack.emplace_back(StackTy(), CurrentNonCapturingFunctionScope);
406 Stack.back().first.emplace_back(DKind, DirName, CurScope,
Loc);
407 Stack.back().first.back().DefaultAttrLoc =
Loc;
411 assert(!IgnoredStackElements &&
412 "cannot change stack while ignoring elements");
413 assert(!Stack.back().first.empty() &&
414 "Data-sharing attributes stack is empty!");
415 Stack.back().first.pop_back();
420 class ParentDirectiveScope {
425 ParentDirectiveScope(DSAStackTy &Self,
bool Activate)
426 : Self(Self), Active(
false) {
430 ~ParentDirectiveScope() { disable(); }
433 --Self.IgnoredStackElements;
439 ++Self.IgnoredStackElements;
448 "Expected loop-based directive.");
449 getTopOfStack().LoopStart =
true;
454 "Expected loop-based directive.");
455 getTopOfStack().LoopStart =
false;
458 bool isLoopStarted()
const {
460 "Expected loop-based directive.");
461 return !getTopOfStack().LoopStart;
464 void resetPossibleLoopCounter(
const Decl *
D =
nullptr) {
468 const Decl *getPossiblyLoopCounter()
const {
469 return getTopOfStack().PossiblyLoopCounter;
472 void pushFunction() {
473 assert(!IgnoredStackElements &&
474 "cannot change stack while ignoring elements");
476 assert(!isa<CapturingScopeInfo>(CurFnScope));
477 CurrentNonCapturingFunctionScope = CurFnScope;
481 assert(!IgnoredStackElements &&
482 "cannot change stack while ignoring elements");
483 if (!Stack.empty() && Stack.back().second == OldFSI) {
484 assert(Stack.back().first.empty());
487 CurrentNonCapturingFunctionScope =
nullptr;
489 if (!isa<CapturingScopeInfo>(FSI)) {
490 CurrentNonCapturingFunctionScope = FSI;
497 Criticals.try_emplace(
D->getDirectiveName().getAsString(),
D, Hint);
499 const std::pair<const OMPCriticalDirective *, llvm::APSInt>
501 auto I = Criticals.find(Name.getAsString());
502 if (I != Criticals.end())
504 return std::make_pair(
nullptr, llvm::APSInt());
521 const LCDeclInfo isLoopControlVariable(
const ValueDecl *
D)
const;
526 const LCDeclInfo isParentLoopControlVariable(
const ValueDecl *
D)
const;
531 const LCDeclInfo isLoopControlVariable(
const ValueDecl *
D,
532 unsigned Level)
const;
535 const ValueDecl *getParentLoopControlVariable(
unsigned I)
const;
538 void markDeclAsUsedInScanDirective(
ValueDecl *
D) {
539 if (SharingMapTy *Stack = getSecondOnStackOrNull())
540 Stack->UsedInScanDirective.insert(
D);
544 bool isUsedInScanDirective(
ValueDecl *
D)
const {
545 if (
const SharingMapTy *Stack = getTopOfStackOrNull())
546 return Stack->UsedInScanDirective.contains(
D);
552 DeclRefExpr *PrivateCopy =
nullptr,
unsigned Modifier = 0,
553 bool AppliedToPointee =
false);
562 const Expr *ReductionRef);
568 Expr *&TaskgroupDescriptor)
const;
573 const Expr *&ReductionRef,
574 Expr *&TaskgroupDescriptor)
const;
577 Expr *getTaskgroupReductionRef()
const {
578 assert((getTopOfStack().
Directive == OMPD_taskgroup ||
582 "taskgroup reference expression requested for non taskgroup or "
583 "parallel/worksharing directive.");
584 return getTopOfStack().TaskgroupReductionRef;
588 bool isTaskgroupReductionRef(
const ValueDecl *VD,
unsigned Level)
const {
589 return getStackElemAtLevel(Level).TaskgroupReductionRef &&
590 cast<DeclRefExpr>(getStackElemAtLevel(Level).TaskgroupReductionRef)
596 const DSAVarData getTopDSA(
ValueDecl *
D,
bool FromParent);
598 const DSAVarData getImplicitDSA(
ValueDecl *
D,
bool FromParent)
const;
600 const DSAVarData getImplicitDSA(
ValueDecl *
D,
unsigned Level)
const;
607 DefaultDataSharingAttributes)>
610 bool FromParent)
const;
618 bool FromParent)
const;
625 unsigned Level,
bool NotLastprivate =
false)
const;
629 bool hasExplicitDirective(
631 unsigned Level)
const;
635 const llvm::function_ref<
bool(
638 bool FromParent)
const;
642 const SharingMapTy *Top = getTopOfStackOrNull();
643 return Top ? Top->Directive : OMPD_unknown;
647 assert(!isStackEmpty() &&
"No directive at specified level.");
648 return getStackElemAtLevel(Level).Directive;
652 unsigned OpenMPCaptureLevel)
const {
655 return CaptureRegions[OpenMPCaptureLevel];
659 const SharingMapTy *
Parent = getSecondOnStackOrNull();
664 void addRequiresDecl(
OMPRequiresDecl *RD) { RequiresDecls.push_back(RD); }
667 template <
typename ClauseType>
bool hasRequiresDeclWithClause()
const {
669 return llvm::any_of(
D->clauselists(), [](
const OMPClause *
C) {
670 return isa<ClauseType>(C);
678 bool IsDuplicate =
false;
681 for (
const OMPClause *CPrev :
D->clauselists()) {
682 if (CNew->getClauseKind() == CPrev->getClauseKind()) {
683 SemaRef.
Diag(CNew->getBeginLoc(),
684 diag::err_omp_requires_clause_redeclaration)
685 << getOpenMPClauseName(CNew->getClauseKind());
686 SemaRef.
Diag(CPrev->getBeginLoc(),
687 diag::note_omp_requires_previous_clause)
688 << getOpenMPClauseName(CPrev->getClauseKind());
699 TargetLocations.push_back(LocStart);
705 AtomicLocation =
Loc;
710 SourceLocation getAtomicDirectiveLoc()
const {
return AtomicLocation; }
714 return TargetLocations;
719 getTopOfStack().DefaultAttr = DSA_none;
720 getTopOfStack().DefaultAttrLoc =
Loc;
724 getTopOfStack().DefaultAttr = DSA_shared;
725 getTopOfStack().DefaultAttrLoc =
Loc;
729 getTopOfStack().DefaultAttr = DSA_private;
730 getTopOfStack().DefaultAttrLoc =
Loc;
734 getTopOfStack().DefaultAttr = DSA_firstprivate;
735 getTopOfStack().DefaultAttrLoc =
Loc;
740 DefaultmapInfo &DMI = getTopOfStack().DefaultmapMap[
Kind];
741 DMI.ImplicitBehavior = M;
747 return getTopOfStack()
748 .DefaultmapMap[OMPC_DEFAULTMAP_aggregate]
751 .DefaultmapMap[OMPC_DEFAULTMAP_scalar]
754 .DefaultmapMap[OMPC_DEFAULTMAP_pointer]
756 return getTopOfStack().DefaultmapMap[VariableCategory].ImplicitBehavior !=
761 return ConstructTraits;
766 ConstructTraits.append(Traits.begin(), Traits.end());
768 for (llvm::omp::TraitProperty Trait : llvm::reverse(Traits)) {
769 llvm::omp::TraitProperty Top = ConstructTraits.pop_back_val();
770 assert(Top == Trait &&
"Something left a trait on the stack!");
776 DefaultDataSharingAttributes getDefaultDSA(
unsigned Level)
const {
777 return getStackSize() <=
Level ? DSA_unspecified
778 : getStackElemAtLevel(Level).DefaultAttr;
780 DefaultDataSharingAttributes getDefaultDSA()
const {
781 return isStackEmpty() ? DSA_unspecified : getTopOfStack().DefaultAttr;
784 return isStackEmpty() ?
SourceLocation() : getTopOfStack().DefaultAttrLoc;
788 return isStackEmpty()
790 : getTopOfStack().DefaultmapMap[
Kind].ImplicitBehavior;
793 getDefaultmapModifierAtLevel(
unsigned Level,
795 return getStackElemAtLevel(Level).DefaultmapMap[
Kind].ImplicitBehavior;
797 bool isDefaultmapCapturedByRef(
unsigned Level,
800 getDefaultmapModifierAtLevel(Level, Kind);
801 if (Kind == OMPC_DEFAULTMAP_scalar || Kind == OMPC_DEFAULTMAP_pointer) {
802 return (M == OMPC_DEFAULTMAP_MODIFIER_alloc) ||
803 (M == OMPC_DEFAULTMAP_MODIFIER_to) ||
804 (M == OMPC_DEFAULTMAP_MODIFIER_from) ||
805 (M == OMPC_DEFAULTMAP_MODIFIER_tofrom) ||
806 (M == OMPC_DEFAULTMAP_MODIFIER_present);
813 case OMPC_DEFAULTMAP_scalar:
814 case OMPC_DEFAULTMAP_pointer:
816 (M == OMPC_DEFAULTMAP_MODIFIER_firstprivate) ||
817 (M == OMPC_DEFAULTMAP_MODIFIER_default);
818 case OMPC_DEFAULTMAP_aggregate:
819 return M == OMPC_DEFAULTMAP_MODIFIER_firstprivate;
823 llvm_unreachable(
"Unexpected OpenMPDefaultmapClauseKind enum");
825 bool mustBeFirstprivateAtLevel(
unsigned Level,
828 getDefaultmapModifierAtLevel(Level, Kind);
829 return mustBeFirstprivateBase(M, Kind);
833 return mustBeFirstprivateBase(M, Kind);
838 const DSAVarData DVar = getTopDSA(
D,
false);
843 void setOrderedRegion(
bool IsOrdered,
const Expr *Param,
846 getTopOfStack().OrderedRegion.emplace(Param, Clause);
848 getTopOfStack().OrderedRegion.reset();
852 bool isOrderedRegion()
const {
853 if (
const SharingMapTy *Top = getTopOfStackOrNull())
854 return Top->OrderedRegion.has_value();
858 std::pair<const Expr *, OMPOrderedClause *> getOrderedRegionParam()
const {
859 if (
const SharingMapTy *Top = getTopOfStackOrNull())
860 if (Top->OrderedRegion)
861 return *Top->OrderedRegion;
862 return std::make_pair(
nullptr,
nullptr);
866 bool isParentOrderedRegion()
const {
867 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
868 return Parent->OrderedRegion.has_value();
872 std::pair<const Expr *, OMPOrderedClause *>
873 getParentOrderedRegionParam()
const {
874 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
875 if (
Parent->OrderedRegion)
876 return *
Parent->OrderedRegion;
877 return std::make_pair(
nullptr,
nullptr);
880 void setRegionHasOrderConcurrent(
bool HasOrderConcurrent) {
881 getTopOfStack().RegionHasOrderConcurrent = HasOrderConcurrent;
885 bool isParentOrderConcurrent()
const {
886 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
887 return Parent->RegionHasOrderConcurrent;
891 void setNowaitRegion(
bool IsNowait =
true) {
892 getTopOfStack().NowaitRegion = IsNowait;
896 bool isParentNowaitRegion()
const {
897 if (
const SharingMapTy *
Parent = getSecondOnStackOrNull())
898 return Parent->NowaitRegion;
902 void setUntiedRegion(
bool IsUntied =
true) {
903 getTopOfStack().UntiedRegion = IsUntied;
906 bool isUntiedRegion()
const {
907 const SharingMapTy *Top = getTopOfStackOrNull();
908 return Top ? Top->UntiedRegion :
false;
911 void setParentCancelRegion(
bool Cancel =
true) {
912 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
913 Parent->CancelRegion |= Cancel;
916 bool isCancelRegion()
const {
917 const SharingMapTy *Top = getTopOfStackOrNull();
918 return Top ? Top->CancelRegion :
false;
923 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
927 bool doesParentHasScanDirective()
const {
928 const SharingMapTy *Top = getSecondOnStackOrNull();
929 return Top ? Top->PrevScanLocation.isValid() :
false;
933 const SharingMapTy *Top = getSecondOnStackOrNull();
938 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
942 bool doesParentHasOrderedDirective()
const {
943 const SharingMapTy *Top = getSecondOnStackOrNull();
944 return Top ? Top->PrevOrderedLocation.isValid() :
false;
948 const SharingMapTy *Top = getSecondOnStackOrNull();
953 void setAssociatedLoops(
unsigned Val) {
954 getTopOfStack().AssociatedLoops = Val;
956 getTopOfStack().HasMutipleLoops =
true;
959 unsigned getAssociatedLoops()
const {
960 const SharingMapTy *Top = getTopOfStackOrNull();
961 return Top ? Top->AssociatedLoops : 0;
964 bool hasMutipleLoops()
const {
965 const SharingMapTy *Top = getTopOfStackOrNull();
966 return Top ? Top->HasMutipleLoops :
false;
972 if (SharingMapTy *
Parent = getSecondOnStackOrNull())
973 Parent->InnerTeamsRegionLoc = TeamsRegionLoc;
976 bool hasInnerTeamsRegion()
const {
977 return getInnerTeamsRegionLoc().
isValid();
981 const SharingMapTy *Top = getTopOfStackOrNull();
985 Scope *getCurScope()
const {
986 const SharingMapTy *Top = getTopOfStackOrNull();
987 return Top ? Top->CurScope :
nullptr;
989 void setContext(
DeclContext *DC) { getTopOfStack().Context = DC; }
991 const SharingMapTy *Top = getTopOfStackOrNull();
997 bool checkMappableExprComponentListsForDecl(
998 const ValueDecl *VD,
bool CurrentRegionOnly,
999 const llvm::function_ref<
1011 if (CurrentRegionOnly)
1014 std::advance(SI, 1);
1016 for (; SI != SE; ++SI) {
1017 auto MI = SI->MappedExprComponents.find(VD);
1018 if (MI != SI->MappedExprComponents.end())
1020 MI->second.Components)
1021 if (Check(L, MI->second.Kind))
1029 bool checkMappableExprComponentListsForDeclAtLevel(
1031 const llvm::function_ref<
1035 if (getStackSize() <= Level)
1038 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
1039 auto MI = StackElem.MappedExprComponents.find(VD);
1040 if (MI != StackElem.MappedExprComponents.end())
1042 MI->second.Components)
1043 if (Check(L, MI->second.Kind))
1050 void addMappableExpressionComponents(
1054 MappedExprComponentTy &MEC = getTopOfStack().MappedExprComponents[VD];
1056 MEC.Components.resize(MEC.Components.size() + 1);
1057 MEC.Components.back().append(Components.begin(), Components.end());
1058 MEC.Kind = WhereFoundClauseKind;
1061 unsigned getNestingLevel()
const {
1062 assert(!isStackEmpty());
1063 return getStackSize() - 1;
1065 void addDoacrossDependClause(
OMPClause *
C,
const OperatorOffsetTy &OpsOffs) {
1066 SharingMapTy *
Parent = getSecondOnStackOrNull();
1068 Parent->DoacrossDepends.try_emplace(
C, OpsOffs);
1070 llvm::iterator_range<DoacrossClauseMapTy::const_iterator>
1071 getDoacrossDependClauses()
const {
1072 const SharingMapTy &StackElem = getTopOfStack();
1074 const DoacrossClauseMapTy &Ref = StackElem.DoacrossDepends;
1075 return llvm::make_range(Ref.begin(), Ref.end());
1077 return llvm::make_range(StackElem.DoacrossDepends.end(),
1078 StackElem.DoacrossDepends.end());
1082 void addMappedClassesQualTypes(
QualType QT) {
1083 SharingMapTy &StackElem = getTopOfStack();
1084 StackElem.MappedClassesQualTypes.insert(QT);
1088 bool isClassPreviouslyMapped(
QualType QT)
const {
1089 const SharingMapTy &StackElem = getTopOfStack();
1090 return StackElem.MappedClassesQualTypes.contains(QT);
1094 void addToParentTargetRegionLinkGlobals(
DeclRefExpr *
E) {
1095 assert(*OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(
1096 E->getDecl()) == OMPDeclareTargetDeclAttr::MT_Link &&
1097 "Expected declare target link global.");
1098 for (
auto &Elem : *
this) {
1100 Elem.DeclareTargetLinkVarDecls.push_back(
E);
1110 "Expected target executable directive.");
1111 return getTopOfStack().DeclareTargetLinkVarDecls;
1115 void addInnerAllocatorExpr(
Expr *
E) {
1116 getTopOfStack().InnerUsedAllocators.push_back(
E);
1120 return getTopOfStack().InnerUsedAllocators;
1124 void addImplicitTaskFirstprivate(
unsigned Level,
Decl *
D) {
1125 getStackElemAtLevel(Level).ImplicitTaskFirstprivates.insert(
D);
1128 bool isImplicitTaskFirstprivate(
Decl *
D)
const {
1129 return getTopOfStack().ImplicitTaskFirstprivates.contains(
D);
1133 void addUsesAllocatorsDecl(
const Decl *
D, UsesAllocatorsDeclKind Kind) {
1134 getTopOfStack().UsesAllocatorsDecls.try_emplace(
D, Kind);
1138 std::optional<UsesAllocatorsDeclKind>
1139 isUsesAllocatorsDecl(
unsigned Level,
const Decl *
D)
const {
1140 const SharingMapTy &StackElem = getTopOfStack();
1141 auto I = StackElem.UsesAllocatorsDecls.find(
D);
1142 if (I == StackElem.UsesAllocatorsDecls.end())
1143 return std::nullopt;
1144 return I->getSecond();
1146 std::optional<UsesAllocatorsDeclKind>
1147 isUsesAllocatorsDecl(
const Decl *
D)
const {
1148 const SharingMapTy &StackElem = getTopOfStack();
1149 auto I = StackElem.UsesAllocatorsDecls.find(
D);
1150 if (I == StackElem.UsesAllocatorsDecls.end())
1151 return std::nullopt;
1152 return I->getSecond();
1155 void addDeclareMapperVarRef(
Expr *Ref) {
1156 SharingMapTy &StackElem = getTopOfStack();
1157 StackElem.DeclareMapperVar = Ref;
1159 const Expr *getDeclareMapperVarRef()
const {
1160 const SharingMapTy *Top = getTopOfStackOrNull();
1161 return Top ? Top->DeclareMapperVar :
nullptr;
1165 void addIteratorVarDecl(
VarDecl *VD) {
1166 SharingMapTy &StackElem = getTopOfStack();
1170 bool isIteratorVarDecl(
const VarDecl *VD)
const {
1171 const SharingMapTy *Top = getTopOfStackOrNull();
1179 const_iterator I = begin();
1180 const_iterator EndI = end();
1181 size_t StackLevel = getStackSize();
1182 for (; I != EndI; ++I) {
1183 if (I->DefaultAttr == DSA_firstprivate || I->DefaultAttr == DSA_private)
1187 assert((StackLevel > 0 && I != EndI) || (StackLevel == 0 && I == EndI));
1190 for (
const auto &IFD : I->ImplicitDefaultFirstprivateFDs)
1191 if (IFD.FD == FD && IFD.StackLevel == StackLevel)
1196 bool isImplicitDefaultFirstprivateFD(
VarDecl *VD)
const {
1197 const_iterator I = begin();
1198 const_iterator EndI = end();
1199 for (; I != EndI; ++I)
1200 if (I->DefaultAttr == DSA_firstprivate || I->DefaultAttr == DSA_private)
1204 for (
const auto &IFD : I->ImplicitDefaultFirstprivateFDs)
1211 iterator I = begin();
1212 const_iterator EndI = end();
1213 size_t StackLevel = getStackSize();
1214 for (; I != EndI; ++I) {
1215 if (I->DefaultAttr == DSA_private || I->DefaultAttr == DSA_firstprivate) {
1216 I->ImplicitDefaultFirstprivateFDs.emplace_back(FD, StackLevel, VD);
1221 assert((StackLevel > 0 && I != EndI) || (StackLevel == 0 && I == EndI));
1231 DKind == OMPD_unknown;
1237 if (
const auto *FE = dyn_cast<FullExpr>(
E))
1238 E = FE->getSubExpr();
1240 if (
const auto *MTE = dyn_cast<MaterializeTemporaryExpr>(
E))
1241 E = MTE->getSubExpr();
1243 while (
const auto *Binder = dyn_cast<CXXBindTemporaryExpr>(
E))
1244 E = Binder->getSubExpr();
1246 if (
const auto *ICE = dyn_cast<ImplicitCastExpr>(
E))
1247 E = ICE->getSubExprAsWritten();
1256 if (
const auto *CED = dyn_cast<OMPCapturedExprDecl>(
D))
1257 if (
const auto *ME = dyn_cast<MemberExpr>(
getExprAsWritten(CED->getInit())))
1258 D = ME->getMemberDecl();
1269DSAStackTy::DSAVarData DSAStackTy::getDSA(const_iterator &
Iter,
1272 auto *VD = dyn_cast<VarDecl>(
D);
1273 const auto *FD = dyn_cast<FieldDecl>(
D);
1275 if (
Iter == end()) {
1282 DVar.CKind = OMPC_shared;
1289 DVar.CKind = OMPC_shared;
1293 DVar.CKind = OMPC_shared;
1304 DVar.CKind = OMPC_private;
1308 DVar.DKind =
Iter->Directive;
1311 if (
Iter->SharingMap.count(
D)) {
1312 const DSAInfo &
Data =
Iter->SharingMap.lookup(
D);
1313 DVar.RefExpr =
Data.RefExpr.getPointer();
1314 DVar.PrivateCopy =
Data.PrivateCopy;
1315 DVar.CKind =
Data.Attributes;
1316 DVar.ImplicitDSALoc =
Iter->DefaultAttrLoc;
1317 DVar.Modifier =
Data.Modifier;
1318 DVar.AppliedToPointee =
Data.AppliedToPointee;
1326 switch (
Iter->DefaultAttr) {
1328 DVar.CKind = OMPC_shared;
1329 DVar.ImplicitDSALoc =
Iter->DefaultAttrLoc;
1333 case DSA_firstprivate:
1336 DVar.CKind = OMPC_unknown;
1338 DVar.CKind = OMPC_firstprivate;
1340 DVar.ImplicitDSALoc =
Iter->DefaultAttrLoc;
1348 DVar.CKind = OMPC_unknown;
1350 DVar.CKind = OMPC_private;
1352 DVar.ImplicitDSALoc =
Iter->DefaultAttrLoc;
1354 case DSA_unspecified:
1359 DVar.ImplicitDSALoc =
Iter->DefaultAttrLoc;
1363 DVar.CKind = OMPC_shared;
1373 DSAVarData DVarTemp;
1374 const_iterator I =
Iter,
E = end();
1382 DVarTemp = getDSA(I,
D);
1383 if (DVarTemp.CKind != OMPC_shared) {
1384 DVar.RefExpr =
nullptr;
1385 DVar.CKind = OMPC_firstprivate;
1388 }
while (I !=
E && !isImplicitTaskingRegion(I->Directive));
1390 (DVarTemp.CKind == OMPC_unknown) ? OMPC_firstprivate : OMPC_shared;
1399 return getDSA(++
Iter,
D);
1403 const Expr *NewDE) {
1404 assert(!isStackEmpty() &&
"Data sharing attributes stack is empty");
1406 SharingMapTy &StackElem = getTopOfStack();
1407 auto It = StackElem.AlignedMap.find(
D);
1408 if (It == StackElem.AlignedMap.end()) {
1409 assert(NewDE &&
"Unexpected nullptr expr to be added into aligned map");
1410 StackElem.AlignedMap[
D] = NewDE;
1413 assert(It->second &&
"Unexpected nullptr expr in the aligned map");
1418 const Expr *NewDE) {
1419 assert(!isStackEmpty() &&
"Data sharing attributes stack is empty");
1421 SharingMapTy &StackElem = getTopOfStack();
1422 auto It = StackElem.NontemporalMap.find(
D);
1423 if (It == StackElem.NontemporalMap.end()) {
1424 assert(NewDE &&
"Unexpected nullptr expr to be added into aligned map");
1425 StackElem.NontemporalMap[
D] = NewDE;
1428 assert(It->second &&
"Unexpected nullptr expr in the aligned map");
1433 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1435 SharingMapTy &StackElem = getTopOfStack();
1436 StackElem.LCVMap.try_emplace(
1437 D, LCDeclInfo(StackElem.LCVMap.size() + 1,
Capture));
1440const DSAStackTy::LCDeclInfo
1441DSAStackTy::isLoopControlVariable(
const ValueDecl *
D)
const {
1442 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1444 const SharingMapTy &StackElem = getTopOfStack();
1445 auto It = StackElem.LCVMap.find(
D);
1446 if (It != StackElem.LCVMap.end())
1448 return {0,
nullptr};
1451const DSAStackTy::LCDeclInfo
1452DSAStackTy::isLoopControlVariable(
const ValueDecl *
D,
unsigned Level)
const {
1453 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1455 for (
unsigned I = Level + 1; I > 0; --I) {
1456 const SharingMapTy &StackElem = getStackElemAtLevel(I - 1);
1457 auto It = StackElem.LCVMap.find(
D);
1458 if (It != StackElem.LCVMap.end())
1461 return {0,
nullptr};
1464const DSAStackTy::LCDeclInfo
1465DSAStackTy::isParentLoopControlVariable(
const ValueDecl *
D)
const {
1466 const SharingMapTy *
Parent = getSecondOnStackOrNull();
1467 assert(
Parent &&
"Data-sharing attributes stack is empty");
1469 auto It =
Parent->LCVMap.find(
D);
1470 if (It !=
Parent->LCVMap.end())
1472 return {0,
nullptr};
1475const ValueDecl *DSAStackTy::getParentLoopControlVariable(
unsigned I)
const {
1476 const SharingMapTy *
Parent = getSecondOnStackOrNull();
1477 assert(
Parent &&
"Data-sharing attributes stack is empty");
1478 if (
Parent->LCVMap.size() < I)
1480 for (
const auto &Pair :
Parent->LCVMap)
1481 if (Pair.second.first == I)
1488 bool AppliedToPointee) {
1490 if (A == OMPC_threadprivate) {
1491 DSAInfo &
Data = Threadprivates[
D];
1492 Data.Attributes = A;
1493 Data.RefExpr.setPointer(
E);
1494 Data.PrivateCopy =
nullptr;
1495 Data.Modifier = Modifier;
1497 DSAInfo &
Data = getTopOfStack().SharingMap[
D];
1498 assert(
Data.Attributes == OMPC_unknown || (A ==
Data.Attributes) ||
1499 (A == OMPC_firstprivate &&
Data.Attributes == OMPC_lastprivate) ||
1500 (A == OMPC_lastprivate &&
Data.Attributes == OMPC_firstprivate) ||
1501 (isLoopControlVariable(
D).first && A == OMPC_private));
1502 Data.Modifier = Modifier;
1503 if (A == OMPC_lastprivate &&
Data.Attributes == OMPC_firstprivate) {
1504 Data.RefExpr.setInt(
true);
1507 const bool IsLastprivate =
1508 A == OMPC_lastprivate ||
Data.Attributes == OMPC_lastprivate;
1509 Data.Attributes = A;
1510 Data.RefExpr.setPointerAndInt(
E, IsLastprivate);
1511 Data.PrivateCopy = PrivateCopy;
1512 Data.AppliedToPointee = AppliedToPointee;
1514 DSAInfo &
Data = getTopOfStack().SharingMap[PrivateCopy->
getDecl()];
1515 Data.Modifier = Modifier;
1516 Data.Attributes = A;
1517 Data.RefExpr.setPointerAndInt(PrivateCopy, IsLastprivate);
1518 Data.PrivateCopy =
nullptr;
1519 Data.AppliedToPointee = AppliedToPointee;
1526 StringRef Name,
const AttrVec *Attrs =
nullptr,
1541 OMPReferencedVarAttr::CreateImplicit(SemaRef.
Context, OrigRef));
1548 bool RefersToCapture =
false) {
1559 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1561 getTopOfStack().SharingMap[
D].Attributes == OMPC_reduction &&
1562 "Additional reduction info may be specified only for reduction items.");
1563 ReductionData &ReductionData = getTopOfStack().ReductionMap[
D];
1564 assert(ReductionData.ReductionRange.isInvalid() &&
1565 (getTopOfStack().
Directive == OMPD_taskgroup ||
1569 "Additional reduction info may be specified only once for reduction "
1571 ReductionData.set(BOK, SR);
1572 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1573 if (!TaskgroupReductionRef) {
1576 TaskgroupReductionRef =
1582 const Expr *ReductionRef) {
1584 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty");
1586 getTopOfStack().SharingMap[
D].Attributes == OMPC_reduction &&
1587 "Additional reduction info may be specified only for reduction items.");
1588 ReductionData &ReductionData = getTopOfStack().ReductionMap[
D];
1589 assert(ReductionData.ReductionRange.isInvalid() &&
1590 (getTopOfStack().
Directive == OMPD_taskgroup ||
1594 "Additional reduction info may be specified only once for reduction "
1596 ReductionData.set(ReductionRef, SR);
1597 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1598 if (!TaskgroupReductionRef) {
1601 TaskgroupReductionRef =
1606const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1608 Expr *&TaskgroupDescriptor)
const {
1610 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty.");
1611 for (const_iterator I = begin() + 1,
E = end(); I !=
E; ++I) {
1612 const DSAInfo &
Data = I->SharingMap.lookup(
D);
1613 if (
Data.Attributes != OMPC_reduction ||
1614 Data.Modifier != OMPC_REDUCTION_task)
1616 const ReductionData &ReductionData = I->ReductionMap.lookup(
D);
1617 if (!ReductionData.ReductionOp ||
1618 ReductionData.ReductionOp.is<
const Expr *>())
1619 return DSAVarData();
1620 SR = ReductionData.ReductionRange;
1621 BOK = ReductionData.ReductionOp.get<ReductionData::BOKPtrType>();
1622 assert(I->TaskgroupReductionRef &&
"taskgroup reduction reference "
1623 "expression for the descriptor is not "
1625 TaskgroupDescriptor = I->TaskgroupReductionRef;
1626 return DSAVarData(I->Directive, OMPC_reduction,
Data.RefExpr.getPointer(),
1627 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1630 return DSAVarData();
1633const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1635 Expr *&TaskgroupDescriptor)
const {
1637 assert(!isStackEmpty() &&
"Data-sharing attributes stack is empty.");
1638 for (const_iterator I = begin() + 1,
E = end(); I !=
E; ++I) {
1639 const DSAInfo &
Data = I->SharingMap.lookup(
D);
1640 if (
Data.Attributes != OMPC_reduction ||
1641 Data.Modifier != OMPC_REDUCTION_task)
1643 const ReductionData &ReductionData = I->ReductionMap.lookup(
D);
1644 if (!ReductionData.ReductionOp ||
1645 !ReductionData.ReductionOp.is<
const Expr *>())
1646 return DSAVarData();
1647 SR = ReductionData.ReductionRange;
1648 ReductionRef = ReductionData.ReductionOp.get<
const Expr *>();
1649 assert(I->TaskgroupReductionRef &&
"taskgroup reduction reference "
1650 "expression for the descriptor is not "
1652 TaskgroupDescriptor = I->TaskgroupReductionRef;
1653 return DSAVarData(I->Directive, OMPC_reduction,
Data.RefExpr.getPointer(),
1654 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1657 return DSAVarData();
1660bool DSAStackTy::isOpenMPLocal(
VarDecl *
D, const_iterator I)
const {
1662 for (const_iterator
E = end(); I !=
E; ++I) {
1663 if (isImplicitOrExplicitTaskingRegion(I->Directive) ||
1667 Scope *CurScope = getCurScope();
1668 while (CurScope && CurScope != TopScope && !CurScope->
isDeclScope(
D))
1670 return CurScope != TopScope;
1673 if (I->Context == DC)
1682 bool AcceptIfMutable =
true,
1683 bool *IsClassType =
nullptr) {
1685 Type =
Type.getNonReferenceType().getCanonicalType();
1686 bool IsConstant =
Type.isConstant(Context);
1691 if (
const auto *CTSD = dyn_cast_or_null<ClassTemplateSpecializationDecl>(RD))
1693 RD = CTD->getTemplatedDecl();
1696 return IsConstant && !(SemaRef.
getLangOpts().CPlusPlus && RD &&
1703 bool AcceptIfMutable =
true,
1704 bool ListItemNotVar =
false) {
1708 unsigned Diag = ListItemNotVar ? diag::err_omp_const_list_item
1709 : IsClassType ? diag::err_omp_const_not_mutable_variable
1710 : diag::err_omp_const_variable;
1711 SemaRef.
Diag(ELoc,
Diag) << getOpenMPClauseName(CKind);
1712 if (!ListItemNotVar &&
D) {
1713 const VarDecl *VD = dyn_cast<VarDecl>(
D);
1717 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
1725const DSAStackTy::DSAVarData DSAStackTy::getTopDSA(
ValueDecl *
D,
1730 auto *VD = dyn_cast<VarDecl>(
D);
1731 auto TI = Threadprivates.find(
D);
1732 if (TI != Threadprivates.end()) {
1733 DVar.RefExpr = TI->getSecond().RefExpr.getPointer();
1734 DVar.CKind = OMPC_threadprivate;
1735 DVar.Modifier = TI->getSecond().Modifier;
1738 if (VD && VD->
hasAttr<OMPThreadPrivateDeclAttr>()) {
1740 SemaRef, VD,
D->getType().getNonReferenceType(),
1741 VD->
getAttr<OMPThreadPrivateDeclAttr>()->getLocation());
1742 DVar.CKind = OMPC_threadprivate;
1743 addDSA(
D, DVar.RefExpr, OMPC_threadprivate);
1750 !(VD->
hasAttr<OMPThreadPrivateDeclAttr>() &&
1756 SemaRef, VD,
D->getType().getNonReferenceType(),
D->
getLocation());
1757 DVar.CKind = OMPC_threadprivate;
1758 addDSA(
D, DVar.RefExpr, OMPC_threadprivate);
1763 !isLoopControlVariable(
D).first) {
1764 const_iterator IterTarget =
1765 std::find_if(begin(), end(), [](
const SharingMapTy &
Data) {
1768 if (IterTarget != end()) {
1769 const_iterator ParentIterTarget = IterTarget + 1;
1770 for (const_iterator
Iter = begin();
Iter != ParentIterTarget; ++
Iter) {
1771 if (isOpenMPLocal(VD,
Iter)) {
1775 DVar.CKind = OMPC_threadprivate;
1779 if (!isClauseParsingMode() || IterTarget != begin()) {
1780 auto DSAIter = IterTarget->SharingMap.find(
D);
1781 if (DSAIter != IterTarget->SharingMap.end() &&
1783 DVar.RefExpr = DSAIter->getSecond().RefExpr.getPointer();
1784 DVar.CKind = OMPC_threadprivate;
1787 const_iterator End = end();
1789 D, std::distance(ParentIterTarget, End),
1793 IterTarget->ConstructLoc);
1794 DVar.CKind = OMPC_threadprivate;
1814 const_iterator I = begin();
1815 const_iterator EndI = end();
1816 if (FromParent && I != EndI)
1819 auto It = I->SharingMap.find(
D);
1820 if (It != I->SharingMap.end()) {
1821 const DSAInfo &
Data = It->getSecond();
1822 DVar.RefExpr =
Data.RefExpr.getPointer();
1823 DVar.PrivateCopy =
Data.PrivateCopy;
1824 DVar.CKind =
Data.Attributes;
1825 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1826 DVar.DKind = I->Directive;
1827 DVar.Modifier =
Data.Modifier;
1828 DVar.AppliedToPointee =
Data.AppliedToPointee;
1833 DVar.CKind = OMPC_shared;
1840 if (SemaRef.
LangOpts.OpenMP <= 31) {
1848 DSAVarData DVarTemp = hasInnermostDSA(
1851 return C == OMPC_firstprivate ||
C == OMPC_shared;
1853 MatchesAlways, FromParent);
1854 if (DVarTemp.CKind != OMPC_unknown && DVarTemp.RefExpr)
1857 DVar.CKind = OMPC_shared;
1864 const_iterator I = begin();
1865 const_iterator EndI = end();
1866 if (FromParent && I != EndI)
1870 auto It = I->SharingMap.find(
D);
1871 if (It != I->SharingMap.end()) {
1872 const DSAInfo &
Data = It->getSecond();
1873 DVar.RefExpr =
Data.RefExpr.getPointer();
1874 DVar.PrivateCopy =
Data.PrivateCopy;
1875 DVar.CKind =
Data.Attributes;
1876 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1877 DVar.DKind = I->Directive;
1878 DVar.Modifier =
Data.Modifier;
1879 DVar.AppliedToPointee =
Data.AppliedToPointee;
1885const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(
ValueDecl *
D,
1886 bool FromParent)
const {
1887 if (isStackEmpty()) {
1889 return getDSA(I,
D);
1892 const_iterator StartI = begin();
1893 const_iterator EndI = end();
1894 if (FromParent && StartI != EndI)
1896 return getDSA(StartI,
D);
1899const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(
ValueDecl *
D,
1900 unsigned Level)
const {
1901 if (getStackSize() <= Level)
1902 return DSAVarData();
1904 const_iterator StartI = std::next(begin(), getStackSize() - 1 - Level);
1905 return getDSA(StartI,
D);
1908const DSAStackTy::DSAVarData
1911 DefaultDataSharingAttributes)>
1914 bool FromParent)
const {
1918 const_iterator I = begin();
1919 const_iterator EndI = end();
1920 if (FromParent && I != EndI)
1922 for (; I != EndI; ++I) {
1923 if (!DPred(I->Directive) &&
1924 !isImplicitOrExplicitTaskingRegion(I->Directive))
1926 const_iterator NewI = I;
1927 DSAVarData DVar = getDSA(NewI,
D);
1928 if (I == NewI && CPred(DVar.CKind, DVar.AppliedToPointee, I->DefaultAttr))
1934const DSAStackTy::DSAVarData DSAStackTy::hasInnermostDSA(
1937 bool FromParent)
const {
1941 const_iterator StartI = begin();
1942 const_iterator EndI = end();
1943 if (FromParent && StartI != EndI)
1945 if (StartI == EndI || !DPred(StartI->Directive))
1947 const_iterator NewI = StartI;
1948 DSAVarData DVar = getDSA(NewI,
D);
1949 return (NewI == StartI && CPred(DVar.CKind, DVar.AppliedToPointee))
1954bool DSAStackTy::hasExplicitDSA(
1957 unsigned Level,
bool NotLastprivate)
const {
1958 if (getStackSize() <= Level)
1961 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
1962 auto I = StackElem.SharingMap.find(
D);
1963 if (I != StackElem.SharingMap.end() && I->getSecond().RefExpr.getPointer() &&
1964 CPred(I->getSecond().Attributes, I->getSecond().AppliedToPointee) &&
1965 (!NotLastprivate || !I->getSecond().RefExpr.getInt()))
1968 auto LI = StackElem.LCVMap.find(
D);
1969 if (LI != StackElem.LCVMap.end())
1970 return CPred(OMPC_private,
false);
1974bool DSAStackTy::hasExplicitDirective(
1976 unsigned Level)
const {
1977 if (getStackSize() <= Level)
1979 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
1980 return DPred(StackElem.Directive);
1983bool DSAStackTy::hasDirective(
1987 bool FromParent)
const {
1989 size_t Skip = FromParent ? 2 : 1;
1990 for (const_iterator I = begin() + std::min(Skip, getStackSize()),
E = end();
1992 if (DPred(I->Directive, I->DirectiveName, I->ConstructLoc))
1998void SemaOpenMP::InitDataSharingAttributesStack() {
1999 VarDataSharingAttributesStack =
new DSAStackTy(
SemaRef);
2002#define DSAStack static_cast<DSAStackTy *>(VarDataSharingAttributesStack)
2004void SemaOpenMP::pushOpenMPFunctionRegion() {
DSAStack->pushFunction(); }
2012 "Expected OpenMP device compilation.");
2018enum class FunctionEmissionStatus {
2029 "Expected OpenMP device compilation.");
2031 SemaDiagnosticBuilder::Kind Kind = SemaDiagnosticBuilder::K_Nop;
2036 Kind = SemaDiagnosticBuilder::K_Immediate;
2047 ? SemaDiagnosticBuilder::K_Deferred
2048 : SemaDiagnosticBuilder::K_Immediate;
2052 Kind = SemaDiagnosticBuilder::K_Nop;
2055 llvm_unreachable(
"CUDADiscarded unexpected in OpenMP device compilation");
2067 "Expected OpenMP host compilation.");
2069 SemaDiagnosticBuilder::Kind Kind = SemaDiagnosticBuilder::K_Nop;
2074 Kind = SemaDiagnosticBuilder::K_Immediate;
2077 Kind = SemaDiagnosticBuilder::K_Deferred;
2082 Kind = SemaDiagnosticBuilder::K_Nop;
2092 if (LO.OpenMP <= 45) {
2094 return OMPC_DEFAULTMAP_scalar;
2095 return OMPC_DEFAULTMAP_aggregate;
2098 return OMPC_DEFAULTMAP_pointer;
2100 return OMPC_DEFAULTMAP_scalar;
2101 return OMPC_DEFAULTMAP_aggregate;
2105 unsigned OpenMPCaptureLevel)
const {
2106 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2109 bool IsByRef =
true;
2115 bool IsVariableUsedInMapClause =
false;
2177 bool IsVariableAssociatedWithSection =
false;
2179 DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2181 [&IsVariableUsedInMapClause, &IsVariableAssociatedWithSection,
2188 if (WhereFoundClauseKind != OMPC_map &&
2189 WhereFoundClauseKind != OMPC_has_device_addr)
2192 auto EI = MapExprComponents.rbegin();
2193 auto EE = MapExprComponents.rend();
2195 assert(EI != EE &&
"Invalid map expression!");
2197 if (isa<DeclRefExpr>(EI->getAssociatedExpression()))
2198 IsVariableUsedInMapClause |= EI->getAssociatedDeclaration() ==
D;
2203 auto Last = std::prev(EE);
2205 dyn_cast<UnaryOperator>(
Last->getAssociatedExpression());
2206 if ((UO && UO->getOpcode() == UO_Deref) ||
2207 isa<ArraySubscriptExpr>(
Last->getAssociatedExpression()) ||
2208 isa<ArraySectionExpr>(
Last->getAssociatedExpression()) ||
2209 isa<MemberExpr>(EI->getAssociatedExpression()) ||
2210 isa<OMPArrayShapingExpr>(
Last->getAssociatedExpression())) {
2211 IsVariableAssociatedWithSection =
true;
2220 if (IsVariableUsedInMapClause) {
2223 IsByRef = !(Ty->
isPointerType() && IsVariableAssociatedWithSection);
2228 IsByRef = (
DSAStack->isForceCaptureByReferenceInTargetExecutable() &&
2231 DSAStack->isDefaultmapCapturedByRef(
2236 return K == OMPC_reduction && !AppliedToPointee;
2244 ((IsVariableUsedInMapClause &&
2245 DSAStack->getCaptureRegion(Level, OpenMPCaptureLevel) ==
2250 return K == OMPC_firstprivate ||
2251 (K == OMPC_reduction && AppliedToPointee);
2254 DSAStack->isUsesAllocatorsDecl(Level,
D))) &&
2257 !(isa<OMPCapturedExprDecl>(
D) && !
D->
hasAttr<OMPCaptureNoInitAttr>() &&
2258 !cast<OMPCapturedExprDecl>(
D)->getInit()->isGLValue()) &&
2261 !((
DSAStack->getDefaultDSA() == DSA_firstprivate ||
2262 DSAStack->getDefaultDSA() == DSA_private) &&
2266 !
DSAStack->isLoopControlVariable(
D, Level).first);
2283unsigned SemaOpenMP::getOpenMPNestingLevel()
const {
2285 return DSAStack->getNestingLevel();
2295 !
DSAStack->isClauseParsingMode()) ||
2306 if (!dyn_cast<FieldDecl>(
D))
2308 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2311 DefaultDataSharingAttributes DefaultAttr) {
2313 (DefaultAttr == DSA_firstprivate || DefaultAttr == DSA_private);
2317 if (DVarPrivate.CKind != OMPC_unknown)
2323 Expr *CaptureExpr,
bool WithInit,
2329 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2332 auto *VD = dyn_cast<VarDecl>(
D);
2341 DSAStackTy::ParentDirectiveScope InParentDirectiveRAII(
2351 DSAStackTy::DSAVarData DVarTop =
2353 if (DVarTop.CKind != OMPC_unknown && DVarTop.RefExpr)
2358 if (OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2365 if (!isa<CapturingScopeInfo>(FSI))
2367 if (
auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2373 assert(CSI &&
"Failed to find CapturedRegionScopeInfo");
2384 !OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2390 if (CheckScopeInfo) {
2391 bool OpenMPFound =
false;
2392 for (
unsigned I = StopAt + 1; I > 0; --I) {
2394 if (!isa<CapturingScopeInfo>(FSI))
2396 if (
auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2406 if (
DSAStack->getCurrentDirective() != OMPD_unknown &&
2407 (!
DSAStack->isClauseParsingMode() ||
2408 DSAStack->getParentDirective() != OMPD_unknown)) {
2409 auto &&Info =
DSAStack->isLoopControlVariable(
D);
2412 isImplicitOrExplicitTaskingRegion(
DSAStack->getCurrentDirective())) ||
2413 (VD &&
DSAStack->isForceVarCapturing()))
2414 return VD ? VD : Info.second;
2415 DSAStackTy::DSAVarData DVarTop =
2417 if (DVarTop.CKind != OMPC_unknown &&
isOpenMPPrivate(DVarTop.CKind) &&
2419 return VD ? VD : cast<VarDecl>(DVarTop.PrivateCopy->getDecl());
2425 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2433 if (VD && !VD->
hasLocalStorage() && DVarPrivate.CKind == OMPC_unknown &&
2434 ((
DSAStack->getDefaultDSA() != DSA_none &&
2435 DSAStack->getDefaultDSA() != DSA_private &&
2436 DSAStack->getDefaultDSA() != DSA_firstprivate) ||
2437 DVarTop.CKind == OMPC_shared))
2439 auto *FD = dyn_cast<FieldDecl>(
D);
2440 if (DVarPrivate.CKind != OMPC_unknown && !VD && FD &&
2441 !DVarPrivate.PrivateCopy) {
2442 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2445 DefaultDataSharingAttributes DefaultAttr) {
2447 (DefaultAttr == DSA_firstprivate ||
2448 DefaultAttr == DSA_private);
2452 if (DVarPrivate.CKind == OMPC_unknown)
2475 VD = cast<VarDecl>(VDPrivateRefExpr->
getDecl());
2476 DSAStack->addImplicitDefaultFirstprivateFD(FD, VD);
2479 if (DVarPrivate.CKind != OMPC_unknown ||
2480 (VD && (
DSAStack->getDefaultDSA() == DSA_none ||
2481 DSAStack->getDefaultDSA() == DSA_private ||
2482 DSAStack->getDefaultDSA() == DSA_firstprivate)))
2483 return VD ? VD : cast<VarDecl>(DVarPrivate.PrivateCopy->getDecl());
2488void SemaOpenMP::adjustOpenMPTargetScopeIndex(
unsigned &FunctionScopesIndex,
2489 unsigned Level)
const {
2494 assert(
getLangOpts().OpenMP &&
"OpenMP must be enabled.");
2500 assert(
getLangOpts().OpenMP &&
"OpenMP must be enabled.");
2502 DSAStack->resetPossibleLoopCounter();
2508 unsigned CapLevel)
const {
2509 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2510 if (
DSAStack->getCurrentDirective() != OMPD_unknown &&
2511 (!
DSAStack->isClauseParsingMode() ||
2512 DSAStack->getParentDirective() != OMPD_unknown)) {
2513 DSAStackTy::DSAVarData DVarPrivate =
DSAStack->hasDSA(
2516 DefaultDataSharingAttributes DefaultAttr) {
2518 DefaultAttr == DSA_private;
2522 if (DVarPrivate.CKind == OMPC_private && isa<OMPCapturedExprDecl>(
D) &&
2523 DSAStack->isImplicitDefaultFirstprivateFD(cast<VarDecl>(
D)) &&
2524 !
DSAStack->isLoopControlVariable(
D).first)
2525 return OMPC_private;
2528 bool IsTriviallyCopyable =
2529 D->getType().getNonReferenceType().isTriviallyCopyableType(
2532 .getNonReferenceType()
2534 ->getAsCXXRecordDecl();
2539 (IsTriviallyCopyable ||
2545 return OMPC_firstprivate;
2546 DSAStackTy::DSAVarData DVar =
DSAStack->getImplicitDSA(
D, Level);
2547 if (DVar.CKind != OMPC_shared &&
2548 !
DSAStack->isLoopControlVariable(
D, Level).first && !DVar.RefExpr) {
2549 DSAStack->addImplicitTaskFirstprivate(Level,
D);
2550 return OMPC_firstprivate;
2559 return OMPC_private;
2562 DSAStack->isLoopControlVariable(
D).first) &&
2567 return OMPC_private;
2569 if (
const auto *VD = dyn_cast<VarDecl>(
D)) {
2575 return OMPC_private;
2580 DSAStack->isUsesAllocatorsDecl(Level,
D).value_or(
2581 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait) ==
2582 DSAStackTy::UsesAllocatorsDeclKind::UserDefinedAllocator)
2583 return OMPC_private;
2587 (
DSAStack->isClauseParsingMode() &&
2588 DSAStack->getClauseParsingMode() == OMPC_private) ||
2593 return K == OMPD_taskgroup ||
2594 ((isOpenMPParallelDirective(K) ||
2595 isOpenMPWorksharingDirective(K)) &&
2596 !isOpenMPSimdDirective(K));
2599 DSAStack->isTaskgroupReductionRef(
D, Level)))
2606 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2609 for (
unsigned I =
DSAStack->getNestingLevel() + 1; I > Level; --I) {
2610 const unsigned NewLevel = I - 1;
2614 if (isOpenMPPrivate(K) && !AppliedToPointee) {
2622 if (
DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2632 if (
DSAStack->mustBeFirstprivateAtLevel(
2634 OMPC = OMPC_firstprivate;
2638 if (OMPC != OMPC_unknown)
2640 OMPCaptureKindAttr::CreateImplicit(getASTContext(),
unsigned(OMPC)));
2644 unsigned CaptureLevel)
const {
2645 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2650 const auto *VD = dyn_cast<VarDecl>(
D);
2654 Regions[CaptureLevel] != OMPD_task;
2658 unsigned CaptureLevel)
const {
2659 assert(
getLangOpts().OpenMP &&
"OpenMP is not allowed");
2662 if (
const auto *VD = dyn_cast<VarDecl>(
D)) {
2666 DSAStackTy::DSAVarData TopDVar =
2668 unsigned NumLevels =
2673 return (NumLevels == CaptureLevel + 1 &&
2674 (TopDVar.CKind != OMPC_shared ||
2675 DSAStack->getDefaultDSA() == DSA_firstprivate));
2678 DSAStackTy::DSAVarData DVar =
DSAStack->getImplicitDSA(
D, Level);
2679 if (DVar.CKind != OMPC_shared)
2681 }
while (Level > 0);
2687void SemaOpenMP::DestroyDataSharingAttributesStack() {
delete DSAStack; }
2691 OMPDeclareVariantScopes.push_back(OMPDeclareVariantScope(TI));
2696 "Not in OpenMP declare variant scope!");
2698 OMPDeclareVariantScopes.pop_back();
2704 assert(
getLangOpts().OpenMP &&
"Expected OpenMP compilation mode.");
2705 std::optional<OMPDeclareTargetDeclAttr::DevTypeTy> DevTy =
2709 (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host))
2712 if (!
getLangOpts().OpenMPIsTargetDevice && DevTy &&
2713 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost)
2716 DevTy = OMPDeclareTargetDeclAttr::getDeviceType(FD);
2717 if (
getLangOpts().OpenMPIsTargetDevice && DevTy &&
2718 *DevTy == OMPDeclareTargetDeclAttr::DT_Host) {
2720 StringRef HostDevTy =
2722 Diag(
Loc, diag::err_omp_wrong_device_function_call) << HostDevTy << 0;
2723 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2724 diag::note_omp_marked_device_type_here)
2730 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost) {
2734 for (OMPDeclareVariantAttr *A :
2735 Callee->specific_attrs<OMPDeclareVariantAttr>()) {
2736 auto *DeclRefVariant = cast<DeclRefExpr>(A->getVariantFuncRef());
2737 auto *VariantFD = cast<FunctionDecl>(DeclRefVariant->getDecl());
2738 std::optional<OMPDeclareTargetDeclAttr::DevTypeTy> DevTy =
2739 OMPDeclareTargetDeclAttr::getDeviceType(
2740 VariantFD->getMostRecentDecl());
2741 if (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host)
2747 Callee->hasAttr<OMPDeclareVariantAttr>() && HasHostAttr(Callee))
2751 OMPC_device_type, OMPC_DEVICE_TYPE_nohost);
2752 Diag(
Loc, diag::err_omp_wrong_device_function_call) << NoHostDevTy << 1;
2753 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2754 diag::note_omp_marked_device_type_here)
2772 DSAStack->setClauseParsingMode(OMPC_unknown);
2776static std::pair<ValueDecl *, bool>
2778 SourceRange &ERange,
bool AllowArraySection =
false,
2779 StringRef DiagType =
"");
2784 bool InscanFound =
false;
2791 if (
C->getClauseKind() != OMPC_reduction)
2793 auto *RC = cast<OMPReductionClause>(
C);
2794 if (RC->getModifier() == OMPC_REDUCTION_inscan) {
2796 InscanLoc = RC->getModifierLoc();
2799 if (RC->getModifier() == OMPC_REDUCTION_task) {
2809 S.
Diag(RC->getModifierLoc(),
2810 diag::err_omp_reduction_task_not_parallel_or_worksharing);
2816 if (
C->getClauseKind() != OMPC_reduction)
2818 auto *RC = cast<OMPReductionClause>(
C);
2819 if (RC->getModifier() != OMPC_REDUCTION_inscan) {
2822 : RC->getModifierLoc(),
2823 diag::err_omp_inscan_reduction_expected);
2824 S.
Diag(InscanLoc, diag::note_omp_previous_inscan_reduction);
2827 for (
Expr *Ref : RC->varlist()) {
2828 assert(Ref &&
"NULL expr in OpenMP nontemporal clause.");
2831 Expr *SimpleRefExpr = Ref;
2838 S.
Diag(Ref->getExprLoc(),
2839 diag::err_omp_reduction_not_inclusive_exclusive)
2840 << Ref->getSourceRange();
2854 const DSAStackTy::DSAVarData &DVar,
2855 bool IsLoopIterVar =
false);
2863 if (
const auto *
D = dyn_cast_or_null<OMPExecutableDirective>(CurDirective)) {
2865 if (
auto *Clause = dyn_cast<OMPLastprivateClause>(
C)) {
2867 for (
Expr *DE : Clause->varlist()) {
2868 if (DE->isValueDependent() || DE->isTypeDependent()) {
2869 PrivateCopies.push_back(
nullptr);
2872 auto *DRE = cast<DeclRefExpr>(DE->IgnoreParens());
2873 auto *VD = cast<VarDecl>(DRE->getDecl());
2875 const DSAStackTy::DSAVarData DVar =
2877 if (DVar.CKind == OMPC_lastprivate) {
2884 SemaRef, DE->getExprLoc(),
Type.getUnqualifiedType(),
2888 PrivateCopies.push_back(
nullptr);
2896 PrivateCopies.push_back(
nullptr);
2899 Clause->setPrivateCopies(PrivateCopies);
2903 if (
auto *Clause = dyn_cast<OMPNontemporalClause>(
C)) {
2905 for (
Expr *RefExpr : Clause->varlist()) {
2906 assert(RefExpr &&
"NULL expr in OpenMP nontemporal clause.");
2909 Expr *SimpleRefExpr = RefExpr;
2913 PrivateRefs.push_back(RefExpr);
2918 const DSAStackTy::DSAVarData DVar =
2920 PrivateRefs.push_back(DVar.PrivateCopy ? DVar.PrivateCopy
2923 Clause->setPrivateRefs(PrivateRefs);
2926 if (
auto *Clause = dyn_cast<OMPUsesAllocatorsClause>(
C)) {
2927 for (
unsigned I = 0,
E = Clause->getNumberOfAllocators(); I <
E; ++I) {
2929 auto *DRE = dyn_cast<DeclRefExpr>(
D.Allocator->IgnoreParenImpCasts());
2933 if (!VD || !isa<VarDecl>(VD))
2935 DSAStackTy::DSAVarData DVar =
2941 Expr *MapExpr =
nullptr;
2943 DSAStack->checkMappableExprComponentListsForDecl(
2949 auto MI = MapExprComponents.rbegin();
2950 auto ME = MapExprComponents.rend();
2952 MI->getAssociatedDeclaration()->getCanonicalDecl() ==
2953 VD->getCanonicalDecl()) {
2954 MapExpr = MI->getAssociatedExpression();
2959 Diag(
D.Allocator->getExprLoc(),
2960 diag::err_omp_allocator_used_in_clauses)
2965 Diag(MapExpr->getExprLoc(), diag::note_used_here)
2966 << MapExpr->getSourceRange();
2984 Expr *NumIterations,
Sema &SemaRef,
2985 Scope *S, DSAStackTy *Stack);
2989 DSAStackTy *Stack) {
2991 "loop exprs were not built");
2998 auto *LC = dyn_cast<OMPLinearClause>(
C);
3017 explicit VarDeclFilterCCC(
Sema &S) : SemaRef(S) {}
3018 bool ValidateCandidate(
const TypoCorrection &Candidate)
override {
3020 if (
const auto *VD = dyn_cast_or_null<VarDecl>(ND)) {
3028 std::unique_ptr<CorrectionCandidateCallback> clone()
override {
3029 return std::make_unique<VarDeclFilterCCC>(*
this);
3038 explicit VarOrFuncDeclFilterCCC(
Sema &S) : SemaRef(S) {}
3039 bool ValidateCandidate(
const TypoCorrection &Candidate)
override {
3041 if (ND && ((isa<VarDecl>(ND) && ND->
getKind() == Decl::Var) ||
3042 isa<FunctionDecl>(ND))) {
3049 std::unique_ptr<CorrectionCandidateCallback> clone()
override {
3050 return std::make_unique<VarOrFuncDeclFilterCCC>(*
this);
3071 VarDeclFilterCCC CCC(
SemaRef);
3078 : diag::err_omp_expected_var_arg_suggest)
3080 VD = Corrected.getCorrectionDeclAs<
VarDecl>();
3082 Diag(
Id.getLoc(), Lookup.
empty() ? diag::err_undeclared_var_use
3083 : diag::err_omp_expected_var_arg)
3088 Diag(
Id.getLoc(), diag::err_omp_expected_var_arg) <<
Id.getName();
3097 Diag(
Id.getLoc(), diag::err_omp_global_var_arg)
3102 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3114 Diag(
Id.getLoc(), diag::err_omp_var_scope)
3115 << getOpenMPDirectiveName(Kind) << VD;
3119 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3129 Diag(
Id.getLoc(), diag::err_omp_var_scope)
3130 << getOpenMPDirectiveName(Kind) << VD;
3134 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3146 Diag(
Id.getLoc(), diag::err_omp_var_scope)
3147 << getOpenMPDirectiveName(Kind) << VD;
3151 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3160 Diag(
Id.getLoc(), diag::err_omp_var_scope)
3161 << getOpenMPDirectiveName(Kind) << VD;
3165 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3173 if (Kind == OMPD_threadprivate && VD->
isUsed() &&
3175 Diag(
Id.getLoc(), diag::err_omp_var_used)
3176 << getOpenMPDirectiveName(Kind) << VD;
3198class LocalVarRefChecker final
3204 if (
const auto *VD = dyn_cast<VarDecl>(
E->getDecl())) {
3207 diag::err_omp_local_var_in_threadprivate_init)
3209 SemaRef.Diag(VD->
getLocation(), diag::note_defined_here)
3216 bool VisitStmt(
const Stmt *S) {
3217 for (
const Stmt *Child : S->children()) {
3218 if (Child && Visit(Child))
3223 explicit LocalVarRefChecker(
Sema &SemaRef) : SemaRef(SemaRef) {}
3232 for (
Expr *RefExpr : VarList) {
3233 auto *DE = cast<DeclRefExpr>(RefExpr);
3234 auto *VD = cast<VarDecl>(DE->getDecl());
3251 ILoc, VD->
getType(), diag::err_omp_threadprivate_incomplete_type)) {
3258 Diag(ILoc, diag::err_omp_ref_type_arg)
3259 << getOpenMPDirectiveName(OMPD_threadprivate) << VD->
getType();
3263 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3271 !(VD->
hasAttr<OMPThreadPrivateDeclAttr>() &&
3276 Diag(ILoc, diag::err_omp_var_thread_local)
3281 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3289 LocalVarRefChecker Checker(
SemaRef);
3290 if (Checker.Visit(
Init))
3294 Vars.push_back(RefExpr);
3295 DSAStack->addDSA(VD, DE, OMPC_threadprivate);
3296 VD->
addAttr(OMPThreadPrivateDeclAttr::CreateImplicit(
3299 ML->DeclarationMarkedOpenMPThreadPrivate(VD);
3302 if (!Vars.empty()) {
3310static OMPAllocateDeclAttr::AllocatorTypeTy
3313 return OMPAllocateDeclAttr::OMPNullMemAlloc;
3314 if (Allocator->isTypeDependent() || Allocator->isValueDependent() ||
3315 Allocator->isInstantiationDependent() ||
3316 Allocator->containsUnexpandedParameterPack())
3317 return OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3318 auto AllocatorKindRes = OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3319 llvm::FoldingSetNodeID AEId;
3320 const Expr *AE = Allocator->IgnoreParenImpCasts();
3322 for (
int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
3323 auto AllocatorKind =
static_cast<OMPAllocateDeclAttr::AllocatorTypeTy
>(I);
3324 const Expr *DefAllocator = Stack->getAllocator(AllocatorKind);
3325 llvm::FoldingSetNodeID DAEId;
3328 if (AEId == DAEId) {
3329 AllocatorKindRes = AllocatorKind;
3333 return AllocatorKindRes;
3338 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
Expr *Allocator) {
3339 if (!VD->
hasAttr<OMPAllocateDeclAttr>())
3341 const auto *A = VD->
getAttr<OMPAllocateDeclAttr>();
3342 Expr *PrevAllocator = A->getAllocator();
3343 OMPAllocateDeclAttr::AllocatorTypeTy PrevAllocatorKind =
3345 bool AllocatorsMatch = AllocatorKind == PrevAllocatorKind;
3346 if (AllocatorsMatch &&
3347 AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc &&
3348 Allocator && PrevAllocator) {
3349 const Expr *AE = Allocator->IgnoreParenImpCasts();
3351 llvm::FoldingSetNodeID AEId, PAEId;
3354 AllocatorsMatch = AEId == PAEId;
3356 if (!AllocatorsMatch) {
3358 llvm::raw_svector_ostream AllocatorStream(AllocatorBuffer);
3362 llvm::raw_svector_ostream PrevAllocatorStream(PrevAllocatorBuffer);
3364 PrevAllocator->printPretty(PrevAllocatorStream,
nullptr,
3368 Allocator ? Allocator->getExprLoc() : RefExpr->
getExprLoc();
3370 Allocator ? Allocator->getSourceRange() : RefExpr->
getSourceRange();
3372 PrevAllocator ? PrevAllocator->getExprLoc() : A->getLocation();
3374 PrevAllocator ? PrevAllocator->getSourceRange() : A->getRange();
3375 S.
Diag(AllocatorLoc, diag::warn_omp_used_different_allocator)
3376 << (Allocator ? 1 : 0) << AllocatorStream.str()
3377 << (PrevAllocator ? 1 : 0) << PrevAllocatorStream.str()
3379 S.
Diag(PrevAllocatorLoc, diag::note_omp_previous_allocator)
3380 << PrevAllocatorRange;
3388 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
3390 if (VD->
hasAttr<OMPAllocateDeclAttr>())
3399 (Allocator->isTypeDependent() || Allocator->isValueDependent() ||
3400 Allocator->isInstantiationDependent() ||
3401 Allocator->containsUnexpandedParameterPack()))
3403 auto *A = OMPAllocateDeclAttr::CreateImplicit(S.
Context, AllocatorKind,
3404 Allocator, Alignment, SR);
3407 ML->DeclarationMarkedOpenMPAllocate(VD, A);
3413 assert(Clauses.size() <= 2 &&
"Expected at most two clauses.");
3414 Expr *Alignment =
nullptr;
3415 Expr *Allocator =
nullptr;
3416 if (Clauses.empty()) {
3426 if (
const auto *AC = dyn_cast<OMPAllocatorClause>(
C))
3427 Allocator = AC->getAllocator();
3428 else if (
const auto *AC = dyn_cast<OMPAlignClause>(
C))
3429 Alignment = AC->getAlignment();
3431 llvm_unreachable(
"Unexpected clause on allocate directive");
3433 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind =
3436 for (
Expr *RefExpr : VarList) {
3437 auto *DE = cast<DeclRefExpr>(RefExpr);
3438 auto *VD = cast<VarDecl>(DE->getDecl());
3442 VD->
hasAttr<OMPThreadPrivateDeclAttr>() ||
3450 AllocatorKind, Allocator))
3458 if (AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc) {
3459 Diag(Allocator->getExprLoc(),
3460 diag::err_omp_expected_predefined_allocator)
3461 << Allocator->getSourceRange();
3465 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3471 Vars.push_back(RefExpr);
3490 Diag(
Loc, diag::err_omp_invalid_scope) <<
"requires";
3504 bool SkippedClauses) {
3505 if (!SkippedClauses && Assumptions.empty())
3506 Diag(
Loc, diag::err_omp_no_clause_for_directive)
3507 << llvm::omp::getAllAssumeClauseOptions()
3508 << llvm::omp::getOpenMPDirectiveName(DKind);
3512 if (DKind == llvm::omp::Directive::OMPD_begin_assumes) {
3513 OMPAssumeScoped.push_back(AA);
3518 if (Assumptions.empty())
3521 assert(DKind == llvm::omp::Directive::OMPD_assumes &&
3522 "Unexpected omp assumption directive!");
3523 OMPAssumeGlobal.push_back(AA);
3531 while (Ctx->getLexicalParent())
3533 DeclContexts.push_back(Ctx);
3534 while (!DeclContexts.empty()) {
3536 for (
auto *SubDC : DC->
decls()) {
3537 if (SubDC->isInvalidDecl())
3539 if (
auto *CTD = dyn_cast<ClassTemplateDecl>(SubDC)) {
3540 DeclContexts.push_back(CTD->getTemplatedDecl());
3541 llvm::append_range(DeclContexts, CTD->specializations());
3544 if (
auto *DC = dyn_cast<DeclContext>(SubDC))
3545 DeclContexts.push_back(DC);
3546 if (
auto *F = dyn_cast<FunctionDecl>(SubDC)) {
3556 OMPAssumeScoped.pop_back();
3577 DSAStack->getEncounteredTargetLocs();
3579 if (!TargetLocations.empty() || !AtomicLoc.
isInvalid()) {
3580 for (
const OMPClause *CNew : ClauseList) {
3582 if (isa<OMPUnifiedSharedMemoryClause>(CNew) ||
3583 isa<OMPUnifiedAddressClause>(CNew) ||
3584 isa<OMPReverseOffloadClause>(CNew) ||
3585 isa<OMPDynamicAllocatorsClause>(CNew)) {
3586 Diag(
Loc, diag::err_omp_directive_before_requires)
3587 <<
"target" << getOpenMPClauseName(CNew->getClauseKind());
3589 Diag(TargetLoc, diag::note_omp_requires_encountered_directive)
3593 isa<OMPAtomicDefaultMemOrderClause>(CNew)) {
3594 Diag(
Loc, diag::err_omp_directive_before_requires)
3595 <<
"atomic" << getOpenMPClauseName(CNew->getClauseKind());
3596 Diag(AtomicLoc, diag::note_omp_requires_encountered_directive)
3602 if (!
DSAStack->hasDuplicateRequiresClause(ClauseList))
3610 const DSAStackTy::DSAVarData &DVar,
3611 bool IsLoopIterVar) {
3613 SemaRef.
Diag(DVar.RefExpr->getExprLoc(), diag::note_omp_explicit_dsa)
3614 << getOpenMPClauseName(DVar.CKind);
3618 PDSA_StaticMemberShared,
3619 PDSA_StaticLocalVarShared,
3620 PDSA_LoopIterVarPrivate,
3621 PDSA_LoopIterVarLinear,
3622 PDSA_LoopIterVarLastprivate,
3623 PDSA_ConstVarShared,
3624 PDSA_GlobalVarShared,
3625 PDSA_TaskVarFirstprivate,
3626 PDSA_LocalVarPrivate,
3628 } Reason = PDSA_Implicit;
3629 bool ReportHint =
false;
3631 auto *VD = dyn_cast<VarDecl>(
D);
3632 if (IsLoopIterVar) {
3633 if (DVar.CKind == OMPC_private)
3634 Reason = PDSA_LoopIterVarPrivate;
3635 else if (DVar.CKind == OMPC_lastprivate)
3636 Reason = PDSA_LoopIterVarLastprivate;
3638 Reason = PDSA_LoopIterVarLinear;
3640 DVar.CKind == OMPC_firstprivate) {
3641 Reason = PDSA_TaskVarFirstprivate;
3642 ReportLoc = DVar.ImplicitDSALoc;
3644 Reason = PDSA_StaticLocalVarShared;
3646 Reason = PDSA_StaticMemberShared;
3648 Reason = PDSA_GlobalVarShared;
3650 Reason = PDSA_ConstVarShared;
3651 else if (VD && VD->
isLocalVarDecl() && DVar.CKind == OMPC_private) {
3653 Reason = PDSA_LocalVarPrivate;
3655 if (Reason != PDSA_Implicit) {
3656 SemaRef.
Diag(ReportLoc, diag::note_omp_predetermined_dsa)
3657 << Reason << ReportHint
3658 << getOpenMPDirectiveName(Stack->getCurrentDirective());
3659 }
else if (DVar.ImplicitDSALoc.isValid()) {
3660 SemaRef.
Diag(DVar.ImplicitDSALoc, diag::note_omp_implicit_dsa)
3661 << getOpenMPClauseName(DVar.CKind);
3667 bool IsAggregateOrDeclareTarget) {
3670 case OMPC_DEFAULTMAP_MODIFIER_alloc:
3671 Kind = OMPC_MAP_alloc;
3673 case OMPC_DEFAULTMAP_MODIFIER_to:
3676 case OMPC_DEFAULTMAP_MODIFIER_from:
3677 Kind = OMPC_MAP_from;
3679 case OMPC_DEFAULTMAP_MODIFIER_tofrom:
3680 Kind = OMPC_MAP_tofrom;
3682 case OMPC_DEFAULTMAP_MODIFIER_present:
3688 Kind = OMPC_MAP_alloc;
3690 case OMPC_DEFAULTMAP_MODIFIER_firstprivate:
3692 llvm_unreachable(
"Unexpected defaultmap implicit behavior");
3693 case OMPC_DEFAULTMAP_MODIFIER_none:
3694 case OMPC_DEFAULTMAP_MODIFIER_default:
3699 if (IsAggregateOrDeclareTarget) {
3700 Kind = OMPC_MAP_tofrom;
3703 llvm_unreachable(
"Unexpected defaultmap implicit behavior");
3710class DSAAttrChecker final :
public StmtVisitor<DSAAttrChecker, void> {
3714 bool ErrorFound =
false;
3715 bool TryCaptureCXXThisMembers =
false;
3722 ImplicitMapModifier[DefaultmapKindNum];
3724 llvm::SmallDenseSet<const ValueDecl *, 4> ImplicitDeclarations;
3728 if (!S->hasAssociatedStmt() || !S->getAssociatedStmt())
3730 if (S->getDirectiveKind() == OMPD_atomic ||
3731 S->getDirectiveKind() == OMPD_critical ||
3732 S->getDirectiveKind() == OMPD_section ||
3733 S->getDirectiveKind() == OMPD_master ||
3734 S->getDirectiveKind() == OMPD_masked ||
3735 S->getDirectiveKind() == OMPD_scope ||
3736 S->getDirectiveKind() == OMPD_assume ||
3738 Visit(S->getAssociatedStmt());
3741 visitSubCaptures(S->getInnermostCapturedStmt());
3744 if (TryCaptureCXXThisMembers ||
3746 llvm::any_of(S->getInnermostCapturedStmt()->captures(),
3748 return C.capturesThis();
3750 bool SavedTryCaptureCXXThisMembers = TryCaptureCXXThisMembers;
3751 TryCaptureCXXThisMembers =
true;
3752 Visit(S->getInnermostCapturedStmt()->getCapturedStmt());
3753 TryCaptureCXXThisMembers = SavedTryCaptureCXXThisMembers;
3760 if (
auto *FC = dyn_cast<OMPFirstprivateClause>(
C)) {
3761 for (
Expr *Ref : FC->varlist())
3774 if (
auto *VD = dyn_cast<VarDecl>(
E->getDecl())) {
3777 !Stack->getTopDSA(VD,
false).RefExpr &&
3778 !Stack->isImplicitDefaultFirstprivateFD(VD))) {
3779 if (
auto *CED = dyn_cast<OMPCapturedExprDecl>(VD))
3780 if (!CED->hasAttr<OMPCaptureNoInitAttr>()) {
3781 Visit(CED->getInit());
3784 }
else if (VD->
isImplicit() || isa<OMPCapturedExprDecl>(VD))
3787 if (!Stack->isImplicitDefaultFirstprivateFD(VD))
3792 !Stack->isImplicitDefaultFirstprivateFD(VD) &&
3793 !Stack->isImplicitTaskFirstprivate(VD))
3796 if (Stack->isUsesAllocatorsDecl(VD))
3799 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(VD,
false);
3801 if (DVar.RefExpr || !ImplicitDeclarations.insert(VD).second)
3805 std::optional<OMPDeclareTargetDeclAttr::MapTypeTy> Res =
3806 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD);
3809 !Res || *Res != OMPDeclareTargetDeclAttr::MT_Link) &&
3810 !Stack->isImplicitDefaultFirstprivateFD(VD) &&
3811 !Stack->isImplicitTaskFirstprivate(VD))
3819 if (DVar.CKind == OMPC_unknown &&
3820 (Stack->getDefaultDSA() == DSA_none ||
3821 Stack->getDefaultDSA() == DSA_private ||
3822 Stack->getDefaultDSA() == DSA_firstprivate) &&
3823 isImplicitOrExplicitTaskingRegion(DKind) &&
3824 VarsWithInheritedDSA.count(VD) == 0) {
3825 bool InheritedDSA = Stack->getDefaultDSA() == DSA_none;
3826 if (!InheritedDSA && (Stack->getDefaultDSA() == DSA_firstprivate ||
3827 Stack->getDefaultDSA() == DSA_private)) {
3828 DSAStackTy::DSAVarData DVar =
3829 Stack->getImplicitDSA(VD,
false);
3830 InheritedDSA = DVar.CKind == OMPC_unknown;
3833 VarsWithInheritedDSA[VD] =
E;
3834 if (Stack->getDefaultDSA() == DSA_none)
3849 bool IsModifierNone = Stack->getDefaultmapModifier(ClauseKind) ==
3850 OMPC_DEFAULTMAP_MODIFIER_none;
3851 if (DVar.CKind == OMPC_unknown && IsModifierNone &&
3852 VarsWithInheritedDSA.count(VD) == 0 && !Res) {
3856 if (!Stack->checkMappableExprComponentListsForDecl(
3861 auto MI = MapExprComponents.rbegin();
3862 auto ME = MapExprComponents.rend();
3863 return MI != ME && MI->getAssociatedDeclaration() == VD;
3865 VarsWithInheritedDSA[VD] =
E;
3871 bool IsModifierPresent = Stack->getDefaultmapModifier(ClauseKind) ==
3872 OMPC_DEFAULTMAP_MODIFIER_present;
3873 if (IsModifierPresent) {
3874 if (!llvm::is_contained(ImplicitMapModifier[ClauseKind],
3875 OMPC_MAP_MODIFIER_present)) {
3876 ImplicitMapModifier[ClauseKind].push_back(
3877 OMPC_MAP_MODIFIER_present);
3883 !Stack->isLoopControlVariable(VD).first) {
3884 if (!Stack->checkMappableExprComponentListsForDecl(
3889 if (SemaRef.LangOpts.OpenMP >= 50)
3890 return !StackComponents.empty();
3893 return StackComponents.size() == 1 ||
3895 llvm::drop_begin(llvm::reverse(StackComponents)),
3896 [](const OMPClauseMappableExprCommon::
3897 MappableComponent &MC) {
3898 return MC.getAssociatedDeclaration() ==
3900 (isa<ArraySectionExpr>(
3901 MC.getAssociatedExpression()) ||
3902 isa<OMPArrayShapingExpr>(
3903 MC.getAssociatedExpression()) ||
3904 isa<ArraySubscriptExpr>(
3905 MC.getAssociatedExpression()));
3908 bool IsFirstprivate =
false;
3910 if (
const auto *RD =
3912 IsFirstprivate = RD->isLambda();
3914 IsFirstprivate || (Stack->mustBeFirstprivate(ClauseKind) && !Res);
3915 if (IsFirstprivate) {
3916 ImplicitFirstprivate.emplace_back(
E);
3919 Stack->getDefaultmapModifier(ClauseKind);
3921 M, ClauseKind == OMPC_DEFAULTMAP_aggregate || Res);
3922 ImplicitMap[ClauseKind][
Kind].emplace_back(
E);
3932 DVar = Stack->hasInnermostDSA(
3935 return C == OMPC_reduction && !AppliedToPointee;
3944 SemaRef.
Diag(ELoc, diag::err_omp_reduction_in_task);
3950 DVar = Stack->getImplicitDSA(VD,
false);
3952 (((Stack->getDefaultDSA() == DSA_firstprivate &&
3953 DVar.CKind == OMPC_firstprivate) ||
3954 (Stack->getDefaultDSA() == DSA_private &&
3955 DVar.CKind == OMPC_private)) &&
3957 !Stack->isLoopControlVariable(VD).first) {
3958 if (Stack->getDefaultDSA() == DSA_private)
3959 ImplicitPrivate.push_back(
E);
3961 ImplicitFirstprivate.push_back(
E);
3968 *Res == OMPDeclareTargetDeclAttr::MT_Link) {
3969 Stack->addToParentTargetRegionLinkGlobals(
E);
3978 auto *FD = dyn_cast<FieldDecl>(
E->getMemberDecl());
3982 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(FD,
false);
3985 if (DVar.RefExpr || !ImplicitDeclarations.insert(FD).second)
3989 !Stack->isLoopControlVariable(FD).first &&
3990 !Stack->checkMappableExprComponentListsForDecl(
3995 return isa<CXXThisExpr>(
3997 StackComponents.back().getAssociatedExpression())
4009 if (Stack->isClassPreviouslyMapped(TE->getType()))
4013 Stack->getDefaultmapModifier(OMPC_DEFAULTMAP_aggregate);
4018 ImplicitMap[ClauseKind][
Kind].emplace_back(
E);
4027 DVar = Stack->hasInnermostDSA(
4030 return C == OMPC_reduction && !AppliedToPointee;
4039 SemaRef.
Diag(ELoc, diag::err_omp_reduction_in_task);
4045 DVar = Stack->getImplicitDSA(FD,
false);
4047 !Stack->isLoopControlVariable(FD).first) {
4052 if (DVar.CKind != OMPC_unknown)
4053 ImplicitFirstprivate.push_back(
E);
4062 const auto *VD = cast<ValueDecl>(
4063 CurComponents.back().getAssociatedDeclaration()->getCanonicalDecl());
4064 if (!Stack->checkMappableExprComponentListsForDecl(
4070 auto CCI = CurComponents.rbegin();
4071 auto CCE = CurComponents.rend();
4072 for (const auto &SC : llvm::reverse(StackComponents)) {
4074 if (CCI->getAssociatedExpression()->getStmtClass() !=
4075 SC.getAssociatedExpression()->getStmtClass())
4076 if (!((isa<ArraySectionExpr>(
4077 SC.getAssociatedExpression()) ||
4078 isa<OMPArrayShapingExpr>(
4079 SC.getAssociatedExpression())) &&
4080 isa<ArraySubscriptExpr>(
4081 CCI->getAssociatedExpression())))
4084 const Decl *CCD = CCI->getAssociatedDeclaration();
4085 const Decl *SCD = SC.getAssociatedDeclaration();
4086 CCD = CCD ? CCD->getCanonicalDecl() : nullptr;
4087 SCD = SCD ? SCD->getCanonicalDecl() : nullptr;
4090 std::advance(CCI, 1);
4096 Visit(
E->getBase());
4098 }
else if (!TryCaptureCXXThisMembers) {
4099 Visit(
E->getBase());
4106 if (isa_and_nonnull<OMPPrivateClause>(
C))
4112 if (
C && !((isa<OMPFirstprivateClause>(
C) || isa<OMPMapClause>(
C)) &&
4114 for (
Stmt *CC :
C->children()) {
4121 VisitSubCaptures(S);
4130 for (
Stmt *
C : S->arguments()) {
4137 if (
Expr *Callee = S->getCallee()) {
4138 auto *CI =
Callee->IgnoreParenImpCasts();
4139 if (
auto *CE = dyn_cast<MemberExpr>(CI))
4140 Visit(CE->getBase());
4141 else if (
auto *CE = dyn_cast<DeclRefExpr>(CI))
4145 void VisitStmt(
Stmt *S) {
4146 for (
Stmt *
C : S->children()) {
4157 if (!Cap.capturesVariable() && !Cap.capturesVariableByCopy())
4159 VarDecl *VD = Cap.getCapturedVar();
4163 Stack->checkMappableExprComponentListsForDecl(
4170 Cap.getLocation(),
true);
4174 bool isErrorFound()
const {
return ErrorFound; }
4176 return ImplicitFirstprivate;
4181 return ImplicitMap[DK][MK];
4185 return ImplicitMapModifier[
Kind];
4188 return VarsWithInheritedDSA;
4192 : Stack(S), SemaRef(SemaRef), ErrorFound(
false), CS(CS) {
4193 DKind = S->getCurrentDirective();
4208 Traits.emplace_back(llvm::omp::TraitProperty::construct_target_target);
4210 Traits.emplace_back(llvm::omp::TraitProperty::construct_teams_teams);
4212 Traits.emplace_back(llvm::omp::TraitProperty::construct_parallel_parallel);
4214 Traits.emplace_back(llvm::omp::TraitProperty::construct_for_for);
4216 Traits.emplace_back(llvm::omp::TraitProperty::construct_simd_simd);
4217 Stack->handleConstructTrait(Traits, ScopeEntry);
4228 std::make_pair(
".global_tid.", KmpInt32PtrTy),
4229 std::make_pair(
".bound_tid.", KmpInt32PtrTy),
4231 if (LoopBoundSharing) {
4233 Params.push_back(std::make_pair(
".previous.lb.", KmpSizeTy));
4234 Params.push_back(std::make_pair(
".previous.ub.", KmpSizeTy));
4238 Params.push_back(std::make_pair(StringRef(),
QualType()));
4259 std::make_pair(
".global_tid.", KmpInt32Ty),
4260 std::make_pair(
".part_id.", KmpInt32PtrTy),
4261 std::make_pair(
".privates.", VoidPtrTy),
4266 std::make_pair(StringRef(),
QualType())
4277 Params.push_back(std::make_pair(StringRef(
"dyn_ptr"), VoidPtrTy));
4280 Params.push_back(std::make_pair(StringRef(),
QualType()));
4287 std::make_pair(StringRef(),
QualType())
4309 std::make_pair(
".global_tid.", KmpInt32Ty),
4310 std::make_pair(
".part_id.", KmpInt32PtrTy),
4311 std::make_pair(
".privates.", VoidPtrTy),
4316 std::make_pair(
".lb.", KmpUInt64Ty),
4317 std::make_pair(
".ub.", KmpUInt64Ty),
4318 std::make_pair(
".st.", KmpInt64Ty),
4319 std::make_pair(
".liter.", KmpInt32Ty),
4320 std::make_pair(
".reductions.", VoidPtrTy),
4321 std::make_pair(StringRef(),
QualType())
4334 CSI->TheCapturedDecl->addAttr(AlwaysInlineAttr::CreateImplicit(
4335 SemaRef.
getASTContext(), {}, AlwaysInlineAttr::Keyword_forceinline));
4338 for (
auto [Level, RKind] : llvm::enumerate(Regions)) {
4373 case OMPD_metadirective:
4376 llvm_unreachable(
"Unexpected capture region");
4392 case OMPD_interchange:
4405int SemaOpenMP::getNumberOfConstructScopes(
unsigned Level)
const {
4412 return CaptureRegions.size();
4416 Expr *CaptureExpr,
bool WithInit,
4418 bool AsExpression) {
4419 assert(CaptureExpr);
4425 Ty =
C.getLValueReferenceType(Ty);
4427 Ty =
C.getPointerType(Ty);
4439 CED->addAttr(OMPCaptureNoInitAttr::CreateImplicit(
C));
4450 CD = cast<OMPCapturedExprDecl>(VD);
4489class CaptureRegionUnwinderRAII {
4496 CaptureRegionUnwinderRAII(
Sema &S,
bool &ErrorFound,
4498 : S(S), ErrorFound(ErrorFound), DKind(DKind) {}
4499 ~CaptureRegionUnwinderRAII() {
4502 while (--ThisCaptureLevel >= 0)
4515 DSAStack->getCurrentDirective()))) {
4517 if (
const auto *RD =
Type.getCanonicalType()
4518 .getNonReferenceType()
4520 bool SavedForceCaptureByReferenceInTargetExecutable =
4521 DSAStack->isForceCaptureByReferenceInTargetExecutable();
4522 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4524 if (RD->isLambda()) {
4525 llvm::DenseMap<const ValueDecl *, FieldDecl *> Captures;
4527 RD->getCaptureFields(Captures, ThisCapture);
4530 VarDecl *VD = cast<VarDecl>(LC.getCapturedVar());
4535 }
else if (LC.getCaptureKind() ==
LCK_This) {
4538 ThisTy, ThisCapture->
getType()))
4543 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4544 SavedForceCaptureByReferenceInTargetExecutable);
4554 for (
const OMPClause *Clause : Clauses) {
4556 Ordered = cast<OMPOrderedClause>(Clause);
4558 Order = cast<OMPOrderClause>(Clause);
4559 if (Order->
getKind() != OMPC_ORDER_concurrent)
4562 if (Ordered && Order)
4566 if (Ordered && Order) {
4568 diag::err_omp_simple_clause_incompatible_with_ordered)
4569 << getOpenMPClauseName(OMPC_order)
4586 bool ErrorFound =
false;
4587 CaptureRegionUnwinderRAII CaptureRegionUnwinder(
4589 if (!S.isUsable()) {
4604 DSAStack->getCurrentDirective() == OMPD_target) &&
4608 auto *IRC = cast<OMPInReductionClause>(Clause);
4609 for (
Expr *
E : IRC->taskgroup_descriptors())
4621 if (
auto *
E = cast_or_null<Expr>(VarRef)) {
4625 DSAStack->setForceVarCapturing(
false);
4626 }
else if (CaptureRegions.size() > 1 ||
4627 CaptureRegions.back() != OMPD_unknown) {
4631 if (
Expr *
E =
C->getPostUpdateExpr())
4636 SC = cast<OMPScheduleClause>(Clause);
4638 OC = cast<OMPOrderedClause>(Clause);
4640 LCs.push_back(cast<OMPLinearClause>(Clause));
4651 OMPC_SCHEDULE_MODIFIER_nonmonotonic) &&
4656 diag::err_omp_simple_clause_incompatible_with_ordered)
4657 << getOpenMPClauseName(OMPC_schedule)
4659 OMPC_SCHEDULE_MODIFIER_nonmonotonic)
4670 Diag(
C->getBeginLoc(), diag::err_omp_linear_ordered)
4679 << getOpenMPDirectiveName(
DSAStack->getCurrentDirective());
4686 unsigned CompletedRegions = 0;
4691 if (ThisCaptureRegion != OMPD_unknown) {
4699 if (CaptureRegion == ThisCaptureRegion ||
4700 CaptureRegion == OMPD_unknown) {
4701 if (
auto *DS = cast_or_null<DeclStmt>(
C->getPreInitStmt())) {
4702 for (
Decl *
D : DS->decls())
4709 if (ThisCaptureRegion == OMPD_target) {
4713 if (
const auto *UAC = dyn_cast<OMPUsesAllocatorsClause>(
C)) {
4714 for (
unsigned I = 0, End = UAC->getNumberOfAllocators(); I < End;
4717 if (
Expr *
E =
D.AllocatorTraits)
4724 if (ThisCaptureRegion == OMPD_parallel) {
4728 if (
auto *RC = dyn_cast<OMPReductionClause>(
C)) {
4729 if (RC->getModifier() != OMPC_REDUCTION_inscan)
4731 for (
Expr *
E : RC->copy_array_temps())
4735 if (
auto *AC = dyn_cast<OMPAlignedClause>(
C)) {
4736 for (
Expr *
E : AC->varlist())
4741 if (++CompletedRegions == CaptureRegions.size())
4752 if (CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_cancellation_point)
4755 if (CancelRegion == OMPD_parallel || CancelRegion == OMPD_for ||
4756 CancelRegion == OMPD_sections || CancelRegion == OMPD_taskgroup)
4759 SemaRef.
Diag(StartLoc, diag::err_omp_wrong_cancel_region)
4760 << getOpenMPDirectiveName(CancelRegion);
4770 if (!Stack->getCurScope())
4775 bool NestingProhibited =
false;
4776 bool CloseNesting =
true;
4777 bool OrphanSeen =
false;
4780 ShouldBeInParallelRegion,
4781 ShouldBeInOrderedRegion,
4782 ShouldBeInTargetRegion,
4783 ShouldBeInTeamsRegion,
4784 ShouldBeInLoopSimdRegion,
4785 } Recommend = NoRecommend;
4789 getLeafOrCompositeConstructs(ParentRegion, LeafOrComposite);
4792 if (SemaRef.
LangOpts.OpenMP >= 51 && Stack->isParentOrderConcurrent() &&
4793 CurrentRegion != OMPD_simd && CurrentRegion != OMPD_loop &&
4794 CurrentRegion != OMPD_parallel &&
4796 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region_order)
4797 << getOpenMPDirectiveName(CurrentRegion);
4801 ((SemaRef.
LangOpts.OpenMP <= 45 && CurrentRegion != OMPD_ordered) ||
4802 (SemaRef.
LangOpts.OpenMP >= 50 && CurrentRegion != OMPD_ordered &&
4803 CurrentRegion != OMPD_simd && CurrentRegion != OMPD_atomic &&
4804 CurrentRegion != OMPD_scan))) {
4817 SemaRef.
Diag(StartLoc, (CurrentRegion != OMPD_simd)
4818 ? diag::err_omp_prohibited_region_simd
4819 : diag::warn_omp_nesting_simd)
4820 << (SemaRef.
LangOpts.OpenMP >= 50 ? 1 : 0);
4821 return CurrentRegion != OMPD_simd;
4823 if (EnclosingConstruct == OMPD_atomic) {
4826 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region_atomic);
4829 if (CurrentRegion == OMPD_section) {
4834 if (EnclosingConstruct != OMPD_sections) {
4835 SemaRef.
Diag(StartLoc, diag::err_omp_orphaned_section_directive)
4836 << (ParentRegion != OMPD_unknown)
4837 << getOpenMPDirectiveName(ParentRegion);
4845 if (ParentRegion == OMPD_unknown &&
4847 CurrentRegion != OMPD_cancellation_point &&
4848 CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_scan)
4852 if (SemaRef.
LangOpts.OpenMP >= 50 && CurrentRegion == OMPD_loop &&
4853 (BindKind == OMPC_BIND_parallel || BindKind == OMPC_BIND_teams) &&
4855 EnclosingConstruct == OMPD_loop)) {
4856 int ErrorMsgNumber = (BindKind == OMPC_BIND_parallel) ? 1 : 4;
4857 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region)
4858 <<
true << getOpenMPDirectiveName(ParentRegion) << ErrorMsgNumber
4859 << getOpenMPDirectiveName(CurrentRegion);
4862 if (CurrentRegion == OMPD_cancellation_point ||
4863 CurrentRegion == OMPD_cancel) {
4876 if (CancelRegion == OMPD_taskgroup) {
4877 NestingProhibited = EnclosingConstruct != OMPD_task &&
4879 EnclosingConstruct != OMPD_taskloop);
4880 }
else if (CancelRegion == OMPD_sections) {
4881 NestingProhibited = EnclosingConstruct != OMPD_section &&
4882 EnclosingConstruct != OMPD_sections;
4884 NestingProhibited = CancelRegion != Leafs.back();
4886 OrphanSeen = ParentRegion == OMPD_unknown;
4887 }
else if (CurrentRegion == OMPD_master || CurrentRegion == OMPD_masked) {
4894 }
else if (CurrentRegion == OMPD_critical && CurrentName.
getName()) {
4900 bool DeadLock = Stack->hasDirective(
4904 if (K == OMPD_critical && DNI.
getName() == CurrentName.
getName()) {
4905 PreviousCriticalLoc = Loc;
4912 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region_critical_same_name)
4914 if (PreviousCriticalLoc.
isValid())
4915 SemaRef.
Diag(PreviousCriticalLoc,
4916 diag::note_omp_previous_critical_region);
4919 }
else if (CurrentRegion == OMPD_barrier || CurrentRegion == OMPD_scope) {
4929 llvm::is_contained({OMPD_masked, OMPD_master,
4930 OMPD_critical, OMPD_ordered},
4931 EnclosingConstruct);
4942 llvm::is_contained({OMPD_masked, OMPD_master,
4943 OMPD_critical, OMPD_ordered},
4944 EnclosingConstruct);
4945 Recommend = ShouldBeInParallelRegion;
4946 }
else if (CurrentRegion == OMPD_ordered) {
4955 NestingProhibited = EnclosingConstruct == OMPD_critical ||
4958 Stack->isParentOrderedRegion());
4959 Recommend = ShouldBeInOrderedRegion;
4965 (SemaRef.
LangOpts.OpenMP <= 45 && EnclosingConstruct != OMPD_target) ||
4966 (SemaRef.
LangOpts.OpenMP >= 50 && EnclosingConstruct != OMPD_unknown &&
4967 EnclosingConstruct != OMPD_target);
4968 OrphanSeen = ParentRegion == OMPD_unknown;
4969 Recommend = ShouldBeInTargetRegion;
4970 }
else if (CurrentRegion == OMPD_scan) {
4971 if (SemaRef.
LangOpts.OpenMP >= 50) {
4976 NestingProhibited = !llvm::is_contained(
4977 {OMPD_for, OMPD_simd, OMPD_for_simd}, EnclosingConstruct);
4979 NestingProhibited =
true;
4981 OrphanSeen = ParentRegion == OMPD_unknown;
4982 Recommend = ShouldBeInLoopSimdRegion;
4986 EnclosingConstruct == OMPD_teams) {
4998 CurrentRegion != OMPD_loop &&
5000 CurrentRegion == OMPD_atomic);
5001 Recommend = ShouldBeInParallelRegion;
5003 if (!NestingProhibited && CurrentRegion == OMPD_loop) {
5009 BindKind == OMPC_BIND_teams && EnclosingConstruct != OMPD_teams;
5010 Recommend = ShouldBeInTeamsRegion;
5016 NestingProhibited = EnclosingConstruct != OMPD_teams;
5017 Recommend = ShouldBeInTeamsRegion;
5019 if (!NestingProhibited &&
5026 NestingProhibited = Stack->hasDirective(
5030 OffendingRegion = K;
5036 CloseNesting =
false;
5038 if (NestingProhibited) {
5040 SemaRef.
Diag(StartLoc, diag::err_omp_orphaned_device_directive)
5041 << getOpenMPDirectiveName(CurrentRegion) << Recommend;
5043 SemaRef.
Diag(StartLoc, diag::err_omp_prohibited_region)
5044 << CloseNesting << getOpenMPDirectiveName(OffendingRegion)
5045 << Recommend << getOpenMPDirectiveName(CurrentRegion);
5059 bool ErrorFound =
false;
5060 unsigned NamedModifiersNumber = 0;
5061 llvm::IndexedMap<const OMPIfClause *, Kind2Unsigned> FoundNameModifiers;
5062 FoundNameModifiers.resize(llvm::omp::Directive_enumSize + 1);
5065 if (
const auto *IC = dyn_cast_or_null<OMPIfClause>(
C)) {
5069 if (FoundNameModifiers[CurNM]) {
5070 S.
Diag(
C->getBeginLoc(), diag::err_omp_more_one_clause)
5071 << getOpenMPDirectiveName(Kind) << getOpenMPClauseName(OMPC_if)
5072 << (CurNM != OMPD_unknown) << getOpenMPDirectiveName(CurNM);
5074 }
else if (CurNM != OMPD_unknown) {
5075 NameModifierLoc.push_back(IC->getNameModifierLoc());
5076 ++NamedModifiersNumber;
5078 FoundNameModifiers[CurNM] = IC;
5079 if (CurNM == OMPD_unknown)
5085 if (!llvm::is_contained(AllowedNameModifiers, CurNM)) {
5086 S.
Diag(IC->getNameModifierLoc(),
5087 diag::err_omp_wrong_if_directive_name_modifier)
5088 << getOpenMPDirectiveName(CurNM) << getOpenMPDirectiveName(Kind);
5095 if (FoundNameModifiers[OMPD_unknown] && NamedModifiersNumber > 0) {
5096 if (NamedModifiersNumber == AllowedNameModifiers.size()) {
5097 S.
Diag(FoundNameModifiers[OMPD_unknown]->getBeginLoc(),
5098 diag::err_omp_no_more_if_clause);
5101 std::string Sep(
", ");
5102 unsigned AllowedCnt = 0;
5103 unsigned TotalAllowedNum =
5104 AllowedNameModifiers.size() - NamedModifiersNumber;
5105 for (
unsigned Cnt = 0, End = AllowedNameModifiers.size(); Cnt < End;
5108 if (!FoundNameModifiers[NM]) {
5110 Values += getOpenMPDirectiveName(NM);
5112 if (AllowedCnt + 2 == TotalAllowedNum)
5114 else if (AllowedCnt + 1 != TotalAllowedNum)