clang 19.0.0git
SemaOpenMP.cpp
Go to the documentation of this file.
1//===--- SemaOpenMP.cpp - Semantic Analysis for OpenMP constructs ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8/// \file
9/// This file implements semantic analysis for OpenMP directives and
10/// clauses.
11///
12//===----------------------------------------------------------------------===//
13
15
16#include "TreeTransform.h"
20#include "clang/AST/Decl.h"
21#include "clang/AST/DeclCXX.h"
24#include "clang/AST/StmtCXX.h"
34#include "clang/Sema/Lookup.h"
36#include "clang/Sema/Scope.h"
38#include "clang/Sema/Sema.h"
40#include "llvm/ADT/IndexedMap.h"
41#include "llvm/ADT/PointerEmbeddedInt.h"
42#include "llvm/ADT/STLExtras.h"
43#include "llvm/ADT/SmallSet.h"
44#include "llvm/ADT/StringExtras.h"
45#include "llvm/Frontend/OpenMP/OMPAssume.h"
46#include "llvm/Frontend/OpenMP/OMPConstants.h"
47#include "llvm/IR/Assumptions.h"
48#include <optional>
49#include <set>
50
51using namespace clang;
52using namespace llvm::omp;
53
54//===----------------------------------------------------------------------===//
55// Stack of data-sharing attributes for variables
56//===----------------------------------------------------------------------===//
57
59 Sema &SemaRef, Expr *E,
61 OpenMPClauseKind CKind, OpenMPDirectiveKind DKind, bool NoDiagnose);
62
63namespace {
64/// Default data sharing attributes, which can be applied to directive.
65enum DefaultDataSharingAttributes {
66 DSA_unspecified = 0, /// Data sharing attribute not specified.
67 DSA_none = 1 << 0, /// Default data sharing attribute 'none'.
68 DSA_shared = 1 << 1, /// Default data sharing attribute 'shared'.
69 DSA_private = 1 << 2, /// Default data sharing attribute 'private'.
70 DSA_firstprivate = 1 << 3, /// Default data sharing attribute 'firstprivate'.
71};
72
73/// Stack for tracking declarations used in OpenMP directives and
74/// clauses and their data-sharing attributes.
75class DSAStackTy {
76public:
77 struct DSAVarData {
78 OpenMPDirectiveKind DKind = OMPD_unknown;
79 OpenMPClauseKind CKind = OMPC_unknown;
80 unsigned Modifier = 0;
81 const Expr *RefExpr = nullptr;
82 DeclRefExpr *PrivateCopy = nullptr;
83 SourceLocation ImplicitDSALoc;
84 bool AppliedToPointee = false;
85 DSAVarData() = default;
86 DSAVarData(OpenMPDirectiveKind DKind, OpenMPClauseKind CKind,
87 const Expr *RefExpr, DeclRefExpr *PrivateCopy,
88 SourceLocation ImplicitDSALoc, unsigned Modifier,
89 bool AppliedToPointee)
90 : DKind(DKind), CKind(CKind), Modifier(Modifier), RefExpr(RefExpr),
91 PrivateCopy(PrivateCopy), ImplicitDSALoc(ImplicitDSALoc),
92 AppliedToPointee(AppliedToPointee) {}
93 };
94 using OperatorOffsetTy =
96 using DoacrossClauseMapTy = llvm::DenseMap<OMPClause *, OperatorOffsetTy>;
97 /// Kind of the declaration used in the uses_allocators clauses.
98 enum class UsesAllocatorsDeclKind {
99 /// Predefined allocator
100 PredefinedAllocator,
101 /// User-defined allocator
102 UserDefinedAllocator,
103 /// The declaration that represent allocator trait
104 AllocatorTrait,
105 };
106
107private:
108 struct DSAInfo {
109 OpenMPClauseKind Attributes = OMPC_unknown;
110 unsigned Modifier = 0;
111 /// Pointer to a reference expression and a flag which shows that the
112 /// variable is marked as lastprivate(true) or not (false).
113 llvm::PointerIntPair<const Expr *, 1, bool> RefExpr;
114 DeclRefExpr *PrivateCopy = nullptr;
115 /// true if the attribute is applied to the pointee, not the variable
116 /// itself.
117 bool AppliedToPointee = false;
118 };
119 using DeclSAMapTy = llvm::SmallDenseMap<const ValueDecl *, DSAInfo, 8>;
120 using UsedRefMapTy = llvm::SmallDenseMap<const ValueDecl *, const Expr *, 8>;
121 using LCDeclInfo = std::pair<unsigned, VarDecl *>;
122 using LoopControlVariablesMapTy =
123 llvm::SmallDenseMap<const ValueDecl *, LCDeclInfo, 8>;
124 /// Struct that associates a component with the clause kind where they are
125 /// found.
126 struct MappedExprComponentTy {
128 OpenMPClauseKind Kind = OMPC_unknown;
129 };
130 using MappedExprComponentsTy =
131 llvm::DenseMap<const ValueDecl *, MappedExprComponentTy>;
132 using CriticalsWithHintsTy =
133 llvm::StringMap<std::pair<const OMPCriticalDirective *, llvm::APSInt>>;
134 struct ReductionData {
135 using BOKPtrType = llvm::PointerEmbeddedInt<BinaryOperatorKind, 16>;
136 SourceRange ReductionRange;
137 llvm::PointerUnion<const Expr *, BOKPtrType> ReductionOp;
138 ReductionData() = default;
139 void set(BinaryOperatorKind BO, SourceRange RR) {
140 ReductionRange = RR;
141 ReductionOp = BO;
142 }
143 void set(const Expr *RefExpr, SourceRange RR) {
144 ReductionRange = RR;
145 ReductionOp = RefExpr;
146 }
147 };
148 using DeclReductionMapTy =
149 llvm::SmallDenseMap<const ValueDecl *, ReductionData, 4>;
150 struct DefaultmapInfo {
151 OpenMPDefaultmapClauseModifier ImplicitBehavior =
153 SourceLocation SLoc;
154 DefaultmapInfo() = default;
156 : ImplicitBehavior(M), SLoc(Loc) {}
157 };
158
159 struct SharingMapTy {
160 DeclSAMapTy SharingMap;
161 DeclReductionMapTy ReductionMap;
162 UsedRefMapTy AlignedMap;
163 UsedRefMapTy NontemporalMap;
164 MappedExprComponentsTy MappedExprComponents;
165 LoopControlVariablesMapTy LCVMap;
166 DefaultDataSharingAttributes DefaultAttr = DSA_unspecified;
167 SourceLocation DefaultAttrLoc;
168 DefaultmapInfo DefaultmapMap[OMPC_DEFAULTMAP_unknown + 1];
169 OpenMPDirectiveKind Directive = OMPD_unknown;
170 /// GenericLoopDirective with bind clause is mapped to other directives,
171 /// like for, distribute and simd. Presently, set MappedDirective to
172 /// OMPLoop. This may also be used in a similar way for other constructs.
173 OpenMPDirectiveKind MappedDirective = OMPD_unknown;
174 DeclarationNameInfo DirectiveName;
175 Scope *CurScope = nullptr;
176 DeclContext *Context = nullptr;
177 SourceLocation ConstructLoc;
178 /// Set of 'depend' clauses with 'sink|source' dependence kind. Required to
179 /// get the data (loop counters etc.) about enclosing loop-based construct.
180 /// This data is required during codegen.
181 DoacrossClauseMapTy DoacrossDepends;
182 /// First argument (Expr *) contains optional argument of the
183 /// 'ordered' clause, the second one is true if the regions has 'ordered'
184 /// clause, false otherwise.
185 std::optional<std::pair<const Expr *, OMPOrderedClause *>> OrderedRegion;
186 bool RegionHasOrderConcurrent = false;
187 unsigned AssociatedLoops = 1;
188 bool HasMutipleLoops = false;
189 const Decl *PossiblyLoopCounter = nullptr;
190 bool NowaitRegion = false;
191 bool UntiedRegion = false;
192 bool CancelRegion = false;
193 bool LoopStart = false;
194 bool BodyComplete = false;
195 SourceLocation PrevScanLocation;
196 SourceLocation PrevOrderedLocation;
197 SourceLocation InnerTeamsRegionLoc;
198 /// Reference to the taskgroup task_reduction reference expression.
199 Expr *TaskgroupReductionRef = nullptr;
200 llvm::DenseSet<QualType> MappedClassesQualTypes;
201 SmallVector<Expr *, 4> InnerUsedAllocators;
202 llvm::DenseSet<CanonicalDeclPtr<Decl>> ImplicitTaskFirstprivates;
203 /// List of globals marked as declare target link in this target region
204 /// (isOpenMPTargetExecutionDirective(Directive) == true).
205 llvm::SmallVector<DeclRefExpr *, 4> DeclareTargetLinkVarDecls;
206 /// List of decls used in inclusive/exclusive clauses of the scan directive.
207 llvm::DenseSet<CanonicalDeclPtr<Decl>> UsedInScanDirective;
208 llvm::DenseMap<CanonicalDeclPtr<const Decl>, UsesAllocatorsDeclKind>
209 UsesAllocatorsDecls;
210 /// Data is required on creating capture fields for implicit
211 /// default first|private clause.
212 struct ImplicitDefaultFDInfoTy {
213 /// Field decl.
214 const FieldDecl *FD = nullptr;
215 /// Nesting stack level
216 size_t StackLevel = 0;
217 /// Capture variable decl.
218 VarDecl *VD = nullptr;
219 ImplicitDefaultFDInfoTy(const FieldDecl *FD, size_t StackLevel,
220 VarDecl *VD)
221 : FD(FD), StackLevel(StackLevel), VD(VD) {}
222 };
223 /// List of captured fields
225 ImplicitDefaultFirstprivateFDs;
226 Expr *DeclareMapperVar = nullptr;
227 SmallVector<VarDecl *, 16> IteratorVarDecls;
228 SharingMapTy(OpenMPDirectiveKind DKind, DeclarationNameInfo Name,
229 Scope *CurScope, SourceLocation Loc)
230 : Directive(DKind), DirectiveName(Name), CurScope(CurScope),
231 ConstructLoc(Loc) {}
232 SharingMapTy() = default;
233 };
234
235 using StackTy = SmallVector<SharingMapTy, 4>;
236
237 /// Stack of used declaration and their data-sharing attributes.
238 DeclSAMapTy Threadprivates;
239 const FunctionScopeInfo *CurrentNonCapturingFunctionScope = nullptr;
241 /// true, if check for DSA must be from parent directive, false, if
242 /// from current directive.
243 OpenMPClauseKind ClauseKindMode = OMPC_unknown;
244 Sema &SemaRef;
245 bool ForceCapturing = false;
246 /// true if all the variables in the target executable directives must be
247 /// captured by reference.
248 bool ForceCaptureByReferenceInTargetExecutable = false;
249 CriticalsWithHintsTy Criticals;
250 unsigned IgnoredStackElements = 0;
251
252 /// Iterators over the stack iterate in order from innermost to outermost
253 /// directive.
254 using const_iterator = StackTy::const_reverse_iterator;
255 const_iterator begin() const {
256 return Stack.empty() ? const_iterator()
257 : Stack.back().first.rbegin() + IgnoredStackElements;
258 }
259 const_iterator end() const {
260 return Stack.empty() ? const_iterator() : Stack.back().first.rend();
261 }
262 using iterator = StackTy::reverse_iterator;
263 iterator begin() {
264 return Stack.empty() ? iterator()
265 : Stack.back().first.rbegin() + IgnoredStackElements;
266 }
267 iterator end() {
268 return Stack.empty() ? iterator() : Stack.back().first.rend();
269 }
270
271 // Convenience operations to get at the elements of the stack.
272
273 bool isStackEmpty() const {
274 return Stack.empty() ||
275 Stack.back().second != CurrentNonCapturingFunctionScope ||
276 Stack.back().first.size() <= IgnoredStackElements;
277 }
278 size_t getStackSize() const {
279 return isStackEmpty() ? 0
280 : Stack.back().first.size() - IgnoredStackElements;
281 }
282
283 SharingMapTy *getTopOfStackOrNull() {
284 size_t Size = getStackSize();
285 if (Size == 0)
286 return nullptr;
287 return &Stack.back().first[Size - 1];
288 }
289 const SharingMapTy *getTopOfStackOrNull() const {
290 return const_cast<DSAStackTy &>(*this).getTopOfStackOrNull();
291 }
292 SharingMapTy &getTopOfStack() {
293 assert(!isStackEmpty() && "no current directive");
294 return *getTopOfStackOrNull();
295 }
296 const SharingMapTy &getTopOfStack() const {
297 return const_cast<DSAStackTy &>(*this).getTopOfStack();
298 }
299
300 SharingMapTy *getSecondOnStackOrNull() {
301 size_t Size = getStackSize();
302 if (Size <= 1)
303 return nullptr;
304 return &Stack.back().first[Size - 2];
305 }
306 const SharingMapTy *getSecondOnStackOrNull() const {
307 return const_cast<DSAStackTy &>(*this).getSecondOnStackOrNull();
308 }
309
310 /// Get the stack element at a certain level (previously returned by
311 /// \c getNestingLevel).
312 ///
313 /// Note that nesting levels count from outermost to innermost, and this is
314 /// the reverse of our iteration order where new inner levels are pushed at
315 /// the front of the stack.
316 SharingMapTy &getStackElemAtLevel(unsigned Level) {
317 assert(Level < getStackSize() && "no such stack element");
318 return Stack.back().first[Level];
319 }
320 const SharingMapTy &getStackElemAtLevel(unsigned Level) const {
321 return const_cast<DSAStackTy &>(*this).getStackElemAtLevel(Level);
322 }
323
324 DSAVarData getDSA(const_iterator &Iter, ValueDecl *D) const;
325
326 /// Checks if the variable is a local for OpenMP region.
327 bool isOpenMPLocal(VarDecl *D, const_iterator Iter) const;
328
329 /// Vector of previously declared requires directives
331 /// omp_allocator_handle_t type.
332 QualType OMPAllocatorHandleT;
333 /// omp_depend_t type.
334 QualType OMPDependT;
335 /// omp_event_handle_t type.
336 QualType OMPEventHandleT;
337 /// omp_alloctrait_t type.
338 QualType OMPAlloctraitT;
339 /// Expression for the predefined allocators.
340 Expr *OMPPredefinedAllocators[OMPAllocateDeclAttr::OMPUserDefinedMemAlloc] = {
341 nullptr};
342 /// Vector of previously encountered target directives
343 SmallVector<SourceLocation, 2> TargetLocations;
344 SourceLocation AtomicLocation;
345 /// Vector of declare variant construct traits.
347
348public:
349 explicit DSAStackTy(Sema &S) : SemaRef(S) {}
350
351 /// Sets omp_allocator_handle_t type.
352 void setOMPAllocatorHandleT(QualType Ty) { OMPAllocatorHandleT = Ty; }
353 /// Gets omp_allocator_handle_t type.
354 QualType getOMPAllocatorHandleT() const { return OMPAllocatorHandleT; }
355 /// Sets omp_alloctrait_t type.
356 void setOMPAlloctraitT(QualType Ty) { OMPAlloctraitT = Ty; }
357 /// Gets omp_alloctrait_t type.
358 QualType getOMPAlloctraitT() const { return OMPAlloctraitT; }
359 /// Sets the given default allocator.
360 void setAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
361 Expr *Allocator) {
362 OMPPredefinedAllocators[AllocatorKind] = Allocator;
363 }
364 /// Returns the specified default allocator.
365 Expr *getAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind) const {
366 return OMPPredefinedAllocators[AllocatorKind];
367 }
368 /// Sets omp_depend_t type.
369 void setOMPDependT(QualType Ty) { OMPDependT = Ty; }
370 /// Gets omp_depend_t type.
371 QualType getOMPDependT() const { return OMPDependT; }
372
373 /// Sets omp_event_handle_t type.
374 void setOMPEventHandleT(QualType Ty) { OMPEventHandleT = Ty; }
375 /// Gets omp_event_handle_t type.
376 QualType getOMPEventHandleT() const { return OMPEventHandleT; }
377
378 bool isClauseParsingMode() const { return ClauseKindMode != OMPC_unknown; }
379 OpenMPClauseKind getClauseParsingMode() const {
380 assert(isClauseParsingMode() && "Must be in clause parsing mode.");
381 return ClauseKindMode;
382 }
383 void setClauseParsingMode(OpenMPClauseKind K) { ClauseKindMode = K; }
384
385 bool isBodyComplete() const {
386 const SharingMapTy *Top = getTopOfStackOrNull();
387 return Top && Top->BodyComplete;
388 }
389 void setBodyComplete() { getTopOfStack().BodyComplete = true; }
390
391 bool isForceVarCapturing() const { return ForceCapturing; }
392 void setForceVarCapturing(bool V) { ForceCapturing = V; }
393
394 void setForceCaptureByReferenceInTargetExecutable(bool V) {
395 ForceCaptureByReferenceInTargetExecutable = V;
396 }
397 bool isForceCaptureByReferenceInTargetExecutable() const {
398 return ForceCaptureByReferenceInTargetExecutable;
399 }
400
401 void push(OpenMPDirectiveKind DKind, const DeclarationNameInfo &DirName,
402 Scope *CurScope, SourceLocation Loc) {
403 assert(!IgnoredStackElements &&
404 "cannot change stack while ignoring elements");
405 if (Stack.empty() ||
406 Stack.back().second != CurrentNonCapturingFunctionScope)
407 Stack.emplace_back(StackTy(), CurrentNonCapturingFunctionScope);
408 Stack.back().first.emplace_back(DKind, DirName, CurScope, Loc);
409 Stack.back().first.back().DefaultAttrLoc = Loc;
410 }
411
412 void pop() {
413 assert(!IgnoredStackElements &&
414 "cannot change stack while ignoring elements");
415 assert(!Stack.back().first.empty() &&
416 "Data-sharing attributes stack is empty!");
417 Stack.back().first.pop_back();
418 }
419
420 /// RAII object to temporarily leave the scope of a directive when we want to
421 /// logically operate in its parent.
422 class ParentDirectiveScope {
423 DSAStackTy &Self;
424 bool Active;
425
426 public:
427 ParentDirectiveScope(DSAStackTy &Self, bool Activate)
428 : Self(Self), Active(false) {
429 if (Activate)
430 enable();
431 }
432 ~ParentDirectiveScope() { disable(); }
433 void disable() {
434 if (Active) {
435 --Self.IgnoredStackElements;
436 Active = false;
437 }
438 }
439 void enable() {
440 if (!Active) {
441 ++Self.IgnoredStackElements;
442 Active = true;
443 }
444 }
445 };
446
447 /// Marks that we're started loop parsing.
448 void loopInit() {
449 assert(isOpenMPLoopDirective(getCurrentDirective()) &&
450 "Expected loop-based directive.");
451 getTopOfStack().LoopStart = true;
452 }
453 /// Start capturing of the variables in the loop context.
454 void loopStart() {
455 assert(isOpenMPLoopDirective(getCurrentDirective()) &&
456 "Expected loop-based directive.");
457 getTopOfStack().LoopStart = false;
458 }
459 /// true, if variables are captured, false otherwise.
460 bool isLoopStarted() const {
461 assert(isOpenMPLoopDirective(getCurrentDirective()) &&
462 "Expected loop-based directive.");
463 return !getTopOfStack().LoopStart;
464 }
465 /// Marks (or clears) declaration as possibly loop counter.
466 void resetPossibleLoopCounter(const Decl *D = nullptr) {
467 getTopOfStack().PossiblyLoopCounter = D ? D->getCanonicalDecl() : D;
468 }
469 /// Gets the possible loop counter decl.
470 const Decl *getPossiblyLoopCounter() const {
471 return getTopOfStack().PossiblyLoopCounter;
472 }
473 /// Start new OpenMP region stack in new non-capturing function.
474 void pushFunction() {
475 assert(!IgnoredStackElements &&
476 "cannot change stack while ignoring elements");
477 const FunctionScopeInfo *CurFnScope = SemaRef.getCurFunction();
478 assert(!isa<CapturingScopeInfo>(CurFnScope));
479 CurrentNonCapturingFunctionScope = CurFnScope;
480 }
481 /// Pop region stack for non-capturing function.
482 void popFunction(const FunctionScopeInfo *OldFSI) {
483 assert(!IgnoredStackElements &&
484 "cannot change stack while ignoring elements");
485 if (!Stack.empty() && Stack.back().second == OldFSI) {
486 assert(Stack.back().first.empty());
487 Stack.pop_back();
488 }
489 CurrentNonCapturingFunctionScope = nullptr;
490 for (const FunctionScopeInfo *FSI : llvm::reverse(SemaRef.FunctionScopes)) {
491 if (!isa<CapturingScopeInfo>(FSI)) {
492 CurrentNonCapturingFunctionScope = FSI;
493 break;
494 }
495 }
496 }
497
498 void addCriticalWithHint(const OMPCriticalDirective *D, llvm::APSInt Hint) {
499 Criticals.try_emplace(D->getDirectiveName().getAsString(), D, Hint);
500 }
501 const std::pair<const OMPCriticalDirective *, llvm::APSInt>
502 getCriticalWithHint(const DeclarationNameInfo &Name) const {
503 auto I = Criticals.find(Name.getAsString());
504 if (I != Criticals.end())
505 return I->second;
506 return std::make_pair(nullptr, llvm::APSInt());
507 }
508 /// If 'aligned' declaration for given variable \a D was not seen yet,
509 /// add it and return NULL; otherwise return previous occurrence's expression
510 /// for diagnostics.
511 const Expr *addUniqueAligned(const ValueDecl *D, const Expr *NewDE);
512 /// If 'nontemporal' declaration for given variable \a D was not seen yet,
513 /// add it and return NULL; otherwise return previous occurrence's expression
514 /// for diagnostics.
515 const Expr *addUniqueNontemporal(const ValueDecl *D, const Expr *NewDE);
516
517 /// Register specified variable as loop control variable.
518 void addLoopControlVariable(const ValueDecl *D, VarDecl *Capture);
519 /// Check if the specified variable is a loop control variable for
520 /// current region.
521 /// \return The index of the loop control variable in the list of associated
522 /// for-loops (from outer to inner).
523 const LCDeclInfo isLoopControlVariable(const ValueDecl *D) const;
524 /// Check if the specified variable is a loop control variable for
525 /// parent region.
526 /// \return The index of the loop control variable in the list of associated
527 /// for-loops (from outer to inner).
528 const LCDeclInfo isParentLoopControlVariable(const ValueDecl *D) const;
529 /// Check if the specified variable is a loop control variable for
530 /// current region.
531 /// \return The index of the loop control variable in the list of associated
532 /// for-loops (from outer to inner).
533 const LCDeclInfo isLoopControlVariable(const ValueDecl *D,
534 unsigned Level) const;
535 /// Get the loop control variable for the I-th loop (or nullptr) in
536 /// parent directive.
537 const ValueDecl *getParentLoopControlVariable(unsigned I) const;
538
539 /// Marks the specified decl \p D as used in scan directive.
540 void markDeclAsUsedInScanDirective(ValueDecl *D) {
541 if (SharingMapTy *Stack = getSecondOnStackOrNull())
542 Stack->UsedInScanDirective.insert(D);
543 }
544
545 /// Checks if the specified declaration was used in the inner scan directive.
546 bool isUsedInScanDirective(ValueDecl *D) const {
547 if (const SharingMapTy *Stack = getTopOfStackOrNull())
548 return Stack->UsedInScanDirective.contains(D);
549 return false;
550 }
551
552 /// Adds explicit data sharing attribute to the specified declaration.
553 void addDSA(const ValueDecl *D, const Expr *E, OpenMPClauseKind A,
554 DeclRefExpr *PrivateCopy = nullptr, unsigned Modifier = 0,
555 bool AppliedToPointee = false);
556
557 /// Adds additional information for the reduction items with the reduction id
558 /// represented as an operator.
559 void addTaskgroupReductionData(const ValueDecl *D, SourceRange SR,
561 /// Adds additional information for the reduction items with the reduction id
562 /// represented as reduction identifier.
563 void addTaskgroupReductionData(const ValueDecl *D, SourceRange SR,
564 const Expr *ReductionRef);
565 /// Returns the location and reduction operation from the innermost parent
566 /// region for the given \p D.
567 const DSAVarData
568 getTopMostTaskgroupReductionData(const ValueDecl *D, SourceRange &SR,
570 Expr *&TaskgroupDescriptor) const;
571 /// Returns the location and reduction operation from the innermost parent
572 /// region for the given \p D.
573 const DSAVarData
574 getTopMostTaskgroupReductionData(const ValueDecl *D, SourceRange &SR,
575 const Expr *&ReductionRef,
576 Expr *&TaskgroupDescriptor) const;
577 /// Return reduction reference expression for the current taskgroup or
578 /// parallel/worksharing directives with task reductions.
579 Expr *getTaskgroupReductionRef() const {
580 assert((getTopOfStack().Directive == OMPD_taskgroup ||
581 ((isOpenMPParallelDirective(getTopOfStack().Directive) ||
582 isOpenMPWorksharingDirective(getTopOfStack().Directive)) &&
583 !isOpenMPSimdDirective(getTopOfStack().Directive))) &&
584 "taskgroup reference expression requested for non taskgroup or "
585 "parallel/worksharing directive.");
586 return getTopOfStack().TaskgroupReductionRef;
587 }
588 /// Checks if the given \p VD declaration is actually a taskgroup reduction
589 /// descriptor variable at the \p Level of OpenMP regions.
590 bool isTaskgroupReductionRef(const ValueDecl *VD, unsigned Level) const {
591 return getStackElemAtLevel(Level).TaskgroupReductionRef &&
592 cast<DeclRefExpr>(getStackElemAtLevel(Level).TaskgroupReductionRef)
593 ->getDecl() == VD;
594 }
595
596 /// Returns data sharing attributes from top of the stack for the
597 /// specified declaration.
598 const DSAVarData getTopDSA(ValueDecl *D, bool FromParent);
599 /// Returns data-sharing attributes for the specified declaration.
600 const DSAVarData getImplicitDSA(ValueDecl *D, bool FromParent) const;
601 /// Returns data-sharing attributes for the specified declaration.
602 const DSAVarData getImplicitDSA(ValueDecl *D, unsigned Level) const;
603 /// Checks if the specified variables has data-sharing attributes which
604 /// match specified \a CPred predicate in any directive which matches \a DPred
605 /// predicate.
606 const DSAVarData
607 hasDSA(ValueDecl *D,
608 const llvm::function_ref<bool(OpenMPClauseKind, bool,
609 DefaultDataSharingAttributes)>
610 CPred,
611 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
612 bool FromParent) const;
613 /// Checks if the specified variables has data-sharing attributes which
614 /// match specified \a CPred predicate in any innermost directive which
615 /// matches \a DPred predicate.
616 const DSAVarData
617 hasInnermostDSA(ValueDecl *D,
618 const llvm::function_ref<bool(OpenMPClauseKind, bool)> CPred,
619 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
620 bool FromParent) const;
621 /// Checks if the specified variables has explicit data-sharing
622 /// attributes which match specified \a CPred predicate at the specified
623 /// OpenMP region.
624 bool
625 hasExplicitDSA(const ValueDecl *D,
626 const llvm::function_ref<bool(OpenMPClauseKind, bool)> CPred,
627 unsigned Level, bool NotLastprivate = false) const;
628
629 /// Returns true if the directive at level \Level matches in the
630 /// specified \a DPred predicate.
631 bool hasExplicitDirective(
632 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
633 unsigned Level) const;
634
635 /// Finds a directive which matches specified \a DPred predicate.
636 bool hasDirective(
637 const llvm::function_ref<bool(
639 DPred,
640 bool FromParent) const;
641
642 /// Returns currently analyzed directive.
643 OpenMPDirectiveKind getCurrentDirective() const {
644 const SharingMapTy *Top = getTopOfStackOrNull();
645 return Top ? Top->Directive : OMPD_unknown;
646 }
647 OpenMPDirectiveKind getMappedDirective() const {
648 const SharingMapTy *Top = getTopOfStackOrNull();
649 return Top ? Top->MappedDirective : OMPD_unknown;
650 }
651 void setCurrentDirective(OpenMPDirectiveKind NewDK) {
652 SharingMapTy *Top = getTopOfStackOrNull();
653 assert(Top &&
654 "Before calling setCurrentDirective Top of Stack not to be NULL.");
655 // Store the old into MappedDirective & assign argument NewDK to Directive.
656 Top->Directive = NewDK;
657 }
658 void setMappedDirective(OpenMPDirectiveKind NewDK) {
659 SharingMapTy *Top = getTopOfStackOrNull();
660 assert(Top &&
661 "Before calling setMappedDirective Top of Stack not to be NULL.");
662 // Store the old into MappedDirective & assign argument NewDK to Directive.
663 Top->MappedDirective = NewDK;
664 }
665 /// Returns directive kind at specified level.
666 OpenMPDirectiveKind getDirective(unsigned Level) const {
667 assert(!isStackEmpty() && "No directive at specified level.");
668 return getStackElemAtLevel(Level).Directive;
669 }
670 /// Returns the capture region at the specified level.
671 OpenMPDirectiveKind getCaptureRegion(unsigned Level,
672 unsigned OpenMPCaptureLevel) const {
674 getOpenMPCaptureRegions(CaptureRegions, getDirective(Level));
675 return CaptureRegions[OpenMPCaptureLevel];
676 }
677 /// Returns parent directive.
678 OpenMPDirectiveKind getParentDirective() const {
679 const SharingMapTy *Parent = getSecondOnStackOrNull();
680 return Parent ? Parent->Directive : OMPD_unknown;
681 }
682
683 /// Add requires decl to internal vector
684 void addRequiresDecl(OMPRequiresDecl *RD) { RequiresDecls.push_back(RD); }
685
686 /// Checks if the defined 'requires' directive has specified type of clause.
687 template <typename ClauseType> bool hasRequiresDeclWithClause() const {
688 return llvm::any_of(RequiresDecls, [](const OMPRequiresDecl *D) {
689 return llvm::any_of(D->clauselists(), [](const OMPClause *C) {
690 return isa<ClauseType>(C);
691 });
692 });
693 }
694
695 /// Checks for a duplicate clause amongst previously declared requires
696 /// directives
697 bool hasDuplicateRequiresClause(ArrayRef<OMPClause *> ClauseList) const {
698 bool IsDuplicate = false;
699 for (OMPClause *CNew : ClauseList) {
700 for (const OMPRequiresDecl *D : RequiresDecls) {
701 for (const OMPClause *CPrev : D->clauselists()) {
702 if (CNew->getClauseKind() == CPrev->getClauseKind()) {
703 SemaRef.Diag(CNew->getBeginLoc(),
704 diag::err_omp_requires_clause_redeclaration)
705 << getOpenMPClauseName(CNew->getClauseKind());
706 SemaRef.Diag(CPrev->getBeginLoc(),
707 diag::note_omp_requires_previous_clause)
708 << getOpenMPClauseName(CPrev->getClauseKind());
709 IsDuplicate = true;
710 }
711 }
712 }
713 }
714 return IsDuplicate;
715 }
716
717 /// Add location of previously encountered target to internal vector
718 void addTargetDirLocation(SourceLocation LocStart) {
719 TargetLocations.push_back(LocStart);
720 }
721
722 /// Add location for the first encountered atomic directive.
723 void addAtomicDirectiveLoc(SourceLocation Loc) {
724 if (AtomicLocation.isInvalid())
725 AtomicLocation = Loc;
726 }
727
728 /// Returns the location of the first encountered atomic directive in the
729 /// module.
730 SourceLocation getAtomicDirectiveLoc() const { return AtomicLocation; }
731
732 // Return previously encountered target region locations.
733 ArrayRef<SourceLocation> getEncounteredTargetLocs() const {
734 return TargetLocations;
735 }
736
737 /// Set default data sharing attribute to none.
738 void setDefaultDSANone(SourceLocation Loc) {
739 getTopOfStack().DefaultAttr = DSA_none;
740 getTopOfStack().DefaultAttrLoc = Loc;
741 }
742 /// Set default data sharing attribute to shared.
743 void setDefaultDSAShared(SourceLocation Loc) {
744 getTopOfStack().DefaultAttr = DSA_shared;
745 getTopOfStack().DefaultAttrLoc = Loc;
746 }
747 /// Set default data sharing attribute to private.
748 void setDefaultDSAPrivate(SourceLocation Loc) {
749 getTopOfStack().DefaultAttr = DSA_private;
750 getTopOfStack().DefaultAttrLoc = Loc;
751 }
752 /// Set default data sharing attribute to firstprivate.
753 void setDefaultDSAFirstPrivate(SourceLocation Loc) {
754 getTopOfStack().DefaultAttr = DSA_firstprivate;
755 getTopOfStack().DefaultAttrLoc = Loc;
756 }
757 /// Set default data mapping attribute to Modifier:Kind
758 void setDefaultDMAAttr(OpenMPDefaultmapClauseModifier M,
760 DefaultmapInfo &DMI = getTopOfStack().DefaultmapMap[Kind];
761 DMI.ImplicitBehavior = M;
762 DMI.SLoc = Loc;
763 }
764 /// Check whether the implicit-behavior has been set in defaultmap
765 bool checkDefaultmapCategory(OpenMPDefaultmapClauseKind VariableCategory) {
766 if (VariableCategory == OMPC_DEFAULTMAP_unknown)
767 return getTopOfStack()
768 .DefaultmapMap[OMPC_DEFAULTMAP_aggregate]
769 .ImplicitBehavior != OMPC_DEFAULTMAP_MODIFIER_unknown ||
770 getTopOfStack()
771 .DefaultmapMap[OMPC_DEFAULTMAP_scalar]
772 .ImplicitBehavior != OMPC_DEFAULTMAP_MODIFIER_unknown ||
773 getTopOfStack()
774 .DefaultmapMap[OMPC_DEFAULTMAP_pointer]
775 .ImplicitBehavior != OMPC_DEFAULTMAP_MODIFIER_unknown;
776 return getTopOfStack().DefaultmapMap[VariableCategory].ImplicitBehavior !=
778 }
779
780 ArrayRef<llvm::omp::TraitProperty> getConstructTraits() {
781 return ConstructTraits;
782 }
783 void handleConstructTrait(ArrayRef<llvm::omp::TraitProperty> Traits,
784 bool ScopeEntry) {
785 if (ScopeEntry)
786 ConstructTraits.append(Traits.begin(), Traits.end());
787 else
788 for (llvm::omp::TraitProperty Trait : llvm::reverse(Traits)) {
789 llvm::omp::TraitProperty Top = ConstructTraits.pop_back_val();
790 assert(Top == Trait && "Something left a trait on the stack!");
791 (void)Trait;
792 (void)Top;
793 }
794 }
795
796 DefaultDataSharingAttributes getDefaultDSA(unsigned Level) const {
797 return getStackSize() <= Level ? DSA_unspecified
798 : getStackElemAtLevel(Level).DefaultAttr;
799 }
800 DefaultDataSharingAttributes getDefaultDSA() const {
801 return isStackEmpty() ? DSA_unspecified : getTopOfStack().DefaultAttr;
802 }
803 SourceLocation getDefaultDSALocation() const {
804 return isStackEmpty() ? SourceLocation() : getTopOfStack().DefaultAttrLoc;
805 }
807 getDefaultmapModifier(OpenMPDefaultmapClauseKind Kind) const {
808 return isStackEmpty()
810 : getTopOfStack().DefaultmapMap[Kind].ImplicitBehavior;
811 }
813 getDefaultmapModifierAtLevel(unsigned Level,
814 OpenMPDefaultmapClauseKind Kind) const {
815 return getStackElemAtLevel(Level).DefaultmapMap[Kind].ImplicitBehavior;
816 }
817 bool isDefaultmapCapturedByRef(unsigned Level,
818 OpenMPDefaultmapClauseKind Kind) const {
820 getDefaultmapModifierAtLevel(Level, Kind);
821 if (Kind == OMPC_DEFAULTMAP_scalar || Kind == OMPC_DEFAULTMAP_pointer) {
822 return (M == OMPC_DEFAULTMAP_MODIFIER_alloc) ||
823 (M == OMPC_DEFAULTMAP_MODIFIER_to) ||
824 (M == OMPC_DEFAULTMAP_MODIFIER_from) ||
825 (M == OMPC_DEFAULTMAP_MODIFIER_tofrom);
826 }
827 return true;
828 }
829 static bool mustBeFirstprivateBase(OpenMPDefaultmapClauseModifier M,
831 switch (Kind) {
832 case OMPC_DEFAULTMAP_scalar:
833 case OMPC_DEFAULTMAP_pointer:
834 return (M == OMPC_DEFAULTMAP_MODIFIER_unknown) ||
835 (M == OMPC_DEFAULTMAP_MODIFIER_firstprivate) ||
836 (M == OMPC_DEFAULTMAP_MODIFIER_default);
837 case OMPC_DEFAULTMAP_aggregate:
838 return M == OMPC_DEFAULTMAP_MODIFIER_firstprivate;
839 default:
840 break;
841 }
842 llvm_unreachable("Unexpected OpenMPDefaultmapClauseKind enum");
843 }
844 bool mustBeFirstprivateAtLevel(unsigned Level,
845 OpenMPDefaultmapClauseKind Kind) const {
847 getDefaultmapModifierAtLevel(Level, Kind);
848 return mustBeFirstprivateBase(M, Kind);
849 }
850 bool mustBeFirstprivate(OpenMPDefaultmapClauseKind Kind) const {
851 OpenMPDefaultmapClauseModifier M = getDefaultmapModifier(Kind);
852 return mustBeFirstprivateBase(M, Kind);
853 }
854
855 /// Checks if the specified variable is a threadprivate.
856 bool isThreadPrivate(VarDecl *D) {
857 const DSAVarData DVar = getTopDSA(D, false);
858 return isOpenMPThreadPrivate(DVar.CKind);
859 }
860
861 /// Marks current region as ordered (it has an 'ordered' clause).
862 void setOrderedRegion(bool IsOrdered, const Expr *Param,
863 OMPOrderedClause *Clause) {
864 if (IsOrdered)
865 getTopOfStack().OrderedRegion.emplace(Param, Clause);
866 else
867 getTopOfStack().OrderedRegion.reset();
868 }
869 /// Returns true, if region is ordered (has associated 'ordered' clause),
870 /// false - otherwise.
871 bool isOrderedRegion() const {
872 if (const SharingMapTy *Top = getTopOfStackOrNull())
873 return Top->OrderedRegion.has_value();
874 return false;
875 }
876 /// Returns optional parameter for the ordered region.
877 std::pair<const Expr *, OMPOrderedClause *> getOrderedRegionParam() const {
878 if (const SharingMapTy *Top = getTopOfStackOrNull())
879 if (Top->OrderedRegion)
880 return *Top->OrderedRegion;
881 return std::make_pair(nullptr, nullptr);
882 }
883 /// Returns true, if parent region is ordered (has associated
884 /// 'ordered' clause), false - otherwise.
885 bool isParentOrderedRegion() const {
886 if (const SharingMapTy *Parent = getSecondOnStackOrNull())
887 return Parent->OrderedRegion.has_value();
888 return false;
889 }
890 /// Returns optional parameter for the ordered region.
891 std::pair<const Expr *, OMPOrderedClause *>
892 getParentOrderedRegionParam() const {
893 if (const SharingMapTy *Parent = getSecondOnStackOrNull())
894 if (Parent->OrderedRegion)
895 return *Parent->OrderedRegion;
896 return std::make_pair(nullptr, nullptr);
897 }
898 /// Marks current region as having an 'order' clause.
899 void setRegionHasOrderConcurrent(bool HasOrderConcurrent) {
900 getTopOfStack().RegionHasOrderConcurrent = HasOrderConcurrent;
901 }
902 /// Returns true, if parent region is order (has associated
903 /// 'order' clause), false - otherwise.
904 bool isParentOrderConcurrent() const {
905 if (const SharingMapTy *Parent = getSecondOnStackOrNull())
906 return Parent->RegionHasOrderConcurrent;
907 return false;
908 }
909 /// Marks current region as nowait (it has a 'nowait' clause).
910 void setNowaitRegion(bool IsNowait = true) {
911 getTopOfStack().NowaitRegion = IsNowait;
912 }
913 /// Returns true, if parent region is nowait (has associated
914 /// 'nowait' clause), false - otherwise.
915 bool isParentNowaitRegion() const {
916 if (const SharingMapTy *Parent = getSecondOnStackOrNull())
917 return Parent->NowaitRegion;
918 return false;
919 }
920 /// Marks current region as untied (it has a 'untied' clause).
921 void setUntiedRegion(bool IsUntied = true) {
922 getTopOfStack().UntiedRegion = IsUntied;
923 }
924 /// Return true if current region is untied.
925 bool isUntiedRegion() const {
926 const SharingMapTy *Top = getTopOfStackOrNull();
927 return Top ? Top->UntiedRegion : false;
928 }
929 /// Marks parent region as cancel region.
930 void setParentCancelRegion(bool Cancel = true) {
931 if (SharingMapTy *Parent = getSecondOnStackOrNull())
932 Parent->CancelRegion |= Cancel;
933 }
934 /// Return true if current region has inner cancel construct.
935 bool isCancelRegion() const {
936 const SharingMapTy *Top = getTopOfStackOrNull();
937 return Top ? Top->CancelRegion : false;
938 }
939
940 /// Mark that parent region already has scan directive.
941 void setParentHasScanDirective(SourceLocation Loc) {
942 if (SharingMapTy *Parent = getSecondOnStackOrNull())
943 Parent->PrevScanLocation = Loc;
944 }
945 /// Return true if current region has inner cancel construct.
946 bool doesParentHasScanDirective() const {
947 const SharingMapTy *Top = getSecondOnStackOrNull();
948 return Top ? Top->PrevScanLocation.isValid() : false;
949 }
950 /// Return true if current region has inner cancel construct.
951 SourceLocation getParentScanDirectiveLoc() const {
952 const SharingMapTy *Top = getSecondOnStackOrNull();
953 return Top ? Top->PrevScanLocation : SourceLocation();
954 }
955 /// Mark that parent region already has ordered directive.
956 void setParentHasOrderedDirective(SourceLocation Loc) {
957 if (SharingMapTy *Parent = getSecondOnStackOrNull())
958 Parent->PrevOrderedLocation = Loc;
959 }
960 /// Return true if current region has inner ordered construct.
961 bool doesParentHasOrderedDirective() const {
962 const SharingMapTy *Top = getSecondOnStackOrNull();
963 return Top ? Top->PrevOrderedLocation.isValid() : false;
964 }
965 /// Returns the location of the previously specified ordered directive.
966 SourceLocation getParentOrderedDirectiveLoc() const {
967 const SharingMapTy *Top = getSecondOnStackOrNull();
968 return Top ? Top->PrevOrderedLocation : SourceLocation();
969 }
970
971 /// Set collapse value for the region.
972 void setAssociatedLoops(unsigned Val) {
973 getTopOfStack().AssociatedLoops = Val;
974 if (Val > 1)
975 getTopOfStack().HasMutipleLoops = true;
976 }
977 /// Return collapse value for region.
978 unsigned getAssociatedLoops() const {
979 const SharingMapTy *Top = getTopOfStackOrNull();
980 return Top ? Top->AssociatedLoops : 0;
981 }
982 /// Returns true if the construct is associated with multiple loops.
983 bool hasMutipleLoops() const {
984 const SharingMapTy *Top = getTopOfStackOrNull();
985 return Top ? Top->HasMutipleLoops : false;
986 }
987
988 /// Marks current target region as one with closely nested teams
989 /// region.
990 void setParentTeamsRegionLoc(SourceLocation TeamsRegionLoc) {
991 if (SharingMapTy *Parent = getSecondOnStackOrNull())
992 Parent->InnerTeamsRegionLoc = TeamsRegionLoc;
993 }
994 /// Returns true, if current region has closely nested teams region.
995 bool hasInnerTeamsRegion() const {
996 return getInnerTeamsRegionLoc().isValid();
997 }
998 /// Returns location of the nested teams region (if any).
999 SourceLocation getInnerTeamsRegionLoc() const {
1000 const SharingMapTy *Top = getTopOfStackOrNull();
1001 return Top ? Top->InnerTeamsRegionLoc : SourceLocation();
1002 }
1003
1004 Scope *getCurScope() const {
1005 const SharingMapTy *Top = getTopOfStackOrNull();
1006 return Top ? Top->CurScope : nullptr;
1007 }
1008 void setContext(DeclContext *DC) { getTopOfStack().Context = DC; }
1009 SourceLocation getConstructLoc() const {
1010 const SharingMapTy *Top = getTopOfStackOrNull();
1011 return Top ? Top->ConstructLoc : SourceLocation();
1012 }
1013
1014 /// Do the check specified in \a Check to all component lists and return true
1015 /// if any issue is found.
1016 bool checkMappableExprComponentListsForDecl(
1017 const ValueDecl *VD, bool CurrentRegionOnly,
1018 const llvm::function_ref<
1021 Check) const {
1022 if (isStackEmpty())
1023 return false;
1024 auto SI = begin();
1025 auto SE = end();
1026
1027 if (SI == SE)
1028 return false;
1029
1030 if (CurrentRegionOnly)
1031 SE = std::next(SI);
1032 else
1033 std::advance(SI, 1);
1034
1035 for (; SI != SE; ++SI) {
1036 auto MI = SI->MappedExprComponents.find(VD);
1037 if (MI != SI->MappedExprComponents.end())
1039 MI->second.Components)
1040 if (Check(L, MI->second.Kind))
1041 return true;
1042 }
1043 return false;
1044 }
1045
1046 /// Do the check specified in \a Check to all component lists at a given level
1047 /// and return true if any issue is found.
1048 bool checkMappableExprComponentListsForDeclAtLevel(
1049 const ValueDecl *VD, unsigned Level,
1050 const llvm::function_ref<
1053 Check) const {
1054 if (getStackSize() <= Level)
1055 return false;
1056
1057 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
1058 auto MI = StackElem.MappedExprComponents.find(VD);
1059 if (MI != StackElem.MappedExprComponents.end())
1061 MI->second.Components)
1062 if (Check(L, MI->second.Kind))
1063 return true;
1064 return false;
1065 }
1066
1067 /// Create a new mappable expression component list associated with a given
1068 /// declaration and initialize it with the provided list of components.
1069 void addMappableExpressionComponents(
1070 const ValueDecl *VD,
1072 OpenMPClauseKind WhereFoundClauseKind) {
1073 MappedExprComponentTy &MEC = getTopOfStack().MappedExprComponents[VD];
1074 // Create new entry and append the new components there.
1075 MEC.Components.resize(MEC.Components.size() + 1);
1076 MEC.Components.back().append(Components.begin(), Components.end());
1077 MEC.Kind = WhereFoundClauseKind;
1078 }
1079
1080 unsigned getNestingLevel() const {
1081 assert(!isStackEmpty());
1082 return getStackSize() - 1;
1083 }
1084 void addDoacrossDependClause(OMPClause *C, const OperatorOffsetTy &OpsOffs) {
1085 SharingMapTy *Parent = getSecondOnStackOrNull();
1086 assert(Parent && isOpenMPWorksharingDirective(Parent->Directive));
1087 Parent->DoacrossDepends.try_emplace(C, OpsOffs);
1088 }
1089 llvm::iterator_range<DoacrossClauseMapTy::const_iterator>
1090 getDoacrossDependClauses() const {
1091 const SharingMapTy &StackElem = getTopOfStack();
1092 if (isOpenMPWorksharingDirective(StackElem.Directive)) {
1093 const DoacrossClauseMapTy &Ref = StackElem.DoacrossDepends;
1094 return llvm::make_range(Ref.begin(), Ref.end());
1095 }
1096 return llvm::make_range(StackElem.DoacrossDepends.end(),
1097 StackElem.DoacrossDepends.end());
1098 }
1099
1100 // Store types of classes which have been explicitly mapped
1101 void addMappedClassesQualTypes(QualType QT) {
1102 SharingMapTy &StackElem = getTopOfStack();
1103 StackElem.MappedClassesQualTypes.insert(QT);
1104 }
1105
1106 // Return set of mapped classes types
1107 bool isClassPreviouslyMapped(QualType QT) const {
1108 const SharingMapTy &StackElem = getTopOfStack();
1109 return StackElem.MappedClassesQualTypes.contains(QT);
1110 }
1111
1112 /// Adds global declare target to the parent target region.
1113 void addToParentTargetRegionLinkGlobals(DeclRefExpr *E) {
1114 assert(*OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(
1115 E->getDecl()) == OMPDeclareTargetDeclAttr::MT_Link &&
1116 "Expected declare target link global.");
1117 for (auto &Elem : *this) {
1118 if (isOpenMPTargetExecutionDirective(Elem.Directive)) {
1119 Elem.DeclareTargetLinkVarDecls.push_back(E);
1120 return;
1121 }
1122 }
1123 }
1124
1125 /// Returns the list of globals with declare target link if current directive
1126 /// is target.
1127 ArrayRef<DeclRefExpr *> getLinkGlobals() const {
1128 assert(isOpenMPTargetExecutionDirective(getCurrentDirective()) &&
1129 "Expected target executable directive.");
1130 return getTopOfStack().DeclareTargetLinkVarDecls;
1131 }
1132
1133 /// Adds list of allocators expressions.
1134 void addInnerAllocatorExpr(Expr *E) {
1135 getTopOfStack().InnerUsedAllocators.push_back(E);
1136 }
1137 /// Return list of used allocators.
1138 ArrayRef<Expr *> getInnerAllocators() const {
1139 return getTopOfStack().InnerUsedAllocators;
1140 }
1141 /// Marks the declaration as implicitly firstprivate nin the task-based
1142 /// regions.
1143 void addImplicitTaskFirstprivate(unsigned Level, Decl *D) {
1144 getStackElemAtLevel(Level).ImplicitTaskFirstprivates.insert(D);
1145 }
1146 /// Checks if the decl is implicitly firstprivate in the task-based region.
1147 bool isImplicitTaskFirstprivate(Decl *D) const {
1148 return getTopOfStack().ImplicitTaskFirstprivates.contains(D);
1149 }
1150
1151 /// Marks decl as used in uses_allocators clause as the allocator.
1152 void addUsesAllocatorsDecl(const Decl *D, UsesAllocatorsDeclKind Kind) {
1153 getTopOfStack().UsesAllocatorsDecls.try_emplace(D, Kind);
1154 }
1155 /// Checks if specified decl is used in uses allocator clause as the
1156 /// allocator.
1157 std::optional<UsesAllocatorsDeclKind>
1158 isUsesAllocatorsDecl(unsigned Level, const Decl *D) const {
1159 const SharingMapTy &StackElem = getTopOfStack();
1160 auto I = StackElem.UsesAllocatorsDecls.find(D);
1161 if (I == StackElem.UsesAllocatorsDecls.end())
1162 return std::nullopt;
1163 return I->getSecond();
1164 }
1165 std::optional<UsesAllocatorsDeclKind>
1166 isUsesAllocatorsDecl(const Decl *D) const {
1167 const SharingMapTy &StackElem = getTopOfStack();
1168 auto I = StackElem.UsesAllocatorsDecls.find(D);
1169 if (I == StackElem.UsesAllocatorsDecls.end())
1170 return std::nullopt;
1171 return I->getSecond();
1172 }
1173
1174 void addDeclareMapperVarRef(Expr *Ref) {
1175 SharingMapTy &StackElem = getTopOfStack();
1176 StackElem.DeclareMapperVar = Ref;
1177 }
1178 const Expr *getDeclareMapperVarRef() const {
1179 const SharingMapTy *Top = getTopOfStackOrNull();
1180 return Top ? Top->DeclareMapperVar : nullptr;
1181 }
1182
1183 /// Add a new iterator variable.
1184 void addIteratorVarDecl(VarDecl *VD) {
1185 SharingMapTy &StackElem = getTopOfStack();
1186 StackElem.IteratorVarDecls.push_back(VD->getCanonicalDecl());
1187 }
1188 /// Check if variable declaration is an iterator VarDecl.
1189 bool isIteratorVarDecl(const VarDecl *VD) const {
1190 const SharingMapTy *Top = getTopOfStackOrNull();
1191 if (!Top)
1192 return false;
1193
1194 return llvm::is_contained(Top->IteratorVarDecls, VD->getCanonicalDecl());
1195 }
1196 /// get captured field from ImplicitDefaultFirstprivateFDs
1197 VarDecl *getImplicitFDCapExprDecl(const FieldDecl *FD) const {
1198 const_iterator I = begin();
1199 const_iterator EndI = end();
1200 size_t StackLevel = getStackSize();
1201 for (; I != EndI; ++I) {
1202 if (I->DefaultAttr == DSA_firstprivate || I->DefaultAttr == DSA_private)
1203 break;
1204 StackLevel--;
1205 }
1206 assert((StackLevel > 0 && I != EndI) || (StackLevel == 0 && I == EndI));
1207 if (I == EndI)
1208 return nullptr;
1209 for (const auto &IFD : I->ImplicitDefaultFirstprivateFDs)
1210 if (IFD.FD == FD && IFD.StackLevel == StackLevel)
1211 return IFD.VD;
1212 return nullptr;
1213 }
1214 /// Check if capture decl is field captured in ImplicitDefaultFirstprivateFDs
1215 bool isImplicitDefaultFirstprivateFD(VarDecl *VD) const {
1216 const_iterator I = begin();
1217 const_iterator EndI = end();
1218 for (; I != EndI; ++I)
1219 if (I->DefaultAttr == DSA_firstprivate || I->DefaultAttr == DSA_private)
1220 break;
1221 if (I == EndI)
1222 return false;
1223 for (const auto &IFD : I->ImplicitDefaultFirstprivateFDs)
1224 if (IFD.VD == VD)
1225 return true;
1226 return false;
1227 }
1228 /// Store capture FD info in ImplicitDefaultFirstprivateFDs
1229 void addImplicitDefaultFirstprivateFD(const FieldDecl *FD, VarDecl *VD) {
1230 iterator I = begin();
1231 const_iterator EndI = end();
1232 size_t StackLevel = getStackSize();
1233 for (; I != EndI; ++I) {
1234 if (I->DefaultAttr == DSA_private || I->DefaultAttr == DSA_firstprivate) {
1235 I->ImplicitDefaultFirstprivateFDs.emplace_back(FD, StackLevel, VD);
1236 break;
1237 }
1238 StackLevel--;
1239 }
1240 assert((StackLevel > 0 && I != EndI) || (StackLevel == 0 && I == EndI));
1241 }
1242};
1243
1244bool isImplicitTaskingRegion(OpenMPDirectiveKind DKind) {
1245 return isOpenMPParallelDirective(DKind) || isOpenMPTeamsDirective(DKind);
1246}
1247
1248bool isImplicitOrExplicitTaskingRegion(OpenMPDirectiveKind DKind) {
1249 return isImplicitTaskingRegion(DKind) || isOpenMPTaskingDirective(DKind) ||
1250 DKind == OMPD_unknown;
1251}
1252
1253} // namespace
1254
1255static const Expr *getExprAsWritten(const Expr *E) {
1256 if (const auto *FE = dyn_cast<FullExpr>(E))
1257 E = FE->getSubExpr();
1258
1259 if (const auto *MTE = dyn_cast<MaterializeTemporaryExpr>(E))
1260 E = MTE->getSubExpr();
1261
1262 while (const auto *Binder = dyn_cast<CXXBindTemporaryExpr>(E))
1263 E = Binder->getSubExpr();
1264
1265 if (const auto *ICE = dyn_cast<ImplicitCastExpr>(E))
1266 E = ICE->getSubExprAsWritten();
1267 return E->IgnoreParens();
1268}
1269
1271 return const_cast<Expr *>(getExprAsWritten(const_cast<const Expr *>(E)));
1272}
1273
1274static const ValueDecl *getCanonicalDecl(const ValueDecl *D) {
1275 if (const auto *CED = dyn_cast<OMPCapturedExprDecl>(D))
1276 if (const auto *ME = dyn_cast<MemberExpr>(getExprAsWritten(CED->getInit())))
1277 D = ME->getMemberDecl();
1278 const auto *VD = dyn_cast<VarDecl>(D);
1279 const auto *FD = dyn_cast<FieldDecl>(D);
1280 if (VD != nullptr) {
1281 VD = VD->getCanonicalDecl();
1282 D = VD;
1283 } else {
1284 assert(FD);
1285 FD = FD->getCanonicalDecl();
1286 D = FD;
1287 }
1288 return D;
1289}
1290
1292 return const_cast<ValueDecl *>(
1293 getCanonicalDecl(const_cast<const ValueDecl *>(D)));
1294}
1295
1296DSAStackTy::DSAVarData DSAStackTy::getDSA(const_iterator &Iter,
1297 ValueDecl *D) const {
1298 D = getCanonicalDecl(D);
1299 auto *VD = dyn_cast<VarDecl>(D);
1300 const auto *FD = dyn_cast<FieldDecl>(D);
1301 DSAVarData DVar;
1302 if (Iter == end()) {
1303 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1304 // in a region but not in construct]
1305 // File-scope or namespace-scope variables referenced in called routines
1306 // in the region are shared unless they appear in a threadprivate
1307 // directive.
1308 if (VD && !VD->isFunctionOrMethodVarDecl() && !isa<ParmVarDecl>(VD))
1309 DVar.CKind = OMPC_shared;
1310
1311 // OpenMP [2.9.1.2, Data-sharing Attribute Rules for Variables Referenced
1312 // in a region but not in construct]
1313 // Variables with static storage duration that are declared in called
1314 // routines in the region are shared.
1315 if (VD && VD->hasGlobalStorage())
1316 DVar.CKind = OMPC_shared;
1317
1318 // Non-static data members are shared by default.
1319 if (FD)
1320 DVar.CKind = OMPC_shared;
1321
1322 return DVar;
1323 }
1324
1325 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1326 // in a Construct, C/C++, predetermined, p.1]
1327 // Variables with automatic storage duration that are declared in a scope
1328 // inside the construct are private.
1329 if (VD && isOpenMPLocal(VD, Iter) && VD->isLocalVarDecl() &&
1330 (VD->getStorageClass() == SC_Auto || VD->getStorageClass() == SC_None)) {
1331 DVar.CKind = OMPC_private;
1332 return DVar;
1333 }
1334
1335 DVar.DKind = Iter->Directive;
1336 // Explicitly specified attributes and local variables with predetermined
1337 // attributes.
1338 if (Iter->SharingMap.count(D)) {
1339 const DSAInfo &Data = Iter->SharingMap.lookup(D);
1340 DVar.RefExpr = Data.RefExpr.getPointer();
1341 DVar.PrivateCopy = Data.PrivateCopy;
1342 DVar.CKind = Data.Attributes;
1343 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1344 DVar.Modifier = Data.Modifier;
1345 DVar.AppliedToPointee = Data.AppliedToPointee;
1346 return DVar;
1347 }
1348
1349 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1350 // in a Construct, C/C++, implicitly determined, p.1]
1351 // In a parallel or task construct, the data-sharing attributes of these
1352 // variables are determined by the default clause, if present.
1353 switch (Iter->DefaultAttr) {
1354 case DSA_shared:
1355 DVar.CKind = OMPC_shared;
1356 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1357 return DVar;
1358 case DSA_none:
1359 return DVar;
1360 case DSA_firstprivate:
1361 if (VD && VD->getStorageDuration() == SD_Static &&
1362 VD->getDeclContext()->isFileContext()) {
1363 DVar.CKind = OMPC_unknown;
1364 } else {
1365 DVar.CKind = OMPC_firstprivate;
1366 }
1367 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1368 return DVar;
1369 case DSA_private:
1370 // each variable with static storage duration that is declared
1371 // in a namespace or global scope and referenced in the construct,
1372 // and that does not have a predetermined data-sharing attribute
1373 if (VD && VD->getStorageDuration() == SD_Static &&
1374 VD->getDeclContext()->isFileContext()) {
1375 DVar.CKind = OMPC_unknown;
1376 } else {
1377 DVar.CKind = OMPC_private;
1378 }
1379 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1380 return DVar;
1381 case DSA_unspecified:
1382 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1383 // in a Construct, implicitly determined, p.2]
1384 // In a parallel construct, if no default clause is present, these
1385 // variables are shared.
1386 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1387 if ((isOpenMPParallelDirective(DVar.DKind) &&
1388 !isOpenMPTaskLoopDirective(DVar.DKind)) ||
1389 isOpenMPTeamsDirective(DVar.DKind)) {
1390 DVar.CKind = OMPC_shared;
1391 return DVar;
1392 }
1393
1394 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1395 // in a Construct, implicitly determined, p.4]
1396 // In a task construct, if no default clause is present, a variable that in
1397 // the enclosing context is determined to be shared by all implicit tasks
1398 // bound to the current team is shared.
1399 if (isOpenMPTaskingDirective(DVar.DKind)) {
1400 DSAVarData DVarTemp;
1401 const_iterator I = Iter, E = end();
1402 do {
1403 ++I;
1404 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables
1405 // Referenced in a Construct, implicitly determined, p.6]
1406 // In a task construct, if no default clause is present, a variable
1407 // whose data-sharing attribute is not determined by the rules above is
1408 // firstprivate.
1409 DVarTemp = getDSA(I, D);
1410 if (DVarTemp.CKind != OMPC_shared) {
1411 DVar.RefExpr = nullptr;
1412 DVar.CKind = OMPC_firstprivate;
1413 return DVar;
1414 }
1415 } while (I != E && !isImplicitTaskingRegion(I->Directive));
1416 DVar.CKind =
1417 (DVarTemp.CKind == OMPC_unknown) ? OMPC_firstprivate : OMPC_shared;
1418 return DVar;
1419 }
1420 }
1421 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1422 // in a Construct, implicitly determined, p.3]
1423 // For constructs other than task, if no default clause is present, these
1424 // variables inherit their data-sharing attributes from the enclosing
1425 // context.
1426 return getDSA(++Iter, D);
1427}
1428
1429const Expr *DSAStackTy::addUniqueAligned(const ValueDecl *D,
1430 const Expr *NewDE) {
1431 assert(!isStackEmpty() && "Data sharing attributes stack is empty");
1432 D = getCanonicalDecl(D);
1433 SharingMapTy &StackElem = getTopOfStack();
1434 auto It = StackElem.AlignedMap.find(D);
1435 if (It == StackElem.AlignedMap.end()) {
1436 assert(NewDE && "Unexpected nullptr expr to be added into aligned map");
1437 StackElem.AlignedMap[D] = NewDE;
1438 return nullptr;
1439 }
1440 assert(It->second && "Unexpected nullptr expr in the aligned map");
1441 return It->second;
1442}
1443
1444const Expr *DSAStackTy::addUniqueNontemporal(const ValueDecl *D,
1445 const Expr *NewDE) {
1446 assert(!isStackEmpty() && "Data sharing attributes stack is empty");
1447 D = getCanonicalDecl(D);
1448 SharingMapTy &StackElem = getTopOfStack();
1449 auto It = StackElem.NontemporalMap.find(D);
1450 if (It == StackElem.NontemporalMap.end()) {
1451 assert(NewDE && "Unexpected nullptr expr to be added into aligned map");
1452 StackElem.NontemporalMap[D] = NewDE;
1453 return nullptr;
1454 }
1455 assert(It->second && "Unexpected nullptr expr in the aligned map");
1456 return It->second;
1457}
1458
1459void DSAStackTy::addLoopControlVariable(const ValueDecl *D, VarDecl *Capture) {
1460 assert(!isStackEmpty() && "Data-sharing attributes stack is empty");
1461 D = getCanonicalDecl(D);
1462 SharingMapTy &StackElem = getTopOfStack();
1463 StackElem.LCVMap.try_emplace(
1464 D, LCDeclInfo(StackElem.LCVMap.size() + 1, Capture));
1465}
1466
1467const DSAStackTy::LCDeclInfo
1468DSAStackTy::isLoopControlVariable(const ValueDecl *D) const {
1469 assert(!isStackEmpty() && "Data-sharing attributes stack is empty");
1470 D = getCanonicalDecl(D);
1471 const SharingMapTy &StackElem = getTopOfStack();
1472 auto It = StackElem.LCVMap.find(D);
1473 if (It != StackElem.LCVMap.end())
1474 return It->second;
1475 return {0, nullptr};
1476}
1477
1478const DSAStackTy::LCDeclInfo
1479DSAStackTy::isLoopControlVariable(const ValueDecl *D, unsigned Level) const {
1480 assert(!isStackEmpty() && "Data-sharing attributes stack is empty");
1481 D = getCanonicalDecl(D);
1482 for (unsigned I = Level + 1; I > 0; --I) {
1483 const SharingMapTy &StackElem = getStackElemAtLevel(I - 1);
1484 auto It = StackElem.LCVMap.find(D);
1485 if (It != StackElem.LCVMap.end())
1486 return It->second;
1487 }
1488 return {0, nullptr};
1489}
1490
1491const DSAStackTy::LCDeclInfo
1492DSAStackTy::isParentLoopControlVariable(const ValueDecl *D) const {
1493 const SharingMapTy *Parent = getSecondOnStackOrNull();
1494 assert(Parent && "Data-sharing attributes stack is empty");
1495 D = getCanonicalDecl(D);
1496 auto It = Parent->LCVMap.find(D);
1497 if (It != Parent->LCVMap.end())
1498 return It->second;
1499 return {0, nullptr};
1500}
1501
1502const ValueDecl *DSAStackTy::getParentLoopControlVariable(unsigned I) const {
1503 const SharingMapTy *Parent = getSecondOnStackOrNull();
1504 assert(Parent && "Data-sharing attributes stack is empty");
1505 if (Parent->LCVMap.size() < I)
1506 return nullptr;
1507 for (const auto &Pair : Parent->LCVMap)
1508 if (Pair.second.first == I)
1509 return Pair.first;
1510 return nullptr;
1511}
1512
1513void DSAStackTy::addDSA(const ValueDecl *D, const Expr *E, OpenMPClauseKind A,
1514 DeclRefExpr *PrivateCopy, unsigned Modifier,
1515 bool AppliedToPointee) {
1516 D = getCanonicalDecl(D);
1517 if (A == OMPC_threadprivate) {
1518 DSAInfo &Data = Threadprivates[D];
1519 Data.Attributes = A;
1520 Data.RefExpr.setPointer(E);
1521 Data.PrivateCopy = nullptr;
1522 Data.Modifier = Modifier;
1523 } else {
1524 DSAInfo &Data = getTopOfStack().SharingMap[D];
1525 assert(Data.Attributes == OMPC_unknown || (A == Data.Attributes) ||
1526 (A == OMPC_firstprivate && Data.Attributes == OMPC_lastprivate) ||
1527 (A == OMPC_lastprivate && Data.Attributes == OMPC_firstprivate) ||
1528 (isLoopControlVariable(D).first && A == OMPC_private));
1529 Data.Modifier = Modifier;
1530 if (A == OMPC_lastprivate && Data.Attributes == OMPC_firstprivate) {
1531 Data.RefExpr.setInt(/*IntVal=*/true);
1532 return;
1533 }
1534 const bool IsLastprivate =
1535 A == OMPC_lastprivate || Data.Attributes == OMPC_lastprivate;
1536 Data.Attributes = A;
1537 Data.RefExpr.setPointerAndInt(E, IsLastprivate);
1538 Data.PrivateCopy = PrivateCopy;
1539 Data.AppliedToPointee = AppliedToPointee;
1540 if (PrivateCopy) {
1541 DSAInfo &Data = getTopOfStack().SharingMap[PrivateCopy->getDecl()];
1542 Data.Modifier = Modifier;
1543 Data.Attributes = A;
1544 Data.RefExpr.setPointerAndInt(PrivateCopy, IsLastprivate);
1545 Data.PrivateCopy = nullptr;
1546 Data.AppliedToPointee = AppliedToPointee;
1547 }
1548 }
1549}
1550
1551/// Build a variable declaration for OpenMP loop iteration variable.
1553 StringRef Name, const AttrVec *Attrs = nullptr,
1554 DeclRefExpr *OrigRef = nullptr) {
1555 DeclContext *DC = SemaRef.CurContext;
1556 IdentifierInfo *II = &SemaRef.PP.getIdentifierTable().get(Name);
1558 auto *Decl =
1559 VarDecl::Create(SemaRef.Context, DC, Loc, Loc, II, Type, TInfo, SC_None);
1560 if (Attrs) {
1561 for (specific_attr_iterator<AlignedAttr> I(Attrs->begin()), E(Attrs->end());
1562 I != E; ++I)
1563 Decl->addAttr(*I);
1564 }
1565 Decl->setImplicit();
1566 if (OrigRef) {
1567 Decl->addAttr(
1568 OMPReferencedVarAttr::CreateImplicit(SemaRef.Context, OrigRef));
1569 }
1570 return Decl;
1571}
1572
1575 bool RefersToCapture = false) {
1576 D->setReferenced();
1577 D->markUsed(S.Context);
1579 SourceLocation(), D, RefersToCapture, Loc, Ty,
1580 VK_LValue);
1581}
1582
1583void DSAStackTy::addTaskgroupReductionData(const ValueDecl *D, SourceRange SR,
1584 BinaryOperatorKind BOK) {
1585 D = getCanonicalDecl(D);
1586 assert(!isStackEmpty() && "Data-sharing attributes stack is empty");
1587 assert(
1588 getTopOfStack().SharingMap[D].Attributes == OMPC_reduction &&
1589 "Additional reduction info may be specified only for reduction items.");
1590 ReductionData &ReductionData = getTopOfStack().ReductionMap[D];
1591 assert(ReductionData.ReductionRange.isInvalid() &&
1592 (getTopOfStack().Directive == OMPD_taskgroup ||
1593 ((isOpenMPParallelDirective(getTopOfStack().Directive) ||
1594 isOpenMPWorksharingDirective(getTopOfStack().Directive)) &&
1595 !isOpenMPSimdDirective(getTopOfStack().Directive))) &&
1596 "Additional reduction info may be specified only once for reduction "
1597 "items.");
1598 ReductionData.set(BOK, SR);
1599 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1600 if (!TaskgroupReductionRef) {
1601 VarDecl *VD = buildVarDecl(SemaRef, SR.getBegin(),
1602 SemaRef.Context.VoidPtrTy, ".task_red.");
1603 TaskgroupReductionRef =
1604 buildDeclRefExpr(SemaRef, VD, SemaRef.Context.VoidPtrTy, SR.getBegin());
1605 }
1606}
1607
1608void DSAStackTy::addTaskgroupReductionData(const ValueDecl *D, SourceRange SR,
1609 const Expr *ReductionRef) {
1610 D = getCanonicalDecl(D);
1611 assert(!isStackEmpty() && "Data-sharing attributes stack is empty");
1612 assert(
1613 getTopOfStack().SharingMap[D].Attributes == OMPC_reduction &&
1614 "Additional reduction info may be specified only for reduction items.");
1615 ReductionData &ReductionData = getTopOfStack().ReductionMap[D];
1616 assert(ReductionData.ReductionRange.isInvalid() &&
1617 (getTopOfStack().Directive == OMPD_taskgroup ||
1618 ((isOpenMPParallelDirective(getTopOfStack().Directive) ||
1619 isOpenMPWorksharingDirective(getTopOfStack().Directive)) &&
1620 !isOpenMPSimdDirective(getTopOfStack().Directive))) &&
1621 "Additional reduction info may be specified only once for reduction "
1622 "items.");
1623 ReductionData.set(ReductionRef, SR);
1624 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1625 if (!TaskgroupReductionRef) {
1626 VarDecl *VD = buildVarDecl(SemaRef, SR.getBegin(),
1627 SemaRef.Context.VoidPtrTy, ".task_red.");
1628 TaskgroupReductionRef =
1629 buildDeclRefExpr(SemaRef, VD, SemaRef.Context.VoidPtrTy, SR.getBegin());
1630 }
1631}
1632
1633const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1634 const ValueDecl *D, SourceRange &SR, BinaryOperatorKind &BOK,
1635 Expr *&TaskgroupDescriptor) const {
1636 D = getCanonicalDecl(D);
1637 assert(!isStackEmpty() && "Data-sharing attributes stack is empty.");
1638 for (const_iterator I = begin() + 1, E = end(); I != E; ++I) {
1639 const DSAInfo &Data = I->SharingMap.lookup(D);
1640 if (Data.Attributes != OMPC_reduction ||
1641 Data.Modifier != OMPC_REDUCTION_task)
1642 continue;
1643 const ReductionData &ReductionData = I->ReductionMap.lookup(D);
1644 if (!ReductionData.ReductionOp ||
1645 ReductionData.ReductionOp.is<const Expr *>())
1646 return DSAVarData();
1647 SR = ReductionData.ReductionRange;
1648 BOK = ReductionData.ReductionOp.get<ReductionData::BOKPtrType>();
1649 assert(I->TaskgroupReductionRef && "taskgroup reduction reference "
1650 "expression for the descriptor is not "
1651 "set.");
1652 TaskgroupDescriptor = I->TaskgroupReductionRef;
1653 return DSAVarData(I->Directive, OMPC_reduction, Data.RefExpr.getPointer(),
1654 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1655 /*AppliedToPointee=*/false);
1656 }
1657 return DSAVarData();
1658}
1659
1660const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1661 const ValueDecl *D, SourceRange &SR, const Expr *&ReductionRef,
1662 Expr *&TaskgroupDescriptor) const {
1663 D = getCanonicalDecl(D);
1664 assert(!isStackEmpty() && "Data-sharing attributes stack is empty.");
1665 for (const_iterator I = begin() + 1, E = end(); I != E; ++I) {
1666 const DSAInfo &Data = I->SharingMap.lookup(D);
1667 if (Data.Attributes != OMPC_reduction ||
1668 Data.Modifier != OMPC_REDUCTION_task)
1669 continue;
1670 const ReductionData &ReductionData = I->ReductionMap.lookup(D);
1671 if (!ReductionData.ReductionOp ||
1672 !ReductionData.ReductionOp.is<const Expr *>())
1673 return DSAVarData();
1674 SR = ReductionData.ReductionRange;
1675 ReductionRef = ReductionData.ReductionOp.get<const Expr *>();
1676 assert(I->TaskgroupReductionRef && "taskgroup reduction reference "
1677 "expression for the descriptor is not "
1678 "set.");
1679 TaskgroupDescriptor = I->TaskgroupReductionRef;
1680 return DSAVarData(I->Directive, OMPC_reduction, Data.RefExpr.getPointer(),
1681 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1682 /*AppliedToPointee=*/false);
1683 }
1684 return DSAVarData();
1685}
1686
1687bool DSAStackTy::isOpenMPLocal(VarDecl *D, const_iterator I) const {
1688 D = D->getCanonicalDecl();
1689 for (const_iterator E = end(); I != E; ++I) {
1690 if (isImplicitOrExplicitTaskingRegion(I->Directive) ||
1691 isOpenMPTargetExecutionDirective(I->Directive)) {
1692 if (I->CurScope) {
1693 Scope *TopScope = I->CurScope->getParent();
1694 Scope *CurScope = getCurScope();
1695 while (CurScope && CurScope != TopScope && !CurScope->isDeclScope(D))
1696 CurScope = CurScope->getParent();
1697 return CurScope != TopScope;
1698 }
1699 for (DeclContext *DC = D->getDeclContext(); DC; DC = DC->getParent())
1700 if (I->Context == DC)
1701 return true;
1702 return false;
1703 }
1704 }
1705 return false;
1706}
1707
1709 bool AcceptIfMutable = true,
1710 bool *IsClassType = nullptr) {
1711 ASTContext &Context = SemaRef.getASTContext();
1712 Type = Type.getNonReferenceType().getCanonicalType();
1713 bool IsConstant = Type.isConstant(Context);
1714 Type = Context.getBaseElementType(Type);
1715 const CXXRecordDecl *RD = AcceptIfMutable && SemaRef.getLangOpts().CPlusPlus
1717 : nullptr;
1718 if (const auto *CTSD = dyn_cast_or_null<ClassTemplateSpecializationDecl>(RD))
1719 if (const ClassTemplateDecl *CTD = CTSD->getSpecializedTemplate())
1720 RD = CTD->getTemplatedDecl();
1721 if (IsClassType)
1722 *IsClassType = RD;
1723 return IsConstant && !(SemaRef.getLangOpts().CPlusPlus && RD &&
1724 RD->hasDefinition() && RD->hasMutableFields());
1725}
1726
1727static bool rejectConstNotMutableType(Sema &SemaRef, const ValueDecl *D,
1729 SourceLocation ELoc,
1730 bool AcceptIfMutable = true,
1731 bool ListItemNotVar = false) {
1732 ASTContext &Context = SemaRef.getASTContext();
1733 bool IsClassType;
1734 if (isConstNotMutableType(SemaRef, Type, AcceptIfMutable, &IsClassType)) {
1735 unsigned Diag = ListItemNotVar ? diag::err_omp_const_list_item
1736 : IsClassType ? diag::err_omp_const_not_mutable_variable
1737 : diag::err_omp_const_variable;
1738 SemaRef.Diag(ELoc, Diag) << getOpenMPClauseName(CKind);
1739 if (!ListItemNotVar && D) {
1740 const VarDecl *VD = dyn_cast<VarDecl>(D);
1741 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
1743 SemaRef.Diag(D->getLocation(),
1744 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
1745 << D;
1746 }
1747 return true;
1748 }
1749 return false;
1750}
1751
1752const DSAStackTy::DSAVarData DSAStackTy::getTopDSA(ValueDecl *D,
1753 bool FromParent) {
1754 D = getCanonicalDecl(D);
1755 DSAVarData DVar;
1756
1757 auto *VD = dyn_cast<VarDecl>(D);
1758 auto TI = Threadprivates.find(D);
1759 if (TI != Threadprivates.end()) {
1760 DVar.RefExpr = TI->getSecond().RefExpr.getPointer();
1761 DVar.CKind = OMPC_threadprivate;
1762 DVar.Modifier = TI->getSecond().Modifier;
1763 return DVar;
1764 }
1765 if (VD && VD->hasAttr<OMPThreadPrivateDeclAttr>()) {
1766 DVar.RefExpr = buildDeclRefExpr(
1767 SemaRef, VD, D->getType().getNonReferenceType(),
1768 VD->getAttr<OMPThreadPrivateDeclAttr>()->getLocation());
1769 DVar.CKind = OMPC_threadprivate;
1770 addDSA(D, DVar.RefExpr, OMPC_threadprivate);
1771 return DVar;
1772 }
1773 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1774 // in a Construct, C/C++, predetermined, p.1]
1775 // Variables appearing in threadprivate directives are threadprivate.
1776 if ((VD && VD->getTLSKind() != VarDecl::TLS_None &&
1777 !(VD->hasAttr<OMPThreadPrivateDeclAttr>() &&
1778 SemaRef.getLangOpts().OpenMPUseTLS &&
1779 SemaRef.getASTContext().getTargetInfo().isTLSSupported())) ||
1780 (VD && VD->getStorageClass() == SC_Register &&
1781 VD->hasAttr<AsmLabelAttr>() && !VD->isLocalVarDecl())) {
1782 DVar.RefExpr = buildDeclRefExpr(
1783 SemaRef, VD, D->getType().getNonReferenceType(), D->getLocation());
1784 DVar.CKind = OMPC_threadprivate;
1785 addDSA(D, DVar.RefExpr, OMPC_threadprivate);
1786 return DVar;
1787 }
1788 if (SemaRef.getLangOpts().OpenMPCUDAMode && VD &&
1789 VD->isLocalVarDeclOrParm() && !isStackEmpty() &&
1790 !isLoopControlVariable(D).first) {
1791 const_iterator IterTarget =
1792 std::find_if(begin(), end(), [](const SharingMapTy &Data) {
1793 return isOpenMPTargetExecutionDirective(Data.Directive);
1794 });
1795 if (IterTarget != end()) {
1796 const_iterator ParentIterTarget = IterTarget + 1;
1797 for (const_iterator Iter = begin(); Iter != ParentIterTarget; ++Iter) {
1798 if (isOpenMPLocal(VD, Iter)) {
1799 DVar.RefExpr =
1800 buildDeclRefExpr(SemaRef, VD, D->getType().getNonReferenceType(),
1801 D->getLocation());
1802 DVar.CKind = OMPC_threadprivate;
1803 return DVar;
1804 }
1805 }
1806 if (!isClauseParsingMode() || IterTarget != begin()) {
1807 auto DSAIter = IterTarget->SharingMap.find(D);
1808 if (DSAIter != IterTarget->SharingMap.end() &&
1809 isOpenMPPrivate(DSAIter->getSecond().Attributes)) {
1810 DVar.RefExpr = DSAIter->getSecond().RefExpr.getPointer();
1811 DVar.CKind = OMPC_threadprivate;
1812 return DVar;
1813 }
1814 const_iterator End = end();
1815 if (!SemaRef.OpenMP().isOpenMPCapturedByRef(
1816 D, std::distance(ParentIterTarget, End),
1817 /*OpenMPCaptureLevel=*/0)) {
1818 DVar.RefExpr =
1819 buildDeclRefExpr(SemaRef, VD, D->getType().getNonReferenceType(),
1820 IterTarget->ConstructLoc);
1821 DVar.CKind = OMPC_threadprivate;
1822 return DVar;
1823 }
1824 }
1825 }
1826 }
1827
1828 if (isStackEmpty())
1829 // Not in OpenMP execution region and top scope was already checked.
1830 return DVar;
1831
1832 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1833 // in a Construct, C/C++, predetermined, p.4]
1834 // Static data members are shared.
1835 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1836 // in a Construct, C/C++, predetermined, p.7]
1837 // Variables with static storage duration that are declared in a scope
1838 // inside the construct are shared.
1839 if (VD && VD->isStaticDataMember()) {
1840 // Check for explicitly specified attributes.
1841 const_iterator I = begin();
1842 const_iterator EndI = end();
1843 if (FromParent && I != EndI)
1844 ++I;
1845 if (I != EndI) {
1846 auto It = I->SharingMap.find(D);
1847 if (It != I->SharingMap.end()) {
1848 const DSAInfo &Data = It->getSecond();
1849 DVar.RefExpr = Data.RefExpr.getPointer();
1850 DVar.PrivateCopy = Data.PrivateCopy;
1851 DVar.CKind = Data.Attributes;
1852 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1853 DVar.DKind = I->Directive;
1854 DVar.Modifier = Data.Modifier;
1855 DVar.AppliedToPointee = Data.AppliedToPointee;
1856 return DVar;
1857 }
1858 }
1859
1860 DVar.CKind = OMPC_shared;
1861 return DVar;
1862 }
1863
1864 auto &&MatchesAlways = [](OpenMPDirectiveKind) { return true; };
1865 // The predetermined shared attribute for const-qualified types having no
1866 // mutable members was removed after OpenMP 3.1.
1867 if (SemaRef.LangOpts.OpenMP <= 31) {
1868 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1869 // in a Construct, C/C++, predetermined, p.6]
1870 // Variables with const qualified type having no mutable member are
1871 // shared.
1872 if (isConstNotMutableType(SemaRef, D->getType())) {
1873 // Variables with const-qualified type having no mutable member may be
1874 // listed in a firstprivate clause, even if they are static data members.
1875 DSAVarData DVarTemp = hasInnermostDSA(
1876 D,
1877 [](OpenMPClauseKind C, bool) {
1878 return C == OMPC_firstprivate || C == OMPC_shared;
1879 },
1880 MatchesAlways, FromParent);
1881 if (DVarTemp.CKind != OMPC_unknown && DVarTemp.RefExpr)
1882 return DVarTemp;
1883
1884 DVar.CKind = OMPC_shared;
1885 return DVar;
1886 }
1887 }
1888
1889 // Explicitly specified attributes and local variables with predetermined
1890 // attributes.
1891 const_iterator I = begin();
1892 const_iterator EndI = end();
1893 if (FromParent && I != EndI)
1894 ++I;
1895 if (I == EndI)
1896 return DVar;
1897 auto It = I->SharingMap.find(D);
1898 if (It != I->SharingMap.end()) {
1899 const DSAInfo &Data = It->getSecond();
1900 DVar.RefExpr = Data.RefExpr.getPointer();
1901 DVar.PrivateCopy = Data.PrivateCopy;
1902 DVar.CKind = Data.Attributes;
1903 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1904 DVar.DKind = I->Directive;
1905 DVar.Modifier = Data.Modifier;
1906 DVar.AppliedToPointee = Data.AppliedToPointee;
1907 }
1908
1909 return DVar;
1910}
1911
1912const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(ValueDecl *D,
1913 bool FromParent) const {
1914 if (isStackEmpty()) {
1915 const_iterator I;
1916 return getDSA(I, D);
1917 }
1918 D = getCanonicalDecl(D);
1919 const_iterator StartI = begin();
1920 const_iterator EndI = end();
1921 if (FromParent && StartI != EndI)
1922 ++StartI;
1923 return getDSA(StartI, D);
1924}
1925
1926const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(ValueDecl *D,
1927 unsigned Level) const {
1928 if (getStackSize() <= Level)
1929 return DSAVarData();
1930 D = getCanonicalDecl(D);
1931 const_iterator StartI = std::next(begin(), getStackSize() - 1 - Level);
1932 return getDSA(StartI, D);
1933}
1934
1935const DSAStackTy::DSAVarData
1936DSAStackTy::hasDSA(ValueDecl *D,
1937 const llvm::function_ref<bool(OpenMPClauseKind, bool,
1938 DefaultDataSharingAttributes)>
1939 CPred,
1940 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
1941 bool FromParent) const {
1942 if (isStackEmpty())
1943 return {};
1944 D = getCanonicalDecl(D);
1945 const_iterator I = begin();
1946 const_iterator EndI = end();
1947 if (FromParent && I != EndI)
1948 ++I;
1949 for (; I != EndI; ++I) {
1950 if (!DPred(I->Directive) &&
1951 !isImplicitOrExplicitTaskingRegion(I->Directive))
1952 continue;
1953 const_iterator NewI = I;
1954 DSAVarData DVar = getDSA(NewI, D);
1955 if (I == NewI && CPred(DVar.CKind, DVar.AppliedToPointee, I->DefaultAttr))
1956 return DVar;
1957 }
1958 return {};
1959}
1960
1961const DSAStackTy::DSAVarData DSAStackTy::hasInnermostDSA(
1962 ValueDecl *D, const llvm::function_ref<bool(OpenMPClauseKind, bool)> CPred,
1963 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
1964 bool FromParent) const {
1965 if (isStackEmpty())
1966 return {};
1967 D = getCanonicalDecl(D);
1968 const_iterator StartI = begin();
1969 const_iterator EndI = end();
1970 if (FromParent && StartI != EndI)
1971 ++StartI;
1972 if (StartI == EndI || !DPred(StartI->Directive))
1973 return {};
1974 const_iterator NewI = StartI;
1975 DSAVarData DVar = getDSA(NewI, D);
1976 return (NewI == StartI && CPred(DVar.CKind, DVar.AppliedToPointee))
1977 ? DVar
1978 : DSAVarData();
1979}
1980
1981bool DSAStackTy::hasExplicitDSA(
1982 const ValueDecl *D,
1983 const llvm::function_ref<bool(OpenMPClauseKind, bool)> CPred,
1984 unsigned Level, bool NotLastprivate) const {
1985 if (getStackSize() <= Level)
1986 return false;
1987 D = getCanonicalDecl(D);
1988 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
1989 auto I = StackElem.SharingMap.find(D);
1990 if (I != StackElem.SharingMap.end() && I->getSecond().RefExpr.getPointer() &&
1991 CPred(I->getSecond().Attributes, I->getSecond().AppliedToPointee) &&
1992 (!NotLastprivate || !I->getSecond().RefExpr.getInt()))
1993 return true;
1994 // Check predetermined rules for the loop control variables.
1995 auto LI = StackElem.LCVMap.find(D);
1996 if (LI != StackElem.LCVMap.end())
1997 return CPred(OMPC_private, /*AppliedToPointee=*/false);
1998 return false;
1999}
2000
2001bool DSAStackTy::hasExplicitDirective(
2002 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
2003 unsigned Level) const {
2004 if (getStackSize() <= Level)
2005 return false;
2006 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
2007 return DPred(StackElem.Directive);
2008}
2009
2010bool DSAStackTy::hasDirective(
2011 const llvm::function_ref<bool(OpenMPDirectiveKind,
2013 DPred,
2014 bool FromParent) const {
2015 // We look only in the enclosing region.
2016 size_t Skip = FromParent ? 2 : 1;
2017 for (const_iterator I = begin() + std::min(Skip, getStackSize()), E = end();
2018 I != E; ++I) {
2019 if (DPred(I->Directive, I->DirectiveName, I->ConstructLoc))
2020 return true;
2021 }
2022 return false;
2023}
2024
2025void SemaOpenMP::InitDataSharingAttributesStack() {
2026 VarDataSharingAttributesStack = new DSAStackTy(SemaRef);
2027}
2028
2029#define DSAStack static_cast<DSAStackTy *>(VarDataSharingAttributesStack)
2030
2031void SemaOpenMP::pushOpenMPFunctionRegion() { DSAStack->pushFunction(); }
2032
2033void SemaOpenMP::popOpenMPFunctionRegion(const FunctionScopeInfo *OldFSI) {
2034 DSAStack->popFunction(OldFSI);
2035}
2036
2038 assert(S.LangOpts.OpenMP && S.LangOpts.OpenMPIsTargetDevice &&
2039 "Expected OpenMP device compilation.");
2041}
2042
2043namespace {
2044/// Status of the function emission on the host/device.
2045enum class FunctionEmissionStatus {
2046 Emitted,
2047 Discarded,
2048 Unknown,
2049};
2050} // anonymous namespace
2051
2054 const FunctionDecl *FD) {
2055 assert(getLangOpts().OpenMP && getLangOpts().OpenMPIsTargetDevice &&
2056 "Expected OpenMP device compilation.");
2057
2058 SemaDiagnosticBuilder::Kind Kind = SemaDiagnosticBuilder::K_Nop;
2059 if (FD) {
2061 switch (FES) {
2063 Kind = SemaDiagnosticBuilder::K_Immediate;
2064 break;
2066 // TODO: We should always delay diagnostics here in case a target
2067 // region is in a function we do not emit. However, as the
2068 // current diagnostics are associated with the function containing
2069 // the target region and we do not emit that one, we would miss out
2070 // on diagnostics for the target region itself. We need to anchor
2071 // the diagnostics with the new generated function *or* ensure we
2072 // emit diagnostics associated with the surrounding function.
2074 ? SemaDiagnosticBuilder::K_Deferred
2075 : SemaDiagnosticBuilder::K_Immediate;
2076 break;
2079 Kind = SemaDiagnosticBuilder::K_Nop;
2080 break;
2082 llvm_unreachable("CUDADiscarded unexpected in OpenMP device compilation");
2083 break;
2084 }
2085 }
2086
2087 return SemaDiagnosticBuilder(Kind, Loc, DiagID, FD, SemaRef);
2088}
2089
2092 const FunctionDecl *FD) {
2093 assert(getLangOpts().OpenMP && !getLangOpts().OpenMPIsTargetDevice &&
2094 "Expected OpenMP host compilation.");
2095
2096 SemaDiagnosticBuilder::Kind Kind = SemaDiagnosticBuilder::K_Nop;
2097 if (FD) {
2099 switch (FES) {
2101 Kind = SemaDiagnosticBuilder::K_Immediate;
2102 break;
2104 Kind = SemaDiagnosticBuilder::K_Deferred;
2105 break;
2109 Kind = SemaDiagnosticBuilder::K_Nop;
2110 break;
2111 }
2112 }
2113
2114 return SemaDiagnosticBuilder(Kind, Loc, DiagID, FD, SemaRef);
2115}
2116
2119 if (LO.OpenMP <= 45) {
2121 return OMPC_DEFAULTMAP_scalar;
2122 return OMPC_DEFAULTMAP_aggregate;
2123 }
2125 return OMPC_DEFAULTMAP_pointer;
2127 return OMPC_DEFAULTMAP_scalar;
2128 return OMPC_DEFAULTMAP_aggregate;
2129}
2130
2132 unsigned OpenMPCaptureLevel) const {
2133 assert(getLangOpts().OpenMP && "OpenMP is not allowed");
2134
2135 ASTContext &Ctx = getASTContext();
2136 bool IsByRef = true;
2137
2138 // Find the directive that is associated with the provided scope.
2139 D = cast<ValueDecl>(D->getCanonicalDecl());
2140 QualType Ty = D->getType();
2141
2142 bool IsVariableUsedInMapClause = false;
2143 if (DSAStack->hasExplicitDirective(isOpenMPTargetExecutionDirective, Level)) {
2144 // This table summarizes how a given variable should be passed to the device
2145 // given its type and the clauses where it appears. This table is based on
2146 // the description in OpenMP 4.5 [2.10.4, target Construct] and
2147 // OpenMP 4.5 [2.15.5, Data-mapping Attribute Rules and Clauses].
2148 //
2149 // =========================================================================
2150 // | type | defaultmap | pvt | first | is_device_ptr | map | res. |
2151 // | |(tofrom:scalar)| | pvt | |has_dv_adr| |
2152 // =========================================================================
2153 // | scl | | | | - | | bycopy|
2154 // | scl | | - | x | - | - | bycopy|
2155 // | scl | | x | - | - | - | null |
2156 // | scl | x | | | - | | byref |
2157 // | scl | x | - | x | - | - | bycopy|
2158 // | scl | x | x | - | - | - | null |
2159 // | scl | | - | - | - | x | byref |
2160 // | scl | x | - | - | - | x | byref |
2161 //
2162 // | agg | n.a. | | | - | | byref |
2163 // | agg | n.a. | - | x | - | - | byref |
2164 // | agg | n.a. | x | - | - | - | null |
2165 // | agg | n.a. | - | - | - | x | byref |
2166 // | agg | n.a. | - | - | - | x[] | byref |
2167 //
2168 // | ptr | n.a. | | | - | | bycopy|
2169 // | ptr | n.a. | - | x | - | - | bycopy|
2170 // | ptr | n.a. | x | - | - | - | null |
2171 // | ptr | n.a. | - | - | - | x | byref |
2172 // | ptr | n.a. | - | - | - | x[] | bycopy|
2173 // | ptr | n.a. | - | - | x | | bycopy|
2174 // | ptr | n.a. | - | - | x | x | bycopy|
2175 // | ptr | n.a. | - | - | x | x[] | bycopy|
2176 // =========================================================================
2177 // Legend:
2178 // scl - scalar
2179 // ptr - pointer
2180 // agg - aggregate
2181 // x - applies
2182 // - - invalid in this combination
2183 // [] - mapped with an array section
2184 // byref - should be mapped by reference
2185 // byval - should be mapped by value
2186 // null - initialize a local variable to null on the device
2187 //
2188 // Observations:
2189 // - All scalar declarations that show up in a map clause have to be passed
2190 // by reference, because they may have been mapped in the enclosing data
2191 // environment.
2192 // - If the scalar value does not fit the size of uintptr, it has to be
2193 // passed by reference, regardless the result in the table above.
2194 // - For pointers mapped by value that have either an implicit map or an
2195 // array section, the runtime library may pass the NULL value to the
2196 // device instead of the value passed to it by the compiler.
2197
2198 if (Ty->isReferenceType())
2199 Ty = Ty->castAs<ReferenceType>()->getPointeeType();
2200
2201 // Locate map clauses and see if the variable being captured is referred to
2202 // in any of those clauses. Here we only care about variables, not fields,
2203 // because fields are part of aggregates.
2204 bool IsVariableAssociatedWithSection = false;
2205
2206 DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2207 D, Level,
2208 [&IsVariableUsedInMapClause, &IsVariableAssociatedWithSection,
2210 MapExprComponents,
2211 OpenMPClauseKind WhereFoundClauseKind) {
2212 // Both map and has_device_addr clauses information influences how a
2213 // variable is captured. E.g. is_device_ptr does not require changing
2214 // the default behavior.
2215 if (WhereFoundClauseKind != OMPC_map &&
2216 WhereFoundClauseKind != OMPC_has_device_addr)
2217 return false;
2218
2219 auto EI = MapExprComponents.rbegin();
2220 auto EE = MapExprComponents.rend();
2221
2222 assert(EI != EE && "Invalid map expression!");
2223
2224 if (isa<DeclRefExpr>(EI->getAssociatedExpression()))
2225 IsVariableUsedInMapClause |= EI->getAssociatedDeclaration() == D;
2226
2227 ++EI;
2228 if (EI == EE)
2229 return false;
2230 auto Last = std::prev(EE);
2231 const auto *UO =
2232 dyn_cast<UnaryOperator>(Last->getAssociatedExpression());
2233 if ((UO && UO->getOpcode() == UO_Deref) ||
2234 isa<ArraySubscriptExpr>(Last->getAssociatedExpression()) ||
2235 isa<ArraySectionExpr>(Last->getAssociatedExpression()) ||
2236 isa<MemberExpr>(EI->getAssociatedExpression()) ||
2237 isa<OMPArrayShapingExpr>(Last->getAssociatedExpression())) {
2238 IsVariableAssociatedWithSection = true;
2239 // There is nothing more we need to know about this variable.
2240 return true;
2241 }
2242
2243 // Keep looking for more map info.
2244 return false;
2245 });
2246
2247 if (IsVariableUsedInMapClause) {
2248 // If variable is identified in a map clause it is always captured by
2249 // reference except if it is a pointer that is dereferenced somehow.
2250 IsByRef = !(Ty->isPointerType() && IsVariableAssociatedWithSection);
2251 } else {
2252 // By default, all the data that has a scalar type is mapped by copy
2253 // (except for reduction variables).
2254 // Defaultmap scalar is mutual exclusive to defaultmap pointer
2255 IsByRef = (DSAStack->isForceCaptureByReferenceInTargetExecutable() &&
2256 !Ty->isAnyPointerType()) ||
2257 !Ty->isScalarType() ||
2258 DSAStack->isDefaultmapCapturedByRef(
2260 DSAStack->hasExplicitDSA(
2261 D,
2262 [](OpenMPClauseKind K, bool AppliedToPointee) {
2263 return K == OMPC_reduction && !AppliedToPointee;
2264 },
2265 Level);
2266 }
2267 }
2268
2269 if (IsByRef && Ty.getNonReferenceType()->isScalarType()) {
2270 IsByRef =
2271 ((IsVariableUsedInMapClause &&
2272 DSAStack->getCaptureRegion(Level, OpenMPCaptureLevel) ==
2273 OMPD_target) ||
2274 !(DSAStack->hasExplicitDSA(
2275 D,
2276 [](OpenMPClauseKind K, bool AppliedToPointee) -> bool {
2277 return K == OMPC_firstprivate ||
2278 (K == OMPC_reduction && AppliedToPointee);
2279 },
2280 Level, /*NotLastprivate=*/true) ||
2281 DSAStack->isUsesAllocatorsDecl(Level, D))) &&
2282 // If the variable is artificial and must be captured by value - try to
2283 // capture by value.
2284 !(isa<OMPCapturedExprDecl>(D) && !D->hasAttr<OMPCaptureNoInitAttr>() &&
2285 !cast<OMPCapturedExprDecl>(D)->getInit()->isGLValue()) &&
2286 // If the variable is implicitly firstprivate and scalar - capture by
2287 // copy
2288 !((DSAStack->getDefaultDSA() == DSA_firstprivate ||
2289 DSAStack->getDefaultDSA() == DSA_private) &&
2290 !DSAStack->hasExplicitDSA(
2291 D, [](OpenMPClauseKind K, bool) { return K != OMPC_unknown; },
2292 Level) &&
2293 !DSAStack->isLoopControlVariable(D, Level).first);
2294 }
2295
2296 // When passing data by copy, we need to make sure it fits the uintptr size
2297 // and alignment, because the runtime library only deals with uintptr types.
2298 // If it does not fit the uintptr size, we need to pass the data by reference
2299 // instead.
2300 if (!IsByRef && (Ctx.getTypeSizeInChars(Ty) >
2302 Ctx.getAlignOfGlobalVarInChars(Ty, dyn_cast<VarDecl>(D)) >
2303 Ctx.getTypeAlignInChars(Ctx.getUIntPtrType()))) {
2304 IsByRef = true;
2305 }
2306
2307 return IsByRef;
2308}
2309
2310unsigned SemaOpenMP::getOpenMPNestingLevel() const {
2311 assert(getLangOpts().OpenMP);
2312 return DSAStack->getNestingLevel();
2313}
2314
2316 return isOpenMPTaskingDirective(DSAStack->getCurrentDirective()) &&
2317 DSAStack->isUntiedRegion();
2318}
2319
2321 return (isOpenMPTargetExecutionDirective(DSAStack->getCurrentDirective()) &&
2322 !DSAStack->isClauseParsingMode()) ||
2323 DSAStack->hasDirective(
2325 SourceLocation) -> bool {
2327 },
2328 false);
2329}
2330
2332 // Only rebuild for Field.
2333 if (!dyn_cast<FieldDecl>(D))
2334 return false;
2335 DSAStackTy::DSAVarData DVarPrivate = DSAStack->hasDSA(
2336 D,
2337 [](OpenMPClauseKind C, bool AppliedToPointee,
2338 DefaultDataSharingAttributes DefaultAttr) {
2339 return isOpenMPPrivate(C) && !AppliedToPointee &&
2340 (DefaultAttr == DSA_firstprivate || DefaultAttr == DSA_private);
2341 },
2342 [](OpenMPDirectiveKind) { return true; },
2343 DSAStack->isClauseParsingMode());
2344 if (DVarPrivate.CKind != OMPC_unknown)
2345 return true;
2346 return false;
2347}
2348
2350 Expr *CaptureExpr, bool WithInit,
2351 DeclContext *CurContext,
2352 bool AsExpression);
2353
2355 unsigned StopAt) {
2356 assert(getLangOpts().OpenMP && "OpenMP is not allowed");
2357 D = getCanonicalDecl(D);
2358
2359 auto *VD = dyn_cast<VarDecl>(D);
2360 // Do not capture constexpr variables.
2361 if (VD && VD->isConstexpr())
2362 return nullptr;
2363
2364 // If we want to determine whether the variable should be captured from the
2365 // perspective of the current capturing scope, and we've already left all the
2366 // capturing scopes of the top directive on the stack, check from the
2367 // perspective of its parent directive (if any) instead.
2368 DSAStackTy::ParentDirectiveScope InParentDirectiveRAII(
2369 *DSAStack, CheckScopeInfo && DSAStack->isBodyComplete());
2370
2371 // If we are attempting to capture a global variable in a directive with
2372 // 'target' we return true so that this global is also mapped to the device.
2373 //
2374 if (VD && !VD->hasLocalStorage() &&
2376 SemaRef.getCurLambda())) {
2378 DSAStackTy::DSAVarData DVarTop =
2379 DSAStack->getTopDSA(D, DSAStack->isClauseParsingMode());
2380 if (DVarTop.CKind != OMPC_unknown && DVarTop.RefExpr)
2381 return VD;
2382 // If the declaration is enclosed in a 'declare target' directive,
2383 // then it should not be captured.
2384 //
2385 if (OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2386 return nullptr;
2387 CapturedRegionScopeInfo *CSI = nullptr;
2388 for (FunctionScopeInfo *FSI : llvm::drop_begin(
2389 llvm::reverse(SemaRef.FunctionScopes),
2390 CheckScopeInfo ? (SemaRef.FunctionScopes.size() - (StopAt + 1))
2391 : 0)) {
2392 if (!isa<CapturingScopeInfo>(FSI))
2393 return nullptr;
2394 if (auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2395 if (RSI->CapRegionKind == CR_OpenMP) {
2396 CSI = RSI;
2397 break;
2398 }
2399 }
2400 assert(CSI && "Failed to find CapturedRegionScopeInfo");
2403 DSAStack->getDirective(CSI->OpenMPLevel));
2404 if (Regions[CSI->OpenMPCaptureLevel] != OMPD_task)
2405 return VD;
2406 }
2408 // Try to mark variable as declare target if it is used in capturing
2409 // regions.
2410 if (getLangOpts().OpenMP <= 45 &&
2411 !OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2413 return nullptr;
2414 }
2415 }
2416
2417 if (CheckScopeInfo) {
2418 bool OpenMPFound = false;
2419 for (unsigned I = StopAt + 1; I > 0; --I) {
2421 if (!isa<CapturingScopeInfo>(FSI))
2422 return nullptr;
2423 if (auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2424 if (RSI->CapRegionKind == CR_OpenMP) {
2425 OpenMPFound = true;
2426 break;
2427 }
2428 }
2429 if (!OpenMPFound)
2430 return nullptr;
2431 }
2432
2433 if (DSAStack->getCurrentDirective() != OMPD_unknown &&
2434 (!DSAStack->isClauseParsingMode() ||
2435 DSAStack->getParentDirective() != OMPD_unknown)) {
2436 auto &&Info = DSAStack->isLoopControlVariable(D);
2437 if (Info.first ||
2438 (VD && VD->hasLocalStorage() &&
2439 isImplicitOrExplicitTaskingRegion(DSAStack->getCurrentDirective())) ||
2440 (VD && DSAStack->isForceVarCapturing()))
2441 return VD ? VD : Info.second;
2442 DSAStackTy::DSAVarData DVarTop =
2443 DSAStack->getTopDSA(D, DSAStack->isClauseParsingMode());
2444 if (DVarTop.CKind != OMPC_unknown && isOpenMPPrivate(DVarTop.CKind) &&
2445 (!VD || VD->hasLocalStorage() || !DVarTop.AppliedToPointee))
2446 return VD ? VD : cast<VarDecl>(DVarTop.PrivateCopy->getDecl());
2447 // Threadprivate variables must not be captured.
2448 if (isOpenMPThreadPrivate(DVarTop.CKind))
2449 return nullptr;
2450 // The variable is not private or it is the variable in the directive with
2451 // default(none) clause and not used in any clause.
2452 DSAStackTy::DSAVarData DVarPrivate = DSAStack->hasDSA(
2453 D,
2454 [](OpenMPClauseKind C, bool AppliedToPointee, bool) {
2455 return isOpenMPPrivate(C) && !AppliedToPointee;
2456 },
2457 [](OpenMPDirectiveKind) { return true; },
2458 DSAStack->isClauseParsingMode());
2459 // Global shared must not be captured.
2460 if (VD && !VD->hasLocalStorage() && DVarPrivate.CKind == OMPC_unknown &&
2461 ((DSAStack->getDefaultDSA() != DSA_none &&
2462 DSAStack->getDefaultDSA() != DSA_private &&
2463 DSAStack->getDefaultDSA() != DSA_firstprivate) ||
2464 DVarTop.CKind == OMPC_shared))
2465 return nullptr;
2466 auto *FD = dyn_cast<FieldDecl>(D);
2467 if (DVarPrivate.CKind != OMPC_unknown && !VD && FD &&
2468 !DVarPrivate.PrivateCopy) {
2469 DSAStackTy::DSAVarData DVarPrivate = DSAStack->hasDSA(
2470 D,
2471 [](OpenMPClauseKind C, bool AppliedToPointee,
2472 DefaultDataSharingAttributes DefaultAttr) {
2473 return isOpenMPPrivate(C) && !AppliedToPointee &&
2474 (DefaultAttr == DSA_firstprivate ||
2475 DefaultAttr == DSA_private);
2476 },
2477 [](OpenMPDirectiveKind) { return true; },
2478 DSAStack->isClauseParsingMode());
2479 if (DVarPrivate.CKind == OMPC_unknown)
2480 return nullptr;
2481
2482 VarDecl *VD = DSAStack->getImplicitFDCapExprDecl(FD);
2483 if (VD)
2484 return VD;
2486 return nullptr;
2489 /*IsImplicit=*/true);
2490 const CXXScopeSpec CS = CXXScopeSpec();
2492 ThisExpr, /*IsArrow=*/true, SourceLocation(),
2495 /*HadMultipleCandidates=*/false, DeclarationNameInfo(), FD->getType(),
2498 SemaRef, FD->getIdentifier(), ME, DVarPrivate.CKind != OMPC_private,
2499 SemaRef.CurContext->getParent(), /*AsExpression=*/false);
2500 DeclRefExpr *VDPrivateRefExpr = buildDeclRefExpr(
2502 VD = cast<VarDecl>(VDPrivateRefExpr->getDecl());
2503 DSAStack->addImplicitDefaultFirstprivateFD(FD, VD);
2504 return VD;
2505 }
2506 if (DVarPrivate.CKind != OMPC_unknown ||
2507 (VD && (DSAStack->getDefaultDSA() == DSA_none ||
2508 DSAStack->getDefaultDSA() == DSA_private ||
2509 DSAStack->getDefaultDSA() == DSA_firstprivate)))
2510 return VD ? VD : cast<VarDecl>(DVarPrivate.PrivateCopy->getDecl());
2511 }
2512 return nullptr;
2513}
2514
2515void SemaOpenMP::adjustOpenMPTargetScopeIndex(unsigned &FunctionScopesIndex,
2516 unsigned Level) const {
2517 FunctionScopesIndex -= getOpenMPCaptureLevels(DSAStack->getDirective(Level));
2518}
2519
2521 assert(getLangOpts().OpenMP && "OpenMP must be enabled.");
2522 if (isOpenMPLoopDirective(DSAStack->getCurrentDirective()))
2523 DSAStack->loopInit();
2524}
2525
2527 assert(getLangOpts().OpenMP && "OpenMP must be enabled.");
2528 if (isOpenMPLoopDirective(DSAStack->getCurrentDirective())) {
2529 DSAStack->resetPossibleLoopCounter();
2530 DSAStack->loopStart();
2531 }
2532}
2533
2535 unsigned CapLevel) const {
2536 assert(getLangOpts().OpenMP && "OpenMP is not allowed");
2537 if (DSAStack->getCurrentDirective() != OMPD_unknown &&
2538 (!DSAStack->isClauseParsingMode() ||
2539 DSAStack->getParentDirective() != OMPD_unknown)) {
2540 DSAStackTy::DSAVarData DVarPrivate = DSAStack->hasDSA(
2541 D,
2542 [](OpenMPClauseKind C, bool AppliedToPointee,
2543 DefaultDataSharingAttributes DefaultAttr) {
2544 return isOpenMPPrivate(C) && !AppliedToPointee &&
2545 DefaultAttr == DSA_private;
2546 },
2547 [](OpenMPDirectiveKind) { return true; },
2548 DSAStack->isClauseParsingMode());
2549 if (DVarPrivate.CKind == OMPC_private && isa<OMPCapturedExprDecl>(D) &&
2550 DSAStack->isImplicitDefaultFirstprivateFD(cast<VarDecl>(D)) &&
2551 !DSAStack->isLoopControlVariable(D).first)
2552 return OMPC_private;
2553 }
2554 if (DSAStack->hasExplicitDirective(isOpenMPTaskingDirective, Level)) {
2555 bool IsTriviallyCopyable =
2556 D->getType().getNonReferenceType().isTriviallyCopyableType(
2557 getASTContext()) &&
2558 !D->getType()
2559 .getNonReferenceType()
2560 .getCanonicalType()
2561 ->getAsCXXRecordDecl();
2562 OpenMPDirectiveKind DKind = DSAStack->getDirective(Level);
2564 getOpenMPCaptureRegions(CaptureRegions, DKind);
2565 if (isOpenMPTaskingDirective(CaptureRegions[CapLevel]) &&
2566 (IsTriviallyCopyable ||
2567 !isOpenMPTaskLoopDirective(CaptureRegions[CapLevel]))) {
2568 if (DSAStack->hasExplicitDSA(
2569 D,
2570 [](OpenMPClauseKind K, bool) { return K == OMPC_firstprivate; },
2571 Level, /*NotLastprivate=*/true))
2572 return OMPC_firstprivate;
2573 DSAStackTy::DSAVarData DVar = DSAStack->getImplicitDSA(D, Level);
2574 if (DVar.CKind != OMPC_shared &&
2575 !DSAStack->isLoopControlVariable(D, Level).first && !DVar.RefExpr) {
2576 DSAStack->addImplicitTaskFirstprivate(Level, D);
2577 return OMPC_firstprivate;
2578 }
2579 }
2580 }
2581 if (isOpenMPLoopDirective(DSAStack->getCurrentDirective()) &&
2582 !isOpenMPLoopTransformationDirective(DSAStack->getCurrentDirective())) {
2583 if (DSAStack->getAssociatedLoops() > 0 && !DSAStack->isLoopStarted()) {
2584 DSAStack->resetPossibleLoopCounter(D);
2585 DSAStack->loopStart();
2586 return OMPC_private;
2587 }
2588 if ((DSAStack->getPossiblyLoopCounter() == D->getCanonicalDecl() ||
2589 DSAStack->isLoopControlVariable(D).first) &&
2590 !DSAStack->hasExplicitDSA(
2591 D, [](OpenMPClauseKind K, bool) { return K != OMPC_private; },
2592 Level) &&
2593 !isOpenMPSimdDirective(DSAStack->getCurrentDirective()))
2594 return OMPC_private;
2595 }
2596 if (const auto *VD = dyn_cast<VarDecl>(D)) {
2597 if (DSAStack->isThreadPrivate(const_cast<VarDecl *>(VD)) &&
2598 DSAStack->isForceVarCapturing() &&
2599 !DSAStack->hasExplicitDSA(
2600 D, [](OpenMPClauseKind K, bool) { return K == OMPC_copyin; },
2601 Level))
2602 return OMPC_private;
2603 }
2604 // User-defined allocators are private since they must be defined in the
2605 // context of target region.
2606 if (DSAStack->hasExplicitDirective(isOpenMPTargetExecutionDirective, Level) &&
2607 DSAStack->isUsesAllocatorsDecl(Level, D).value_or(
2608 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait) ==
2609 DSAStackTy::UsesAllocatorsDeclKind::UserDefinedAllocator)
2610 return OMPC_private;
2611 return (DSAStack->hasExplicitDSA(
2612 D, [](OpenMPClauseKind K, bool) { return K == OMPC_private; },
2613 Level) ||
2614 (DSAStack->isClauseParsingMode() &&
2615 DSAStack->getClauseParsingMode() == OMPC_private) ||
2616 // Consider taskgroup reduction descriptor variable a private
2617 // to avoid possible capture in the region.
2618 (DSAStack->hasExplicitDirective(
2619 [](OpenMPDirectiveKind K) {
2620 return K == OMPD_taskgroup ||
2621 ((isOpenMPParallelDirective(K) ||
2622 isOpenMPWorksharingDirective(K)) &&
2623 !isOpenMPSimdDirective(K));
2624 },
2625 Level) &&
2626 DSAStack->isTaskgroupReductionRef(D, Level)))
2627 ? OMPC_private
2628 : OMPC_unknown;
2629}
2630
2632 unsigned Level) {
2633 assert(getLangOpts().OpenMP && "OpenMP is not allowed");
2634 D = getCanonicalDecl(D);
2635 OpenMPClauseKind OMPC = OMPC_unknown;
2636 for (unsigned I = DSAStack->getNestingLevel() + 1; I > Level; --I) {
2637 const unsigned NewLevel = I - 1;
2638 if (DSAStack->hasExplicitDSA(
2639 D,
2640 [&OMPC](const OpenMPClauseKind K, bool AppliedToPointee) {
2641 if (isOpenMPPrivate(K) && !AppliedToPointee) {
2642 OMPC = K;
2643 return true;
2644 }
2645 return false;
2646 },
2647 NewLevel))
2648 break;
2649 if (DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2650 D, NewLevel,
2652 OpenMPClauseKind) { return true; })) {
2653 OMPC = OMPC_map;
2654 break;
2655 }
2656 if (DSAStack->hasExplicitDirective(isOpenMPTargetExecutionDirective,
2657 NewLevel)) {
2658 OMPC = OMPC_map;
2659 if (DSAStack->mustBeFirstprivateAtLevel(
2661 OMPC = OMPC_firstprivate;
2662 break;
2663 }
2664 }
2665 if (OMPC != OMPC_unknown)
2666 FD->addAttr(
2667 OMPCaptureKindAttr::CreateImplicit(getASTContext(), unsigned(OMPC)));
2668}
2669
2671 unsigned CaptureLevel) const {
2672 assert(getLangOpts().OpenMP && "OpenMP is not allowed");
2673 // Return true if the current level is no longer enclosed in a target region.
2674
2676 getOpenMPCaptureRegions(Regions, DSAStack->getDirective(Level));
2677 const auto *VD = dyn_cast<VarDecl>(D);
2678 return VD && !VD->hasLocalStorage() &&
2679 DSAStack->hasExplicitDirective(isOpenMPTargetExecutionDirective,
2680 Level) &&
2681 Regions[CaptureLevel] != OMPD_task;
2682}
2683
2685 unsigned CaptureLevel) const {
2686 assert(getLangOpts().OpenMP && "OpenMP is not allowed");
2687 // Return true if the current level is no longer enclosed in a target region.
2688
2689 if (const auto *VD = dyn_cast<VarDecl>(D)) {
2690 if (!VD->hasLocalStorage()) {
2692 return true;
2693 DSAStackTy::DSAVarData TopDVar =
2694 DSAStack->getTopDSA(D, /*FromParent=*/false);
2695 unsigned NumLevels =
2696 getOpenMPCaptureLevels(DSAStack->getDirective(Level));
2697 if (Level == 0)
2698 // non-file scope static variable with default(firstprivate)
2699 // should be global captured.
2700 return (NumLevels == CaptureLevel + 1 &&
2701 (TopDVar.CKind != OMPC_shared ||
2702 DSAStack->getDefaultDSA() == DSA_firstprivate));
2703 do {
2704 --Level;
2705 DSAStackTy::DSAVarData DVar = DSAStack->getImplicitDSA(D, Level);
2706 if (DVar.CKind != OMPC_shared)
2707 return true;
2708 } while (Level > 0);
2709 }
2710 }
2711 return true;
2712}
2713
2714void SemaOpenMP::DestroyDataSharingAttributesStack() { delete DSAStack; }
2715
2717 OMPTraitInfo &TI) {
2718 OMPDeclareVariantScopes.push_back(OMPDeclareVariantScope(TI));
2719}
2720
2723 "Not in OpenMP declare variant scope!");
2724
2725 OMPDeclareVariantScopes.pop_back();
2726}
2727
2729 const FunctionDecl *Callee,
2731 assert(getLangOpts().OpenMP && "Expected OpenMP compilation mode.");
2732 std::optional<OMPDeclareTargetDeclAttr::DevTypeTy> DevTy =
2733 OMPDeclareTargetDeclAttr::getDeviceType(Caller->getMostRecentDecl());
2734 // Ignore host functions during device analysis.
2735 if (getLangOpts().OpenMPIsTargetDevice &&
2736 (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host))
2737 return;
2738 // Ignore nohost functions during host analysis.
2739 if (!getLangOpts().OpenMPIsTargetDevice && DevTy &&
2740 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost)
2741 return;
2742 const FunctionDecl *FD = Callee->getMostRecentDecl();
2743 DevTy = OMPDeclareTargetDeclAttr::getDeviceType(FD);
2744 if (getLangOpts().OpenMPIsTargetDevice && DevTy &&
2745 *DevTy == OMPDeclareTargetDeclAttr::DT_Host) {
2746 // Diagnose host function called during device codegen.
2747 StringRef HostDevTy =
2748 getOpenMPSimpleClauseTypeName(OMPC_device_type, OMPC_DEVICE_TYPE_host);
2749 Diag(Loc, diag::err_omp_wrong_device_function_call) << HostDevTy << 0;
2750 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2751 diag::note_omp_marked_device_type_here)
2752 << HostDevTy;
2753 return;
2754 }
2755 if (!getLangOpts().OpenMPIsTargetDevice &&
2756 !getLangOpts().OpenMPOffloadMandatory && DevTy &&
2757 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost) {
2758 // In OpenMP 5.2 or later, if the function has a host variant then allow
2759 // that to be called instead
2760 auto &&HasHostAttr = [](const FunctionDecl *Callee) {
2761 for (OMPDeclareVariantAttr *A :
2762 Callee->specific_attrs<OMPDeclareVariantAttr>()) {
2763 auto *DeclRefVariant = cast<DeclRefExpr>(A->getVariantFuncRef());
2764 auto *VariantFD = cast<FunctionDecl>(DeclRefVariant->getDecl());
2765 std::optional<OMPDeclareTargetDeclAttr::DevTypeTy> DevTy =
2766 OMPDeclareTargetDeclAttr::getDeviceType(
2767 VariantFD->getMostRecentDecl());
2768 if (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host)
2769 return true;
2770 }
2771 return false;
2772 };
2773 if (getLangOpts().OpenMP >= 52 &&
2774 Callee->hasAttr<OMPDeclareVariantAttr>() && HasHostAttr(Callee))
2775 return;
2776 // Diagnose nohost function called during host codegen.
2777 StringRef NoHostDevTy = getOpenMPSimpleClauseTypeName(
2778 OMPC_device_type, OMPC_DEVICE_TYPE_nohost);
2779 Diag(Loc, diag::err_omp_wrong_device_function_call) << NoHostDevTy << 1;
2780 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2781 diag::note_omp_marked_device_type_here)
2782 << NoHostDevTy;
2783 }
2784}
2785
2787 const DeclarationNameInfo &DirName,
2788 Scope *CurScope, SourceLocation Loc) {
2789 DSAStack->push(DKind, DirName, CurScope, Loc);
2792}
2793
2795 DSAStack->setClauseParsingMode(K);
2796}
2797
2799 DSAStack->setClauseParsingMode(/*K=*/OMPC_unknown);
2801}
2802
2803static std::pair<ValueDecl *, bool>
2804getPrivateItem(Sema &S, Expr *&RefExpr, SourceLocation &ELoc,
2805 SourceRange &ERange, bool AllowArraySection = false,
2806 StringRef DiagType = "");
2807
2808/// Check consistency of the reduction clauses.
2809static void checkReductionClauses(Sema &S, DSAStackTy *Stack,
2810 ArrayRef<OMPClause *> Clauses) {
2811 bool InscanFound = false;
2812 SourceLocation InscanLoc;
2813 // OpenMP 5.0, 2.19.5.4 reduction Clause, Restrictions.
2814 // A reduction clause without the inscan reduction-modifier may not appear on
2815 // a construct on which a reduction clause with the inscan reduction-modifier
2816 // appears.
2817 for (OMPClause *C : Clauses) {
2818 if (C->getClauseKind() != OMPC_reduction)
2819 continue;
2820 auto *RC = cast<OMPReductionClause>(C);
2821 if (RC->getModifier() == OMPC_REDUCTION_inscan) {
2822 InscanFound = true;
2823 InscanLoc = RC->getModifierLoc();
2824 continue;
2825 }
2826 if (RC->getModifier() == OMPC_REDUCTION_task) {
2827 // OpenMP 5.0, 2.19.5.4 reduction Clause.
2828 // A reduction clause with the task reduction-modifier may only appear on
2829 // a parallel construct, a worksharing construct or a combined or
2830 // composite construct for which any of the aforementioned constructs is a
2831 // constituent construct and simd or loop are not constituent constructs.
2832 OpenMPDirectiveKind CurDir = Stack->getCurrentDirective();
2833 if (!(isOpenMPParallelDirective(CurDir) ||
2835 isOpenMPSimdDirective(CurDir))
2836 S.Diag(RC->getModifierLoc(),
2837 diag::err_omp_reduction_task_not_parallel_or_worksharing);
2838 continue;
2839 }
2840 }
2841 if (InscanFound) {
2842 for (OMPClause *C : Clauses) {
2843 if (C->getClauseKind() != OMPC_reduction)
2844 continue;
2845 auto *RC = cast<OMPReductionClause>(C);
2846 if (RC->getModifier() != OMPC_REDUCTION_inscan) {
2847 S.Diag(RC->getModifier() == OMPC_REDUCTION_unknown
2848 ? RC->getBeginLoc()
2849 : RC->getModifierLoc(),
2850 diag::err_omp_inscan_reduction_expected);
2851 S.Diag(InscanLoc, diag::note_omp_previous_inscan_reduction);
2852 continue;
2853 }
2854 for (Expr *Ref : RC->varlists()) {
2855 assert(Ref && "NULL expr in OpenMP nontemporal clause.");
2856 SourceLocation ELoc;
2857 SourceRange ERange;
2858 Expr *SimpleRefExpr = Ref;
2859 auto Res = getPrivateItem(S, SimpleRefExpr, ELoc, ERange,
2860 /*AllowArraySection=*/true);
2861 ValueDecl *D = Res.first;
2862 if (!D)
2863 continue;
2864 if (!Stack->isUsedInScanDirective(getCanonicalDecl(D))) {
2865 S.Diag(Ref->getExprLoc(),
2866 diag::err_omp_reduction_not_inclusive_exclusive)
2867 << Ref->getSourceRange();
2868 }
2869 }
2870 }
2871 }
2872}
2873
2874static void checkAllocateClauses(Sema &S, DSAStackTy *Stack,
2875 ArrayRef<OMPClause *> Clauses);
2876static DeclRefExpr *buildCapture(Sema &S, ValueDecl *D, Expr *CaptureExpr,
2877 bool WithInit);
2878
2879static void reportOriginalDsa(Sema &SemaRef, const DSAStackTy *Stack,
2880 const ValueDecl *D,
2881 const DSAStackTy::DSAVarData &DVar,
2882 bool IsLoopIterVar = false);
2883
2885 // OpenMP [2.14.3.5, Restrictions, C/C++, p.1]
2886 // A variable of class type (or array thereof) that appears in a lastprivate
2887 // clause requires an accessible, unambiguous default constructor for the
2888 // class type, unless the list item is also specified in a firstprivate
2889 // clause.
2890 if (const auto *D = dyn_cast_or_null<OMPExecutableDirective>(CurDirective)) {
2891 for (OMPClause *C : D->clauses()) {
2892 if (auto *Clause = dyn_cast<OMPLastprivateClause>(C)) {
2893 SmallVector<Expr *, 8> PrivateCopies;
2894 for (Expr *DE : Clause->varlists()) {
2895 if (DE->isValueDependent() || DE->isTypeDependent()) {
2896 PrivateCopies.push_back(nullptr);
2897 continue;
2898 }
2899 auto *DRE = cast<DeclRefExpr>(DE->IgnoreParens());
2900 auto *VD = cast<VarDecl>(DRE->getDecl());
2902 const DSAStackTy::DSAVarData DVar =
2903 DSAStack->getTopDSA(VD, /*FromParent=*/false);
2904 if (DVar.CKind == OMPC_lastprivate) {
2905 // Generate helper private variable and initialize it with the
2906 // default value. The address of the original variable is replaced
2907 // by the address of the new private variable in CodeGen. This new
2908 // variable is not added to IdResolver, so the code in the OpenMP
2909 // region uses original variable for proper diagnostics.
2910 VarDecl *VDPrivate = buildVarDecl(
2911 SemaRef, DE->getExprLoc(), Type.getUnqualifiedType(),
2912 VD->getName(), VD->hasAttrs() ? &VD->getAttrs() : nullptr, DRE);
2914 if (VDPrivate->isInvalidDecl()) {
2915 PrivateCopies.push_back(nullptr);
2916 continue;
2917 }
2918 PrivateCopies.push_back(buildDeclRefExpr(
2919 SemaRef, VDPrivate, DE->getType(), DE->getExprLoc()));
2920 } else {
2921 // The variable is also a firstprivate, so initialization sequence
2922 // for private copy is generated already.
2923 PrivateCopies.push_back(nullptr);
2924 }
2925 }
2926 Clause->setPrivateCopies(PrivateCopies);
2927 continue;
2928 }
2929 // Finalize nontemporal clause by handling private copies, if any.
2930 if (auto *Clause = dyn_cast<OMPNontemporalClause>(C)) {
2931 SmallVector<Expr *, 8> PrivateRefs;
2932 for (Expr *RefExpr : Clause->varlists()) {
2933 assert(RefExpr && "NULL expr in OpenMP nontemporal clause.");
2934 SourceLocation ELoc;
2935 SourceRange ERange;
2936 Expr *SimpleRefExpr = RefExpr;
2937 auto Res = getPrivateItem(SemaRef, SimpleRefExpr, ELoc, ERange);
2938 if (Res.second)
2939 // It will be analyzed later.
2940 PrivateRefs.push_back(RefExpr);
2941 ValueDecl *D = Res.first;
2942 if (!D)
2943 continue;
2944
2945 const DSAStackTy::DSAVarData DVar =
2946 DSAStack->getTopDSA(D, /*FromParent=*/false);
2947 PrivateRefs.push_back(DVar.PrivateCopy ? DVar.PrivateCopy
2948 : SimpleRefExpr);
2949 }
2950 Clause->setPrivateRefs(PrivateRefs);
2951 continue;
2952 }
2953 if (auto *Clause = dyn_cast<OMPUsesAllocatorsClause>(C)) {
2954 for (unsigned I = 0, E = Clause->getNumberOfAllocators(); I < E; ++I) {
2955 OMPUsesAllocatorsClause::Data D = Clause->getAllocatorData(I);
2956 auto *DRE = dyn_cast<DeclRefExpr>(D.Allocator->IgnoreParenImpCasts());
2957 if (!DRE)
2958 continue;
2959 ValueDecl *VD = DRE->getDecl();
2960 if (!VD || !isa<VarDecl>(VD))
2961 continue;
2962 DSAStackTy::DSAVarData DVar =
2963 DSAStack->getTopDSA(VD, /*FromParent=*/false);
2964 // OpenMP [2.12.5, target Construct]
2965 // Memory allocators that appear in a uses_allocators clause cannot
2966 // appear in other data-sharing attribute clauses or data-mapping
2967 // attribute clauses in the same construct.
2968 Expr *MapExpr = nullptr;
2969 if (DVar.RefExpr ||
2970 DSAStack->checkMappableExprComponentListsForDecl(
2971 VD, /*CurrentRegionOnly=*/true,
2972 [VD, &MapExpr](
2974 MapExprComponents,
2976 auto MI = MapExprComponents.rbegin();
2977 auto ME = MapExprComponents.rend();
2978 if (MI != ME &&
2979 MI->getAssociatedDeclaration()->getCanonicalDecl() ==
2980 VD->getCanonicalDecl()) {
2981 MapExpr = MI->getAssociatedExpression();
2982 return true;
2983 }
2984 return false;
2985 })) {
2986 Diag(D.Allocator->getExprLoc(),
2987 diag::err_omp_allocator_used_in_clauses)
2988 << D.Allocator->getSourceRange();
2989 if (DVar.RefExpr)
2991 else
2992 Diag(MapExpr->getExprLoc(), diag::note_used_here)
2993 << MapExpr->getSourceRange();
2994 }
2995 }
2996 continue;
2997 }
2998 }
2999 // Check allocate clauses.
3001 checkAllocateClauses(SemaRef, DSAStack, D->clauses());
3003 }
3004
3005 DSAStack->pop();
3008}
3009
3011 Expr *NumIterations, Sema &SemaRef,
3012 Scope *S, DSAStackTy *Stack);
3013
3014static bool finishLinearClauses(Sema &SemaRef, ArrayRef<OMPClause *> Clauses,
3016 DSAStackTy *Stack) {
3017 assert((SemaRef.CurContext->isDependentContext() || B.builtAll()) &&
3018 "loop exprs were not built");
3019
3020 if (SemaRef.CurContext->isDependentContext())
3021 return false;
3022
3023 // Finalize the clauses that need pre-built expressions for CodeGen.
3024 for (OMPClause *C : Clauses) {
3025 auto *LC = dyn_cast<OMPLinearClause>(C);
3026 if (!LC)
3027 continue;
3028 if (FinishOpenMPLinearClause(*LC, cast<DeclRefExpr>(B.IterationVarRef),
3029 B.NumIterations, SemaRef,
3030 SemaRef.getCurScope(), Stack))
3031 return true;
3032 }
3033
3034 return false;
3035}
3036
3037namespace {
3038
3039class VarDeclFilterCCC final : public CorrectionCandidateCallback {
3040private:
3041 Sema &SemaRef;
3042
3043public:
3044 explicit VarDeclFilterCCC(Sema &S) : SemaRef(S) {}
3045 bool ValidateCandidate(const TypoCorrection &Candidate) override {
3046 NamedDecl *ND = Candidate.getCorrectionDecl();
3047 if (const auto *VD = dyn_cast_or_null<VarDecl>(ND)) {
3048 return VD->hasGlobalStorage() &&
3049 SemaRef.isDeclInScope(ND, SemaRef.getCurLexicalContext(),
3050 SemaRef.getCurScope());
3051 }
3052 return false;
3053 }
3054
3055 std::unique_ptr<CorrectionCandidateCallback> clone() override {
3056 return std::make_unique<VarDeclFilterCCC>(*this);
3057 }
3058};
3059
3060class VarOrFuncDeclFilterCCC final : public CorrectionCandidateCallback {
3061private:
3062 Sema &SemaRef;
3063
3064public:
3065 explicit VarOrFuncDeclFilterCCC(Sema &S) : SemaRef(S) {}
3066 bool ValidateCandidate(const TypoCorrection &Candidate) override {
3067 NamedDecl *ND = Candidate.getCorrectionDecl();
3068 if (ND && ((isa<VarDecl>(ND) && ND->getKind() == Decl::Var) ||
3069 isa<FunctionDecl>(ND))) {
3070 return SemaRef.isDeclInScope(ND, SemaRef.getCurLexicalContext(),
3071 SemaRef.getCurScope());
3072 }
3073 return false;
3074 }
3075
3076 std::unique_ptr<CorrectionCandidateCallback> clone() override {
3077 return std::make_unique<VarOrFuncDeclFilterCCC>(*this);
3078 }
3079};
3080
3081} // namespace
3082
3084 CXXScopeSpec &ScopeSpec,
3085 const DeclarationNameInfo &Id,
3086 OpenMPDirectiveKind Kind) {
3087 ASTContext &Context = getASTContext();
3089 SemaRef.LookupParsedName(Lookup, CurScope, &ScopeSpec,
3090 /*ObjectType=*/QualType(),
3091 /*AllowBuiltinCreation=*/true);
3092
3093 if (Lookup.isAmbiguous())
3094 return ExprError();
3095
3096 VarDecl *VD;
3097 if (!Lookup.isSingleResult()) {
3098 VarDeclFilterCCC CCC(SemaRef);
3099 if (TypoCorrection Corrected =
3100 SemaRef.CorrectTypo(Id, Sema::LookupOrdinaryName, CurScope, nullptr,
3102 SemaRef.diagnoseTypo(Corrected,
3103 PDiag(Lookup.empty()
3104 ? diag::err_undeclared_var_use_suggest
3105 : diag::err_omp_expected_var_arg_suggest)
3106 << Id.getName());
3107 VD = Corrected.getCorrectionDeclAs<VarDecl>();
3108 } else {
3109 Diag(Id.getLoc(), Lookup.empty() ? diag::err_undeclared_var_use
3110 : diag::err_omp_expected_var_arg)
3111 << Id.getName();
3112 return ExprError();
3113 }
3114 } else if (!(VD = Lookup.getAsSingle<VarDecl>())) {
3115 Diag(Id.getLoc(), diag::err_omp_expected_var_arg) << Id.getName();
3116 Diag(Lookup.getFoundDecl()->getLocation(), diag::note_declared_at);
3117 return ExprError();
3118 }
3119 Lookup.suppressDiagnostics();
3120
3121 // OpenMP [2.9.2, Syntax, C/C++]
3122 // Variables must be file-scope, namespace-scope, or static block-scope.
3123 if (Kind == OMPD_threadprivate && !VD->hasGlobalStorage()) {
3124 Diag(Id.getLoc(), diag::err_omp_global_var_arg)
3125 << getOpenMPDirectiveName(Kind) << !VD->isStaticLocal();
3126 bool IsDecl =
3128 Diag(VD->getLocation(),
3129 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3130 << VD;
3131 return ExprError();
3132 }
3133
3134 VarDecl *CanonicalVD = VD->getCanonicalDecl();
3135 NamedDecl *ND = CanonicalVD;
3136 // OpenMP [2.9.2, Restrictions, C/C++, p.2]
3137 // A threadprivate directive for file-scope variables must appear outside
3138 // any definition or declaration.
3139 if (CanonicalVD->getDeclContext()->isTranslationUnit() &&
3141 Diag(Id.getLoc(), diag::err_omp_var_scope)
3142 << getOpenMPDirectiveName(Kind) << VD;
3143 bool IsDecl =
3145 Diag(VD->getLocation(),
3146 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3147 << VD;
3148 return ExprError();
3149 }
3150 // OpenMP [2.9.2, Restrictions, C/C++, p.3]
3151 // A threadprivate directive for static class member variables must appear
3152 // in the class definition, in the same scope in which the member
3153 // variables are declared.
3154 if (CanonicalVD->isStaticDataMember() &&
3155 !CanonicalVD->getDeclContext()->Equals(SemaRef.getCurLexicalContext())) {
3156 Diag(Id.getLoc(), diag::err_omp_var_scope)
3157 << getOpenMPDirectiveName(Kind) << VD;
3158 bool IsDecl =
3160 Diag(VD->getLocation(),
3161 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3162 << VD;
3163 return ExprError();
3164 }
3165 // OpenMP [2.9.2, Restrictions, C/C++, p.4]
3166 // A threadprivate directive for namespace-scope variables must appear
3167 // outside any definition or declaration other than the namespace
3168 // definition itself.
3169 if (CanonicalVD->getDeclContext()->isNamespace() &&
3172 CanonicalVD->getDeclContext()))) {
3173 Diag(Id.getLoc(), diag::err_omp_var_scope)
3174 << getOpenMPDirectiveName(Kind) << VD;
3175 bool IsDecl =
3177 Diag(VD->getLocation(),
3178 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3179 << VD;
3180 return ExprError();
3181 }
3182 // OpenMP [2.9.2, Restrictions, C/C++, p.6]
3183 // A threadprivate directive for static block-scope variables must appear
3184 // in the scope of the variable and not in a nested scope.
3185 if (CanonicalVD->isLocalVarDecl() && CurScope &&
3187 Diag(Id.getLoc(), diag::err_omp_var_scope)
3188 << getOpenMPDirectiveName(Kind) << VD;
3189 bool IsDecl =
3191 Diag(VD->getLocation(),
3192 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3193 << VD;
3194 return ExprError();
3195 }
3196
3197 // OpenMP [2.9.2, Restrictions, C/C++, p.2-6]
3198 // A threadprivate directive must lexically precede all references to any
3199 // of the variables in its list.
3200 if (Kind == OMPD_threadprivate && VD->isUsed() &&
3201 !DSAStack->isThreadPrivate(VD)) {
3202 Diag(Id.getLoc(), diag::err_omp_var_used)
3203 << getOpenMPDirectiveName(Kind) << VD;
3204 return ExprError();
3205 }
3206
3207 QualType ExprType = VD->getType().getNonReferenceType();
3209 SourceLocation(), VD,
3210 /*RefersToEnclosingVariableOrCapture=*/false,
3211 Id.getLoc(), ExprType, VK_LValue);
3212}
3213
3216 ArrayRef<Expr *> VarList) {
3220 }
3221 return nullptr;
3222}
3223
3224namespace {
3225class LocalVarRefChecker final
3226 : public ConstStmtVisitor<LocalVarRefChecker, bool> {
3227 Sema &SemaRef;
3228
3229public:
3230 bool VisitDeclRefExpr(const DeclRefExpr *E) {
3231 if (const auto *VD = dyn_cast<VarDecl>(E->getDecl())) {
3232 if (VD->hasLocalStorage()) {
3233 SemaRef.Diag(E->getBeginLoc(),
3234 diag::err_omp_local_var_in_threadprivate_init)
3235 << E->getSourceRange();
3236 SemaRef.Diag(VD->getLocation(), diag::note_defined_here)
3237 << VD << VD->getSourceRange();
3238 return true;
3239 }
3240 }
3241 return false;
3242 }
3243 bool VisitStmt(const Stmt *S) {
3244 for (const Stmt *Child : S->children()) {
3245 if (Child && Visit(Child))
3246 return true;
3247 }
3248 return false;
3249 }
3250 explicit LocalVarRefChecker(Sema &SemaRef) : SemaRef(SemaRef) {}
3251};
3252} // namespace
3253
3256 ArrayRef<Expr *> VarList) {
3257 ASTContext &Context = getASTContext();
3259 for (Expr *RefExpr : VarList) {
3260 auto *DE = cast<DeclRefExpr>(RefExpr);
3261 auto *VD = cast<VarDecl>(DE->getDecl());
3262 SourceLocation ILoc = DE->getExprLoc();
3263
3264 // Mark variable as used.
3265 VD->setReferenced();
3266 VD->markUsed(Context);
3267
3268 QualType QType = VD->getType();
3269 if (QType->isDependentType() || QType->isInstantiationDependentType()) {
3270 // It will be analyzed later.
3271 Vars.push_back(DE);
3272 continue;
3273 }
3274
3275 // OpenMP [2.9.2, Restrictions, C/C++, p.10]
3276 // A threadprivate variable must not have an incomplete type.
3278 ILoc, VD->getType(), diag::err_omp_threadprivate_incomplete_type)) {
3279 continue;
3280 }
3281
3282 // OpenMP [2.9.2, Restrictions, C/C++, p.10]
3283 // A threadprivate variable must not have a reference type.
3284 if (VD->getType()->isReferenceType()) {
3285 Diag(ILoc, diag::err_omp_ref_type_arg)
3286 << getOpenMPDirectiveName(OMPD_threadprivate) << VD->getType();
3287 bool IsDecl =
3289 Diag(VD->getLocation(),
3290 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3291 << VD;
3292 continue;
3293 }
3294
3295 // Check if this is a TLS variable. If TLS is not being supported, produce
3296 // the corresponding diagnostic.
3297 if ((VD->getTLSKind() != VarDecl::TLS_None &&
3298 !(VD->hasAttr<OMPThreadPrivateDeclAttr>() &&
3299 getLangOpts().OpenMPUseTLS &&
3300 getASTContext().getTargetInfo().isTLSSupported())) ||
3301 (VD->getStorageClass() == SC_Register && VD->hasAttr<AsmLabelAttr>() &&
3302 !VD->isLocalVarDecl())) {
3303 Diag(ILoc, diag::err_omp_var_thread_local)
3304 << VD << ((VD->getTLSKind() != VarDecl::TLS_None) ? 0 : 1);
3305 bool IsDecl =
3307 Diag(VD->getLocation(),
3308 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3309 << VD;
3310 continue;
3311 }
3312
3313 // Check if initial value of threadprivate variable reference variable with
3314 // local storage (it is not supported by runtime).
3315 if (const Expr *Init = VD->getAnyInitializer()) {
3316 LocalVarRefChecker Checker(SemaRef);
3317 if (Checker.Visit(Init))
3318 continue;
3319 }
3320
3321 Vars.push_back(RefExpr);
3322 DSAStack->addDSA(VD, DE, OMPC_threadprivate);
3323 VD->addAttr(OMPThreadPrivateDeclAttr::CreateImplicit(
3324 Context, SourceRange(Loc, Loc)));
3325 if (ASTMutationListener *ML = Context.getASTMutationListener())
3326 ML->DeclarationMarkedOpenMPThreadPrivate(VD);
3327 }
3328 OMPThreadPrivateDecl *D = nullptr;
3329 if (!Vars.empty()) {
3331 Loc, Vars);
3333 }
3334 return D;
3335}
3336
3337static OMPAllocateDeclAttr::AllocatorTypeTy
3338getAllocatorKind(Sema &S, DSAStackTy *Stack, Expr *Allocator) {
3339 if (!Allocator)
3340 return OMPAllocateDeclAttr::OMPNullMemAlloc;
3341 if (Allocator->isTypeDependent() || Allocator->isValueDependent() ||
3342 Allocator->isInstantiationDependent() ||
3343 Allocator->containsUnexpandedParameterPack())
3344 return OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3345 auto AllocatorKindRes = OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3346 llvm::FoldingSetNodeID AEId;
3347 const Expr *AE = Allocator->IgnoreParenImpCasts();
3348 AE->IgnoreImpCasts()->Profile(AEId, S.getASTContext(), /*Canonical=*/true);
3349 for (int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
3350 auto AllocatorKind = static_cast<OMPAllocateDeclAttr::AllocatorTypeTy>(I);
3351 const Expr *DefAllocator = Stack->getAllocator(AllocatorKind);
3352 llvm::FoldingSetNodeID DAEId;
3353 DefAllocator->IgnoreImpCasts()->Profile(DAEId, S.getASTContext(),
3354 /*Canonical=*/true);
3355 if (AEId == DAEId) {
3356 AllocatorKindRes = AllocatorKind;
3357 break;
3358 }
3359 }
3360 return AllocatorKindRes;
3361}
3362
3364 Sema &S, DSAStackTy *Stack, Expr *RefExpr, VarDecl *VD,
3365 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind, Expr *Allocator) {
3366 if (!VD->hasAttr<OMPAllocateDeclAttr>())
3367 return false;
3368 const auto *A = VD->getAttr<OMPAllocateDeclAttr>();
3369 Expr *PrevAllocator = A->getAllocator();
3370 OMPAllocateDeclAttr::AllocatorTypeTy PrevAllocatorKind =
3371 getAllocatorKind(S, Stack, PrevAllocator);
3372 bool AllocatorsMatch = AllocatorKind == PrevAllocatorKind;
3373 if (AllocatorsMatch &&
3374 AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc &&
3375 Allocator && PrevAllocator) {
3376 const Expr *AE = Allocator->IgnoreParenImpCasts();
3377 const Expr *PAE = PrevAllocator->IgnoreParenImpCasts();
3378 llvm::FoldingSetNodeID AEId, PAEId;
3379 AE->Profile(AEId, S.Context, /*Canonical=*/true);
3380 PAE->Profile(PAEId, S.Context, /*Canonical=*/true);
3381 AllocatorsMatch = AEId == PAEId;
3382 }
3383 if (!AllocatorsMatch) {
3384 SmallString<256> AllocatorBuffer;
3385 llvm::raw_svector_ostream AllocatorStream(AllocatorBuffer);
3386 if (Allocator)
3387 Allocator->printPretty(AllocatorStream, nullptr, S.getPrintingPolicy());
3388 SmallString<256> PrevAllocatorBuffer;
3389 llvm::raw_svector_ostream PrevAllocatorStream(PrevAllocatorBuffer);
3390 if (PrevAllocator)
3391 PrevAllocator->printPretty(PrevAllocatorStream, nullptr,
3392 S.getPrintingPolicy());
3393
3394 SourceLocation AllocatorLoc =
3395 Allocator ? Allocator->getExprLoc() : RefExpr->getExprLoc();
3396 SourceRange AllocatorRange =
3397 Allocator ? Allocator->getSourceRange() : RefExpr->getSourceRange();
3398 SourceLocation PrevAllocatorLoc =
3399 PrevAllocator ? PrevAllocator->getExprLoc() : A->getLocation();
3400 SourceRange PrevAllocatorRange =
3401 PrevAllocator ? PrevAllocator->getSourceRange() : A->getRange();
3402 S.Diag(AllocatorLoc, diag::warn_omp_used_different_allocator)
3403 << (Allocator ? 1 : 0) << AllocatorStream.str()
3404 << (PrevAllocator ? 1 : 0) << PrevAllocatorStream.str()
3405 << AllocatorRange;
3406 S.Diag(PrevAllocatorLoc, diag::note_omp_previous_allocator)
3407 << PrevAllocatorRange;
3408 return true;
3409 }
3410 return false;
3411}
3412
3413static void
3415 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
3416 Expr *Allocator, Expr *Alignment, SourceRange SR) {
3417 if (VD->hasAttr<OMPAllocateDeclAttr>())
3418 return;
3419 if (Alignment &&
3420 (Alignment->isTypeDependent() || Alignment->isValueDependent() ||
3421 Alignment->isInstantiationDependent() ||
3422 Alignment->containsUnexpandedParameterPack()))
3423 // Apply later when we have a usable value.
3424 return;
3425 if (Allocator &&
3426 (Allocator->isTypeDependent() || Allocator->isValueDependent() ||
3427 Allocator->isInstantiationDependent() ||
3428 Allocator->containsUnexpandedParameterPack()))
3429 return;
3430 auto *A = OMPAllocateDeclAttr::CreateImplicit(S.Context, AllocatorKind,
3431 Allocator, Alignment, SR);
3432 VD->addAttr(A);
3434 ML->DeclarationMarkedOpenMPAllocate(VD, A);
3435}
3436
3439 DeclContext *Owner) {
3440 assert(Clauses.size() <= 2 && "Expected at most two clauses.");
3441 Expr *Alignment = nullptr;
3442 Expr *Allocator = nullptr;
3443 if (Clauses.empty()) {
3444 // OpenMP 5.0, 2.11.3 allocate Directive, Restrictions.
3445 // allocate directives that appear in a target region must specify an
3446 // allocator clause unless a requires directive with the dynamic_allocators
3447 // clause is present in the same compilation unit.
3448 if (getLangOpts().OpenMPIsTargetDevice &&
3449 !DSAStack->hasRequiresDeclWithClause<OMPDynamicAllocatorsClause>())
3450 SemaRef.targetDiag(Loc, diag::err_expected_allocator_clause);
3451 } else {
3452 for (const OMPClause *C : Clauses)
3453 if (const auto *AC = dyn_cast<OMPAllocatorClause>(C))
3454 Allocator = AC->getAllocator();
3455 else if (const auto *AC = dyn_cast<OMPAlignClause>(C))
3456 Alignment = AC->getAlignment();
3457 else
3458 llvm_unreachable("Unexpected clause on allocate directive");
3459 }
3460 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind =
3461 getAllocatorKind(SemaRef, DSAStack, Allocator);
3463 for (Expr *RefExpr : VarList) {
3464 auto *DE = cast<DeclRefExpr>(RefExpr);
3465 auto *VD = cast<VarDecl>(DE->getDecl());
3466
3467 // Check if this is a TLS variable or global register.
3468 if (VD->getTLSKind() != VarDecl::TLS_None ||
3469 VD->hasAttr<OMPThreadPrivateDeclAttr>() ||
3470 (VD->getStorageClass() == SC_Register && VD->hasAttr<AsmLabelAttr>() &&
3471 !VD->isLocalVarDecl()))
3472 continue;
3473
3474 // If the used several times in the allocate directive, the same allocator
3475 // must be used.
3477 AllocatorKind, Allocator))
3478 continue;
3479
3480 // OpenMP, 2.11.3 allocate Directive, Restrictions, C / C++
3481 // If a list item has a static storage type, the allocator expression in the
3482 // allocator clause must be a constant expression that evaluates to one of
3483 // the predefined memory allocator values.
3484 if (Allocator && VD->hasGlobalStorage()) {
3485 if (AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc) {
3486 Diag(Allocator->getExprLoc(),
3487 diag::err_omp_expected_predefined_allocator)
3488 << Allocator->getSourceRange();
3489 bool IsDecl = VD->isThisDeclarationADefinition(getASTContext()) ==
3491 Diag(VD->getLocation(),
3492 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3493 << VD;
3494 continue;
3495 }
3496 }
3497
3498 Vars.push_back(RefExpr);
3499 applyOMPAllocateAttribute(SemaRef, VD, AllocatorKind, Allocator, Alignment,
3500 DE->getSourceRange());
3501 }
3502 if (Vars.empty())
3503 return nullptr;
3504 if (!Owner)
3505 Owner = SemaRef.getCurLexicalContext();
3506 auto *D = OMPAllocateDecl::Create(getASTContext(), Owner, Loc, Vars, Clauses);
3508 Owner->addDecl(D);
3510}
3511
3514 ArrayRef<OMPClause *> ClauseList) {
3515 OMPRequiresDecl *D = nullptr;
3517 Diag(Loc, diag::err_omp_invalid_scope) << "requires";
3518 } else {
3519 D = CheckOMPRequiresDecl(Loc, ClauseList);
3520 if (D) {
3522 DSAStack->addRequiresDecl(D);
3523 }
3524 }
3526}
3527
3529 OpenMPDirectiveKind DKind,
3530 ArrayRef<std::string> Assumptions,
3531 bool SkippedClauses) {
3532 if (!SkippedClauses && Assumptions.empty())
3533 Diag(Loc, diag::err_omp_no_clause_for_directive)
3534 << llvm::omp::getAllAssumeClauseOptions()
3535 << llvm::omp::getOpenMPDirectiveName(DKind);
3536
3537 auto *AA =
3538 OMPAssumeAttr::Create(getASTContext(), llvm::join(Assumptions, ","), Loc);
3539 if (DKind == llvm::omp::Directive::OMPD_begin_assumes) {
3540 OMPAssumeScoped.push_back(AA);
3541 return;
3542 }
3543
3544 // Global assumes without assumption clauses are ignored.
3545 if (Assumptions.empty())
3546 return;
3547
3548 assert(DKind == llvm::omp::Directive::OMPD_assumes &&
3549 "Unexpected omp assumption directive!");
3550 OMPAssumeGlobal.push_back(AA);
3551
3552 // The OMPAssumeGlobal scope above will take care of new declarations but
3553 // we also want to apply the assumption to existing ones, e.g., to
3554 // declarations in included headers. To this end, we traverse all existing
3555 // declaration contexts and annotate function declarations here.
3556 SmallVector<DeclContext *, 8> DeclContexts;
3557 auto *Ctx = SemaRef.CurContext;
3558 while (Ctx->getLexicalParent())
3559 Ctx = Ctx->getLexicalParent();
3560 DeclContexts.push_back(Ctx);
3561 while (!DeclContexts.empty()) {
3562 DeclContext *DC = DeclContexts.pop_back_val();
3563 for (auto *SubDC : DC->decls()) {
3564 if (SubDC->isInvalidDecl())
3565 continue;
3566 if (auto *CTD = dyn_cast<ClassTemplateDecl>(SubDC)) {
3567 DeclContexts.push_back(CTD->getTemplatedDecl());
3568 llvm::append_range(DeclContexts, CTD->specializations());
3569 continue;
3570 }
3571 if (auto *DC = dyn_cast<DeclContext>(SubDC))
3572 DeclContexts.push_back(DC);
3573 if (auto *F = dyn_cast<FunctionDecl>(SubDC)) {
3574 F->addAttr(AA);
3575 continue;
3576 }
3577 }
3578 }
3579}
3580
3582 assert(isInOpenMPAssumeScope() && "Not in OpenMP assumes scope!");
3583 OMPAssumeScoped.pop_back();
3584}
3585
3588 ArrayRef<OMPClause *> ClauseList) {
3589 /// For target specific clauses, the requires directive cannot be
3590 /// specified after the handling of any of the target regions in the
3591 /// current compilation unit.
3592 ArrayRef<SourceLocation> TargetLocations =
3593 DSAStack->getEncounteredTargetLocs();
3594 SourceLocation AtomicLoc = DSAStack->getAtomicDirectiveLoc();
3595 if (!TargetLocations.empty() || !AtomicLoc.isInvalid()) {
3596 for (const OMPClause *CNew : ClauseList) {
3597 // Check if any of the requires clauses affect target regions.
3598 if (isa<OMPUnifiedSharedMemoryClause>(CNew) ||
3599 isa<OMPUnifiedAddressClause>(CNew) ||
3600 isa<OMPReverseOffloadClause>(CNew) ||
3601 isa<OMPDynamicAllocatorsClause>(CNew)) {
3602 Diag(Loc, diag::err_omp_directive_before_requires)
3603 << "target" << getOpenMPClauseName(CNew->getClauseKind());
3604 for (SourceLocation TargetLoc : TargetLocations) {
3605 Diag(TargetLoc, diag::note_omp_requires_encountered_directive)
3606 << "target";
3607 }
3608 } else if (!AtomicLoc.isInvalid() &&
3609 isa<OMPAtomicDefaultMemOrderClause>(CNew)) {
3610 Diag(Loc, diag::err_omp_directive_before_requires)
3611 << "atomic" << getOpenMPClauseName(CNew->getClauseKind());
3612 Diag(AtomicLoc, diag::note_omp_requires_encountered_directive)
3613 << "atomic";
3614 }
3615 }
3616 }
3617
3618 if (!DSAStack->hasDuplicateRequiresClause(ClauseList))
3620 getASTContext(), SemaRef.getCurLexicalContext(), Loc, ClauseList);
3621 return nullptr;
3622}
3623
3624static void reportOriginalDsa(Sema &SemaRef, const DSAStackTy *Stack,
3625 const ValueDecl *D,
3626 const DSAStackTy::DSAVarData &DVar,
3627 bool IsLoopIterVar) {
3628 if (DVar.RefExpr) {
3629 SemaRef.Diag(DVar.RefExpr->getExprLoc(), diag::note_omp_explicit_dsa)
3630 << getOpenMPClauseName(DVar.CKind);
3631 return;
3632 }
3633 enum {
3634 PDSA_StaticMemberShared,
3635 PDSA_StaticLocalVarShared,
3636 PDSA_LoopIterVarPrivate,
3637 PDSA_LoopIterVarLinear,
3638 PDSA_LoopIterVarLastprivate,
3639 PDSA_ConstVarShared,
3640 PDSA_GlobalVarShared,
3641 PDSA_TaskVarFirstprivate,
3642 PDSA_LocalVarPrivate,
3643 PDSA_Implicit
3644 } Reason = PDSA_Implicit;
3645 bool ReportHint = false;
3646 auto ReportLoc = D->getLocation();
3647 auto *VD = dyn_cast<VarDecl>(D);
3648 if (IsLoopIterVar) {
3649 if (DVar.CKind == OMPC_private)
3650 Reason = PDSA_LoopIterVarPrivate;
3651 else if (DVar.CKind == OMPC_lastprivate)
3652 Reason = PDSA_LoopIterVarLastprivate;
3653 else
3654 Reason = PDSA_LoopIterVarLinear;
3655 } else if (isOpenMPTaskingDirective(DVar.DKind) &&
3656 DVar.CKind == OMPC_firstprivate) {
3657 Reason = PDSA_TaskVarFirstprivate;
3658 ReportLoc = DVar.ImplicitDSALoc;
3659 } else if (VD && VD->isStaticLocal())
3660 Reason = PDSA_StaticLocalVarShared;
3661 else if (VD && VD->isStaticDataMember())
3662 Reason = PDSA_StaticMemberShared;
3663 else if (VD && VD->isFileVarDecl())
3664 Reason = PDSA_GlobalVarShared;
3665 else if (D->getType().isConstant(SemaRef.getASTContext()))
3666 Reason = PDSA_ConstVarShared;
3667 else if (VD && VD->isLocalVarDecl() && DVar.CKind == OMPC_private) {
3668 ReportHint = true;
3669 Reason = PDSA_LocalVarPrivate;
3670 }
3671 if (Reason != PDSA_Implicit) {
3672 SemaRef.Diag(ReportLoc, diag::note_omp_predetermined_dsa)
3673 << Reason << ReportHint
3674 << getOpenMPDirectiveName(Stack->getCurrentDirective());
3675 } else if (DVar.ImplicitDSALoc.isValid()) {
3676 SemaRef.Diag(DVar.ImplicitDSALoc, diag::note_omp_implicit_dsa)
3677 << getOpenMPClauseName(DVar.CKind);
3678 }
3679}
3680
3683 bool IsAggregateOrDeclareTarget) {
3685 switch (M) {
3686 case OMPC_DEFAULTMAP_MODIFIER_alloc:
3687 Kind = OMPC_MAP_alloc;
3688 break;
3689 case OMPC_DEFAULTMAP_MODIFIER_to:
3690 Kind = OMPC_MAP_to;
3691 break;
3692 case OMPC_DEFAULTMAP_MODIFIER_from:
3693 Kind = OMPC_MAP_from;
3694 break;
3695 case OMPC_DEFAULTMAP_MODIFIER_tofrom:
3696 Kind = OMPC_MAP_tofrom;
3697 break;
3698 case OMPC_DEFAULTMAP_MODIFIER_present:
3699 // OpenMP 5.1 [2.21.7.3] defaultmap clause, Description]
3700 // If implicit-behavior is present, each variable referenced in the
3701 // construct in the category specified by variable-category is treated as if
3702 // it had been listed in a map clause with the map-type of alloc and
3703 // map-type-modifier of present.
3704 Kind = OMPC_MAP_alloc;
3705 break;
3706 case OMPC_DEFAULTMAP_MODIFIER_firstprivate:
3708 llvm_unreachable("Unexpected defaultmap implicit behavior");
3709 case OMPC_DEFAULTMAP_MODIFIER_none:
3710 case OMPC_DEFAULTMAP_MODIFIER_default:
3712 // IsAggregateOrDeclareTarget could be true if:
3713 // 1. the implicit behavior for aggregate is tofrom
3714 // 2. it's a declare target link
3715 if (IsAggregateOrDeclareTarget) {
3716 Kind = OMPC_MAP_tofrom;
3717 break;
3718 }
3719 llvm_unreachable("Unexpected defaultmap implicit behavior");
3720 }
3721 assert(Kind != OMPC_MAP_unknown && "Expect map kind to be known");
3722 return Kind;
3723}
3724
3725namespace {
3726class DSAAttrChecker final : public StmtVisitor<DSAAttrChecker, void> {
3727 DSAStackTy *Stack;
3728 Sema &SemaRef;
3729 bool ErrorFound = false;
3730 bool TryCaptureCXXThisMembers = false;
3731 CapturedStmt *CS = nullptr;
3732 const static unsigned DefaultmapKindNum = OMPC_DEFAULTMAP_unknown + 1;
3733 llvm::SmallVector<Expr *, 4> ImplicitFirstprivate;
3734 llvm::SmallVector<Expr *, 4> ImplicitPrivate;
3735 llvm::SmallVector<Expr *, 4> ImplicitMap[DefaultmapKindNum][OMPC_MAP_delete];
3737 ImplicitMapModifier[DefaultmapKindNum];
3738 SemaOpenMP::VarsWithInheritedDSAType VarsWithInheritedDSA;
3739 llvm::SmallDenseSet<const ValueDecl *, 4> ImplicitDeclarations;
3740
3741 void VisitSubCaptures(OMPExecutableDirective *S) {
3742 // Check implicitly captured variables.
3743 if (!S->hasAssociatedStmt() || !S->getAssociatedStmt())
3744 return;
3745 if (S->getDirectiveKind() == OMPD_atomic ||
3746 S->getDirectiveKind() == OMPD_critical ||
3747 S->getDirectiveKind() == OMPD_section ||
3748 S->getDirectiveKind() == OMPD_master ||
3749 S->getDirectiveKind() == OMPD_masked ||
3750 S->getDirectiveKind() == OMPD_scope ||
3751 isOpenMPLoopTransformationDirective(S->getDirectiveKind())) {
3752 Visit(S->getAssociatedStmt());
3753 return;
3754 }
3755 visitSubCaptures(S->getInnermostCapturedStmt());
3756 // Try to capture inner this->member references to generate correct mappings
3757 // and diagnostics.
3758 if (TryCaptureCXXThisMembers ||
3759 (isOpenMPTargetExecutionDirective(Stack->getCurrentDirective()) &&
3760 llvm::any_of(S->getInnermostCapturedStmt()->captures(),
3761 [](const CapturedStmt::Capture &C) {
3762 return C.capturesThis();
3763 }))) {
3764 bool SavedTryCaptureCXXThisMembers = TryCaptureCXXThisMembers;
3765 TryCaptureCXXThisMembers = true;
3766 Visit(S->getInnermostCapturedStmt()->getCapturedStmt());
3767 TryCaptureCXXThisMembers = SavedTryCaptureCXXThisMembers;
3768 }
3769 // In tasks firstprivates are not captured anymore, need to analyze them
3770 // explicitly.
3771 if (isOpenMPTaskingDirective(S->getDirectiveKind()) &&
3772 !isOpenMPTaskLoopDirective(S->getDirectiveKind())) {
3773 for (OMPClause *C : S->clauses())
3774 if (auto *FC = dyn_cast<OMPFirstprivateClause>(C)) {
3775 for (Expr *Ref : FC->varlists())
3776 Visit(Ref);
3777 }
3778 }
3779 }
3780
3781public:
3782 void VisitDeclRefExpr(DeclRefExpr *E) {
3783 if (TryCaptureCXXThisMembers || E->isTypeDependent() ||
3786 E->isNonOdrUse() == clang::NOUR_Unevaluated)
3787 return;
3788 if (auto *VD = dyn_cast<VarDecl>(E->getDecl())) {
3789 // Check the datasharing rules for the expressions in the clauses.
3790 if (!CS || (isa<OMPCapturedExprDecl>(VD) && !CS->capturesVariable(VD) &&
3791 !Stack->getTopDSA(VD, /*FromParent=*/false).RefExpr &&
3792 !Stack->isImplicitDefaultFirstprivateFD(VD))) {
3793 if (auto *CED = dyn_cast<OMPCapturedExprDecl>(VD))
3794 if (!CED->hasAttr<OMPCaptureNoInitAttr>()) {
3795 Visit(CED->getInit());
3796 return;
3797 }
3798 } else if (VD->isImplicit() || isa<OMPCapturedExprDecl>(VD))
3799 // Do not analyze internal variables and do not enclose them into
3800 // implicit clauses.
3801 if (!Stack->isImplicitDefaultFirstprivateFD(VD))
3802 return;
3803 VD = VD->getCanonicalDecl();
3804 // Skip internally declared variables.
3805 if (VD->hasLocalStorage() && CS && !CS->capturesVariable(VD) &&
3806 !Stack->isImplicitDefaultFirstprivateFD(VD) &&
3807 !Stack->isImplicitTaskFirstprivate(VD))
3808 return;
3809 // Skip allocators in uses_allocators clauses.
3810 if (Stack->isUsesAllocatorsDecl(VD))
3811 return;
3812
3813 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(VD, /*FromParent=*/false);
3814 // Check if the variable has explicit DSA set and stop analysis if it so.
3815 if (DVar.RefExpr || !ImplicitDeclarations.insert(VD).second)
3816 return;
3817
3818 // Skip internally declared static variables.
3819 std::optional<OMPDeclareTargetDeclAttr::MapTypeTy> Res =
3820 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD);
3821 if (VD->hasGlobalStorage() && CS && !CS->capturesVariable(VD) &&
3822 (Stack->hasRequiresDeclWithClause<OMPUnifiedSharedMemoryClause>() ||
3823 !Res || *Res != OMPDeclareTargetDeclAttr::MT_Link) &&
3824 !Stack->isImplicitDefaultFirstprivateFD(VD) &&
3825 !Stack->isImplicitTaskFirstprivate(VD))
3826 return;
3827
3828 SourceLocation ELoc = E->getExprLoc();
3829 OpenMPDirectiveKind DKind = Stack->getCurrentDirective();
3830 // The default(none) clause requires that each variable that is referenced
3831 // in the construct, and does not have a predetermined data-sharing
3832 // attribute, must have its data-sharing attribute explicitly determined
3833 // by being listed in a data-sharing attribute clause.
3834 if (DVar.CKind == OMPC_unknown &&
3835 (Stack->getDefaultDSA() == DSA_none ||
3836 Stack->getDefaultDSA() == DSA_private ||
3837 Stack->getDefaultDSA() == DSA_firstprivate) &&
3838 isImplicitOrExplicitTaskingRegion(DKind) &&
3839 VarsWithInheritedDSA.count(VD) == 0) {
3840 bool InheritedDSA = Stack->getDefaultDSA() == DSA_none;
3841 if (!InheritedDSA && (Stack->getDefaultDSA() == DSA_firstprivate ||
3842 Stack->getDefaultDSA() == DSA_private)) {
3843 DSAStackTy::DSAVarData DVar =
3844 Stack->getImplicitDSA(VD, /*FromParent=*/false);
3845 InheritedDSA = DVar.CKind == OMPC_unknown;
3846 }
3847 if (InheritedDSA)
3848 VarsWithInheritedDSA[VD] = E;
3849 if (Stack->getDefaultDSA() == DSA_none)
3850 return;
3851 }
3852
3853 // OpenMP 5.0 [2.19.7.2, defaultmap clause, Description]
3854 // If implicit-behavior is none, each variable referenced in the
3855 // construct that does not have a predetermined data-sharing attribute
3856 // and does not appear in a to or link clause on a declare target
3857 // directive must be listed in a data-mapping attribute clause, a
3858 // data-sharing attribute clause (including a data-sharing attribute
3859 // clause on a combined construct where target. is one of the
3860 // constituent constructs), or an is_device_ptr clause.
3861 OpenMPDefaultmapClauseKind ClauseKind =
3863 if (SemaRef.getLangOpts().OpenMP >= 50) {
3864 bool IsModifierNone = Stack->getDefaultmapModifier(ClauseKind) ==
3865 OMPC_DEFAULTMAP_MODIFIER_none;
3866 if (DVar.CKind == OMPC_unknown && IsModifierNone &&
3867 VarsWithInheritedDSA.count(VD) == 0 && !Res) {
3868 // Only check for data-mapping attribute and is_device_ptr here
3869 // since we have already make sure that the declaration does not
3870 // have a data-sharing attribute above
3871 if (!Stack->checkMappableExprComponentListsForDecl(
3872 VD, /*CurrentRegionOnly=*/true,
3874 MapExprComponents,
3876 auto MI = MapExprComponents.rbegin();
3877 auto ME = MapExprComponents.rend();
3878 return MI != ME && MI->getAssociatedDeclaration() == VD;
3879 })) {
3880 VarsWithInheritedDSA[VD] = E;
3881 return;
3882 }
3883 }
3884 }
3885 if (SemaRef.getLangOpts().OpenMP > 50) {
3886 bool IsModifierPresent = Stack->getDefaultmapModifier(ClauseKind) ==
3887 OMPC_DEFAULTMAP_MODIFIER_present;
3888 if (IsModifierPresent) {
3889 if (!llvm::is_contained(ImplicitMapModifier[ClauseKind],
3890 OMPC_MAP_MODIFIER_present)) {
3891 ImplicitMapModifier[ClauseKind].push_back(
3892 OMPC_MAP_MODIFIER_present);
3893 }
3894 }
3895 }
3896
3898 !Stack->isLoopControlVariable(VD).first) {
3899 if (!Stack->checkMappableExprComponentListsForDecl(
3900 VD, /*CurrentRegionOnly=*/true,
3902 StackComponents,
3904 if (SemaRef.LangOpts.OpenMP >= 50)
3905 return !StackComponents.empty();
3906 // Variable is used if it has been marked as an array, array
3907 // section, array shaping or the variable itself.
3908 return StackComponents.size() == 1 ||
3909 llvm::all_of(
3910 llvm::drop_begin(llvm::reverse(StackComponents)),
3911 [](const OMPClauseMappableExprCommon::
3912 MappableComponent &MC) {
3913 return MC.getAssociatedDeclaration() ==
3914 nullptr &&
3915 (isa<ArraySectionExpr>(
3916 MC.getAssociatedExpression()) ||
3917 isa<OMPArrayShapingExpr>(
3918 MC.getAssociatedExpression()) ||
3919 isa<ArraySubscriptExpr>(
3920 MC.getAssociatedExpression()));
3921 });
3922 })) {
3923 bool IsFirstprivate = false;
3924 // By default lambdas are captured as firstprivates.
3925 if (const auto *RD =
3927 IsFirstprivate = RD->isLambda();
3928 IsFirstprivate =
3929 IsFirstprivate || (Stack->mustBeFirstprivate(ClauseKind) && !Res);
3930 if (IsFirstprivate) {
3931 ImplicitFirstprivate.emplace_back(E);
3932 } else {
3934 Stack->getDefaultmapModifier(ClauseKind);
3936 M, ClauseKind == OMPC_DEFAULTMAP_aggregate || Res);
3937 ImplicitMap[ClauseKind][Kind].emplace_back(E);
3938 }
3939 return;
3940 }
3941 }
3942
3943 // OpenMP [2.9.3.6, Restrictions, p.2]
3944 // A list item that appears in a reduction clause of the innermost
3945 // enclosing worksharing or parallel construct may not be accessed in an
3946 // explicit task.
3947 DVar = Stack->hasInnermostDSA(
3948 VD,
3949 [](OpenMPClauseKind C, bool AppliedToPointee) {
3950 return C == OMPC_reduction && !AppliedToPointee;
3951 },
3952 [](OpenMPDirectiveKind K) {
3953 return isOpenMPParallelDirective(K) ||
3955 },
3956 /*FromParent=*/true);
3957 if (isOpenMPTaskingDirective(DKind) && DVar.CKind == OMPC_reduction) {
3958 ErrorFound = true;
3959 SemaRef.Diag(ELoc, diag::err_omp_reduction_in_task);
3960 reportOriginalDsa(SemaRef, Stack, VD, DVar);
3961 return;
3962 }
3963
3964 // Define implicit data-sharing attributes for task.
3965 DVar = Stack->getImplicitDSA(VD, /*FromParent=*/false);
3966 if (((isOpenMPTaskingDirective(DKind) && DVar.CKind != OMPC_shared) ||
3967 (((Stack->getDefaultDSA() == DSA_firstprivate &&
3968 DVar.CKind == OMPC_firstprivate) ||
3969 (Stack->getDefaultDSA() == DSA_private &&
3970 DVar.CKind == OMPC_private)) &&
3971 !DVar.RefExpr)) &&
3972 !Stack->isLoopControlVariable(VD).first) {
3973 if (Stack->getDefaultDSA() == DSA_private)
3974 ImplicitPrivate.push_back(E);
3975 else
3976 ImplicitFirstprivate.push_back(E);
3977 return;
3978 }
3979
3980 // Store implicitly used globals with declare target link for parent
3981 // target.
3982 if (!isOpenMPTargetExecutionDirective(DKind) && Res &&
3983 *Res == OMPDeclareTargetDeclAttr::MT_Link) {
3984 Stack->addToParentTargetRegionLinkGlobals(E);
3985 return;
3986 }
3987 }
3988 }
3989 void VisitMemberExpr(MemberExpr *E) {
3990 if (E->isTypeDependent() || E->isValueDependent() ||
3992 return;
3993 auto *FD = dyn_cast<FieldDecl>(E->getMemberDecl());
3994 OpenMPDirectiveKind DKind = Stack->getCurrentDirective();
3995 if (auto *TE = dyn_cast<CXXThisExpr>(E->getBase()->IgnoreParenCasts())) {
3996 if (!FD)
3997 return;
3998 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(FD, /*FromParent=*/false);
3999 // Check if the variable has explicit DSA set and stop analysis if it
4000 // so.
4001 if (DVar.RefExpr || !ImplicitDeclarations.insert(FD).second)
4002 return;
4003
4005 !Stack->isLoopControlVariable(FD).first &&
4006 !Stack->checkMappableExprComponentListsForDecl(
4007 FD, /*CurrentRegionOnly=*/true,
4009 StackComponents,
4011 return isa<CXXThisExpr>(
4012 cast<MemberExpr>(
4013 StackComponents.back().getAssociatedExpression())
4014 ->getBase()
4015 ->IgnoreParens());
4016 })) {
4017 // OpenMP 4.5 [2.15.5.1, map Clause, Restrictions, C/C++, p.3]
4018 // A bit-field cannot appear in a map clause.
4019 //
4020 if (FD->isBitField())
4021 return;
4022
4023 // Check to see if the member expression is referencing a class that
4024 // has already been explicitly mapped
4025 if (Stack->isClassPreviouslyMapped(TE->getType()))
4026 return;
4027
4029 Stack->getDefaultmapModifier(OMPC_DEFAULTMAP_aggregate);
4030 OpenMPDefaultmapClauseKind ClauseKind =
4033 Modifier, /*IsAggregateOrDeclareTarget*/ true);
4034 ImplicitMap[ClauseKind][Kind].emplace_back(E);
4035 return;
4036 }
4037
4038 SourceLocation ELoc = E->getExprLoc();
4039 // OpenMP [2.9.3.6, Restrictions, p.2]
4040 // A list item that appears in a reduction clause of the innermost
4041 // enclosing worksharing or parallel construct may not be accessed in
4042 // an explicit task.
4043 DVar = Stack->hasInnermostDSA(
4044 FD,
4045 [](OpenMPClauseKind C, bool AppliedToPointee) {
4046 return C == OMPC_reduction && !AppliedToPointee;
4047 },
4048 [](OpenMPDirectiveKind K) {
4049 return isOpenMPParallelDirective(K) ||
4051 },
4052 /*FromParent=*/true);
4053 if (isOpenMPTaskingDirective(DKind) && DVar.CKind == OMPC_reduction) {
4054 ErrorFound = true;
4055 SemaRef.Diag(ELoc, diag::err_omp_reduction_in_task);
4056 reportOriginalDsa(SemaRef, Stack, FD, DVar);
4057 return;
4058 }
4059
4060 // Define implicit data-sharing attributes for task.
4061 DVar = Stack->getImplicitDSA(FD, /*FromParent=*/false);
4062 if (isOpenMPTaskingDirective(DKind) && DVar.CKind != OMPC_shared &&
4063 !Stack->isLoopControlVariable(FD).first) {
4064 // Check if there is a captured expression for the current field in the
4065 // region. Do not mark it as firstprivate unless there is no captured
4066 // expression.
4067 // TODO: try to make it firstprivate.
4068 if (DVar.CKind != OMPC_unknown)
4069 ImplicitFirstprivate.push_back(E);
4070 }
4071 return;
4072 }
4075 if (!checkMapClauseExpressionBase(SemaRef, E, CurComponents, OMPC_map,
4076 Stack->getCurrentDirective(),
4077 /*NoDiagnose=*/true))
4078 return;
4079 const auto *VD = cast<ValueDecl>(
4080 CurComponents.back().getAssociatedDeclaration()->getCanonicalDecl());
4081 if (!Stack->checkMappableExprComponentListsForDecl(
4082 VD, /*CurrentRegionOnly=*/true,
4083 [&CurComponents](
4085 StackComponents,
4087 auto CCI = CurComponents.rbegin();
4088 auto CCE = CurComponents.rend();
4089 for (const auto &SC : llvm::reverse(StackComponents)) {
4090 // Do both expressions have the same kind?
4091 if (CCI->getAssociatedExpression()->getStmtClass() !=
4092 SC.getAssociatedExpression()->getStmtClass())
4093 if (!((isa<ArraySectionExpr>(
4094 SC.getAssociatedExpression()) ||
4095 isa<OMPArrayShapingExpr>(
4096 SC.getAssociatedExpression())) &&
4097 isa<ArraySubscriptExpr>(
4098 CCI->getAssociatedExpression())))
4099 return false;
4100
4101 const Decl *CCD = CCI->getAssociatedDeclaration();
4102 const Decl *SCD = SC.getAssociatedDeclaration();
4103 CCD = CCD ? CCD->getCanonicalDecl() : nullptr;
4104 SCD = SCD ? SCD->getCanonicalDecl() : nullptr;
4105 if (SCD != CCD)
4106 return false;
4107 std::advance(CCI, 1);
4108 if (CCI == CCE)
4109 break;
4110 }
4111 return true;
4112 })) {
4113 Visit(E->getBase());
4114 }
4115 } else if (!TryCaptureCXXThisMembers) {
4116 Visit(E->getBase());
4117 }
4118 }
4119 void VisitOMPExecutableDirective(OMPExecutableDirective *S) {
4120 for (OMPClause *C : S->clauses()) {
4121 // Skip analysis of arguments of private clauses for task|target
4122 // directives.
4123 if (isa_and_nonnull<OMPPrivateClause>(C))
4124 continue;
4125 // Skip analysis of arguments of implicitly defined firstprivate clause
4126 // for task|target directives.
4127 // Skip analysis of arguments of implicitly defined map clause for target
4128 // directives.
4129 if (C && !((isa<OMPFirstprivateClause>(C) || isa<OMPMapClause>(C)) &&
4130 C->isImplicit() &&
4131 !isOpenMPTaskingDirective(Stack->getCurrentDirective()))) {
4132 for (Stmt *CC : C->children()) {
4133 if (CC)
4134 Visit(CC);
4135 }
4136 }
4137 }
4138 // Check implicitly captured variables.
4139 VisitSubCaptures(S);
4140 }
4141
4142 void VisitOMPLoopTransformationDirective(OMPLoopTransformationDirective *S) {
4143 // Loop transformation directives do not introduce data sharing
4144 VisitStmt(S);
4145 }
4146
4147 void VisitCallExpr(CallExpr *S) {
4148 for (Stmt *C : S->arguments()) {
4149 if (C) {
4150 // Check implicitly captured variables in the task-based directives to
4151 // check if they must be firstprivatized.
4152 Visit(C);
4153 }
4154 }
4155 if (Expr *Callee = S->getCallee()) {
4156 auto *CI = Callee->IgnoreParenImpCasts();
4157 if (auto *CE = dyn_cast<MemberExpr>(CI))
4158 Visit(CE->getBase());
4159 else if (auto *CE = dyn_cast<DeclRefExpr>(CI))
4160 Visit(CE);
4161 }
4162 }
4163 void VisitStmt(Stmt *S) {
4164 for (Stmt *C : S->children()) {
4165 if (C) {
4166 // Check implicitly captured variables in the task-based directives to
4167 // check if they must be firstprivatized.
4168 Visit(C);
4169 }
4170 }
4171 }
4172
4173 void visitSubCaptures(CapturedStmt *S) {
4174 for (const CapturedStmt::Capture &Cap : S->captures()) {
4175 if (!Cap.capturesVariable() && !Cap.capturesVariableByCopy())
4176 continue;
4177 VarDecl *VD = Cap.getCapturedVar();
4178 // Do not try to map the variable if it or its sub-component was mapped
4179 // already.
4180 if (isOpenMPTargetExecutionDirective(Stack->getCurrentDirective()) &&
4181 Stack->checkMappableExprComponentListsForDecl(
4182 VD, /*CurrentRegionOnly=*/true,
4184 OpenMPClauseKind) { return true; }))
4185 continue;
4187 SemaRef, VD, VD->getType().getNonLValueExprType(SemaRef.Context),
4188 Cap.getLocation(), /*RefersToCapture=*/true);
4189 Visit(DRE);
4190 }
4191 }
4192 bool isErrorFound() const { return ErrorFound; }
4193 ArrayRef<Expr *> getImplicitFirstprivate() const {
4194 return ImplicitFirstprivate;
4195 }
4196 ArrayRef<Expr *> getImplicitPrivate() const { return ImplicitPrivate; }
4198 OpenMPMapClauseKind MK) const {
4199 return ImplicitMap[DK][MK];
4200 }
4202 getImplicitMapModifier(OpenMPDefaultmapClauseKind Kind) const {
4203 return ImplicitMapModifier[Kind];
4204 }
4205 const SemaOpenMP::VarsWithInheritedDSAType &getVarsWithInheritedDSA() const {
4206 return VarsWithInheritedDSA;
4207 }
4208
4209 DSAAttrChecker(DSAStackTy *S, Sema &SemaRef, CapturedStmt *CS)
4210 : Stack(S), SemaRef(SemaRef), ErrorFound(false), CS(CS) {
4211 // Process declare target link variables for the target directives.
4212 if (isOpenMPTargetExecutionDirective(S->getCurrentDirective())) {
4213 for (DeclRefExpr *E : Stack->getLinkGlobals())
4214 Visit(E);
4215 }
4216 }
4217};
4218} // namespace
4219
4220static void handleDeclareVariantConstructTrait(DSAStackTy *Stack,
4221 OpenMPDirectiveKind DKind,
4222 bool ScopeEntry) {
4225 Traits.emplace_back(llvm::omp::TraitProperty::construct_target_target);
4226 if (isOpenMPTeamsDirective(DKind))
4227 Traits.emplace_back(llvm::omp::TraitProperty::construct_teams_teams);
4228 if (isOpenMPParallelDirective(DKind))
4229 Traits.emplace_back(llvm::omp::TraitProperty::construct_parallel_parallel);
4231 Traits.emplace_back(llvm::omp::TraitProperty::construct_for_for);
4232 if (isOpenMPSimdDirective(DKind))
4233 Traits.emplace_back(llvm::omp::TraitProperty::construct_simd_simd);
4234 Stack->handleConstructTrait(Traits, ScopeEntry);
4235}
4236
4238getParallelRegionParams(Sema &SemaRef, bool LoopBoundSharing) {
4239 ASTContext &Context = SemaRef.getASTContext();
4240 QualType KmpInt32Ty =
4241 Context.getIntTypeForBitwidth(/*DestWidth=*/32, /*Signed=*/1).withConst();
4242 QualType KmpInt32PtrTy =
4243 Context.getPointerType(KmpInt32Ty).withConst().withRestrict();
4245 std::make_pair(".global_tid.", KmpInt32PtrTy),
4246 std::make_pair(".bound_tid.", KmpInt32PtrTy),
4247 };
4248 if (LoopBoundSharing) {
4249 QualType KmpSizeTy = Context.getSizeType().withConst();
4250 Params.push_back(std::make_pair(".previous.lb.", KmpSizeTy));
4251 Params.push_back(std::make_pair(".previous.ub.", KmpSizeTy));
4252 }
4253
4254 // __context with shared vars
4255 Params.push_back(std::make_pair(StringRef(), QualType()));
4256 return Params;
4257}
4258
4261 return getParallelRegionParams(SemaRef, /*LoopBoundSharing=*/false);
4262}
4263
4266 ASTContext &Context = SemaRef.getASTContext();
4267 QualType KmpInt32Ty = Context.getIntTypeForBitwidth(32, 1).withConst();
4268 QualType VoidPtrTy = Context.VoidPtrTy.withConst().withRestrict();
4269 QualType KmpInt32PtrTy =
4270 Context.getPointerType(KmpInt32Ty).withConst().withRestrict();
4271 QualType Args[] = {VoidPtrTy};
4273 EPI.Variadic = true;
4274 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4276 std::make_pair(".global_tid.", KmpInt32Ty),
4277 std::make_pair(".part_id.", KmpInt32PtrTy),
4278 std::make_pair(".privates.", VoidPtrTy),
4279 std::make_pair(
4280 ".copy_fn.",
4281 Context.getPointerType(CopyFnType).withConst().withRestrict()),
4282 std::make_pair(".task_t.", Context.VoidPtrTy.withConst()),
4283 std::make_pair(StringRef(), QualType()) // __context with shared vars
4284 };
4285 return Params;
4286}
4287
4290 ASTContext &Context = SemaRef.getASTContext();
4292 if (SemaRef.getLangOpts().OpenMPIsTargetDevice) {
4293 QualType VoidPtrTy = Context.VoidPtrTy.withConst().withRestrict();
4294 Params.push_back(std::make_pair(StringRef("dyn_ptr"), VoidPtrTy));
4295 }
4296 // __context with shared vars
4297 Params.push_back(std::make_pair(StringRef(), QualType()));
4298 return Params;
4299}
4300
4304 std::make_pair(StringRef(), QualType()) // __context with shared vars
4305 };
4306 return Params;
4307}
4308
4311 ASTContext &Context = SemaRef.getASTContext();
4312 QualType KmpInt32Ty =
4313 Context.getIntTypeForBitwidth(/*DestWidth=*/32, /*Signed=*/1).withConst();
4314 QualType KmpUInt64Ty =
4315 Context.getIntTypeForBitwidth(/*DestWidth=*/64, /*Signed=*/0).withConst();
4316 QualType KmpInt64Ty =
4317 Context.getIntTypeForBitwidth(/*DestWidth=*/64, /*Signed=*/1).withConst();
4318 QualType VoidPtrTy = Context.VoidPtrTy.withConst().withRestrict();
4319 QualType KmpInt32PtrTy =
4320 Context.getPointerType(KmpInt32Ty).withConst().withRestrict();
4321 QualType Args[] = {VoidPtrTy};
4323 EPI.Variadic = true;
4324 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4326 std::make_pair(".global_tid.", KmpInt32Ty),
4327 std::make_pair(".part_id.", KmpInt32PtrTy),
4328 std::make_pair(".privates.", VoidPtrTy),
4329 std::make_pair(
4330 ".copy_fn.",
4331 Context.getPointerType(CopyFnType).withConst().withRestrict()),
4332 std::make_pair(".task_t.", Context.VoidPtrTy.withConst()),
4333 std::make_pair(".lb.", KmpUInt64Ty),
4334 std::make_pair(".ub.", KmpUInt64Ty),
4335 std::make_pair(".st.", KmpInt64Ty),
4336 std::make_pair(".liter.", KmpInt32Ty),
4337 std::make_pair(".reductions.", VoidPtrTy),
4338 std::make_pair(StringRef(), QualType()) // __context with shared vars
4339 };
4340 return Params;
4341}
4342
4344 Scope *CurScope, SourceLocation Loc) {
4346 getOpenMPCaptureRegions(Regions, DKind);
4347
4348 bool LoopBoundSharing = isOpenMPLoopBoundSharingDirective(DKind);
4349
4350 auto MarkAsInlined = [&](CapturedRegionScopeInfo *CSI) {
4351 CSI->TheCapturedDecl->addAttr(AlwaysInlineAttr::CreateImplicit(
4352 SemaRef.getASTContext(), {}, AlwaysInlineAttr::Keyword_forceinline));
4353 };
4354
4355 for (auto [Level, RKind] : llvm::enumerate(Regions)) {
4356 switch (RKind) {
4357 // All region kinds that can be returned from `getOpenMPCaptureRegions`
4358 // are listed here.
4359 case OMPD_parallel:
4361 Loc, CurScope, CR_OpenMP,
4362 getParallelRegionParams(SemaRef, LoopBoundSharing), Level);
4363 break;
4364 case OMPD_teams:
4365 SemaRef.ActOnCapturedRegionStart(Loc, CurScope, CR_OpenMP,
4366 getTeamsRegionParams(SemaRef), Level);
4367 break;
4368 case OMPD_task:
4369 SemaRef.ActOnCapturedRegionStart(Loc, CurScope, CR_OpenMP,
4370 getTaskRegionParams(SemaRef), Level);
4371 // Mark this captured region as inlined, because we don't use outlined
4372 // function directly.
4373 MarkAsInlined(SemaRef.getCurCapturedRegion());
4374 break;
4375 case OMPD_taskloop:
4376 SemaRef.ActOnCapturedRegionStart(Loc, CurScope, CR_OpenMP,
4377 getTaskloopRegionParams(SemaRef), Level);
4378 // Mark this captured region as inlined, because we don't use outlined
4379 // function directly.
4380 MarkAsInlined(SemaRef.getCurCapturedRegion());
4381 break;
4382 case OMPD_target:
4383 SemaRef.ActOnCapturedRegionStart(Loc, CurScope, CR_OpenMP,
4384 getTargetRegionParams(SemaRef), Level);
4385 break;
4386 case OMPD_unknown:
4387 SemaRef.ActOnCapturedRegionStart(Loc, CurScope, CR_OpenMP,
4388 getUnknownRegionParams(SemaRef));
4389 break;
4390 case OMPD_metadirective:
4391 case OMPD_nothing:
4392 default:
4393 llvm_unreachable("Unexpected capture region");
4394 }
4395 }
4396}
4397
4399 Scope *CurScope) {
4400 switch (DKind) {
4401 case OMPD_atomic:
4402 case OMPD_critical:
4403 case OMPD_masked:
4404 case OMPD_master:
4405 case OMPD_section:
4406 case OMPD_tile:
4407 case OMPD_unroll:
4408 break;
4409 default:
4410 processCapturedRegions(SemaRef, DKind, CurScope,
4411 DSAStack->getConstructLoc());
4412 break;
4413 }
4414
4415 DSAStack->setContext(SemaRef.CurContext);
4416 handleDeclareVariantConstructTrait(DSAStack, DKind, /*ScopeEntry=*/true);
4417}
4418
4419int SemaOpenMP::getNumberOfConstructScopes(unsigned Level) const {
4420 return getOpenMPCaptureLevels(DSAStack->getDirective(Level));
4421}
4422
4425 getOpenMPCaptureRegions(CaptureRegions, DKind);
4426 return CaptureRegions.size();
4427}
4428
4430 Expr *CaptureExpr, bool WithInit,
4431 DeclContext *CurContext,
4432 bool AsExpression) {
4433 assert(CaptureExpr);
4434 ASTContext &C = S.getASTContext();
4435 Expr *Init = AsExpression ? CaptureExpr : CaptureExpr->IgnoreImpCasts();
4436 QualType Ty = Init->getType();
4437 if (CaptureExpr->getObjectKind() == OK_Ordinary && CaptureExpr->isGLValue()) {
4438 if (S.getLangOpts().CPlusPlus) {
4439 Ty = C.getLValueReferenceType(Ty);
4440 } else {
4441 Ty = C.getPointerType(Ty);
4442 ExprResult Res =
4443 S.CreateBuiltinUnaryOp(CaptureExpr->getExprLoc(), UO_AddrOf, Init);
4444 if (!Res.isUsable())
4445 return nullptr;
4446 Init = Res.get();
4447 }
4448 WithInit = true;
4449 }
4450 auto *CED = OMPCapturedExprDecl::Create(C, CurContext, Id, Ty,
4451 CaptureExpr->getBeginLoc());
4452 if (!WithInit)
4453 CED->addAttr(OMPCaptureNoInitAttr::CreateImplicit(C));
4454 CurContext->addHiddenDecl(CED);
4456 S.AddInitializerToDecl(CED, Init, /*DirectInit=*/false);
4457 return CED;
4458}
4459
4460static DeclRefExpr *buildCapture(Sema &S, ValueDecl *D, Expr *CaptureExpr,
4461 bool WithInit) {
4463 if (VarDecl *VD = S.OpenMP().isOpenMPCapturedDecl(D))
4464 CD = cast<OMPCapturedExprDecl>(VD);
4465 else
4466 CD = buildCaptureDecl(S, D->getIdentifier(), CaptureExpr, WithInit,
4467 S.CurContext,
4468 /*AsExpression=*/false);
4469 return buildDeclRefExpr(S, CD, CD->getType().getNonReferenceType(),
4470 CaptureExpr->getExprLoc());
4471}
4472
4473static ExprResult buildCapture(Sema &S, Expr *CaptureExpr, DeclRefExpr *&Ref,
4474 StringRef Name) {
4475 CaptureExpr = S.DefaultLvalueConversion(CaptureExpr).get();
4476 if (!Ref) {
4478 S, &S.getASTContext().Idents.get(Name), CaptureExpr,
4479 /*WithInit=*/true, S.CurContext, /*AsExpression=*/true);
4480 Ref = buildDeclRefExpr(S, CD, CD->getType().getNonReferenceType(),
4481 CaptureExpr->getExprLoc());
4482 }
4483 ExprResult Res = Ref;
4484 if (!S.getLangOpts().CPlusPlus &&
4485 CaptureExpr->getObjectKind() == OK_Ordinary && CaptureExpr->isGLValue() &&
4486 Ref->getType()->isPointerType()) {
4487 Res = S.CreateBuiltinUnaryOp(CaptureExpr->getExprLoc(), UO_Deref, Ref);
4488 if (!Res.isUsable())
4489 return ExprError();
4490 }
4491 return S.DefaultLvalueConversion(Res.get());
4492}
4493
4494namespace {
4495// OpenMP directives parsed in this section are represented as a
4496// CapturedStatement with an associated statement. If a syntax error
4497// is detected during the parsing of the associated statement, the
4498// compiler must abort processing and close the CapturedStatement.
4499//
4500// Combined directives such as 'target parallel' have more than one
4501// nested CapturedStatements. This RAII ensures that we unwind out
4502// of all the nested CapturedStatements when an error is found.
4503class CaptureRegionUnwinderRAII {
4504private:
4505 Sema &S;
4506 bool &ErrorFound;
4507 OpenMPDirectiveKind DKind = OMPD_unknown;
4508
4509public:
4510 CaptureRegionUnwinderRAII(Sema &S, bool &ErrorFound,
4511 OpenMPDirectiveKind DKind)
4512 : S(S), ErrorFound(ErrorFound), DKind(DKind) {}
4513 ~CaptureRegionUnwinderRAII() {
4514 if (ErrorFound) {
4515 int ThisCaptureLevel = S.OpenMP().getOpenMPCaptureLevels(DKind);
4516 while (--ThisCaptureLevel >= 0)
4518 }
4519 }
4520};
4521} // namespace
4522
4524 // Capture variables captured by reference in lambdas for target-based
4525 // directives.
4527 (isOpenMPTargetExecutionDirective(DSAStack->getCurrentDirective()) ||
4529 DSAStack->getCurrentDirective()))) {
4530 QualType Type = V->getType();
4531 if (const auto *RD = Type.getCanonicalType()
4532 .getNonReferenceType()
4533 ->getAsCXXRecordDecl()) {
4534 bool SavedForceCaptureByReferenceInTargetExecutable =
4535 DSAStack->isForceCaptureByReferenceInTargetExecutable();
4536 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4537 /*V=*/true);
4538 if (RD->isLambda()) {
4539 llvm::DenseMap<const ValueDecl *, FieldDecl *> Captures;
4540 FieldDecl *ThisCapture;
4541 RD->getCaptureFields(Captures, ThisCapture);
4542 for (const LambdaCapture &LC : RD->captures()) {
4543 if (LC.getCaptureKind() == LCK_ByRef) {
4544 VarDecl *VD = cast<VarDecl>(LC.getCapturedVar());
4545 DeclContext *VDC = VD->getDeclContext();
4546 if (!VDC->Encloses(SemaRef.CurContext))
4547 continue;
4548 SemaRef.MarkVariableReferenced(LC.getLocation(), VD);
4549 } else if (LC.getCaptureKind() == LCK_This) {
4551 if (!ThisTy.isNull() && getASTContext().typesAreCompatible(
4552 ThisTy, ThisCapture->getType()))
4553 SemaRef.CheckCXXThisCapture(LC.getLocation());
4554 }
4555 }
4556 }
4557 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4558 SavedForceCaptureByReferenceInTargetExecutable);
4559 }
4560 }
4561}
4562
4564 const ArrayRef<OMPClause *> Clauses) {
4565 const OMPOrderedClause *Ordered = nullptr;
4566 const OMPOrderClause *Order = nullptr;
4567
4568 for (const OMPClause *Clause : Clauses) {
4569 if (Clause->getClauseKind() == OMPC_ordered)
4570 Ordered = cast<OMPOrderedClause>(Clause);
4571 else if (Clause->getClauseKind() == OMPC_order) {
4572 Order = cast<OMPOrderClause>(Clause);
4573 if (Order->getKind() != OMPC_ORDER_concurrent)
4574 Order = nullptr;
4575 }
4576 if (Ordered && Order)
4577 break;
4578 }
4579
4580 if (Ordered && Order) {
4581 S.Diag(Order->getKindKwLoc(),
4582 diag::err_omp_simple_clause_incompatible_with_ordered)
4583 << getOpenMPClauseName(OMPC_order)
4584 << getOpenMPSimpleClauseTypeName(OMPC_order, OMPC_ORDER_concurrent)
4585 << SourceRange(Order->getBeginLoc(), Order->getEndLoc());
4586 S.Diag(Ordered->getBeginLoc(), diag::note_omp_ordered_param)
4587 << 0 << SourceRange(Ordered->getBeginLoc(), Ordered->getEndLoc());
4588 return true;
4589 }
4590 return false;
4591}
4592
4594 ArrayRef<OMPClause *> Clauses) {
4596 /* ScopeEntry */ false);
4597 if (!isOpenMPCapturingDirective(DSAStack->getCurrentDirective()))
4598 return S;
4599
4600 bool ErrorFound = false;
4601 CaptureRegionUnwinderRAII CaptureRegionUnwinder(
4602 SemaRef, ErrorFound, DSAStack->getCurrentDirective());
4603 if (!S.isUsable()) {
4604 ErrorFound = true;
4605 return StmtError();
4606 }
4607
4609 getOpenMPCaptureRegions(CaptureRegions, DSAStack->getCurrentDirective());
4610 OMPOrderedClause *OC = nullptr;
4611 OMPScheduleClause *SC = nullptr;
4614 // This is required for proper codegen.
4615 for (OMPClause *Clause : Clauses) {
4616 if (!getLangOpts().OpenMPSimd &&
4617 (isOpenMPTaskingDirective(DSAStack->getCurrentDirective()) ||
4618 DSAStack->getCurrentDirective() == OMPD_target) &&
4619 Clause->getClauseKind() == OMPC_in_reduction) {
4620 // Capture taskgroup task_reduction descriptors inside the tasking regions
4621 // with the corresponding in_reduction items.
4622 auto *IRC = cast<OMPInReductionClause>(Clause);
4623 for (Expr *E : IRC->taskgroup_descriptors())
4624 if (E)
4626 }
4627 if (isOpenMPPrivate(Clause->getClauseKind()) ||
4628 Clause->getClauseKind() == OMPC_copyprivate ||
4629 (getLangOpts().OpenMPUseTLS &&
4630 getASTContext().getTargetInfo().isTLSSupported() &&
4631 Clause->getClauseKind() == OMPC_copyin)) {
4632 DSAStack->setForceVarCapturing(Clause->getClauseKind() == OMPC_copyin);
4633 // Mark all variables in private list clauses as used in inner region.
4634 for (Stmt *VarRef : Clause->children()) {
4635 if (auto *E = cast_or_null<Expr>(VarRef)) {
4637 }
4638 }
4639 DSAStack->setForceVarCapturing(/*V=*/false);
4640 } else if (CaptureRegions.size() > 1 ||
4641 CaptureRegions.back() != OMPD_unknown) {
4642 if (auto *C = OMPClauseWithPreInit::get(Clause))
4643 PICs.push_back(C);
4644 if (auto *C = OMPClauseWithPostUpdate::get(Clause)) {
4645 if (Expr *E = C->getPostUpdateExpr())
4647 }
4648 }
4649 if (Clause->getClauseKind() == OMPC_schedule)
4650 SC = cast<OMPScheduleClause>(Clause);
4651 else if (Clause->getClauseKind() == OMPC_ordered)
4652 OC = cast<OMPOrderedClause>(Clause);
4653 else if (Clause->getClauseKind() == OMPC_linear)
4654 LCs.push_back(cast<OMPLinearClause>(Clause));
4655 }
4656 // Capture allocator expressions if used.
4657 for (Expr *E : DSAStack->getInnerAllocators())
4659 // OpenMP, 2.7.1 Loop Construct, Restrictions
4660 // The nonmonotonic modifier cannot be specified if an ordered clause is
4661 // specified.
4662 if (SC &&
4663 (SC->getFirstScheduleModifier() == OMPC_SCHEDULE_MODIFIER_nonmonotonic ||
4665 OMPC_SCHEDULE_MODIFIER_nonmonotonic) &&
4666 OC) {
4667 Diag(SC->getFirstScheduleModifier() == OMPC_SCHEDULE_MODIFIER_nonmonotonic
4670 diag::err_omp_simple_clause_incompatible_with_ordered)
4671 << getOpenMPClauseName(OMPC_schedule)
4672 << getOpenMPSimpleClauseTypeName(OMPC_schedule,
4673 OMPC_SCHEDULE_MODIFIER_nonmonotonic)
4674 << SourceRange(OC->getBeginLoc(), OC->getEndLoc());
4675 ErrorFound = true;
4676 }
4677 // OpenMP 5.0, 2.9.2 Worksharing-Loop Construct, Restrictions.
4678 // If an order(concurrent) clause is present, an ordered clause may not appear
4679 // on the same directive.
4680 if (checkOrderedOrderSpecified(SemaRef, Clauses))
4681 ErrorFound = true;
4682 if (!LCs.empty() && OC && OC->getNumForLoops()) {
4683 for (const OMPLinearClause *C : LCs) {
4684 Diag(C->getBeginLoc(), diag::err_omp_linear_ordered)
4685 << SourceRange(OC->getBeginLoc(), OC->getEndLoc());
4686 }
4687 ErrorFound = true;
4688 }
4689 if (isOpenMPWorksharingDirective(DSAStack->getCurrentDirective()) &&
4690 isOpenMPSimdDirective(DSAStack->getCurrentDirective()) && OC &&
4691 OC->getNumForLoops()) {
4692 Diag(OC->getBeginLoc(), diag::err_omp_ordered_simd)
4693 << getOpenMPDirectiveName(DSAStack->getCurrentDirective());
4694 ErrorFound = true;
4695 }
4696 if (ErrorFound) {
4697 return StmtError();
4698 }
4699 StmtResult SR = S;
4700 unsigned CompletedRegions = 0;
4701 for (OpenMPDirectiveKind ThisCaptureRegion : llvm::reverse(CaptureRegions)) {
4702 // Mark all variables in private list clauses as used in inner region.
4703 // Required for proper codegen of combined directives.
4704 // TODO: add processing for other clauses.
4705 if (ThisCaptureRegion != OMPD_unknown) {
4706 for (const clang::OMPClauseWithPreInit *C : PICs) {
4707 OpenMPDirectiveKind CaptureRegion = C->getCaptureRegion();
4708 // Find the particular capture region for the clause if the
4709 // directive is a combined one with multiple capture regions.
4710 // If the directive is not a combined one, the capture region
4711 // associated with the clause is OMPD_unknown and is generated
4712 // only once.
4713 if (CaptureRegion == ThisCaptureRegion ||
4714 CaptureRegion == OMPD_unknown) {
4715 if (auto *DS = cast_or_null<DeclStmt>(C->getPreInitStmt())) {
4716 for (Decl *D : DS->decls())
4718 cast<VarDecl>(D));
4719 }
4720 }
4721 }
4722 }
4723 if (ThisCaptureRegion == OMPD_target) {
4724 // Capture allocator traits in the target region. They are used implicitly
4725 // and, thus, are not captured by default.
4726 for (OMPClause *C : Clauses) {
4727 if (const auto *UAC = dyn_cast<OMPUsesAllocatorsClause>(C)) {
4728 for (unsigned I = 0, End = UAC->getNumberOfAllocators(); I < End;
4729 ++I) {
4730 OMPUsesAllocatorsClause::Data D = UAC->getAllocatorData(I);
4731 if (Expr *E = D.AllocatorTraits)
4733 }
4734 continue;
4735 }
4736 }
4737 }
4738 if (ThisCaptureRegion == OMPD_parallel) {
4739 // Capture temp arrays for inscan reductions and locals in aligned
4740 // clauses.
4741 for (OMPClause *C : Clauses) {
4742 if (auto *RC = dyn_cast<OMPReductionClause>(C)) {
4743 if (RC->getModifier() != OMPC_REDUCTION_inscan)
4744 continue;
4745 for (Expr *E : RC->copy_array_temps())
4746 if (E)
4748 }
4749 if (auto *AC = dyn_cast<OMPAlignedClause>(C)) {
4750 for (Expr *E : AC->varlists())
4752 }
4753 }
4754 }
4755 if (++CompletedRegions == CaptureRegions.size())
4756 DSAStack->setBodyComplete();
4758 }
4759 return SR;
4760}
4761
4762static bool checkCancelRegion(Sema &SemaRef, OpenMPDirectiveKind CurrentRegion,
4763 OpenMPDirectiveKind CancelRegion,
4764 SourceLocation StartLoc) {
4765 // CancelRegion is only needed for cancel and cancellation_point.
4766 if (CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_cancellation_point)
4767 return false;
4768
4769 if (CancelRegion == OMPD_parallel || CancelRegion == OMPD_for ||
4770 CancelRegion == OMPD_sections || CancelRegion == OMPD_taskgroup)
4771 return false;
4772
4773 SemaRef.Diag(StartLoc, diag::err_omp_wrong_cancel_region)
4774 << getOpenMPDirectiveName(CancelRegion);
4775 return true;
4776}
4777
4778static bool checkNestingOfRegions(Sema &SemaRef, const DSAStackTy *Stack,
4779 OpenMPDirectiveKind CurrentRegion,
4780 const DeclarationNameInfo &CurrentName,
4781 OpenMPDirectiveKind CancelRegion,
4782 OpenMPBindClauseKind BindKind,
4783 SourceLocation StartLoc) {
4784 if (!Stack->getCurScope())
4785 return false;
4786
4787 OpenMPDirectiveKind ParentRegion = Stack->getParentDirective();
4788 OpenMPDirectiveKind OffendingRegion = ParentRegion;
4789 bool NestingProhibited = false;
4790 bool CloseNesting = true;
4791 bool OrphanSeen = false;
4792 enum {
4793 NoRecommend,
4794 ShouldBeInParallelRegion,
4795 ShouldBeInOrderedRegion,
4796 ShouldBeInTargetRegion,
4797 ShouldBeInTeamsRegion,
4798 ShouldBeInLoopSimdRegion,
4799 } Recommend = NoRecommend;
4800 if (SemaRef.LangOpts.OpenMP >= 51 && Stack->isParentOrderConcurrent() &&
4801 CurrentRegion != OMPD_simd && CurrentRegion != OMPD_loop &&
4802 CurrentRegion != OMPD_parallel &&
4803 !isOpenMPCombinedParallelADirective(CurrentRegion)) {
4804 SemaRef.Diag(StartLoc, diag::err_omp_prohibited_region_order)
4805 << getOpenMPDirectiveName(CurrentRegion);
4806 return true;
4807 }
4808 if (isOpenMPSimdDirective(ParentRegion) &&
4809 ((SemaRef.LangOpts.OpenMP <= 45 && CurrentRegion != OMPD_ordered) ||
4810 (SemaRef.LangOpts.OpenMP >= 50 && CurrentRegion != OMPD_ordered &&
4811 CurrentRegion != OMPD_simd && CurrentRegion != OMPD_atomic &&
4812 CurrentRegion != OMPD_scan))) {
4813 // OpenMP [2.16, Nesting of Regions]
4814 // OpenMP constructs may not be nested inside a simd region.
4815 // OpenMP [2.8.1,simd Construct, Restrictions]
4816 // An ordered construct with the simd clause is the only OpenMP
4817 // construct that can appear in the simd region.
4818 // Allowing a SIMD construct nested in another SIMD construct is an
4819 // extension. The OpenMP 4.5 spec does not allow it. Issue a warning
4820 // message.
4821 // OpenMP 5.0 [2.9.3.1, simd Construct, Restrictions]
4822 // The only OpenMP constructs that can be encountered during execution of
4823 // a simd region are the atomic construct, the loop construct, the simd
4824 // construct and the ordered construct with the simd clause.
4825 SemaRef.Diag(StartLoc, (CurrentRegion != OMPD_simd)
4826 ? diag::err_omp_prohibited_region_simd
4827 : diag::warn_omp_nesting_simd)
4828 << (SemaRef.LangOpts.OpenMP >= 50 ? 1 : 0);
4829 return CurrentRegion != OMPD_simd;
4830 }
4831 if (ParentRegion == OMPD_atomic) {
4832 // OpenMP [2.16, Nesting of Regions]
4833 // OpenMP constructs may not be nested inside an atomic region.
4834 SemaRef.Diag(StartLoc, diag::err_omp_prohibited_region_atomic);
4835 return true;
4836 }
4837 if (CurrentRegion == OMPD_section) {
4838 // OpenMP [2.7.2, sections Construct, Restrictions]
4839 // Orphaned section directives are prohibited. That is, the section
4840 // directives must appear within the sections construct and must not be
4841 // encountered elsewhere in the sections region.
4842 if (ParentRegion != OMPD_sections &&
4843 ParentRegion != OMPD_parallel_sections) {
4844 SemaRef.Diag(StartLoc, diag::err_omp_orphaned_section_directive)
4845 << (ParentRegion != OMPD_unknown)
4846 << getOpenMPDirectiveName(ParentRegion);
4847 return true;
4848 }
4849 return false;
4850 }
4851 // Allow some constructs (except teams and cancellation constructs) to be
4852 // orphaned (they could be used in functions, called from OpenMP regions
4853 // with the required preconditions).
4854 if (ParentRegion == OMPD_unknown &&
4855 !isOpenMPNestingTeamsDirective(CurrentRegion) &&
4856 CurrentRegion != OMPD_cancellation_point &&
4857 CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_scan)
4858 return false;
4859 // Checks needed for mapping "loop" construct. Please check mapLoopConstruct
4860 // for a detailed explanation
4861 if (SemaRef.LangOpts.OpenMP >= 50 && CurrentRegion == OMPD_loop &&
4862 (BindKind == OMPC_BIND_parallel || BindKind == OMPC_BIND_teams) &&
4863 (isOpenMPWorksharingDirective(ParentRegion) ||
4864 ParentRegion == OMPD_loop)) {
4865 int ErrorMsgNumber = (BindKind == OMPC_BIND_parallel) ? 1 : 4;
4866 SemaRef.Diag(StartLoc, diag::err_omp_prohibited_region)
4867 << true << getOpenMPDirectiveName(ParentRegion) << ErrorMsgNumber
4868 << getOpenMPDirectiveName(CurrentRegion);
4869 return true;
4870 }
4871 if (CurrentRegion == OMPD_cancellation_point ||
4872 CurrentRegion == OMPD_cancel) {
4873 // OpenMP [2.16, Nesting of Regions]
4874 // A cancellation point construct for which construct-type-clause is
4875 // taskgroup must be nested inside a task construct. A cancellation
4876 // point construct for which construct-type-clause is not taskgroup must
4877 // be closely nested inside an OpenMP construct that matches the type
4878 // specified in construct-type-clause.
4879 // A cancel construct for which construct-type-clause is taskgroup must be
4880 // nested inside a task construct. A cancel construct for which
4881 // construct-type-clause is not taskgroup must be closely nested inside an
4882 // OpenMP construct that matches the type specified in
4883 // construct-type-clause.
4884 NestingProhibited =
4885 !((CancelRegion == OMPD_parallel &&
4886 (ParentRegion == OMPD_parallel ||
4887 ParentRegion == OMPD_target_parallel)) ||
4888 (CancelRegion == OMPD_for &&
4889 (ParentRegion == OMPD_for || ParentRegion == OMPD_parallel_for ||
4890 ParentRegion == OMPD_target_parallel_for ||
4891 ParentRegion == OMPD_distribute_parallel_for ||
4892 ParentRegion == OMPD_teams_distribute_parallel_for ||
4893 ParentRegion == OMPD_target_teams_distribute_parallel_for)) ||
4894 (CancelRegion == OMPD_taskgroup &&
4895 (ParentRegion == OMPD_task ||
4896 (SemaRef.getLangOpts().OpenMP >= 50 &&
4897 (ParentRegion == OMPD_taskloop ||
4898 ParentRegion == OMPD_master_taskloop ||
4899 ParentRegion == OMPD_masked_taskloop ||
4900 ParentRegion == OMPD_parallel_masked_taskloop ||
4901 ParentRegion == OMPD_parallel_master_taskloop)))) ||
4902 (CancelRegion == OMPD_sections &&
4903 (ParentRegion == OMPD_section || ParentRegion == OMPD_sections ||
4904 ParentRegion == OMPD_parallel_sections)));
4905 OrphanSeen = ParentRegion == OMPD_unknown;
4906 } else if (CurrentRegion == OMPD_master || CurrentRegion == OMPD_masked) {
4907 // OpenMP 5.1 [2.22, Nesting of Regions]
4908 // A masked region may not be closely nested inside a worksharing, loop,
4909 // atomic, task, or taskloop region.
4910 NestingProhibited = isOpenMPWorksharingDirective(ParentRegion) ||
4911 isOpenMPGenericLoopDirective(ParentRegion) ||
4912 isOpenMPTaskingDirective(ParentRegion);
4913 } else if (CurrentRegion == OMPD_critical && CurrentName.getName()) {
4914 // OpenMP [2.16, Nesting of Regions]
4915 // A critical region may not be nested (closely or otherwise) inside a
4916 // critical region with the same name. Note that this restriction is not
4917 // sufficient to prevent deadlock.
4918 SourceLocation PreviousCriticalLoc;
4919 bool DeadLock = Stack->hasDirective(
4920 [CurrentName, &PreviousCriticalLoc](OpenMPDirectiveKind K,
4921 const DeclarationNameInfo &DNI,
4923 if (K == OMPD_critical && DNI.getName() == CurrentName.getName()) {
4924 PreviousCriticalLoc = Loc;
4925 return true;
4926 }
4927 return false;
4928 },
4929 false /* skip top directive */);
4930 if (DeadLock) {
4931 SemaRef.Diag(StartLoc, diag::err_omp_prohibited_region_critical_same_name)
4932 << CurrentName.getName();
4933 if (PreviousCriticalLoc.isValid())
4934 SemaRef.Diag(PreviousCriticalLoc,
4935 diag::note_omp_previous_critical_region);
4936 return true;
4937 }
4938 } else if (CurrentRegion == OMPD_barrier || CurrentRegion == OMPD_scope) {
4939 // OpenMP 5.1 [2.22, Nesting of Regions]
4940 // A scope region may not be closely nested inside a worksharing, loop,
4941 // task, taskloop, critical, ordered, atomic, or masked region.
4942 // OpenMP 5.1 [2.22, Nesting of Regions]
4943 // A barrier region may not be closely nested inside a worksharing, loop,
4944 // task, taskloop, critical, ordered, atomic, or masked region.
4945 NestingProhibited =
4946 isOpenMPWorksharingDirective(ParentRegion) ||
4947 isOpenMPGenericLoopDirective(ParentRegion) ||
4948 isOpenMPTaskingDirective(ParentRegion) || ParentRegion == OMPD_master ||
4949 ParentRegion == OMPD_masked || ParentRegion == OMPD_parallel_master ||
4950 ParentRegion == OMPD_parallel_masked || ParentRegion == OMPD_critical ||
4951 ParentRegion == OMPD_ordered;
4952 } else if (isOpenMPWorksharingDirective(CurrentRegion) &&
4953 !isOpenMPParallelDirective(CurrentRegion) &&
4954 !isOpenMPTeamsDirective(CurrentRegion)) {
4955 // OpenMP 5.1 [2.22, Nesting of Regions]
4956 // A loop region that binds to a parallel region or a worksharing region
4957 // may not be closely nested inside a worksharing, loop, task, taskloop,
4958 // critical, ordered, atomic, or masked region.
4959 NestingProhibited =
4960 isOpenMPWorksharingDirective(ParentRegion) ||
4961 isOpenMPGenericLoopDirective(ParentRegion) ||
4962 isOpenMPTaskingDirective(ParentRegion) || ParentRegion == OMPD_master ||
4963 ParentRegion == OMPD_masked || ParentRegion == OMPD_parallel_master ||
4964 ParentRegion == OMPD_parallel_masked || ParentRegion == OMPD_critical ||
4965 ParentRegion == OMPD_ordered;
4966 Recommend = ShouldBeInParallelRegion;
4967 } else if (CurrentRegion == OMPD_ordered) {
4968 // OpenMP [2.16, Nesting of Regions]
4969 // An ordered region may not be closely nested inside a critical,
4970 // atomic, or explicit task region.
4971 // An ordered region must be closely nested inside a loop region (or
4972 // parallel loop region) with an ordered clause.
4973 // OpenMP [2.8.1,simd Construct, Restrictions]
4974 // An ordered construct with the simd clause is the only OpenMP construct
4975 // that can appear in the simd region.
4976 NestingProhibited = ParentRegion == OMPD_critical ||
4977 isOpenMPTaskingDirective(ParentRegion) ||
4978 !(isOpenMPSimdDirective(ParentRegion) ||
4979 Stack->isParentOrderedRegion());
4980 Recommend = ShouldBeInOrderedRegion;
4981 } else if (isOpenMPNestingTeamsDirective(CurrentRegion)) {
4982 // OpenMP [2.16, Nesting of Regions]
4983 // If specified, a teams construct must be contained within a target
4984 // construct.
4985 NestingProhibited =
4986 (SemaRef.LangOpts.OpenMP <= 45 && ParentRegion != OMPD_target) ||
4987 (SemaRef.LangOpts.OpenMP >= 50 && ParentRegion != OMPD_unknown &&
4988 ParentRegion != OMPD_target);
4989 OrphanSeen = ParentRegion == OMPD_unknown;
4990 Recommend = ShouldBeInTargetRegion;
4991 } else if (CurrentRegion == OMPD_scan) {
4992 if (SemaRef.LangOpts.OpenMP >= 50) {
4994 std::ignore = getLeafOrCompositeConstructs(ParentRegion, LeafOrComposite);
4995 // OpenMP spec 5.0 and 5.1 require scan to be directly enclosed by for,
4996 // simd, or for simd. This has to take into account combined directives.
4997 // In 5.2 this seems to be implied by the fact that the specified
4998 // separated constructs are do, for, and simd.
4999 OpenMPDirectiveKind Enclosing = LeafOrComposite.back();
5000 NestingProhibited = Enclosing != OMPD_for && Enclosing != OMPD_simd &&
5001 Enclosing != OMPD_for_simd;
5002 } else {
5003 NestingProhibited = true;
5004 }
5005 OrphanSeen = ParentRegion == OMPD_unknown;
5006 Recommend = ShouldBeInLoopSimdRegion;
5007 }
5008 if (!NestingProhibited && !isOpenMPTargetExecutionDirective(CurrentRegion) &&
5009 !isOpenMPTargetDataManagementDirective(CurrentRegion) &&
5010 (ParentRegion == OMPD_teams || ParentRegion == OMPD_target_teams)) {
5011 // OpenMP [5.1, 2.22, Nesting of Regions]
5012 // distribute, distribute simd, distribute parallel worksharing-loop,
5013 // distribute parallel worksharing-loop SIMD, loop, parallel regions,
5014 // including any parallel regions arising from combined constructs,
5015 // omp_get_num_teams() regions, and omp_get_team_num() regions are the
5016 // only OpenMP regions that may be strictly nested inside the teams
5017 // region.
5018 //
5019 // As an extension, we permit atomic within teams as well.
5020 NestingProhibited = !isOpenMPParallelDirective(CurrentRegion) &&
5021 !isOpenMPDistributeDirective(CurrentRegion) &&
5022 CurrentRegion != OMPD_loop &&
5023 !(SemaRef.getLangOpts().OpenMPExtensions &&
5024 CurrentRegion == OMPD_atomic);
5025 Recommend = ShouldBeInParallelRegion;
5026 }
5027 if (!NestingProhibited && CurrentRegion == OMPD_loop) {
5028 // OpenMP [5.1, 2.11.7, loop Construct, Restrictions]
5029 // If the bind clause is present on the loop construct and binding is
5030 // teams then the corresponding loop region must be strictly nested inside
5031 // a teams region.
5032 NestingProhibited = BindKind == OMPC_BIND_teams &&
5033 ParentRegion != OMPD_teams &&
5034 ParentRegion != OMPD_target_teams;
5035 Recommend = ShouldBeInTeamsRegion;
5036 }
5037 if (!NestingProhibited && isOpenMPNestingDistributeDirective(CurrentRegion)) {
5038 // OpenMP 4.5 [2.17 Nesting of Regions]
5039 // The region associated with the distribute construct must be strictly
5040 // nested inside a teams region
5041 NestingProhibited =
5042 (ParentRegion != OMPD_teams && ParentRegion != OMPD_target_teams);
5043 Recommend = ShouldBeInTeamsRegion;
5044 }
5045 if (!NestingProhibited &&
5046 (isOpenMPTargetExecutionDirective(CurrentRegion) ||
5047 isOpenMPTargetDataManagementDirective(CurrentRegion))) {
5048 // OpenMP 4.5 [2.17 Nesting of Regions]
5049 // If a target, target update, target data, target enter data, or
5050 // target exit data construct is encountered during execution of a
5051 // target region, the behavior is unspecified.
5052 NestingProhibited = Stack->hasDirective(
5053 [&OffendingRegion](OpenMPDirectiveKind K, const DeclarationNameInfo &,
5056 OffendingRegion = K;
5057 return true;
5058 }
5059 return false;
5060 },
5061 false /* don't skip top directive */);
5062 CloseNesting = false;
5063 }
5064 if (NestingProhibited) {
5065 if (OrphanSeen) {
5066 SemaRef.Diag(StartLoc, diag::err_omp_orphaned_device_directive)
5067 << getOpenMPDirectiveName(CurrentRegion) << Recommend;
5068 } else {
5069 SemaRef.Diag(StartLoc, diag::err_omp_prohibited_region)
5070 << CloseNesting << getOpenMPDirectiveName(OffendingRegion)
5071 << Recommend << getOpenMPDirectiveName(CurrentRegion);
5072 }
5073 return true;
5074 }
5075 return false;
5076}
5077
5080 unsigned operator()(argument_type DK) { return unsigned(DK); }
5081};
5083 ArrayRef<OMPClause *> Clauses,
5084 ArrayRef<OpenMPDirectiveKind> AllowedNameModifiers) {
5085 bool ErrorFound = false;
5086 unsigned NamedModifiersNumber = 0;
5087 llvm::IndexedMap<const OMPIfClause *, Kind2Unsigned> FoundNameModifiers;
5088 FoundNameModifiers.resize(llvm::omp::Directive_enumSize + 1);
5089 SmallVector<SourceLocation, 4> NameModifierLoc;
5090 for (const OMPClause *C : Clauses) {
5091 if (const auto *IC = dyn_cast_or_null<OMPIfClause>(C)) {
5092 // At most one if clause without a directive-name-modifier can appear on
5093 // the directive.
5094 OpenMPDirectiveKind CurNM = IC->getNameModifier();
5095 if (FoundNameModifiers[CurNM]) {
5096 S.Diag(C->getBeginLoc(), diag::err_omp_more_one_clause)
5097 << getOpenMPDirectiveName(Kind) << getOpenMPClauseName(OMPC_if)
5098 << (CurNM != OMPD_unknown) << getOpenMPDirectiveName(CurNM);
5099 ErrorFound = true;
5100 } else if (CurNM != OMPD_unknown) {
5101 NameModifierLoc.push_back(IC->getNameModifierLoc());
5102 ++NamedModifiersNumber;
5103 }
5104 FoundNameModifiers[CurNM] = IC;
5105 if (CurNM == OMPD_unknown)
5106 continue;
5107 // Check if the specified name modifier is allowed for the current
5108 // directive.
5109 // At most one if clause with the particular directive-name-modifier can
5110 // appear on the directive.
5111 if (!llvm::is_contained(AllowedNameModifiers, CurNM)) {
5112 S.Diag(IC->getNameModifierLoc(),
5113 diag::err_omp_wrong_if_directive_name_modifier)
5114 << getOpenMPDirectiveName(CurNM) << getOpenMPDirectiveName(Kind);
5115 ErrorFound = true;