clang 18.0.0git
SemaOpenMP.cpp
Go to the documentation of this file.
1//===--- SemaOpenMP.cpp - Semantic Analysis for OpenMP constructs ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8/// \file
9/// This file implements semantic analysis for OpenMP directives and
10/// clauses.
11///
12//===----------------------------------------------------------------------===//
13
14#include "TreeTransform.h"
18#include "clang/AST/Decl.h"
19#include "clang/AST/DeclCXX.h"
22#include "clang/AST/StmtCXX.h"
32#include "clang/Sema/Lookup.h"
34#include "clang/Sema/Scope.h"
37#include "llvm/ADT/IndexedMap.h"
38#include "llvm/ADT/PointerEmbeddedInt.h"
39#include "llvm/ADT/STLExtras.h"
40#include "llvm/ADT/SmallSet.h"
41#include "llvm/ADT/StringExtras.h"
42#include "llvm/Frontend/OpenMP/OMPAssume.h"
43#include "llvm/Frontend/OpenMP/OMPConstants.h"
44#include <optional>
45#include <set>
46
47using namespace clang;
48using namespace llvm::omp;
49
50//===----------------------------------------------------------------------===//
51// Stack of data-sharing attributes for variables
52//===----------------------------------------------------------------------===//
53
55 Sema &SemaRef, Expr *E,
57 OpenMPClauseKind CKind, OpenMPDirectiveKind DKind, bool NoDiagnose);
58
59namespace {
60/// Default data sharing attributes, which can be applied to directive.
61enum DefaultDataSharingAttributes {
62 DSA_unspecified = 0, /// Data sharing attribute not specified.
63 DSA_none = 1 << 0, /// Default data sharing attribute 'none'.
64 DSA_shared = 1 << 1, /// Default data sharing attribute 'shared'.
65 DSA_private = 1 << 2, /// Default data sharing attribute 'private'.
66 DSA_firstprivate = 1 << 3, /// Default data sharing attribute 'firstprivate'.
67};
68
69/// Stack for tracking declarations used in OpenMP directives and
70/// clauses and their data-sharing attributes.
71class DSAStackTy {
72public:
73 struct DSAVarData {
74 OpenMPDirectiveKind DKind = OMPD_unknown;
75 OpenMPClauseKind CKind = OMPC_unknown;
76 unsigned Modifier = 0;
77 const Expr *RefExpr = nullptr;
78 DeclRefExpr *PrivateCopy = nullptr;
79 SourceLocation ImplicitDSALoc;
80 bool AppliedToPointee = false;
81 DSAVarData() = default;
82 DSAVarData(OpenMPDirectiveKind DKind, OpenMPClauseKind CKind,
83 const Expr *RefExpr, DeclRefExpr *PrivateCopy,
84 SourceLocation ImplicitDSALoc, unsigned Modifier,
85 bool AppliedToPointee)
86 : DKind(DKind), CKind(CKind), Modifier(Modifier), RefExpr(RefExpr),
87 PrivateCopy(PrivateCopy), ImplicitDSALoc(ImplicitDSALoc),
88 AppliedToPointee(AppliedToPointee) {}
89 };
90 using OperatorOffsetTy =
92 using DoacrossClauseMapTy = llvm::DenseMap<OMPClause *, OperatorOffsetTy>;
93 /// Kind of the declaration used in the uses_allocators clauses.
94 enum class UsesAllocatorsDeclKind {
95 /// Predefined allocator
96 PredefinedAllocator,
97 /// User-defined allocator
98 UserDefinedAllocator,
99 /// The declaration that represent allocator trait
100 AllocatorTrait,
101 };
102
103private:
104 struct DSAInfo {
105 OpenMPClauseKind Attributes = OMPC_unknown;
106 unsigned Modifier = 0;
107 /// Pointer to a reference expression and a flag which shows that the
108 /// variable is marked as lastprivate(true) or not (false).
109 llvm::PointerIntPair<const Expr *, 1, bool> RefExpr;
110 DeclRefExpr *PrivateCopy = nullptr;
111 /// true if the attribute is applied to the pointee, not the variable
112 /// itself.
113 bool AppliedToPointee = false;
114 };
115 using DeclSAMapTy = llvm::SmallDenseMap<const ValueDecl *, DSAInfo, 8>;
116 using UsedRefMapTy = llvm::SmallDenseMap<const ValueDecl *, const Expr *, 8>;
117 using LCDeclInfo = std::pair<unsigned, VarDecl *>;
118 using LoopControlVariablesMapTy =
119 llvm::SmallDenseMap<const ValueDecl *, LCDeclInfo, 8>;
120 /// Struct that associates a component with the clause kind where they are
121 /// found.
122 struct MappedExprComponentTy {
124 OpenMPClauseKind Kind = OMPC_unknown;
125 };
126 using MappedExprComponentsTy =
127 llvm::DenseMap<const ValueDecl *, MappedExprComponentTy>;
128 using CriticalsWithHintsTy =
129 llvm::StringMap<std::pair<const OMPCriticalDirective *, llvm::APSInt>>;
130 struct ReductionData {
131 using BOKPtrType = llvm::PointerEmbeddedInt<BinaryOperatorKind, 16>;
132 SourceRange ReductionRange;
133 llvm::PointerUnion<const Expr *, BOKPtrType> ReductionOp;
134 ReductionData() = default;
135 void set(BinaryOperatorKind BO, SourceRange RR) {
136 ReductionRange = RR;
137 ReductionOp = BO;
138 }
139 void set(const Expr *RefExpr, SourceRange RR) {
140 ReductionRange = RR;
141 ReductionOp = RefExpr;
142 }
143 };
144 using DeclReductionMapTy =
145 llvm::SmallDenseMap<const ValueDecl *, ReductionData, 4>;
146 struct DefaultmapInfo {
147 OpenMPDefaultmapClauseModifier ImplicitBehavior =
149 SourceLocation SLoc;
150 DefaultmapInfo() = default;
152 : ImplicitBehavior(M), SLoc(Loc) {}
153 };
154
155 struct SharingMapTy {
156 DeclSAMapTy SharingMap;
157 DeclReductionMapTy ReductionMap;
158 UsedRefMapTy AlignedMap;
159 UsedRefMapTy NontemporalMap;
160 MappedExprComponentsTy MappedExprComponents;
161 LoopControlVariablesMapTy LCVMap;
162 DefaultDataSharingAttributes DefaultAttr = DSA_unspecified;
163 SourceLocation DefaultAttrLoc;
164 DefaultmapInfo DefaultmapMap[OMPC_DEFAULTMAP_unknown];
165 OpenMPDirectiveKind Directive = OMPD_unknown;
166 /// GenericLoopDirective with bind clause is mapped to other directives,
167 /// like for, distribute and simd. Presently, set MappedDirective to
168 /// OMPLoop. This may also be used in a similar way for other constructs.
169 OpenMPDirectiveKind MappedDirective = OMPD_unknown;
170 DeclarationNameInfo DirectiveName;
171 Scope *CurScope = nullptr;
172 DeclContext *Context = nullptr;
173 SourceLocation ConstructLoc;
174 /// Set of 'depend' clauses with 'sink|source' dependence kind. Required to
175 /// get the data (loop counters etc.) about enclosing loop-based construct.
176 /// This data is required during codegen.
177 DoacrossClauseMapTy DoacrossDepends;
178 /// First argument (Expr *) contains optional argument of the
179 /// 'ordered' clause, the second one is true if the regions has 'ordered'
180 /// clause, false otherwise.
181 std::optional<std::pair<const Expr *, OMPOrderedClause *>> OrderedRegion;
182 bool RegionHasOrderConcurrent = false;
183 unsigned AssociatedLoops = 1;
184 bool HasMutipleLoops = false;
185 const Decl *PossiblyLoopCounter = nullptr;
186 bool NowaitRegion = false;
187 bool UntiedRegion = false;
188 bool CancelRegion = false;
189 bool LoopStart = false;
190 bool BodyComplete = false;
191 SourceLocation PrevScanLocation;
192 SourceLocation PrevOrderedLocation;
193 SourceLocation InnerTeamsRegionLoc;
194 /// Reference to the taskgroup task_reduction reference expression.
195 Expr *TaskgroupReductionRef = nullptr;
196 llvm::DenseSet<QualType> MappedClassesQualTypes;
197 SmallVector<Expr *, 4> InnerUsedAllocators;
198 llvm::DenseSet<CanonicalDeclPtr<Decl>> ImplicitTaskFirstprivates;
199 /// List of globals marked as declare target link in this target region
200 /// (isOpenMPTargetExecutionDirective(Directive) == true).
201 llvm::SmallVector<DeclRefExpr *, 4> DeclareTargetLinkVarDecls;
202 /// List of decls used in inclusive/exclusive clauses of the scan directive.
203 llvm::DenseSet<CanonicalDeclPtr<Decl>> UsedInScanDirective;
204 llvm::DenseMap<CanonicalDeclPtr<const Decl>, UsesAllocatorsDeclKind>
205 UsesAllocatorsDecls;
206 /// Data is required on creating capture fields for implicit
207 /// default first|private clause.
208 struct ImplicitDefaultFDInfoTy {
209 /// Field decl.
210 const FieldDecl *FD = nullptr;
211 /// Nesting stack level
212 size_t StackLevel = 0;
213 /// Capture variable decl.
214 VarDecl *VD = nullptr;
215 ImplicitDefaultFDInfoTy(const FieldDecl *FD, size_t StackLevel,
216 VarDecl *VD)
217 : FD(FD), StackLevel(StackLevel), VD(VD) {}
218 };
219 /// List of captured fields
221 ImplicitDefaultFirstprivateFDs;
222 Expr *DeclareMapperVar = nullptr;
223 SmallVector<VarDecl *, 16> IteratorVarDecls;
224 SharingMapTy(OpenMPDirectiveKind DKind, DeclarationNameInfo Name,
225 Scope *CurScope, SourceLocation Loc)
226 : Directive(DKind), DirectiveName(Name), CurScope(CurScope),
227 ConstructLoc(Loc) {}
228 SharingMapTy() = default;
229 };
230
231 using StackTy = SmallVector<SharingMapTy, 4>;
232
233 /// Stack of used declaration and their data-sharing attributes.
234 DeclSAMapTy Threadprivates;
235 const FunctionScopeInfo *CurrentNonCapturingFunctionScope = nullptr;
237 /// true, if check for DSA must be from parent directive, false, if
238 /// from current directive.
239 OpenMPClauseKind ClauseKindMode = OMPC_unknown;
240 Sema &SemaRef;
241 bool ForceCapturing = false;
242 /// true if all the variables in the target executable directives must be
243 /// captured by reference.
244 bool ForceCaptureByReferenceInTargetExecutable = false;
245 CriticalsWithHintsTy Criticals;
246 unsigned IgnoredStackElements = 0;
247
248 /// Iterators over the stack iterate in order from innermost to outermost
249 /// directive.
250 using const_iterator = StackTy::const_reverse_iterator;
251 const_iterator begin() const {
252 return Stack.empty() ? const_iterator()
253 : Stack.back().first.rbegin() + IgnoredStackElements;
254 }
255 const_iterator end() const {
256 return Stack.empty() ? const_iterator() : Stack.back().first.rend();
257 }
258 using iterator = StackTy::reverse_iterator;
259 iterator begin() {
260 return Stack.empty() ? iterator()
261 : Stack.back().first.rbegin() + IgnoredStackElements;
262 }
263 iterator end() {
264 return Stack.empty() ? iterator() : Stack.back().first.rend();
265 }
266
267 // Convenience operations to get at the elements of the stack.
268
269 bool isStackEmpty() const {
270 return Stack.empty() ||
271 Stack.back().second != CurrentNonCapturingFunctionScope ||
272 Stack.back().first.size() <= IgnoredStackElements;
273 }
274 size_t getStackSize() const {
275 return isStackEmpty() ? 0
276 : Stack.back().first.size() - IgnoredStackElements;
277 }
278
279 SharingMapTy *getTopOfStackOrNull() {
280 size_t Size = getStackSize();
281 if (Size == 0)
282 return nullptr;
283 return &Stack.back().first[Size - 1];
284 }
285 const SharingMapTy *getTopOfStackOrNull() const {
286 return const_cast<DSAStackTy &>(*this).getTopOfStackOrNull();
287 }
288 SharingMapTy &getTopOfStack() {
289 assert(!isStackEmpty() && "no current directive");
290 return *getTopOfStackOrNull();
291 }
292 const SharingMapTy &getTopOfStack() const {
293 return const_cast<DSAStackTy &>(*this).getTopOfStack();
294 }
295
296 SharingMapTy *getSecondOnStackOrNull() {
297 size_t Size = getStackSize();
298 if (Size <= 1)
299 return nullptr;
300 return &Stack.back().first[Size - 2];
301 }
302 const SharingMapTy *getSecondOnStackOrNull() const {
303 return const_cast<DSAStackTy &>(*this).getSecondOnStackOrNull();
304 }
305
306 /// Get the stack element at a certain level (previously returned by
307 /// \c getNestingLevel).
308 ///
309 /// Note that nesting levels count from outermost to innermost, and this is
310 /// the reverse of our iteration order where new inner levels are pushed at
311 /// the front of the stack.
312 SharingMapTy &getStackElemAtLevel(unsigned Level) {
313 assert(Level < getStackSize() && "no such stack element");
314 return Stack.back().first[Level];
315 }
316 const SharingMapTy &getStackElemAtLevel(unsigned Level) const {
317 return const_cast<DSAStackTy &>(*this).getStackElemAtLevel(Level);
318 }
319
320 DSAVarData getDSA(const_iterator &Iter, ValueDecl *D) const;
321
322 /// Checks if the variable is a local for OpenMP region.
323 bool isOpenMPLocal(VarDecl *D, const_iterator Iter) const;
324
325 /// Vector of previously declared requires directives
327 /// omp_allocator_handle_t type.
328 QualType OMPAllocatorHandleT;
329 /// omp_depend_t type.
330 QualType OMPDependT;
331 /// omp_event_handle_t type.
332 QualType OMPEventHandleT;
333 /// omp_alloctrait_t type.
334 QualType OMPAlloctraitT;
335 /// Expression for the predefined allocators.
336 Expr *OMPPredefinedAllocators[OMPAllocateDeclAttr::OMPUserDefinedMemAlloc] = {
337 nullptr};
338 /// Vector of previously encountered target directives
339 SmallVector<SourceLocation, 2> TargetLocations;
340 SourceLocation AtomicLocation;
341 /// Vector of declare variant construct traits.
343
344public:
345 explicit DSAStackTy(Sema &S) : SemaRef(S) {}
346
347 /// Sets omp_allocator_handle_t type.
348 void setOMPAllocatorHandleT(QualType Ty) { OMPAllocatorHandleT = Ty; }
349 /// Gets omp_allocator_handle_t type.
350 QualType getOMPAllocatorHandleT() const { return OMPAllocatorHandleT; }
351 /// Sets omp_alloctrait_t type.
352 void setOMPAlloctraitT(QualType Ty) { OMPAlloctraitT = Ty; }
353 /// Gets omp_alloctrait_t type.
354 QualType getOMPAlloctraitT() const { return OMPAlloctraitT; }
355 /// Sets the given default allocator.
356 void setAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
357 Expr *Allocator) {
358 OMPPredefinedAllocators[AllocatorKind] = Allocator;
359 }
360 /// Returns the specified default allocator.
361 Expr *getAllocator(OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind) const {
362 return OMPPredefinedAllocators[AllocatorKind];
363 }
364 /// Sets omp_depend_t type.
365 void setOMPDependT(QualType Ty) { OMPDependT = Ty; }
366 /// Gets omp_depend_t type.
367 QualType getOMPDependT() const { return OMPDependT; }
368
369 /// Sets omp_event_handle_t type.
370 void setOMPEventHandleT(QualType Ty) { OMPEventHandleT = Ty; }
371 /// Gets omp_event_handle_t type.
372 QualType getOMPEventHandleT() const { return OMPEventHandleT; }
373
374 bool isClauseParsingMode() const { return ClauseKindMode != OMPC_unknown; }
375 OpenMPClauseKind getClauseParsingMode() const {
376 assert(isClauseParsingMode() && "Must be in clause parsing mode.");
377 return ClauseKindMode;
378 }
379 void setClauseParsingMode(OpenMPClauseKind K) { ClauseKindMode = K; }
380
381 bool isBodyComplete() const {
382 const SharingMapTy *Top = getTopOfStackOrNull();
383 return Top && Top->BodyComplete;
384 }
385 void setBodyComplete() { getTopOfStack().BodyComplete = true; }
386
387 bool isForceVarCapturing() const { return ForceCapturing; }
388 void setForceVarCapturing(bool V) { ForceCapturing = V; }
389
390 void setForceCaptureByReferenceInTargetExecutable(bool V) {
391 ForceCaptureByReferenceInTargetExecutable = V;
392 }
393 bool isForceCaptureByReferenceInTargetExecutable() const {
394 return ForceCaptureByReferenceInTargetExecutable;
395 }
396
397 void push(OpenMPDirectiveKind DKind, const DeclarationNameInfo &DirName,
398 Scope *CurScope, SourceLocation Loc) {
399 assert(!IgnoredStackElements &&
400 "cannot change stack while ignoring elements");
401 if (Stack.empty() ||
402 Stack.back().second != CurrentNonCapturingFunctionScope)
403 Stack.emplace_back(StackTy(), CurrentNonCapturingFunctionScope);
404 Stack.back().first.emplace_back(DKind, DirName, CurScope, Loc);
405 Stack.back().first.back().DefaultAttrLoc = Loc;
406 }
407
408 void pop() {
409 assert(!IgnoredStackElements &&
410 "cannot change stack while ignoring elements");
411 assert(!Stack.back().first.empty() &&
412 "Data-sharing attributes stack is empty!");
413 Stack.back().first.pop_back();
414 }
415
416 /// RAII object to temporarily leave the scope of a directive when we want to
417 /// logically operate in its parent.
418 class ParentDirectiveScope {
419 DSAStackTy &Self;
420 bool Active;
421
422 public:
423 ParentDirectiveScope(DSAStackTy &Self, bool Activate)
424 : Self(Self), Active(false) {
425 if (Activate)
426 enable();
427 }
428 ~ParentDirectiveScope() { disable(); }
429 void disable() {
430 if (Active) {
431 --Self.IgnoredStackElements;
432 Active = false;
433 }
434 }
435 void enable() {
436 if (!Active) {
437 ++Self.IgnoredStackElements;
438 Active = true;
439 }
440 }
441 };
442
443 /// Marks that we're started loop parsing.
444 void loopInit() {
445 assert(isOpenMPLoopDirective(getCurrentDirective()) &&
446 "Expected loop-based directive.");
447 getTopOfStack().LoopStart = true;
448 }
449 /// Start capturing of the variables in the loop context.
450 void loopStart() {
451 assert(isOpenMPLoopDirective(getCurrentDirective()) &&
452 "Expected loop-based directive.");
453 getTopOfStack().LoopStart = false;
454 }
455 /// true, if variables are captured, false otherwise.
456 bool isLoopStarted() const {
457 assert(isOpenMPLoopDirective(getCurrentDirective()) &&
458 "Expected loop-based directive.");
459 return !getTopOfStack().LoopStart;
460 }
461 /// Marks (or clears) declaration as possibly loop counter.
462 void resetPossibleLoopCounter(const Decl *D = nullptr) {
463 getTopOfStack().PossiblyLoopCounter = D ? D->getCanonicalDecl() : D;
464 }
465 /// Gets the possible loop counter decl.
466 const Decl *getPossiblyLoopCunter() const {
467 return getTopOfStack().PossiblyLoopCounter;
468 }
469 /// Start new OpenMP region stack in new non-capturing function.
470 void pushFunction() {
471 assert(!IgnoredStackElements &&
472 "cannot change stack while ignoring elements");
473 const FunctionScopeInfo *CurFnScope = SemaRef.getCurFunction();
474 assert(!isa<CapturingScopeInfo>(CurFnScope));
475 CurrentNonCapturingFunctionScope = CurFnScope;
476 }
477 /// Pop region stack for non-capturing function.
478 void popFunction(const FunctionScopeInfo *OldFSI) {
479 assert(!IgnoredStackElements &&
480 "cannot change stack while ignoring elements");
481 if (!Stack.empty() && Stack.back().second == OldFSI) {
482 assert(Stack.back().first.empty());
483 Stack.pop_back();
484 }
485 CurrentNonCapturingFunctionScope = nullptr;
486 for (const FunctionScopeInfo *FSI : llvm::reverse(SemaRef.FunctionScopes)) {
487 if (!isa<CapturingScopeInfo>(FSI)) {
488 CurrentNonCapturingFunctionScope = FSI;
489 break;
490 }
491 }
492 }
493
494 void addCriticalWithHint(const OMPCriticalDirective *D, llvm::APSInt Hint) {
495 Criticals.try_emplace(D->getDirectiveName().getAsString(), D, Hint);
496 }
497 const std::pair<const OMPCriticalDirective *, llvm::APSInt>
498 getCriticalWithHint(const DeclarationNameInfo &Name) const {
499 auto I = Criticals.find(Name.getAsString());
500 if (I != Criticals.end())
501 return I->second;
502 return std::make_pair(nullptr, llvm::APSInt());
503 }
504 /// If 'aligned' declaration for given variable \a D was not seen yet,
505 /// add it and return NULL; otherwise return previous occurrence's expression
506 /// for diagnostics.
507 const Expr *addUniqueAligned(const ValueDecl *D, const Expr *NewDE);
508 /// If 'nontemporal' declaration for given variable \a D was not seen yet,
509 /// add it and return NULL; otherwise return previous occurrence's expression
510 /// for diagnostics.
511 const Expr *addUniqueNontemporal(const ValueDecl *D, const Expr *NewDE);
512
513 /// Register specified variable as loop control variable.
514 void addLoopControlVariable(const ValueDecl *D, VarDecl *Capture);
515 /// Check if the specified variable is a loop control variable for
516 /// current region.
517 /// \return The index of the loop control variable in the list of associated
518 /// for-loops (from outer to inner).
519 const LCDeclInfo isLoopControlVariable(const ValueDecl *D) const;
520 /// Check if the specified variable is a loop control variable for
521 /// parent region.
522 /// \return The index of the loop control variable in the list of associated
523 /// for-loops (from outer to inner).
524 const LCDeclInfo isParentLoopControlVariable(const ValueDecl *D) const;
525 /// Check if the specified variable is a loop control variable for
526 /// current region.
527 /// \return The index of the loop control variable in the list of associated
528 /// for-loops (from outer to inner).
529 const LCDeclInfo isLoopControlVariable(const ValueDecl *D,
530 unsigned Level) const;
531 /// Get the loop control variable for the I-th loop (or nullptr) in
532 /// parent directive.
533 const ValueDecl *getParentLoopControlVariable(unsigned I) const;
534
535 /// Marks the specified decl \p D as used in scan directive.
536 void markDeclAsUsedInScanDirective(ValueDecl *D) {
537 if (SharingMapTy *Stack = getSecondOnStackOrNull())
538 Stack->UsedInScanDirective.insert(D);
539 }
540
541 /// Checks if the specified declaration was used in the inner scan directive.
542 bool isUsedInScanDirective(ValueDecl *D) const {
543 if (const SharingMapTy *Stack = getTopOfStackOrNull())
544 return Stack->UsedInScanDirective.contains(D);
545 return false;
546 }
547
548 /// Adds explicit data sharing attribute to the specified declaration.
549 void addDSA(const ValueDecl *D, const Expr *E, OpenMPClauseKind A,
550 DeclRefExpr *PrivateCopy = nullptr, unsigned Modifier = 0,
551 bool AppliedToPointee = false);
552
553 /// Adds additional information for the reduction items with the reduction id
554 /// represented as an operator.
555 void addTaskgroupReductionData(const ValueDecl *D, SourceRange SR,
557 /// Adds additional information for the reduction items with the reduction id
558 /// represented as reduction identifier.
559 void addTaskgroupReductionData(const ValueDecl *D, SourceRange SR,
560 const Expr *ReductionRef);
561 /// Returns the location and reduction operation from the innermost parent
562 /// region for the given \p D.
563 const DSAVarData
564 getTopMostTaskgroupReductionData(const ValueDecl *D, SourceRange &SR,
566 Expr *&TaskgroupDescriptor) const;
567 /// Returns the location and reduction operation from the innermost parent
568 /// region for the given \p D.
569 const DSAVarData
570 getTopMostTaskgroupReductionData(const ValueDecl *D, SourceRange &SR,
571 const Expr *&ReductionRef,
572 Expr *&TaskgroupDescriptor) const;
573 /// Return reduction reference expression for the current taskgroup or
574 /// parallel/worksharing directives with task reductions.
575 Expr *getTaskgroupReductionRef() const {
576 assert((getTopOfStack().Directive == OMPD_taskgroup ||
577 ((isOpenMPParallelDirective(getTopOfStack().Directive) ||
578 isOpenMPWorksharingDirective(getTopOfStack().Directive)) &&
579 !isOpenMPSimdDirective(getTopOfStack().Directive))) &&
580 "taskgroup reference expression requested for non taskgroup or "
581 "parallel/worksharing directive.");
582 return getTopOfStack().TaskgroupReductionRef;
583 }
584 /// Checks if the given \p VD declaration is actually a taskgroup reduction
585 /// descriptor variable at the \p Level of OpenMP regions.
586 bool isTaskgroupReductionRef(const ValueDecl *VD, unsigned Level) const {
587 return getStackElemAtLevel(Level).TaskgroupReductionRef &&
588 cast<DeclRefExpr>(getStackElemAtLevel(Level).TaskgroupReductionRef)
589 ->getDecl() == VD;
590 }
591
592 /// Returns data sharing attributes from top of the stack for the
593 /// specified declaration.
594 const DSAVarData getTopDSA(ValueDecl *D, bool FromParent);
595 /// Returns data-sharing attributes for the specified declaration.
596 const DSAVarData getImplicitDSA(ValueDecl *D, bool FromParent) const;
597 /// Returns data-sharing attributes for the specified declaration.
598 const DSAVarData getImplicitDSA(ValueDecl *D, unsigned Level) const;
599 /// Checks if the specified variables has data-sharing attributes which
600 /// match specified \a CPred predicate in any directive which matches \a DPred
601 /// predicate.
602 const DSAVarData
603 hasDSA(ValueDecl *D,
604 const llvm::function_ref<bool(OpenMPClauseKind, bool,
605 DefaultDataSharingAttributes)>
606 CPred,
607 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
608 bool FromParent) const;
609 /// Checks if the specified variables has data-sharing attributes which
610 /// match specified \a CPred predicate in any innermost directive which
611 /// matches \a DPred predicate.
612 const DSAVarData
613 hasInnermostDSA(ValueDecl *D,
614 const llvm::function_ref<bool(OpenMPClauseKind, bool)> CPred,
615 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
616 bool FromParent) const;
617 /// Checks if the specified variables has explicit data-sharing
618 /// attributes which match specified \a CPred predicate at the specified
619 /// OpenMP region.
620 bool
621 hasExplicitDSA(const ValueDecl *D,
622 const llvm::function_ref<bool(OpenMPClauseKind, bool)> CPred,
623 unsigned Level, bool NotLastprivate = false) const;
624
625 /// Returns true if the directive at level \Level matches in the
626 /// specified \a DPred predicate.
627 bool hasExplicitDirective(
628 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
629 unsigned Level) const;
630
631 /// Finds a directive which matches specified \a DPred predicate.
632 bool hasDirective(
633 const llvm::function_ref<bool(
635 DPred,
636 bool FromParent) const;
637
638 /// Returns currently analyzed directive.
639 OpenMPDirectiveKind getCurrentDirective() const {
640 const SharingMapTy *Top = getTopOfStackOrNull();
641 return Top ? Top->Directive : OMPD_unknown;
642 }
643 OpenMPDirectiveKind getMappedDirective() const {
644 const SharingMapTy *Top = getTopOfStackOrNull();
645 return Top ? Top->MappedDirective : OMPD_unknown;
646 }
647 void setCurrentDirective(OpenMPDirectiveKind NewDK) {
648 SharingMapTy *Top = getTopOfStackOrNull();
649 assert(Top &&
650 "Before calling setCurrentDirective Top of Stack not to be NULL.");
651 // Store the old into MappedDirective & assign argument NewDK to Directive.
652 Top->Directive = NewDK;
653 }
654 void setMappedDirective(OpenMPDirectiveKind NewDK) {
655 SharingMapTy *Top = getTopOfStackOrNull();
656 assert(Top &&
657 "Before calling setMappedDirective Top of Stack not to be NULL.");
658 // Store the old into MappedDirective & assign argument NewDK to Directive.
659 Top->MappedDirective = NewDK;
660 }
661 /// Returns directive kind at specified level.
662 OpenMPDirectiveKind getDirective(unsigned Level) const {
663 assert(!isStackEmpty() && "No directive at specified level.");
664 return getStackElemAtLevel(Level).Directive;
665 }
666 /// Returns the capture region at the specified level.
667 OpenMPDirectiveKind getCaptureRegion(unsigned Level,
668 unsigned OpenMPCaptureLevel) const {
670 getOpenMPCaptureRegions(CaptureRegions, getDirective(Level));
671 return CaptureRegions[OpenMPCaptureLevel];
672 }
673 /// Returns parent directive.
674 OpenMPDirectiveKind getParentDirective() const {
675 const SharingMapTy *Parent = getSecondOnStackOrNull();
676 return Parent ? Parent->Directive : OMPD_unknown;
677 }
678
679 /// Add requires decl to internal vector
680 void addRequiresDecl(OMPRequiresDecl *RD) { RequiresDecls.push_back(RD); }
681
682 /// Checks if the defined 'requires' directive has specified type of clause.
683 template <typename ClauseType> bool hasRequiresDeclWithClause() const {
684 return llvm::any_of(RequiresDecls, [](const OMPRequiresDecl *D) {
685 return llvm::any_of(D->clauselists(), [](const OMPClause *C) {
686 return isa<ClauseType>(C);
687 });
688 });
689 }
690
691 /// Checks for a duplicate clause amongst previously declared requires
692 /// directives
693 bool hasDuplicateRequiresClause(ArrayRef<OMPClause *> ClauseList) const {
694 bool IsDuplicate = false;
695 for (OMPClause *CNew : ClauseList) {
696 for (const OMPRequiresDecl *D : RequiresDecls) {
697 for (const OMPClause *CPrev : D->clauselists()) {
698 if (CNew->getClauseKind() == CPrev->getClauseKind()) {
699 SemaRef.Diag(CNew->getBeginLoc(),
700 diag::err_omp_requires_clause_redeclaration)
701 << getOpenMPClauseName(CNew->getClauseKind());
702 SemaRef.Diag(CPrev->getBeginLoc(),
703 diag::note_omp_requires_previous_clause)
704 << getOpenMPClauseName(CPrev->getClauseKind());
705 IsDuplicate = true;
706 }
707 }
708 }
709 }
710 return IsDuplicate;
711 }
712
713 /// Add location of previously encountered target to internal vector
714 void addTargetDirLocation(SourceLocation LocStart) {
715 TargetLocations.push_back(LocStart);
716 }
717
718 /// Add location for the first encountered atomicc directive.
719 void addAtomicDirectiveLoc(SourceLocation Loc) {
720 if (AtomicLocation.isInvalid())
721 AtomicLocation = Loc;
722 }
723
724 /// Returns the location of the first encountered atomic directive in the
725 /// module.
726 SourceLocation getAtomicDirectiveLoc() const { return AtomicLocation; }
727
728 // Return previously encountered target region locations.
729 ArrayRef<SourceLocation> getEncounteredTargetLocs() const {
730 return TargetLocations;
731 }
732
733 /// Set default data sharing attribute to none.
734 void setDefaultDSANone(SourceLocation Loc) {
735 getTopOfStack().DefaultAttr = DSA_none;
736 getTopOfStack().DefaultAttrLoc = Loc;
737 }
738 /// Set default data sharing attribute to shared.
739 void setDefaultDSAShared(SourceLocation Loc) {
740 getTopOfStack().DefaultAttr = DSA_shared;
741 getTopOfStack().DefaultAttrLoc = Loc;
742 }
743 /// Set default data sharing attribute to private.
744 void setDefaultDSAPrivate(SourceLocation Loc) {
745 getTopOfStack().DefaultAttr = DSA_private;
746 getTopOfStack().DefaultAttrLoc = Loc;
747 }
748 /// Set default data sharing attribute to firstprivate.
749 void setDefaultDSAFirstPrivate(SourceLocation Loc) {
750 getTopOfStack().DefaultAttr = DSA_firstprivate;
751 getTopOfStack().DefaultAttrLoc = Loc;
752 }
753 /// Set default data mapping attribute to Modifier:Kind
754 void setDefaultDMAAttr(OpenMPDefaultmapClauseModifier M,
756 DefaultmapInfo &DMI = getTopOfStack().DefaultmapMap[Kind];
757 DMI.ImplicitBehavior = M;
758 DMI.SLoc = Loc;
759 }
760 /// Check whether the implicit-behavior has been set in defaultmap
761 bool checkDefaultmapCategory(OpenMPDefaultmapClauseKind VariableCategory) {
762 if (VariableCategory == OMPC_DEFAULTMAP_unknown)
763 return getTopOfStack()
764 .DefaultmapMap[OMPC_DEFAULTMAP_aggregate]
765 .ImplicitBehavior != OMPC_DEFAULTMAP_MODIFIER_unknown ||
766 getTopOfStack()
767 .DefaultmapMap[OMPC_DEFAULTMAP_scalar]
768 .ImplicitBehavior != OMPC_DEFAULTMAP_MODIFIER_unknown ||
769 getTopOfStack()
770 .DefaultmapMap[OMPC_DEFAULTMAP_pointer]
771 .ImplicitBehavior != OMPC_DEFAULTMAP_MODIFIER_unknown;
772 return getTopOfStack().DefaultmapMap[VariableCategory].ImplicitBehavior !=
774 }
775
776 ArrayRef<llvm::omp::TraitProperty> getConstructTraits() {
777 return ConstructTraits;
778 }
779 void handleConstructTrait(ArrayRef<llvm::omp::TraitProperty> Traits,
780 bool ScopeEntry) {
781 if (ScopeEntry)
782 ConstructTraits.append(Traits.begin(), Traits.end());
783 else
784 for (llvm::omp::TraitProperty Trait : llvm::reverse(Traits)) {
785 llvm::omp::TraitProperty Top = ConstructTraits.pop_back_val();
786 assert(Top == Trait && "Something left a trait on the stack!");
787 (void)Trait;
788 (void)Top;
789 }
790 }
791
792 DefaultDataSharingAttributes getDefaultDSA(unsigned Level) const {
793 return getStackSize() <= Level ? DSA_unspecified
794 : getStackElemAtLevel(Level).DefaultAttr;
795 }
796 DefaultDataSharingAttributes getDefaultDSA() const {
797 return isStackEmpty() ? DSA_unspecified : getTopOfStack().DefaultAttr;
798 }
799 SourceLocation getDefaultDSALocation() const {
800 return isStackEmpty() ? SourceLocation() : getTopOfStack().DefaultAttrLoc;
801 }
803 getDefaultmapModifier(OpenMPDefaultmapClauseKind Kind) const {
804 return isStackEmpty()
806 : getTopOfStack().DefaultmapMap[Kind].ImplicitBehavior;
807 }
809 getDefaultmapModifierAtLevel(unsigned Level,
810 OpenMPDefaultmapClauseKind Kind) const {
811 return getStackElemAtLevel(Level).DefaultmapMap[Kind].ImplicitBehavior;
812 }
813 bool isDefaultmapCapturedByRef(unsigned Level,
814 OpenMPDefaultmapClauseKind Kind) const {
816 getDefaultmapModifierAtLevel(Level, Kind);
817 if (Kind == OMPC_DEFAULTMAP_scalar || Kind == OMPC_DEFAULTMAP_pointer) {
818 return (M == OMPC_DEFAULTMAP_MODIFIER_alloc) ||
819 (M == OMPC_DEFAULTMAP_MODIFIER_to) ||
820 (M == OMPC_DEFAULTMAP_MODIFIER_from) ||
821 (M == OMPC_DEFAULTMAP_MODIFIER_tofrom);
822 }
823 return true;
824 }
825 static bool mustBeFirstprivateBase(OpenMPDefaultmapClauseModifier M,
827 switch (Kind) {
828 case OMPC_DEFAULTMAP_scalar:
829 case OMPC_DEFAULTMAP_pointer:
830 return (M == OMPC_DEFAULTMAP_MODIFIER_unknown) ||
831 (M == OMPC_DEFAULTMAP_MODIFIER_firstprivate) ||
832 (M == OMPC_DEFAULTMAP_MODIFIER_default);
833 case OMPC_DEFAULTMAP_aggregate:
834 return M == OMPC_DEFAULTMAP_MODIFIER_firstprivate;
835 default:
836 break;
837 }
838 llvm_unreachable("Unexpected OpenMPDefaultmapClauseKind enum");
839 }
840 bool mustBeFirstprivateAtLevel(unsigned Level,
841 OpenMPDefaultmapClauseKind Kind) const {
843 getDefaultmapModifierAtLevel(Level, Kind);
844 return mustBeFirstprivateBase(M, Kind);
845 }
846 bool mustBeFirstprivate(OpenMPDefaultmapClauseKind Kind) const {
847 OpenMPDefaultmapClauseModifier M = getDefaultmapModifier(Kind);
848 return mustBeFirstprivateBase(M, Kind);
849 }
850
851 /// Checks if the specified variable is a threadprivate.
852 bool isThreadPrivate(VarDecl *D) {
853 const DSAVarData DVar = getTopDSA(D, false);
854 return isOpenMPThreadPrivate(DVar.CKind);
855 }
856
857 /// Marks current region as ordered (it has an 'ordered' clause).
858 void setOrderedRegion(bool IsOrdered, const Expr *Param,
859 OMPOrderedClause *Clause) {
860 if (IsOrdered)
861 getTopOfStack().OrderedRegion.emplace(Param, Clause);
862 else
863 getTopOfStack().OrderedRegion.reset();
864 }
865 /// Returns true, if region is ordered (has associated 'ordered' clause),
866 /// false - otherwise.
867 bool isOrderedRegion() const {
868 if (const SharingMapTy *Top = getTopOfStackOrNull())
869 return Top->OrderedRegion.has_value();
870 return false;
871 }
872 /// Returns optional parameter for the ordered region.
873 std::pair<const Expr *, OMPOrderedClause *> getOrderedRegionParam() const {
874 if (const SharingMapTy *Top = getTopOfStackOrNull())
875 if (Top->OrderedRegion)
876 return *Top->OrderedRegion;
877 return std::make_pair(nullptr, nullptr);
878 }
879 /// Returns true, if parent region is ordered (has associated
880 /// 'ordered' clause), false - otherwise.
881 bool isParentOrderedRegion() const {
882 if (const SharingMapTy *Parent = getSecondOnStackOrNull())
883 return Parent->OrderedRegion.has_value();
884 return false;
885 }
886 /// Returns optional parameter for the ordered region.
887 std::pair<const Expr *, OMPOrderedClause *>
888 getParentOrderedRegionParam() const {
889 if (const SharingMapTy *Parent = getSecondOnStackOrNull())
890 if (Parent->OrderedRegion)
891 return *Parent->OrderedRegion;
892 return std::make_pair(nullptr, nullptr);
893 }
894 /// Marks current region as having an 'order' clause.
895 void setRegionHasOrderConcurrent(bool HasOrderConcurrent) {
896 getTopOfStack().RegionHasOrderConcurrent = HasOrderConcurrent;
897 }
898 /// Returns true, if parent region is order (has associated
899 /// 'order' clause), false - otherwise.
900 bool isParentOrderConcurrent() const {
901 if (const SharingMapTy *Parent = getSecondOnStackOrNull())
902 return Parent->RegionHasOrderConcurrent;
903 return false;
904 }
905 /// Marks current region as nowait (it has a 'nowait' clause).
906 void setNowaitRegion(bool IsNowait = true) {
907 getTopOfStack().NowaitRegion = IsNowait;
908 }
909 /// Returns true, if parent region is nowait (has associated
910 /// 'nowait' clause), false - otherwise.
911 bool isParentNowaitRegion() const {
912 if (const SharingMapTy *Parent = getSecondOnStackOrNull())
913 return Parent->NowaitRegion;
914 return false;
915 }
916 /// Marks current region as untied (it has a 'untied' clause).
917 void setUntiedRegion(bool IsUntied = true) {
918 getTopOfStack().UntiedRegion = IsUntied;
919 }
920 /// Return true if current region is untied.
921 bool isUntiedRegion() const {
922 const SharingMapTy *Top = getTopOfStackOrNull();
923 return Top ? Top->UntiedRegion : false;
924 }
925 /// Marks parent region as cancel region.
926 void setParentCancelRegion(bool Cancel = true) {
927 if (SharingMapTy *Parent = getSecondOnStackOrNull())
928 Parent->CancelRegion |= Cancel;
929 }
930 /// Return true if current region has inner cancel construct.
931 bool isCancelRegion() const {
932 const SharingMapTy *Top = getTopOfStackOrNull();
933 return Top ? Top->CancelRegion : false;
934 }
935
936 /// Mark that parent region already has scan directive.
937 void setParentHasScanDirective(SourceLocation Loc) {
938 if (SharingMapTy *Parent = getSecondOnStackOrNull())
939 Parent->PrevScanLocation = Loc;
940 }
941 /// Return true if current region has inner cancel construct.
942 bool doesParentHasScanDirective() const {
943 const SharingMapTy *Top = getSecondOnStackOrNull();
944 return Top ? Top->PrevScanLocation.isValid() : false;
945 }
946 /// Return true if current region has inner cancel construct.
947 SourceLocation getParentScanDirectiveLoc() const {
948 const SharingMapTy *Top = getSecondOnStackOrNull();
949 return Top ? Top->PrevScanLocation : SourceLocation();
950 }
951 /// Mark that parent region already has ordered directive.
952 void setParentHasOrderedDirective(SourceLocation Loc) {
953 if (SharingMapTy *Parent = getSecondOnStackOrNull())
954 Parent->PrevOrderedLocation = Loc;
955 }
956 /// Return true if current region has inner ordered construct.
957 bool doesParentHasOrderedDirective() const {
958 const SharingMapTy *Top = getSecondOnStackOrNull();
959 return Top ? Top->PrevOrderedLocation.isValid() : false;
960 }
961 /// Returns the location of the previously specified ordered directive.
962 SourceLocation getParentOrderedDirectiveLoc() const {
963 const SharingMapTy *Top = getSecondOnStackOrNull();
964 return Top ? Top->PrevOrderedLocation : SourceLocation();
965 }
966
967 /// Set collapse value for the region.
968 void setAssociatedLoops(unsigned Val) {
969 getTopOfStack().AssociatedLoops = Val;
970 if (Val > 1)
971 getTopOfStack().HasMutipleLoops = true;
972 }
973 /// Return collapse value for region.
974 unsigned getAssociatedLoops() const {
975 const SharingMapTy *Top = getTopOfStackOrNull();
976 return Top ? Top->AssociatedLoops : 0;
977 }
978 /// Returns true if the construct is associated with multiple loops.
979 bool hasMutipleLoops() const {
980 const SharingMapTy *Top = getTopOfStackOrNull();
981 return Top ? Top->HasMutipleLoops : false;
982 }
983
984 /// Marks current target region as one with closely nested teams
985 /// region.
986 void setParentTeamsRegionLoc(SourceLocation TeamsRegionLoc) {
987 if (SharingMapTy *Parent = getSecondOnStackOrNull())
988 Parent->InnerTeamsRegionLoc = TeamsRegionLoc;
989 }
990 /// Returns true, if current region has closely nested teams region.
991 bool hasInnerTeamsRegion() const {
992 return getInnerTeamsRegionLoc().isValid();
993 }
994 /// Returns location of the nested teams region (if any).
995 SourceLocation getInnerTeamsRegionLoc() const {
996 const SharingMapTy *Top = getTopOfStackOrNull();
997 return Top ? Top->InnerTeamsRegionLoc : SourceLocation();
998 }
999
1000 Scope *getCurScope() const {
1001 const SharingMapTy *Top = getTopOfStackOrNull();
1002 return Top ? Top->CurScope : nullptr;
1003 }
1004 void setContext(DeclContext *DC) { getTopOfStack().Context = DC; }
1005 SourceLocation getConstructLoc() const {
1006 const SharingMapTy *Top = getTopOfStackOrNull();
1007 return Top ? Top->ConstructLoc : SourceLocation();
1008 }
1009
1010 /// Do the check specified in \a Check to all component lists and return true
1011 /// if any issue is found.
1012 bool checkMappableExprComponentListsForDecl(
1013 const ValueDecl *VD, bool CurrentRegionOnly,
1014 const llvm::function_ref<
1017 Check) const {
1018 if (isStackEmpty())
1019 return false;
1020 auto SI = begin();
1021 auto SE = end();
1022
1023 if (SI == SE)
1024 return false;
1025
1026 if (CurrentRegionOnly)
1027 SE = std::next(SI);
1028 else
1029 std::advance(SI, 1);
1030
1031 for (; SI != SE; ++SI) {
1032 auto MI = SI->MappedExprComponents.find(VD);
1033 if (MI != SI->MappedExprComponents.end())
1035 MI->second.Components)
1036 if (Check(L, MI->second.Kind))
1037 return true;
1038 }
1039 return false;
1040 }
1041
1042 /// Do the check specified in \a Check to all component lists at a given level
1043 /// and return true if any issue is found.
1044 bool checkMappableExprComponentListsForDeclAtLevel(
1045 const ValueDecl *VD, unsigned Level,
1046 const llvm::function_ref<
1049 Check) const {
1050 if (getStackSize() <= Level)
1051 return false;
1052
1053 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
1054 auto MI = StackElem.MappedExprComponents.find(VD);
1055 if (MI != StackElem.MappedExprComponents.end())
1057 MI->second.Components)
1058 if (Check(L, MI->second.Kind))
1059 return true;
1060 return false;
1061 }
1062
1063 /// Create a new mappable expression component list associated with a given
1064 /// declaration and initialize it with the provided list of components.
1065 void addMappableExpressionComponents(
1066 const ValueDecl *VD,
1068 OpenMPClauseKind WhereFoundClauseKind) {
1069 MappedExprComponentTy &MEC = getTopOfStack().MappedExprComponents[VD];
1070 // Create new entry and append the new components there.
1071 MEC.Components.resize(MEC.Components.size() + 1);
1072 MEC.Components.back().append(Components.begin(), Components.end());
1073 MEC.Kind = WhereFoundClauseKind;
1074 }
1075
1076 unsigned getNestingLevel() const {
1077 assert(!isStackEmpty());
1078 return getStackSize() - 1;
1079 }
1080 void addDoacrossDependClause(OMPClause *C, const OperatorOffsetTy &OpsOffs) {
1081 SharingMapTy *Parent = getSecondOnStackOrNull();
1082 assert(Parent && isOpenMPWorksharingDirective(Parent->Directive));
1083 Parent->DoacrossDepends.try_emplace(C, OpsOffs);
1084 }
1085 llvm::iterator_range<DoacrossClauseMapTy::const_iterator>
1086 getDoacrossDependClauses() const {
1087 const SharingMapTy &StackElem = getTopOfStack();
1088 if (isOpenMPWorksharingDirective(StackElem.Directive)) {
1089 const DoacrossClauseMapTy &Ref = StackElem.DoacrossDepends;
1090 return llvm::make_range(Ref.begin(), Ref.end());
1091 }
1092 return llvm::make_range(StackElem.DoacrossDepends.end(),
1093 StackElem.DoacrossDepends.end());
1094 }
1095
1096 // Store types of classes which have been explicitly mapped
1097 void addMappedClassesQualTypes(QualType QT) {
1098 SharingMapTy &StackElem = getTopOfStack();
1099 StackElem.MappedClassesQualTypes.insert(QT);
1100 }
1101
1102 // Return set of mapped classes types
1103 bool isClassPreviouslyMapped(QualType QT) const {
1104 const SharingMapTy &StackElem = getTopOfStack();
1105 return StackElem.MappedClassesQualTypes.contains(QT);
1106 }
1107
1108 /// Adds global declare target to the parent target region.
1109 void addToParentTargetRegionLinkGlobals(DeclRefExpr *E) {
1110 assert(*OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(
1111 E->getDecl()) == OMPDeclareTargetDeclAttr::MT_Link &&
1112 "Expected declare target link global.");
1113 for (auto &Elem : *this) {
1114 if (isOpenMPTargetExecutionDirective(Elem.Directive)) {
1115 Elem.DeclareTargetLinkVarDecls.push_back(E);
1116 return;
1117 }
1118 }
1119 }
1120
1121 /// Returns the list of globals with declare target link if current directive
1122 /// is target.
1123 ArrayRef<DeclRefExpr *> getLinkGlobals() const {
1124 assert(isOpenMPTargetExecutionDirective(getCurrentDirective()) &&
1125 "Expected target executable directive.");
1126 return getTopOfStack().DeclareTargetLinkVarDecls;
1127 }
1128
1129 /// Adds list of allocators expressions.
1130 void addInnerAllocatorExpr(Expr *E) {
1131 getTopOfStack().InnerUsedAllocators.push_back(E);
1132 }
1133 /// Return list of used allocators.
1134 ArrayRef<Expr *> getInnerAllocators() const {
1135 return getTopOfStack().InnerUsedAllocators;
1136 }
1137 /// Marks the declaration as implicitly firstprivate nin the task-based
1138 /// regions.
1139 void addImplicitTaskFirstprivate(unsigned Level, Decl *D) {
1140 getStackElemAtLevel(Level).ImplicitTaskFirstprivates.insert(D);
1141 }
1142 /// Checks if the decl is implicitly firstprivate in the task-based region.
1143 bool isImplicitTaskFirstprivate(Decl *D) const {
1144 return getTopOfStack().ImplicitTaskFirstprivates.contains(D);
1145 }
1146
1147 /// Marks decl as used in uses_allocators clause as the allocator.
1148 void addUsesAllocatorsDecl(const Decl *D, UsesAllocatorsDeclKind Kind) {
1149 getTopOfStack().UsesAllocatorsDecls.try_emplace(D, Kind);
1150 }
1151 /// Checks if specified decl is used in uses allocator clause as the
1152 /// allocator.
1153 std::optional<UsesAllocatorsDeclKind>
1154 isUsesAllocatorsDecl(unsigned Level, const Decl *D) const {
1155 const SharingMapTy &StackElem = getTopOfStack();
1156 auto I = StackElem.UsesAllocatorsDecls.find(D);
1157 if (I == StackElem.UsesAllocatorsDecls.end())
1158 return std::nullopt;
1159 return I->getSecond();
1160 }
1161 std::optional<UsesAllocatorsDeclKind>
1162 isUsesAllocatorsDecl(const Decl *D) const {
1163 const SharingMapTy &StackElem = getTopOfStack();
1164 auto I = StackElem.UsesAllocatorsDecls.find(D);
1165 if (I == StackElem.UsesAllocatorsDecls.end())
1166 return std::nullopt;
1167 return I->getSecond();
1168 }
1169
1170 void addDeclareMapperVarRef(Expr *Ref) {
1171 SharingMapTy &StackElem = getTopOfStack();
1172 StackElem.DeclareMapperVar = Ref;
1173 }
1174 const Expr *getDeclareMapperVarRef() const {
1175 const SharingMapTy *Top = getTopOfStackOrNull();
1176 return Top ? Top->DeclareMapperVar : nullptr;
1177 }
1178
1179 /// Add a new iterator variable.
1180 void addIteratorVarDecl(VarDecl *VD) {
1181 SharingMapTy &StackElem = getTopOfStack();
1182 StackElem.IteratorVarDecls.push_back(VD->getCanonicalDecl());
1183 }
1184 /// Check if variable declaration is an iterator VarDecl.
1185 bool isIteratorVarDecl(const VarDecl *VD) const {
1186 const SharingMapTy *Top = getTopOfStackOrNull();
1187 if (!Top)
1188 return false;
1189
1190 return llvm::is_contained(Top->IteratorVarDecls, VD->getCanonicalDecl());
1191 }
1192 /// get captured field from ImplicitDefaultFirstprivateFDs
1193 VarDecl *getImplicitFDCapExprDecl(const FieldDecl *FD) const {
1194 const_iterator I = begin();
1195 const_iterator EndI = end();
1196 size_t StackLevel = getStackSize();
1197 for (; I != EndI; ++I) {
1198 if (I->DefaultAttr == DSA_firstprivate || I->DefaultAttr == DSA_private)
1199 break;
1200 StackLevel--;
1201 }
1202 assert((StackLevel > 0 && I != EndI) || (StackLevel == 0 && I == EndI));
1203 if (I == EndI)
1204 return nullptr;
1205 for (const auto &IFD : I->ImplicitDefaultFirstprivateFDs)
1206 if (IFD.FD == FD && IFD.StackLevel == StackLevel)
1207 return IFD.VD;
1208 return nullptr;
1209 }
1210 /// Check if capture decl is field captured in ImplicitDefaultFirstprivateFDs
1211 bool isImplicitDefaultFirstprivateFD(VarDecl *VD) const {
1212 const_iterator I = begin();
1213 const_iterator EndI = end();
1214 for (; I != EndI; ++I)
1215 if (I->DefaultAttr == DSA_firstprivate || I->DefaultAttr == DSA_private)
1216 break;
1217 if (I == EndI)
1218 return false;
1219 for (const auto &IFD : I->ImplicitDefaultFirstprivateFDs)
1220 if (IFD.VD == VD)
1221 return true;
1222 return false;
1223 }
1224 /// Store capture FD info in ImplicitDefaultFirstprivateFDs
1225 void addImplicitDefaultFirstprivateFD(const FieldDecl *FD, VarDecl *VD) {
1226 iterator I = begin();
1227 const_iterator EndI = end();
1228 size_t StackLevel = getStackSize();
1229 for (; I != EndI; ++I) {
1230 if (I->DefaultAttr == DSA_private || I->DefaultAttr == DSA_firstprivate) {
1231 I->ImplicitDefaultFirstprivateFDs.emplace_back(FD, StackLevel, VD);
1232 break;
1233 }
1234 StackLevel--;
1235 }
1236 assert((StackLevel > 0 && I != EndI) || (StackLevel == 0 && I == EndI));
1237 }
1238};
1239
1240bool isImplicitTaskingRegion(OpenMPDirectiveKind DKind) {
1241 return isOpenMPParallelDirective(DKind) || isOpenMPTeamsDirective(DKind);
1242}
1243
1244bool isImplicitOrExplicitTaskingRegion(OpenMPDirectiveKind DKind) {
1245 return isImplicitTaskingRegion(DKind) || isOpenMPTaskingDirective(DKind) ||
1246 DKind == OMPD_unknown;
1247}
1248
1249} // namespace
1250
1251static const Expr *getExprAsWritten(const Expr *E) {
1252 if (const auto *FE = dyn_cast<FullExpr>(E))
1253 E = FE->getSubExpr();
1254
1255 if (const auto *MTE = dyn_cast<MaterializeTemporaryExpr>(E))
1256 E = MTE->getSubExpr();
1257
1258 while (const auto *Binder = dyn_cast<CXXBindTemporaryExpr>(E))
1259 E = Binder->getSubExpr();
1260
1261 if (const auto *ICE = dyn_cast<ImplicitCastExpr>(E))
1262 E = ICE->getSubExprAsWritten();
1263 return E->IgnoreParens();
1264}
1265
1267 return const_cast<Expr *>(getExprAsWritten(const_cast<const Expr *>(E)));
1268}
1269
1270static const ValueDecl *getCanonicalDecl(const ValueDecl *D) {
1271 if (const auto *CED = dyn_cast<OMPCapturedExprDecl>(D))
1272 if (const auto *ME = dyn_cast<MemberExpr>(getExprAsWritten(CED->getInit())))
1273 D = ME->getMemberDecl();
1274 const auto *VD = dyn_cast<VarDecl>(D);
1275 const auto *FD = dyn_cast<FieldDecl>(D);
1276 if (VD != nullptr) {
1277 VD = VD->getCanonicalDecl();
1278 D = VD;
1279 } else {
1280 assert(FD);
1281 FD = FD->getCanonicalDecl();
1282 D = FD;
1283 }
1284 return D;
1285}
1286
1288 return const_cast<ValueDecl *>(
1289 getCanonicalDecl(const_cast<const ValueDecl *>(D)));
1290}
1291
1292DSAStackTy::DSAVarData DSAStackTy::getDSA(const_iterator &Iter,
1293 ValueDecl *D) const {
1294 D = getCanonicalDecl(D);
1295 auto *VD = dyn_cast<VarDecl>(D);
1296 const auto *FD = dyn_cast<FieldDecl>(D);
1297 DSAVarData DVar;
1298 if (Iter == end()) {
1299 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1300 // in a region but not in construct]
1301 // File-scope or namespace-scope variables referenced in called routines
1302 // in the region are shared unless they appear in a threadprivate
1303 // directive.
1304 if (VD && !VD->isFunctionOrMethodVarDecl() && !isa<ParmVarDecl>(VD))
1305 DVar.CKind = OMPC_shared;
1306
1307 // OpenMP [2.9.1.2, Data-sharing Attribute Rules for Variables Referenced
1308 // in a region but not in construct]
1309 // Variables with static storage duration that are declared in called
1310 // routines in the region are shared.
1311 if (VD && VD->hasGlobalStorage())
1312 DVar.CKind = OMPC_shared;
1313
1314 // Non-static data members are shared by default.
1315 if (FD)
1316 DVar.CKind = OMPC_shared;
1317
1318 return DVar;
1319 }
1320
1321 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1322 // in a Construct, C/C++, predetermined, p.1]
1323 // Variables with automatic storage duration that are declared in a scope
1324 // inside the construct are private.
1325 if (VD && isOpenMPLocal(VD, Iter) && VD->isLocalVarDecl() &&
1326 (VD->getStorageClass() == SC_Auto || VD->getStorageClass() == SC_None)) {
1327 DVar.CKind = OMPC_private;
1328 return DVar;
1329 }
1330
1331 DVar.DKind = Iter->Directive;
1332 // Explicitly specified attributes and local variables with predetermined
1333 // attributes.
1334 if (Iter->SharingMap.count(D)) {
1335 const DSAInfo &Data = Iter->SharingMap.lookup(D);
1336 DVar.RefExpr = Data.RefExpr.getPointer();
1337 DVar.PrivateCopy = Data.PrivateCopy;
1338 DVar.CKind = Data.Attributes;
1339 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1340 DVar.Modifier = Data.Modifier;
1341 DVar.AppliedToPointee = Data.AppliedToPointee;
1342 return DVar;
1343 }
1344
1345 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1346 // in a Construct, C/C++, implicitly determined, p.1]
1347 // In a parallel or task construct, the data-sharing attributes of these
1348 // variables are determined by the default clause, if present.
1349 switch (Iter->DefaultAttr) {
1350 case DSA_shared:
1351 DVar.CKind = OMPC_shared;
1352 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1353 return DVar;
1354 case DSA_none:
1355 return DVar;
1356 case DSA_firstprivate:
1357 if (VD && VD->getStorageDuration() == SD_Static &&
1358 VD->getDeclContext()->isFileContext()) {
1359 DVar.CKind = OMPC_unknown;
1360 } else {
1361 DVar.CKind = OMPC_firstprivate;
1362 }
1363 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1364 return DVar;
1365 case DSA_private:
1366 // each variable with static storage duration that is declared
1367 // in a namespace or global scope and referenced in the construct,
1368 // and that does not have a predetermined data-sharing attribute
1369 if (VD && VD->getStorageDuration() == SD_Static &&
1370 VD->getDeclContext()->isFileContext()) {
1371 DVar.CKind = OMPC_unknown;
1372 } else {
1373 DVar.CKind = OMPC_private;
1374 }
1375 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1376 return DVar;
1377 case DSA_unspecified:
1378 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1379 // in a Construct, implicitly determined, p.2]
1380 // In a parallel construct, if no default clause is present, these
1381 // variables are shared.
1382 DVar.ImplicitDSALoc = Iter->DefaultAttrLoc;
1383 if ((isOpenMPParallelDirective(DVar.DKind) &&
1384 !isOpenMPTaskLoopDirective(DVar.DKind)) ||
1385 isOpenMPTeamsDirective(DVar.DKind)) {
1386 DVar.CKind = OMPC_shared;
1387 return DVar;
1388 }
1389
1390 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1391 // in a Construct, implicitly determined, p.4]
1392 // In a task construct, if no default clause is present, a variable that in
1393 // the enclosing context is determined to be shared by all implicit tasks
1394 // bound to the current team is shared.
1395 if (isOpenMPTaskingDirective(DVar.DKind)) {
1396 DSAVarData DVarTemp;
1397 const_iterator I = Iter, E = end();
1398 do {
1399 ++I;
1400 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables
1401 // Referenced in a Construct, implicitly determined, p.6]
1402 // In a task construct, if no default clause is present, a variable
1403 // whose data-sharing attribute is not determined by the rules above is
1404 // firstprivate.
1405 DVarTemp = getDSA(I, D);
1406 if (DVarTemp.CKind != OMPC_shared) {
1407 DVar.RefExpr = nullptr;
1408 DVar.CKind = OMPC_firstprivate;
1409 return DVar;
1410 }
1411 } while (I != E && !isImplicitTaskingRegion(I->Directive));
1412 DVar.CKind =
1413 (DVarTemp.CKind == OMPC_unknown) ? OMPC_firstprivate : OMPC_shared;
1414 return DVar;
1415 }
1416 }
1417 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1418 // in a Construct, implicitly determined, p.3]
1419 // For constructs other than task, if no default clause is present, these
1420 // variables inherit their data-sharing attributes from the enclosing
1421 // context.
1422 return getDSA(++Iter, D);
1423}
1424
1425const Expr *DSAStackTy::addUniqueAligned(const ValueDecl *D,
1426 const Expr *NewDE) {
1427 assert(!isStackEmpty() && "Data sharing attributes stack is empty");
1428 D = getCanonicalDecl(D);
1429 SharingMapTy &StackElem = getTopOfStack();
1430 auto It = StackElem.AlignedMap.find(D);
1431 if (It == StackElem.AlignedMap.end()) {
1432 assert(NewDE && "Unexpected nullptr expr to be added into aligned map");
1433 StackElem.AlignedMap[D] = NewDE;
1434 return nullptr;
1435 }
1436 assert(It->second && "Unexpected nullptr expr in the aligned map");
1437 return It->second;
1438}
1439
1440const Expr *DSAStackTy::addUniqueNontemporal(const ValueDecl *D,
1441 const Expr *NewDE) {
1442 assert(!isStackEmpty() && "Data sharing attributes stack is empty");
1443 D = getCanonicalDecl(D);
1444 SharingMapTy &StackElem = getTopOfStack();
1445 auto It = StackElem.NontemporalMap.find(D);
1446 if (It == StackElem.NontemporalMap.end()) {
1447 assert(NewDE && "Unexpected nullptr expr to be added into aligned map");
1448 StackElem.NontemporalMap[D] = NewDE;
1449 return nullptr;
1450 }
1451 assert(It->second && "Unexpected nullptr expr in the aligned map");
1452 return It->second;
1453}
1454
1455void DSAStackTy::addLoopControlVariable(const ValueDecl *D, VarDecl *Capture) {
1456 assert(!isStackEmpty() && "Data-sharing attributes stack is empty");
1457 D = getCanonicalDecl(D);
1458 SharingMapTy &StackElem = getTopOfStack();
1459 StackElem.LCVMap.try_emplace(
1460 D, LCDeclInfo(StackElem.LCVMap.size() + 1, Capture));
1461}
1462
1463const DSAStackTy::LCDeclInfo
1464DSAStackTy::isLoopControlVariable(const ValueDecl *D) const {
1465 assert(!isStackEmpty() && "Data-sharing attributes stack is empty");
1466 D = getCanonicalDecl(D);
1467 const SharingMapTy &StackElem = getTopOfStack();
1468 auto It = StackElem.LCVMap.find(D);
1469 if (It != StackElem.LCVMap.end())
1470 return It->second;
1471 return {0, nullptr};
1472}
1473
1474const DSAStackTy::LCDeclInfo
1475DSAStackTy::isLoopControlVariable(const ValueDecl *D, unsigned Level) const {
1476 assert(!isStackEmpty() && "Data-sharing attributes stack is empty");
1477 D = getCanonicalDecl(D);
1478 for (unsigned I = Level + 1; I > 0; --I) {
1479 const SharingMapTy &StackElem = getStackElemAtLevel(I - 1);
1480 auto It = StackElem.LCVMap.find(D);
1481 if (It != StackElem.LCVMap.end())
1482 return It->second;
1483 }
1484 return {0, nullptr};
1485}
1486
1487const DSAStackTy::LCDeclInfo
1488DSAStackTy::isParentLoopControlVariable(const ValueDecl *D) const {
1489 const SharingMapTy *Parent = getSecondOnStackOrNull();
1490 assert(Parent && "Data-sharing attributes stack is empty");
1491 D = getCanonicalDecl(D);
1492 auto It = Parent->LCVMap.find(D);
1493 if (It != Parent->LCVMap.end())
1494 return It->second;
1495 return {0, nullptr};
1496}
1497
1498const ValueDecl *DSAStackTy::getParentLoopControlVariable(unsigned I) const {
1499 const SharingMapTy *Parent = getSecondOnStackOrNull();
1500 assert(Parent && "Data-sharing attributes stack is empty");
1501 if (Parent->LCVMap.size() < I)
1502 return nullptr;
1503 for (const auto &Pair : Parent->LCVMap)
1504 if (Pair.second.first == I)
1505 return Pair.first;
1506 return nullptr;
1507}
1508
1509void DSAStackTy::addDSA(const ValueDecl *D, const Expr *E, OpenMPClauseKind A,
1510 DeclRefExpr *PrivateCopy, unsigned Modifier,
1511 bool AppliedToPointee) {
1512 D = getCanonicalDecl(D);
1513 if (A == OMPC_threadprivate) {
1514 DSAInfo &Data = Threadprivates[D];
1515 Data.Attributes = A;
1516 Data.RefExpr.setPointer(E);
1517 Data.PrivateCopy = nullptr;
1518 Data.Modifier = Modifier;
1519 } else {
1520 DSAInfo &Data = getTopOfStack().SharingMap[D];
1521 assert(Data.Attributes == OMPC_unknown || (A == Data.Attributes) ||
1522 (A == OMPC_firstprivate && Data.Attributes == OMPC_lastprivate) ||
1523 (A == OMPC_lastprivate && Data.Attributes == OMPC_firstprivate) ||
1524 (isLoopControlVariable(D).first && A == OMPC_private));
1525 Data.Modifier = Modifier;
1526 if (A == OMPC_lastprivate && Data.Attributes == OMPC_firstprivate) {
1527 Data.RefExpr.setInt(/*IntVal=*/true);
1528 return;
1529 }
1530 const bool IsLastprivate =
1531 A == OMPC_lastprivate || Data.Attributes == OMPC_lastprivate;
1532 Data.Attributes = A;
1533 Data.RefExpr.setPointerAndInt(E, IsLastprivate);
1534 Data.PrivateCopy = PrivateCopy;
1535 Data.AppliedToPointee = AppliedToPointee;
1536 if (PrivateCopy) {
1537 DSAInfo &Data = getTopOfStack().SharingMap[PrivateCopy->getDecl()];
1538 Data.Modifier = Modifier;
1539 Data.Attributes = A;
1540 Data.RefExpr.setPointerAndInt(PrivateCopy, IsLastprivate);
1541 Data.PrivateCopy = nullptr;
1542 Data.AppliedToPointee = AppliedToPointee;
1543 }
1544 }
1545}
1546
1547/// Build a variable declaration for OpenMP loop iteration variable.
1549 StringRef Name, const AttrVec *Attrs = nullptr,
1550 DeclRefExpr *OrigRef = nullptr) {
1551 DeclContext *DC = SemaRef.CurContext;
1552 IdentifierInfo *II = &SemaRef.PP.getIdentifierTable().get(Name);
1553 TypeSourceInfo *TInfo = SemaRef.Context.getTrivialTypeSourceInfo(Type, Loc);
1554 auto *Decl =
1555 VarDecl::Create(SemaRef.Context, DC, Loc, Loc, II, Type, TInfo, SC_None);
1556 if (Attrs) {
1557 for (specific_attr_iterator<AlignedAttr> I(Attrs->begin()), E(Attrs->end());
1558 I != E; ++I)
1559 Decl->addAttr(*I);
1560 }
1561 Decl->setImplicit();
1562 if (OrigRef) {
1563 Decl->addAttr(
1564 OMPReferencedVarAttr::CreateImplicit(SemaRef.Context, OrigRef));
1565 }
1566 return Decl;
1567}
1568
1570 SourceLocation Loc,
1571 bool RefersToCapture = false) {
1572 D->setReferenced();
1573 D->markUsed(S.Context);
1575 SourceLocation(), D, RefersToCapture, Loc, Ty,
1576 VK_LValue);
1577}
1578
1579void DSAStackTy::addTaskgroupReductionData(const ValueDecl *D, SourceRange SR,
1580 BinaryOperatorKind BOK) {
1581 D = getCanonicalDecl(D);
1582 assert(!isStackEmpty() && "Data-sharing attributes stack is empty");
1583 assert(
1584 getTopOfStack().SharingMap[D].Attributes == OMPC_reduction &&
1585 "Additional reduction info may be specified only for reduction items.");
1586 ReductionData &ReductionData = getTopOfStack().ReductionMap[D];
1587 assert(ReductionData.ReductionRange.isInvalid() &&
1588 (getTopOfStack().Directive == OMPD_taskgroup ||
1589 ((isOpenMPParallelDirective(getTopOfStack().Directive) ||
1590 isOpenMPWorksharingDirective(getTopOfStack().Directive)) &&
1591 !isOpenMPSimdDirective(getTopOfStack().Directive))) &&
1592 "Additional reduction info may be specified only once for reduction "
1593 "items.");
1594 ReductionData.set(BOK, SR);
1595 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1596 if (!TaskgroupReductionRef) {
1597 VarDecl *VD = buildVarDecl(SemaRef, SR.getBegin(),
1598 SemaRef.Context.VoidPtrTy, ".task_red.");
1599 TaskgroupReductionRef =
1600 buildDeclRefExpr(SemaRef, VD, SemaRef.Context.VoidPtrTy, SR.getBegin());
1601 }
1602}
1603
1604void DSAStackTy::addTaskgroupReductionData(const ValueDecl *D, SourceRange SR,
1605 const Expr *ReductionRef) {
1606 D = getCanonicalDecl(D);
1607 assert(!isStackEmpty() && "Data-sharing attributes stack is empty");
1608 assert(
1609 getTopOfStack().SharingMap[D].Attributes == OMPC_reduction &&
1610 "Additional reduction info may be specified only for reduction items.");
1611 ReductionData &ReductionData = getTopOfStack().ReductionMap[D];
1612 assert(ReductionData.ReductionRange.isInvalid() &&
1613 (getTopOfStack().Directive == OMPD_taskgroup ||
1614 ((isOpenMPParallelDirective(getTopOfStack().Directive) ||
1615 isOpenMPWorksharingDirective(getTopOfStack().Directive)) &&
1616 !isOpenMPSimdDirective(getTopOfStack().Directive))) &&
1617 "Additional reduction info may be specified only once for reduction "
1618 "items.");
1619 ReductionData.set(ReductionRef, SR);
1620 Expr *&TaskgroupReductionRef = getTopOfStack().TaskgroupReductionRef;
1621 if (!TaskgroupReductionRef) {
1622 VarDecl *VD = buildVarDecl(SemaRef, SR.getBegin(),
1623 SemaRef.Context.VoidPtrTy, ".task_red.");
1624 TaskgroupReductionRef =
1625 buildDeclRefExpr(SemaRef, VD, SemaRef.Context.VoidPtrTy, SR.getBegin());
1626 }
1627}
1628
1629const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1630 const ValueDecl *D, SourceRange &SR, BinaryOperatorKind &BOK,
1631 Expr *&TaskgroupDescriptor) const {
1632 D = getCanonicalDecl(D);
1633 assert(!isStackEmpty() && "Data-sharing attributes stack is empty.");
1634 for (const_iterator I = begin() + 1, E = end(); I != E; ++I) {
1635 const DSAInfo &Data = I->SharingMap.lookup(D);
1636 if (Data.Attributes != OMPC_reduction ||
1637 Data.Modifier != OMPC_REDUCTION_task)
1638 continue;
1639 const ReductionData &ReductionData = I->ReductionMap.lookup(D);
1640 if (!ReductionData.ReductionOp ||
1641 ReductionData.ReductionOp.is<const Expr *>())
1642 return DSAVarData();
1643 SR = ReductionData.ReductionRange;
1644 BOK = ReductionData.ReductionOp.get<ReductionData::BOKPtrType>();
1645 assert(I->TaskgroupReductionRef && "taskgroup reduction reference "
1646 "expression for the descriptor is not "
1647 "set.");
1648 TaskgroupDescriptor = I->TaskgroupReductionRef;
1649 return DSAVarData(I->Directive, OMPC_reduction, Data.RefExpr.getPointer(),
1650 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1651 /*AppliedToPointee=*/false);
1652 }
1653 return DSAVarData();
1654}
1655
1656const DSAStackTy::DSAVarData DSAStackTy::getTopMostTaskgroupReductionData(
1657 const ValueDecl *D, SourceRange &SR, const Expr *&ReductionRef,
1658 Expr *&TaskgroupDescriptor) const {
1659 D = getCanonicalDecl(D);
1660 assert(!isStackEmpty() && "Data-sharing attributes stack is empty.");
1661 for (const_iterator I = begin() + 1, E = end(); I != E; ++I) {
1662 const DSAInfo &Data = I->SharingMap.lookup(D);
1663 if (Data.Attributes != OMPC_reduction ||
1664 Data.Modifier != OMPC_REDUCTION_task)
1665 continue;
1666 const ReductionData &ReductionData = I->ReductionMap.lookup(D);
1667 if (!ReductionData.ReductionOp ||
1668 !ReductionData.ReductionOp.is<const Expr *>())
1669 return DSAVarData();
1670 SR = ReductionData.ReductionRange;
1671 ReductionRef = ReductionData.ReductionOp.get<const Expr *>();
1672 assert(I->TaskgroupReductionRef && "taskgroup reduction reference "
1673 "expression for the descriptor is not "
1674 "set.");
1675 TaskgroupDescriptor = I->TaskgroupReductionRef;
1676 return DSAVarData(I->Directive, OMPC_reduction, Data.RefExpr.getPointer(),
1677 Data.PrivateCopy, I->DefaultAttrLoc, OMPC_REDUCTION_task,
1678 /*AppliedToPointee=*/false);
1679 }
1680 return DSAVarData();
1681}
1682
1683bool DSAStackTy::isOpenMPLocal(VarDecl *D, const_iterator I) const {
1684 D = D->getCanonicalDecl();
1685 for (const_iterator E = end(); I != E; ++I) {
1686 if (isImplicitOrExplicitTaskingRegion(I->Directive) ||
1687 isOpenMPTargetExecutionDirective(I->Directive)) {
1688 if (I->CurScope) {
1689 Scope *TopScope = I->CurScope->getParent();
1690 Scope *CurScope = getCurScope();
1691 while (CurScope && CurScope != TopScope && !CurScope->isDeclScope(D))
1692 CurScope = CurScope->getParent();
1693 return CurScope != TopScope;
1694 }
1695 for (DeclContext *DC = D->getDeclContext(); DC; DC = DC->getParent())
1696 if (I->Context == DC)
1697 return true;
1698 return false;
1699 }
1700 }
1701 return false;
1702}
1703
1705 bool AcceptIfMutable = true,
1706 bool *IsClassType = nullptr) {
1707 ASTContext &Context = SemaRef.getASTContext();
1708 Type = Type.getNonReferenceType().getCanonicalType();
1709 bool IsConstant = Type.isConstant(Context);
1710 Type = Context.getBaseElementType(Type);
1711 const CXXRecordDecl *RD = AcceptIfMutable && SemaRef.getLangOpts().CPlusPlus
1713 : nullptr;
1714 if (const auto *CTSD = dyn_cast_or_null<ClassTemplateSpecializationDecl>(RD))
1715 if (const ClassTemplateDecl *CTD = CTSD->getSpecializedTemplate())
1716 RD = CTD->getTemplatedDecl();
1717 if (IsClassType)
1718 *IsClassType = RD;
1719 return IsConstant && !(SemaRef.getLangOpts().CPlusPlus && RD &&
1720 RD->hasDefinition() && RD->hasMutableFields());
1721}
1722
1723static bool rejectConstNotMutableType(Sema &SemaRef, const ValueDecl *D,
1725 SourceLocation ELoc,
1726 bool AcceptIfMutable = true,
1727 bool ListItemNotVar = false) {
1728 ASTContext &Context = SemaRef.getASTContext();
1729 bool IsClassType;
1730 if (isConstNotMutableType(SemaRef, Type, AcceptIfMutable, &IsClassType)) {
1731 unsigned Diag = ListItemNotVar ? diag::err_omp_const_list_item
1732 : IsClassType ? diag::err_omp_const_not_mutable_variable
1733 : diag::err_omp_const_variable;
1734 SemaRef.Diag(ELoc, Diag) << getOpenMPClauseName(CKind);
1735 if (!ListItemNotVar && D) {
1736 const VarDecl *VD = dyn_cast<VarDecl>(D);
1737 bool IsDecl = !VD || VD->isThisDeclarationADefinition(Context) ==
1739 SemaRef.Diag(D->getLocation(),
1740 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
1741 << D;
1742 }
1743 return true;
1744 }
1745 return false;
1746}
1747
1748const DSAStackTy::DSAVarData DSAStackTy::getTopDSA(ValueDecl *D,
1749 bool FromParent) {
1750 D = getCanonicalDecl(D);
1751 DSAVarData DVar;
1752
1753 auto *VD = dyn_cast<VarDecl>(D);
1754 auto TI = Threadprivates.find(D);
1755 if (TI != Threadprivates.end()) {
1756 DVar.RefExpr = TI->getSecond().RefExpr.getPointer();
1757 DVar.CKind = OMPC_threadprivate;
1758 DVar.Modifier = TI->getSecond().Modifier;
1759 return DVar;
1760 }
1761 if (VD && VD->hasAttr<OMPThreadPrivateDeclAttr>()) {
1762 DVar.RefExpr = buildDeclRefExpr(
1763 SemaRef, VD, D->getType().getNonReferenceType(),
1764 VD->getAttr<OMPThreadPrivateDeclAttr>()->getLocation());
1765 DVar.CKind = OMPC_threadprivate;
1766 addDSA(D, DVar.RefExpr, OMPC_threadprivate);
1767 return DVar;
1768 }
1769 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1770 // in a Construct, C/C++, predetermined, p.1]
1771 // Variables appearing in threadprivate directives are threadprivate.
1772 if ((VD && VD->getTLSKind() != VarDecl::TLS_None &&
1773 !(VD->hasAttr<OMPThreadPrivateDeclAttr>() &&
1774 SemaRef.getLangOpts().OpenMPUseTLS &&
1775 SemaRef.getASTContext().getTargetInfo().isTLSSupported())) ||
1776 (VD && VD->getStorageClass() == SC_Register &&
1777 VD->hasAttr<AsmLabelAttr>() && !VD->isLocalVarDecl())) {
1778 DVar.RefExpr = buildDeclRefExpr(
1779 SemaRef, VD, D->getType().getNonReferenceType(), D->getLocation());
1780 DVar.CKind = OMPC_threadprivate;
1781 addDSA(D, DVar.RefExpr, OMPC_threadprivate);
1782 return DVar;
1783 }
1784 if (SemaRef.getLangOpts().OpenMPCUDAMode && VD &&
1785 VD->isLocalVarDeclOrParm() && !isStackEmpty() &&
1786 !isLoopControlVariable(D).first) {
1787 const_iterator IterTarget =
1788 std::find_if(begin(), end(), [](const SharingMapTy &Data) {
1789 return isOpenMPTargetExecutionDirective(Data.Directive);
1790 });
1791 if (IterTarget != end()) {
1792 const_iterator ParentIterTarget = IterTarget + 1;
1793 for (const_iterator Iter = begin(); Iter != ParentIterTarget; ++Iter) {
1794 if (isOpenMPLocal(VD, Iter)) {
1795 DVar.RefExpr =
1796 buildDeclRefExpr(SemaRef, VD, D->getType().getNonReferenceType(),
1797 D->getLocation());
1798 DVar.CKind = OMPC_threadprivate;
1799 return DVar;
1800 }
1801 }
1802 if (!isClauseParsingMode() || IterTarget != begin()) {
1803 auto DSAIter = IterTarget->SharingMap.find(D);
1804 if (DSAIter != IterTarget->SharingMap.end() &&
1805 isOpenMPPrivate(DSAIter->getSecond().Attributes)) {
1806 DVar.RefExpr = DSAIter->getSecond().RefExpr.getPointer();
1807 DVar.CKind = OMPC_threadprivate;
1808 return DVar;
1809 }
1810 const_iterator End = end();
1811 if (!SemaRef.isOpenMPCapturedByRef(D,
1812 std::distance(ParentIterTarget, End),
1813 /*OpenMPCaptureLevel=*/0)) {
1814 DVar.RefExpr =
1815 buildDeclRefExpr(SemaRef, VD, D->getType().getNonReferenceType(),
1816 IterTarget->ConstructLoc);
1817 DVar.CKind = OMPC_threadprivate;
1818 return DVar;
1819 }
1820 }
1821 }
1822 }
1823
1824 if (isStackEmpty())
1825 // Not in OpenMP execution region and top scope was already checked.
1826 return DVar;
1827
1828 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1829 // in a Construct, C/C++, predetermined, p.4]
1830 // Static data members are shared.
1831 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1832 // in a Construct, C/C++, predetermined, p.7]
1833 // Variables with static storage duration that are declared in a scope
1834 // inside the construct are shared.
1835 if (VD && VD->isStaticDataMember()) {
1836 // Check for explicitly specified attributes.
1837 const_iterator I = begin();
1838 const_iterator EndI = end();
1839 if (FromParent && I != EndI)
1840 ++I;
1841 if (I != EndI) {
1842 auto It = I->SharingMap.find(D);
1843 if (It != I->SharingMap.end()) {
1844 const DSAInfo &Data = It->getSecond();
1845 DVar.RefExpr = Data.RefExpr.getPointer();
1846 DVar.PrivateCopy = Data.PrivateCopy;
1847 DVar.CKind = Data.Attributes;
1848 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1849 DVar.DKind = I->Directive;
1850 DVar.Modifier = Data.Modifier;
1851 DVar.AppliedToPointee = Data.AppliedToPointee;
1852 return DVar;
1853 }
1854 }
1855
1856 DVar.CKind = OMPC_shared;
1857 return DVar;
1858 }
1859
1860 auto &&MatchesAlways = [](OpenMPDirectiveKind) { return true; };
1861 // The predetermined shared attribute for const-qualified types having no
1862 // mutable members was removed after OpenMP 3.1.
1863 if (SemaRef.LangOpts.OpenMP <= 31) {
1864 // OpenMP [2.9.1.1, Data-sharing Attribute Rules for Variables Referenced
1865 // in a Construct, C/C++, predetermined, p.6]
1866 // Variables with const qualified type having no mutable member are
1867 // shared.
1868 if (isConstNotMutableType(SemaRef, D->getType())) {
1869 // Variables with const-qualified type having no mutable member may be
1870 // listed in a firstprivate clause, even if they are static data members.
1871 DSAVarData DVarTemp = hasInnermostDSA(
1872 D,
1873 [](OpenMPClauseKind C, bool) {
1874 return C == OMPC_firstprivate || C == OMPC_shared;
1875 },
1876 MatchesAlways, FromParent);
1877 if (DVarTemp.CKind != OMPC_unknown && DVarTemp.RefExpr)
1878 return DVarTemp;
1879
1880 DVar.CKind = OMPC_shared;
1881 return DVar;
1882 }
1883 }
1884
1885 // Explicitly specified attributes and local variables with predetermined
1886 // attributes.
1887 const_iterator I = begin();
1888 const_iterator EndI = end();
1889 if (FromParent && I != EndI)
1890 ++I;
1891 if (I == EndI)
1892 return DVar;
1893 auto It = I->SharingMap.find(D);
1894 if (It != I->SharingMap.end()) {
1895 const DSAInfo &Data = It->getSecond();
1896 DVar.RefExpr = Data.RefExpr.getPointer();
1897 DVar.PrivateCopy = Data.PrivateCopy;
1898 DVar.CKind = Data.Attributes;
1899 DVar.ImplicitDSALoc = I->DefaultAttrLoc;
1900 DVar.DKind = I->Directive;
1901 DVar.Modifier = Data.Modifier;
1902 DVar.AppliedToPointee = Data.AppliedToPointee;
1903 }
1904
1905 return DVar;
1906}
1907
1908const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(ValueDecl *D,
1909 bool FromParent) const {
1910 if (isStackEmpty()) {
1911 const_iterator I;
1912 return getDSA(I, D);
1913 }
1914 D = getCanonicalDecl(D);
1915 const_iterator StartI = begin();
1916 const_iterator EndI = end();
1917 if (FromParent && StartI != EndI)
1918 ++StartI;
1919 return getDSA(StartI, D);
1920}
1921
1922const DSAStackTy::DSAVarData DSAStackTy::getImplicitDSA(ValueDecl *D,
1923 unsigned Level) const {
1924 if (getStackSize() <= Level)
1925 return DSAVarData();
1926 D = getCanonicalDecl(D);
1927 const_iterator StartI = std::next(begin(), getStackSize() - 1 - Level);
1928 return getDSA(StartI, D);
1929}
1930
1931const DSAStackTy::DSAVarData
1932DSAStackTy::hasDSA(ValueDecl *D,
1933 const llvm::function_ref<bool(OpenMPClauseKind, bool,
1934 DefaultDataSharingAttributes)>
1935 CPred,
1936 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
1937 bool FromParent) const {
1938 if (isStackEmpty())
1939 return {};
1940 D = getCanonicalDecl(D);
1941 const_iterator I = begin();
1942 const_iterator EndI = end();
1943 if (FromParent && I != EndI)
1944 ++I;
1945 for (; I != EndI; ++I) {
1946 if (!DPred(I->Directive) &&
1947 !isImplicitOrExplicitTaskingRegion(I->Directive))
1948 continue;
1949 const_iterator NewI = I;
1950 DSAVarData DVar = getDSA(NewI, D);
1951 if (I == NewI && CPred(DVar.CKind, DVar.AppliedToPointee, I->DefaultAttr))
1952 return DVar;
1953 }
1954 return {};
1955}
1956
1957const DSAStackTy::DSAVarData DSAStackTy::hasInnermostDSA(
1958 ValueDecl *D, const llvm::function_ref<bool(OpenMPClauseKind, bool)> CPred,
1959 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
1960 bool FromParent) const {
1961 if (isStackEmpty())
1962 return {};
1963 D = getCanonicalDecl(D);
1964 const_iterator StartI = begin();
1965 const_iterator EndI = end();
1966 if (FromParent && StartI != EndI)
1967 ++StartI;
1968 if (StartI == EndI || !DPred(StartI->Directive))
1969 return {};
1970 const_iterator NewI = StartI;
1971 DSAVarData DVar = getDSA(NewI, D);
1972 return (NewI == StartI && CPred(DVar.CKind, DVar.AppliedToPointee))
1973 ? DVar
1974 : DSAVarData();
1975}
1976
1977bool DSAStackTy::hasExplicitDSA(
1978 const ValueDecl *D,
1979 const llvm::function_ref<bool(OpenMPClauseKind, bool)> CPred,
1980 unsigned Level, bool NotLastprivate) const {
1981 if (getStackSize() <= Level)
1982 return false;
1983 D = getCanonicalDecl(D);
1984 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
1985 auto I = StackElem.SharingMap.find(D);
1986 if (I != StackElem.SharingMap.end() && I->getSecond().RefExpr.getPointer() &&
1987 CPred(I->getSecond().Attributes, I->getSecond().AppliedToPointee) &&
1988 (!NotLastprivate || !I->getSecond().RefExpr.getInt()))
1989 return true;
1990 // Check predetermined rules for the loop control variables.
1991 auto LI = StackElem.LCVMap.find(D);
1992 if (LI != StackElem.LCVMap.end())
1993 return CPred(OMPC_private, /*AppliedToPointee=*/false);
1994 return false;
1995}
1996
1997bool DSAStackTy::hasExplicitDirective(
1998 const llvm::function_ref<bool(OpenMPDirectiveKind)> DPred,
1999 unsigned Level) const {
2000 if (getStackSize() <= Level)
2001 return false;
2002 const SharingMapTy &StackElem = getStackElemAtLevel(Level);
2003 return DPred(StackElem.Directive);
2004}
2005
2006bool DSAStackTy::hasDirective(
2007 const llvm::function_ref<bool(OpenMPDirectiveKind,
2009 DPred,
2010 bool FromParent) const {
2011 // We look only in the enclosing region.
2012 size_t Skip = FromParent ? 2 : 1;
2013 for (const_iterator I = begin() + std::min(Skip, getStackSize()), E = end();
2014 I != E; ++I) {
2015 if (DPred(I->Directive, I->DirectiveName, I->ConstructLoc))
2016 return true;
2017 }
2018 return false;
2019}
2020
2021void Sema::InitDataSharingAttributesStack() {
2022 VarDataSharingAttributesStack = new DSAStackTy(*this);
2023}
2024
2025#define DSAStack static_cast<DSAStackTy *>(VarDataSharingAttributesStack)
2026
2027void Sema::pushOpenMPFunctionRegion() { DSAStack->pushFunction(); }
2028
2029void Sema::popOpenMPFunctionRegion(const FunctionScopeInfo *OldFSI) {
2030 DSAStack->popFunction(OldFSI);
2031}
2032
2034 assert(S.LangOpts.OpenMP && S.LangOpts.OpenMPIsTargetDevice &&
2035 "Expected OpenMP device compilation.");
2037}
2038
2039namespace {
2040/// Status of the function emission on the host/device.
2041enum class FunctionEmissionStatus {
2042 Emitted,
2043 Discarded,
2044 Unknown,
2045};
2046} // anonymous namespace
2047
2050 const FunctionDecl *FD) {
2051 assert(LangOpts.OpenMP && LangOpts.OpenMPIsTargetDevice &&
2052 "Expected OpenMP device compilation.");
2053
2055 if (FD) {
2057 switch (FES) {
2060 break;
2062 // TODO: We should always delay diagnostics here in case a target
2063 // region is in a function we do not emit. However, as the
2064 // current diagnostics are associated with the function containing
2065 // the target region and we do not emit that one, we would miss out
2066 // on diagnostics for the target region itself. We need to anchor
2067 // the diagnostics with the new generated function *or* ensure we
2068 // emit diagnostics associated with the surrounding function.
2069 Kind = isOpenMPDeviceDelayedContext(*this)
2072 break;
2076 break;
2078 llvm_unreachable("CUDADiscarded unexpected in OpenMP device compilation");
2079 break;
2080 }
2081 }
2082
2083 return SemaDiagnosticBuilder(Kind, Loc, DiagID, FD, *this);
2084}
2085
2087 unsigned DiagID,
2088 const FunctionDecl *FD) {
2089 assert(LangOpts.OpenMP && !LangOpts.OpenMPIsTargetDevice &&
2090 "Expected OpenMP host compilation.");
2091
2093 if (FD) {
2095 switch (FES) {
2098 break;
2101 break;
2106 break;
2107 }
2108 }
2109
2110 return SemaDiagnosticBuilder(Kind, Loc, DiagID, FD, *this);
2111}
2112
2115 if (LO.OpenMP <= 45) {
2117 return OMPC_DEFAULTMAP_scalar;
2118 return OMPC_DEFAULTMAP_aggregate;
2119 }
2121 return OMPC_DEFAULTMAP_pointer;
2123 return OMPC_DEFAULTMAP_scalar;
2124 return OMPC_DEFAULTMAP_aggregate;
2125}
2126
2127bool Sema::isOpenMPCapturedByRef(const ValueDecl *D, unsigned Level,
2128 unsigned OpenMPCaptureLevel) const {
2129 assert(LangOpts.OpenMP && "OpenMP is not allowed");
2130
2131 ASTContext &Ctx = getASTContext();
2132 bool IsByRef = true;
2133
2134 // Find the directive that is associated with the provided scope.
2135 D = cast<ValueDecl>(D->getCanonicalDecl());
2136 QualType Ty = D->getType();
2137
2138 bool IsVariableUsedInMapClause = false;
2139 if (DSAStack->hasExplicitDirective(isOpenMPTargetExecutionDirective, Level)) {
2140 // This table summarizes how a given variable should be passed to the device
2141 // given its type and the clauses where it appears. This table is based on
2142 // the description in OpenMP 4.5 [2.10.4, target Construct] and
2143 // OpenMP 4.5 [2.15.5, Data-mapping Attribute Rules and Clauses].
2144 //
2145 // =========================================================================
2146 // | type | defaultmap | pvt | first | is_device_ptr | map | res. |
2147 // | |(tofrom:scalar)| | pvt | |has_dv_adr| |
2148 // =========================================================================
2149 // | scl | | | | - | | bycopy|
2150 // | scl | | - | x | - | - | bycopy|
2151 // | scl | | x | - | - | - | null |
2152 // | scl | x | | | - | | byref |
2153 // | scl | x | - | x | - | - | bycopy|
2154 // | scl | x | x | - | - | - | null |
2155 // | scl | | - | - | - | x | byref |
2156 // | scl | x | - | - | - | x | byref |
2157 //
2158 // | agg | n.a. | | | - | | byref |
2159 // | agg | n.a. | - | x | - | - | byref |
2160 // | agg | n.a. | x | - | - | - | null |
2161 // | agg | n.a. | - | - | - | x | byref |
2162 // | agg | n.a. | - | - | - | x[] | byref |
2163 //
2164 // | ptr | n.a. | | | - | | bycopy|
2165 // | ptr | n.a. | - | x | - | - | bycopy|
2166 // | ptr | n.a. | x | - | - | - | null |
2167 // | ptr | n.a. | - | - | - | x | byref |
2168 // | ptr | n.a. | - | - | - | x[] | bycopy|
2169 // | ptr | n.a. | - | - | x | | bycopy|
2170 // | ptr | n.a. | - | - | x | x | bycopy|
2171 // | ptr | n.a. | - | - | x | x[] | bycopy|
2172 // =========================================================================
2173 // Legend:
2174 // scl - scalar
2175 // ptr - pointer
2176 // agg - aggregate
2177 // x - applies
2178 // - - invalid in this combination
2179 // [] - mapped with an array section
2180 // byref - should be mapped by reference
2181 // byval - should be mapped by value
2182 // null - initialize a local variable to null on the device
2183 //
2184 // Observations:
2185 // - All scalar declarations that show up in a map clause have to be passed
2186 // by reference, because they may have been mapped in the enclosing data
2187 // environment.
2188 // - If the scalar value does not fit the size of uintptr, it has to be
2189 // passed by reference, regardless the result in the table above.
2190 // - For pointers mapped by value that have either an implicit map or an
2191 // array section, the runtime library may pass the NULL value to the
2192 // device instead of the value passed to it by the compiler.
2193
2194 if (Ty->isReferenceType())
2195 Ty = Ty->castAs<ReferenceType>()->getPointeeType();
2196
2197 // Locate map clauses and see if the variable being captured is referred to
2198 // in any of those clauses. Here we only care about variables, not fields,
2199 // because fields are part of aggregates.
2200 bool IsVariableAssociatedWithSection = false;
2201
2202 DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2203 D, Level,
2204 [&IsVariableUsedInMapClause, &IsVariableAssociatedWithSection,
2206 MapExprComponents,
2207 OpenMPClauseKind WhereFoundClauseKind) {
2208 // Both map and has_device_addr clauses information influences how a
2209 // variable is captured. E.g. is_device_ptr does not require changing
2210 // the default behavior.
2211 if (WhereFoundClauseKind != OMPC_map &&
2212 WhereFoundClauseKind != OMPC_has_device_addr)
2213 return false;
2214
2215 auto EI = MapExprComponents.rbegin();
2216 auto EE = MapExprComponents.rend();
2217
2218 assert(EI != EE && "Invalid map expression!");
2219
2220 if (isa<DeclRefExpr>(EI->getAssociatedExpression()))
2221 IsVariableUsedInMapClause |= EI->getAssociatedDeclaration() == D;
2222
2223 ++EI;
2224 if (EI == EE)
2225 return false;
2226 auto Last = std::prev(EE);
2227 const auto *UO =
2228 dyn_cast<UnaryOperator>(Last->getAssociatedExpression());
2229 if ((UO && UO->getOpcode() == UO_Deref) ||
2230 isa<ArraySubscriptExpr>(Last->getAssociatedExpression()) ||
2231 isa<OMPArraySectionExpr>(Last->getAssociatedExpression()) ||
2232 isa<MemberExpr>(EI->getAssociatedExpression()) ||
2233 isa<OMPArrayShapingExpr>(Last->getAssociatedExpression())) {
2234 IsVariableAssociatedWithSection = true;
2235 // There is nothing more we need to know about this variable.
2236 return true;
2237 }
2238
2239 // Keep looking for more map info.
2240 return false;
2241 });
2242
2243 if (IsVariableUsedInMapClause) {
2244 // If variable is identified in a map clause it is always captured by
2245 // reference except if it is a pointer that is dereferenced somehow.
2246 IsByRef = !(Ty->isPointerType() && IsVariableAssociatedWithSection);
2247 } else {
2248 // By default, all the data that has a scalar type is mapped by copy
2249 // (except for reduction variables).
2250 // Defaultmap scalar is mutual exclusive to defaultmap pointer
2251 IsByRef = (DSAStack->isForceCaptureByReferenceInTargetExecutable() &&
2252 !Ty->isAnyPointerType()) ||
2253 !Ty->isScalarType() ||
2254 DSAStack->isDefaultmapCapturedByRef(
2256 DSAStack->hasExplicitDSA(
2257 D,
2258 [](OpenMPClauseKind K, bool AppliedToPointee) {
2259 return K == OMPC_reduction && !AppliedToPointee;
2260 },
2261 Level);
2262 }
2263 }
2264
2265 if (IsByRef && Ty.getNonReferenceType()->isScalarType()) {
2266 IsByRef =
2267 ((IsVariableUsedInMapClause &&
2268 DSAStack->getCaptureRegion(Level, OpenMPCaptureLevel) ==
2269 OMPD_target) ||
2270 !(DSAStack->hasExplicitDSA(
2271 D,
2272 [](OpenMPClauseKind K, bool AppliedToPointee) -> bool {
2273 return K == OMPC_firstprivate ||
2274 (K == OMPC_reduction && AppliedToPointee);
2275 },
2276 Level, /*NotLastprivate=*/true) ||
2277 DSAStack->isUsesAllocatorsDecl(Level, D))) &&
2278 // If the variable is artificial and must be captured by value - try to
2279 // capture by value.
2280 !(isa<OMPCapturedExprDecl>(D) && !D->hasAttr<OMPCaptureNoInitAttr>() &&
2281 !cast<OMPCapturedExprDecl>(D)->getInit()->isGLValue()) &&
2282 // If the variable is implicitly firstprivate and scalar - capture by
2283 // copy
2284 !((DSAStack->getDefaultDSA() == DSA_firstprivate ||
2285 DSAStack->getDefaultDSA() == DSA_private) &&
2286 !DSAStack->hasExplicitDSA(
2287 D, [](OpenMPClauseKind K, bool) { return K != OMPC_unknown; },
2288 Level) &&
2289 !DSAStack->isLoopControlVariable(D, Level).first);
2290 }
2291
2292 // When passing data by copy, we need to make sure it fits the uintptr size
2293 // and alignment, because the runtime library only deals with uintptr types.
2294 // If it does not fit the uintptr size, we need to pass the data by reference
2295 // instead.
2296 if (!IsByRef && (Ctx.getTypeSizeInChars(Ty) >
2299 Ctx.getTypeAlignInChars(Ctx.getUIntPtrType()))) {
2300 IsByRef = true;
2301 }
2302
2303 return IsByRef;
2304}
2305
2306unsigned Sema::getOpenMPNestingLevel() const {
2307 assert(getLangOpts().OpenMP);
2308 return DSAStack->getNestingLevel();
2309}
2310
2312 return isOpenMPTaskingDirective(DSAStack->getCurrentDirective()) &&
2313 DSAStack->isUntiedRegion();
2314}
2315
2317 return (isOpenMPTargetExecutionDirective(DSAStack->getCurrentDirective()) &&
2318 !DSAStack->isClauseParsingMode()) ||
2319 DSAStack->hasDirective(
2321 SourceLocation) -> bool {
2323 },
2324 false);
2325}
2326
2328 // Only rebuild for Field.
2329 if (!dyn_cast<FieldDecl>(D))
2330 return false;
2331 DSAStackTy::DSAVarData DVarPrivate = DSAStack->hasDSA(
2332 D,
2333 [](OpenMPClauseKind C, bool AppliedToPointee,
2334 DefaultDataSharingAttributes DefaultAttr) {
2335 return isOpenMPPrivate(C) && !AppliedToPointee &&
2336 (DefaultAttr == DSA_firstprivate || DefaultAttr == DSA_private);
2337 },
2338 [](OpenMPDirectiveKind) { return true; },
2339 DSAStack->isClauseParsingMode());
2340 if (DVarPrivate.CKind != OMPC_unknown)
2341 return true;
2342 return false;
2343}
2344
2346 Expr *CaptureExpr, bool WithInit,
2347 DeclContext *CurContext,
2348 bool AsExpression);
2349
2351 unsigned StopAt) {
2352 assert(LangOpts.OpenMP && "OpenMP is not allowed");
2353 D = getCanonicalDecl(D);
2354
2355 auto *VD = dyn_cast<VarDecl>(D);
2356 // Do not capture constexpr variables.
2357 if (VD && VD->isConstexpr())
2358 return nullptr;
2359
2360 // If we want to determine whether the variable should be captured from the
2361 // perspective of the current capturing scope, and we've already left all the
2362 // capturing scopes of the top directive on the stack, check from the
2363 // perspective of its parent directive (if any) instead.
2364 DSAStackTy::ParentDirectiveScope InParentDirectiveRAII(
2365 *DSAStack, CheckScopeInfo && DSAStack->isBodyComplete());
2366
2367 // If we are attempting to capture a global variable in a directive with
2368 // 'target' we return true so that this global is also mapped to the device.
2369 //
2370 if (VD && !VD->hasLocalStorage() &&
2373 DSAStackTy::DSAVarData DVarTop =
2374 DSAStack->getTopDSA(D, DSAStack->isClauseParsingMode());
2375 if (DVarTop.CKind != OMPC_unknown && DVarTop.RefExpr)
2376 return VD;
2377 // If the declaration is enclosed in a 'declare target' directive,
2378 // then it should not be captured.
2379 //
2380 if (OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2381 return nullptr;
2382 CapturedRegionScopeInfo *CSI = nullptr;
2383 for (FunctionScopeInfo *FSI : llvm::drop_begin(
2384 llvm::reverse(FunctionScopes),
2385 CheckScopeInfo ? (FunctionScopes.size() - (StopAt + 1)) : 0)) {
2386 if (!isa<CapturingScopeInfo>(FSI))
2387 return nullptr;
2388 if (auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2389 if (RSI->CapRegionKind == CR_OpenMP) {
2390 CSI = RSI;
2391 break;
2392 }
2393 }
2394 assert(CSI && "Failed to find CapturedRegionScopeInfo");
2397 DSAStack->getDirective(CSI->OpenMPLevel));
2398 if (Regions[CSI->OpenMPCaptureLevel] != OMPD_task)
2399 return VD;
2400 }
2402 // Try to mark variable as declare target if it is used in capturing
2403 // regions.
2404 if (LangOpts.OpenMP <= 45 &&
2405 !OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD))
2407 return nullptr;
2408 }
2409 }
2410
2411 if (CheckScopeInfo) {
2412 bool OpenMPFound = false;
2413 for (unsigned I = StopAt + 1; I > 0; --I) {
2414 FunctionScopeInfo *FSI = FunctionScopes[I - 1];
2415 if (!isa<CapturingScopeInfo>(FSI))
2416 return nullptr;
2417 if (auto *RSI = dyn_cast<CapturedRegionScopeInfo>(FSI))
2418 if (RSI->CapRegionKind == CR_OpenMP) {
2419 OpenMPFound = true;
2420 break;
2421 }
2422 }
2423 if (!OpenMPFound)
2424 return nullptr;
2425 }
2426
2427 if (DSAStack->getCurrentDirective() != OMPD_unknown &&
2428 (!DSAStack->isClauseParsingMode() ||
2429 DSAStack->getParentDirective() != OMPD_unknown)) {
2430 auto &&Info = DSAStack->isLoopControlVariable(D);
2431 if (Info.first ||
2432 (VD && VD->hasLocalStorage() &&
2433 isImplicitOrExplicitTaskingRegion(DSAStack->getCurrentDirective())) ||
2434 (VD && DSAStack->isForceVarCapturing()))
2435 return VD ? VD : Info.second;
2436 DSAStackTy::DSAVarData DVarTop =
2437 DSAStack->getTopDSA(D, DSAStack->isClauseParsingMode());
2438 if (DVarTop.CKind != OMPC_unknown && isOpenMPPrivate(DVarTop.CKind) &&
2439 (!VD || VD->hasLocalStorage() || !DVarTop.AppliedToPointee))
2440 return VD ? VD : cast<VarDecl>(DVarTop.PrivateCopy->getDecl());
2441 // Threadprivate variables must not be captured.
2442 if (isOpenMPThreadPrivate(DVarTop.CKind))
2443 return nullptr;
2444 // The variable is not private or it is the variable in the directive with
2445 // default(none) clause and not used in any clause.
2446 DSAStackTy::DSAVarData DVarPrivate = DSAStack->hasDSA(
2447 D,
2448 [](OpenMPClauseKind C, bool AppliedToPointee, bool) {
2449 return isOpenMPPrivate(C) && !AppliedToPointee;
2450 },
2451 [](OpenMPDirectiveKind) { return true; },
2452 DSAStack->isClauseParsingMode());
2453 // Global shared must not be captured.
2454 if (VD && !VD->hasLocalStorage() && DVarPrivate.CKind == OMPC_unknown &&
2455 ((DSAStack->getDefaultDSA() != DSA_none &&
2456 DSAStack->getDefaultDSA() != DSA_private &&
2457 DSAStack->getDefaultDSA() != DSA_firstprivate) ||
2458 DVarTop.CKind == OMPC_shared))
2459 return nullptr;
2460 auto *FD = dyn_cast<FieldDecl>(D);
2461 if (DVarPrivate.CKind != OMPC_unknown && !VD && FD &&
2462 !DVarPrivate.PrivateCopy) {
2463 DSAStackTy::DSAVarData DVarPrivate = DSAStack->hasDSA(
2464 D,
2465 [](OpenMPClauseKind C, bool AppliedToPointee,
2466 DefaultDataSharingAttributes DefaultAttr) {
2467 return isOpenMPPrivate(C) && !AppliedToPointee &&
2468 (DefaultAttr == DSA_firstprivate ||
2469 DefaultAttr == DSA_private);
2470 },
2471 [](OpenMPDirectiveKind) { return true; },
2472 DSAStack->isClauseParsingMode());
2473 if (DVarPrivate.CKind == OMPC_unknown)
2474 return nullptr;
2475
2476 VarDecl *VD = DSAStack->getImplicitFDCapExprDecl(FD);
2477 if (VD)
2478 return VD;
2479 if (getCurrentThisType().isNull())
2480 return nullptr;
2482 /*IsImplicit=*/true);
2483 const CXXScopeSpec CS = CXXScopeSpec();
2484 Expr *ME = BuildMemberExpr(ThisExpr, /*IsArrow=*/true, SourceLocation(),
2487 /*HadMultipleCandidates=*/false,
2491 *this, FD->getIdentifier(), ME, DVarPrivate.CKind != OMPC_private,
2492 CurContext->getParent(), /*AsExpression=*/false);
2493 DeclRefExpr *VDPrivateRefExpr = buildDeclRefExpr(
2494 *this, CD, CD->getType().getNonReferenceType(), SourceLocation());
2495 VD = cast<VarDecl>(VDPrivateRefExpr->getDecl());
2496 DSAStack->addImplicitDefaultFirstprivateFD(FD, VD);
2497 return VD;
2498 }
2499 if (DVarPrivate.CKind != OMPC_unknown ||
2500 (VD && (DSAStack->getDefaultDSA() == DSA_none ||
2501 DSAStack->getDefaultDSA() == DSA_private ||
2502 DSAStack->getDefaultDSA() == DSA_firstprivate)))
2503 return VD ? VD : cast<VarDecl>(DVarPrivate.PrivateCopy->getDecl());
2504 }
2505 return nullptr;
2506}
2507
2508void Sema::adjustOpenMPTargetScopeIndex(unsigned &FunctionScopesIndex,
2509 unsigned Level) const {
2510 FunctionScopesIndex -= getOpenMPCaptureLevels(DSAStack->getDirective(Level));
2511}
2512
2514 assert(LangOpts.OpenMP && "OpenMP must be enabled.");
2515 if (isOpenMPLoopDirective(DSAStack->getCurrentDirective()))
2516 DSAStack->loopInit();
2517}
2518
2520 assert(LangOpts.OpenMP && "OpenMP must be enabled.");
2521 if (isOpenMPLoopDirective(DSAStack->getCurrentDirective())) {
2522 DSAStack->resetPossibleLoopCounter();
2523 DSAStack->loopStart();
2524 }
2525}
2526
2528 unsigned CapLevel) const {
2529 assert(LangOpts.OpenMP && "OpenMP is not allowed");
2530 if (DSAStack->getCurrentDirective() != OMPD_unknown &&
2531 (!DSAStack->isClauseParsingMode() ||
2532 DSAStack->getParentDirective() != OMPD_unknown)) {
2533 DSAStackTy::DSAVarData DVarPrivate = DSAStack->hasDSA(
2534 D,
2535 [](OpenMPClauseKind C, bool AppliedToPointee,
2536 DefaultDataSharingAttributes DefaultAttr) {
2537 return isOpenMPPrivate(C) && !AppliedToPointee &&
2538 DefaultAttr == DSA_private;
2539 },
2540 [](OpenMPDirectiveKind) { return true; },
2541 DSAStack->isClauseParsingMode());
2542 if (DVarPrivate.CKind == OMPC_private && isa<OMPCapturedExprDecl>(D) &&
2543 DSAStack->isImplicitDefaultFirstprivateFD(cast<VarDecl>(D)) &&
2544 !DSAStack->isLoopControlVariable(D).first)
2545 return OMPC_private;
2546 }
2547 if (DSAStack->hasExplicitDirective(isOpenMPTaskingDirective, Level)) {
2548 bool IsTriviallyCopyable =
2550 !D->getType()
2554 OpenMPDirectiveKind DKind = DSAStack->getDirective(Level);
2556 getOpenMPCaptureRegions(CaptureRegions, DKind);
2557 if (isOpenMPTaskingDirective(CaptureRegions[CapLevel]) &&
2558 (IsTriviallyCopyable ||
2559 !isOpenMPTaskLoopDirective(CaptureRegions[CapLevel]))) {
2560 if (DSAStack->hasExplicitDSA(
2561 D,
2562 [](OpenMPClauseKind K, bool) { return K == OMPC_firstprivate; },
2563 Level, /*NotLastprivate=*/true))
2564 return OMPC_firstprivate;
2565 DSAStackTy::DSAVarData DVar = DSAStack->getImplicitDSA(D, Level);
2566 if (DVar.CKind != OMPC_shared &&
2567 !DSAStack->isLoopControlVariable(D, Level).first && !DVar.RefExpr) {
2568 DSAStack->addImplicitTaskFirstprivate(Level, D);
2569 return OMPC_firstprivate;
2570 }
2571 }
2572 }
2573 if (isOpenMPLoopDirective(DSAStack->getCurrentDirective()) &&
2574 !isOpenMPLoopTransformationDirective(DSAStack->getCurrentDirective())) {
2575 if (DSAStack->getAssociatedLoops() > 0 && !DSAStack->isLoopStarted()) {
2576 DSAStack->resetPossibleLoopCounter(D);
2577 DSAStack->loopStart();
2578 return OMPC_private;
2579 }
2580 if ((DSAStack->getPossiblyLoopCunter() == D->getCanonicalDecl() ||
2581 DSAStack->isLoopControlVariable(D).first) &&
2582 !DSAStack->hasExplicitDSA(
2583 D, [](OpenMPClauseKind K, bool) { return K != OMPC_private; },
2584 Level) &&
2585 !isOpenMPSimdDirective(DSAStack->getCurrentDirective()))
2586 return OMPC_private;
2587 }
2588 if (const auto *VD = dyn_cast<VarDecl>(D)) {
2589 if (DSAStack->isThreadPrivate(const_cast<VarDecl *>(VD)) &&
2590 DSAStack->isForceVarCapturing() &&
2591 !DSAStack->hasExplicitDSA(
2592 D, [](OpenMPClauseKind K, bool) { return K == OMPC_copyin; },
2593 Level))
2594 return OMPC_private;
2595 }
2596 // User-defined allocators are private since they must be defined in the
2597 // context of target region.
2598 if (DSAStack->hasExplicitDirective(isOpenMPTargetExecutionDirective, Level) &&
2599 DSAStack->isUsesAllocatorsDecl(Level, D).value_or(
2600 DSAStackTy::UsesAllocatorsDeclKind::AllocatorTrait) ==
2601 DSAStackTy::UsesAllocatorsDeclKind::UserDefinedAllocator)
2602 return OMPC_private;
2603 return (DSAStack->hasExplicitDSA(
2604 D, [](OpenMPClauseKind K, bool) { return K == OMPC_private; },
2605 Level) ||
2606 (DSAStack->isClauseParsingMode() &&
2607 DSAStack->getClauseParsingMode() == OMPC_private) ||
2608 // Consider taskgroup reduction descriptor variable a private
2609 // to avoid possible capture in the region.
2610 (DSAStack->hasExplicitDirective(
2611 [](OpenMPDirectiveKind K) {
2612 return K == OMPD_taskgroup ||
2613 ((isOpenMPParallelDirective(K) ||
2614 isOpenMPWorksharingDirective(K)) &&
2615 !isOpenMPSimdDirective(K));
2616 },
2617 Level) &&
2618 DSAStack->isTaskgroupReductionRef(D, Level)))
2619 ? OMPC_private
2620 : OMPC_unknown;
2621}
2622
2624 unsigned Level) {
2625 assert(LangOpts.OpenMP && "OpenMP is not allowed");
2626 D = getCanonicalDecl(D);
2627 OpenMPClauseKind OMPC = OMPC_unknown;
2628 for (unsigned I = DSAStack->getNestingLevel() + 1; I > Level; --I) {
2629 const unsigned NewLevel = I - 1;
2630 if (DSAStack->hasExplicitDSA(
2631 D,
2632 [&OMPC](const OpenMPClauseKind K, bool AppliedToPointee) {
2633 if (isOpenMPPrivate(K) && !AppliedToPointee) {
2634 OMPC = K;
2635 return true;
2636 }
2637 return false;
2638 },
2639 NewLevel))
2640 break;
2641 if (DSAStack->checkMappableExprComponentListsForDeclAtLevel(
2642 D, NewLevel,
2644 OpenMPClauseKind) { return true; })) {
2645 OMPC = OMPC_map;
2646 break;
2647 }
2648 if (DSAStack->hasExplicitDirective(isOpenMPTargetExecutionDirective,
2649 NewLevel)) {
2650 OMPC = OMPC_map;
2651 if (DSAStack->mustBeFirstprivateAtLevel(
2653 OMPC = OMPC_firstprivate;
2654 break;
2655 }
2656 }
2657 if (OMPC != OMPC_unknown)
2658 FD->addAttr(OMPCaptureKindAttr::CreateImplicit(Context, unsigned(OMPC)));
2659}
2660
2661bool Sema::isOpenMPTargetCapturedDecl(const ValueDecl *D, unsigned Level,
2662 unsigned CaptureLevel) const {
2663 assert(LangOpts.OpenMP && "OpenMP is not allowed");
2664 // Return true if the current level is no longer enclosed in a target region.
2665
2667 getOpenMPCaptureRegions(Regions, DSAStack->getDirective(Level));
2668 const auto *VD = dyn_cast<VarDecl>(D);
2669 return VD && !VD->hasLocalStorage() &&
2670 DSAStack->hasExplicitDirective(isOpenMPTargetExecutionDirective,
2671 Level) &&
2672 Regions[CaptureLevel] != OMPD_task;
2673}
2674
2676 unsigned CaptureLevel) const {
2677 assert(LangOpts.OpenMP && "OpenMP is not allowed");
2678 // Return true if the current level is no longer enclosed in a target region.
2679
2680 if (const auto *VD = dyn_cast<VarDecl>(D)) {
2681 if (!VD->hasLocalStorage()) {
2683 return true;
2684 DSAStackTy::DSAVarData TopDVar =
2685 DSAStack->getTopDSA(D, /*FromParent=*/false);
2686 unsigned NumLevels =
2687 getOpenMPCaptureLevels(DSAStack->getDirective(Level));
2688 if (Level == 0)
2689 // non-file scope static variale with default(firstprivate)
2690 // should be gloabal captured.
2691 return (NumLevels == CaptureLevel + 1 &&
2692 (TopDVar.CKind != OMPC_shared ||
2693 DSAStack->getDefaultDSA() == DSA_firstprivate));
2694 do {
2695 --Level;
2696 DSAStackTy::DSAVarData DVar = DSAStack->getImplicitDSA(D, Level);
2697 if (DVar.CKind != OMPC_shared)
2698 return true;
2699 } while (Level > 0);
2700 }
2701 }
2702 return true;
2703}
2704
2705void Sema::DestroyDataSharingAttributesStack() { delete DSAStack; }
2706
2708 OMPTraitInfo &TI) {
2709 OMPDeclareVariantScopes.push_back(OMPDeclareVariantScope(TI));
2710}
2711
2714 "Not in OpenMP declare variant scope!");
2715
2716 OMPDeclareVariantScopes.pop_back();
2717}
2718
2720 const FunctionDecl *Callee,
2721 SourceLocation Loc) {
2722 assert(LangOpts.OpenMP && "Expected OpenMP compilation mode.");
2723 std::optional<OMPDeclareTargetDeclAttr::DevTypeTy> DevTy =
2724 OMPDeclareTargetDeclAttr::getDeviceType(Caller->getMostRecentDecl());
2725 // Ignore host functions during device analyzis.
2726 if (LangOpts.OpenMPIsTargetDevice &&
2727 (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host))
2728 return;
2729 // Ignore nohost functions during host analyzis.
2730 if (!LangOpts.OpenMPIsTargetDevice && DevTy &&
2731 *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost)
2732 return;
2733 const FunctionDecl *FD = Callee->getMostRecentDecl();
2734 DevTy = OMPDeclareTargetDeclAttr::getDeviceType(FD);
2735 if (LangOpts.OpenMPIsTargetDevice && DevTy &&
2736 *DevTy == OMPDeclareTargetDeclAttr::DT_Host) {
2737 // Diagnose host function called during device codegen.
2738 StringRef HostDevTy =
2739 getOpenMPSimpleClauseTypeName(OMPC_device_type, OMPC_DEVICE_TYPE_host);
2740 Diag(Loc, diag::err_omp_wrong_device_function_call) << HostDevTy << 0;
2741 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2742 diag::note_omp_marked_device_type_here)
2743 << HostDevTy;
2744 return;
2745 }
2746 if (!LangOpts.OpenMPIsTargetDevice && !LangOpts.OpenMPOffloadMandatory &&
2747 DevTy && *DevTy == OMPDeclareTargetDeclAttr::DT_NoHost) {
2748 // In OpenMP 5.2 or later, if the function has a host variant then allow
2749 // that to be called instead
2750 auto &&HasHostAttr = [](const FunctionDecl *Callee) {
2751 for (OMPDeclareVariantAttr *A :
2752 Callee->specific_attrs<OMPDeclareVariantAttr>()) {
2753 auto *DeclRefVariant = cast<DeclRefExpr>(A->getVariantFuncRef());
2754 auto *VariantFD = cast<FunctionDecl>(DeclRefVariant->getDecl());
2755 std::optional<OMPDeclareTargetDeclAttr::DevTypeTy> DevTy =
2756 OMPDeclareTargetDeclAttr::getDeviceType(
2757 VariantFD->getMostRecentDecl());
2758 if (!DevTy || *DevTy == OMPDeclareTargetDeclAttr::DT_Host)
2759 return true;
2760 }
2761 return false;
2762 };
2763 if (getLangOpts().OpenMP >= 52 &&
2764 Callee->hasAttr<OMPDeclareVariantAttr>() && HasHostAttr(Callee))
2765 return;
2766 // Diagnose nohost function called during host codegen.
2767 StringRef NoHostDevTy = getOpenMPSimpleClauseTypeName(
2768 OMPC_device_type, OMPC_DEVICE_TYPE_nohost);
2769 Diag(Loc, diag::err_omp_wrong_device_function_call) << NoHostDevTy << 1;
2770 Diag(*OMPDeclareTargetDeclAttr::getLocation(FD),
2771 diag::note_omp_marked_device_type_here)
2772 << NoHostDevTy;
2773 }
2774}
2775
2777 const DeclarationNameInfo &DirName,
2778 Scope *CurScope, SourceLocation Loc) {
2779 DSAStack->push(DKind, DirName, CurScope, Loc);
2782}
2783
2785 DSAStack->setClauseParsingMode(K);
2786}
2787
2789 DSAStack->setClauseParsingMode(/*K=*/OMPC_unknown);
2791}
2792
2793static std::pair<ValueDecl *, bool>
2794getPrivateItem(Sema &S, Expr *&RefExpr, SourceLocation &ELoc,
2795 SourceRange &ERange, bool AllowArraySection = false,
2796 StringRef DiagType = "");
2797
2798/// Check consistency of the reduction clauses.
2799static void checkReductionClauses(Sema &S, DSAStackTy *Stack,
2800 ArrayRef<OMPClause *> Clauses) {
2801 bool InscanFound = false;
2802 SourceLocation InscanLoc;
2803 // OpenMP 5.0, 2.19.5.4 reduction Clause, Restrictions.
2804 // A reduction clause without the inscan reduction-modifier may not appear on
2805 // a construct on which a reduction clause with the inscan reduction-modifier
2806 // appears.
2807 for (OMPClause *C : Clauses) {
2808 if (C->getClauseKind() != OMPC_reduction)
2809 continue;
2810 auto *RC = cast<OMPReductionClause>(C);
2811 if (RC->getModifier() == OMPC_REDUCTION_inscan) {
2812 InscanFound = true;
2813 InscanLoc = RC->getModifierLoc();
2814 continue;
2815 }
2816 if (RC->getModifier() == OMPC_REDUCTION_task) {
2817 // OpenMP 5.0, 2.19.5.4 reduction Clause.
2818 // A reduction clause with the task reduction-modifier may only appear on
2819 // a parallel construct, a worksharing construct or a combined or
2820 // composite construct for which any of the aforementioned constructs is a
2821 // constituent construct and simd or loop are not constituent constructs.
2822 OpenMPDirectiveKind CurDir = Stack->getCurrentDirective();
2823 if (!(isOpenMPParallelDirective(CurDir) ||
2825 isOpenMPSimdDirective(CurDir))
2826 S.Diag(RC->getModifierLoc(),
2827 diag::err_omp_reduction_task_not_parallel_or_worksharing);
2828 continue;
2829 }
2830 }
2831 if (InscanFound) {
2832 for (OMPClause *C : Clauses) {
2833 if (C->getClauseKind() != OMPC_reduction)
2834 continue;
2835 auto *RC = cast<OMPReductionClause>(C);
2836 if (RC->getModifier() != OMPC_REDUCTION_inscan) {
2837 S.Diag(RC->getModifier() == OMPC_REDUCTION_unknown
2838 ? RC->getBeginLoc()
2839 : RC->getModifierLoc(),
2840 diag::err_omp_inscan_reduction_expected);
2841 S.Diag(InscanLoc, diag::note_omp_previous_inscan_reduction);
2842 continue;
2843 }
2844 for (Expr *Ref : RC->varlists()) {
2845 assert(Ref && "NULL expr in OpenMP nontemporal clause.");
2846 SourceLocation ELoc;
2847 SourceRange ERange;
2848 Expr *SimpleRefExpr = Ref;
2849 auto Res = getPrivateItem(S, SimpleRefExpr, ELoc, ERange,
2850 /*AllowArraySection=*/true);
2851 ValueDecl *D = Res.first;
2852 if (!D)
2853 continue;
2854 if (!Stack->isUsedInScanDirective(getCanonicalDecl(D))) {
2855 S.Diag(Ref->getExprLoc(),
2856 diag::err_omp_reduction_not_inclusive_exclusive)
2857 << Ref->getSourceRange();
2858 }
2859 }
2860 }
2861 }
2862}
2863
2864static void checkAllocateClauses(Sema &S, DSAStackTy *Stack,
2865 ArrayRef<OMPClause *> Clauses);
2866static DeclRefExpr *buildCapture(Sema &S, ValueDecl *D, Expr *CaptureExpr,
2867 bool WithInit);
2868
2869static void reportOriginalDsa(Sema &SemaRef, const DSAStackTy *Stack,
2870 const ValueDecl *D,
2871 const DSAStackTy::DSAVarData &DVar,
2872 bool IsLoopIterVar = false);
2873
2874void Sema::EndOpenMPDSABlock(Stmt *CurDirective) {
2875 // OpenMP [2.14.3.5, Restrictions, C/C++, p.1]
2876 // A variable of class type (or array thereof) that appears in a lastprivate
2877 // clause requires an accessible, unambiguous default constructor for the
2878 // class type, unless the list item is also specified in a firstprivate
2879 // clause.
2880 if (const auto *D = dyn_cast_or_null<OMPExecutableDirective>(CurDirective)) {
2881 for (OMPClause *C : D->clauses()) {
2882 if (auto *Clause = dyn_cast<OMPLastprivateClause>(C)) {
2883 SmallVector<Expr *, 8> PrivateCopies;
2884 for (Expr *DE : Clause->varlists()) {
2885 if (DE->isValueDependent() || DE->isTypeDependent()) {
2886 PrivateCopies.push_back(nullptr);
2887 continue;
2888 }
2889 auto *DRE = cast<DeclRefExpr>(DE->IgnoreParens());
2890 auto *VD = cast<VarDecl>(DRE->getDecl());
2892 const DSAStackTy::DSAVarData DVar =
2893 DSAStack->getTopDSA(VD, /*FromParent=*/false);
2894 if (DVar.CKind == OMPC_lastprivate) {
2895 // Generate helper private variable and initialize it with the
2896 // default value. The address of the original variable is replaced
2897 // by the address of the new private variable in CodeGen. This new
2898 // variable is not added to IdResolver, so the code in the OpenMP
2899 // region uses original variable for proper diagnostics.
2900 VarDecl *VDPrivate = buildVarDecl(
2901 *this, DE->getExprLoc(), Type.getUnqualifiedType(),
2902 VD->getName(), VD->hasAttrs() ? &VD->getAttrs() : nullptr, DRE);
2903 ActOnUninitializedDecl(VDPrivate);
2904 if (VDPrivate->isInvalidDecl()) {
2905 PrivateCopies.push_back(nullptr);
2906 continue;
2907 }
2908 PrivateCopies.push_back(buildDeclRefExpr(
2909 *this, VDPrivate, DE->getType(), DE->getExprLoc()));
2910 } else {
2911 // The variable is also a firstprivate, so initialization sequence
2912 // for private copy is generated already.
2913 PrivateCopies.push_back(nullptr);
2914 }
2915 }
2916 Clause->setPrivateCopies(PrivateCopies);
2917 continue;
2918 }
2919 // Finalize nontemporal clause by handling private copies, if any.
2920 if (auto *Clause = dyn_cast<OMPNontemporalClause>(C)) {
2921 SmallVector<Expr *, 8> PrivateRefs;
2922 for (Expr *RefExpr : Clause->varlists()) {
2923 assert(RefExpr && "NULL expr in OpenMP nontemporal clause.");
2924 SourceLocation ELoc;
2925 SourceRange ERange;
2926 Expr *SimpleRefExpr = RefExpr;
2927 auto Res = getPrivateItem(*this, SimpleRefExpr, ELoc, ERange);
2928 if (Res.second)
2929 // It will be analyzed later.
2930 PrivateRefs.push_back(RefExpr);
2931 ValueDecl *D = Res.first;
2932 if (!D)
2933 continue;
2934
2935 const DSAStackTy::DSAVarData DVar =
2936 DSAStack->getTopDSA(D, /*FromParent=*/false);
2937 PrivateRefs.push_back(DVar.PrivateCopy ? DVar.PrivateCopy
2938 : SimpleRefExpr);
2939 }
2940 Clause->setPrivateRefs(PrivateRefs);
2941 continue;
2942 }
2943 if (auto *Clause = dyn_cast<OMPUsesAllocatorsClause>(C)) {
2944 for (unsigned I = 0, E = Clause->getNumberOfAllocators(); I < E; ++I) {
2945 OMPUsesAllocatorsClause::Data D = Clause->getAllocatorData(I);
2946 auto *DRE = dyn_cast<DeclRefExpr>(D.Allocator->IgnoreParenImpCasts());
2947 if (!DRE)
2948 continue;
2949 ValueDecl *VD = DRE->getDecl();
2950 if (!VD || !isa<VarDecl>(VD))
2951 continue;
2952 DSAStackTy::DSAVarData DVar =
2953 DSAStack->getTopDSA(VD, /*FromParent=*/false);
2954 // OpenMP [2.12.5, target Construct]
2955 // Memory allocators that appear in a uses_allocators clause cannot
2956 // appear in other data-sharing attribute clauses or data-mapping
2957 // attribute clauses in the same construct.
2958 Expr *MapExpr = nullptr;
2959 if (DVar.RefExpr ||
2960 DSAStack->checkMappableExprComponentListsForDecl(
2961 VD, /*CurrentRegionOnly=*/true,
2962 [VD, &MapExpr](
2964 MapExprComponents,
2966 auto MI = MapExprComponents.rbegin();
2967 auto ME = MapExprComponents.rend();
2968 if (MI != ME &&
2969 MI->getAssociatedDeclaration()->getCanonicalDecl() ==
2970 VD->getCanonicalDecl()) {
2971 MapExpr = MI->getAssociatedExpression();
2972 return true;
2973 }
2974 return false;
2975 })) {
2976 Diag(D.Allocator->getExprLoc(),
2977 diag::err_omp_allocator_used_in_clauses)
2978 << D.Allocator->getSourceRange();
2979 if (DVar.RefExpr)
2980 reportOriginalDsa(*this, DSAStack, VD, DVar);
2981 else
2982 Diag(MapExpr->getExprLoc(), diag::note_used_here)
2983 << MapExpr->getSourceRange();
2984 }
2985 }
2986 continue;
2987 }
2988 }
2989 // Check allocate clauses.
2991 checkAllocateClauses(*this, DSAStack, D->clauses());
2992 checkReductionClauses(*this, DSAStack, D->clauses());
2993 }
2994
2995 DSAStack->pop();
2996 DiscardCleanupsInEvaluationContext();
2997 PopExpressionEvaluationContext();
2998}
2999
3001 Expr *NumIterations, Sema &SemaRef,
3002 Scope *S, DSAStackTy *Stack);
3003
3004namespace {
3005
3006class VarDeclFilterCCC final : public CorrectionCandidateCallback {
3007private:
3008 Sema &SemaRef;
3009
3010public:
3011 explicit VarDeclFilterCCC(Sema &S) : SemaRef(S) {}
3012 bool ValidateCandidate(const TypoCorrection &Candidate) override {
3013 NamedDecl *ND = Candidate.getCorrectionDecl();
3014 if (const auto *VD = dyn_cast_or_null<VarDecl>(ND)) {
3015 return VD->hasGlobalStorage() &&
3016 SemaRef.isDeclInScope(ND, SemaRef.getCurLexicalContext(),
3017 SemaRef.getCurScope());
3018 }
3019 return false;
3020 }
3021
3022 std::unique_ptr<CorrectionCandidateCallback> clone() override {
3023 return std::make_unique<VarDeclFilterCCC>(*this);
3024 }
3025};
3026
3027class VarOrFuncDeclFilterCCC final : public CorrectionCandidateCallback {
3028private:
3029 Sema &SemaRef;
3030
3031public:
3032 explicit VarOrFuncDeclFilterCCC(Sema &S) : SemaRef(S) {}
3033 bool ValidateCandidate(const TypoCorrection &Candidate) override {
3034 NamedDecl *ND = Candidate.getCorrectionDecl();
3035 if (ND && ((isa<VarDecl>(ND) && ND->getKind() == Decl::Var) ||
3036 isa<FunctionDecl>(ND))) {
3037 return SemaRef.isDeclInScope(ND, SemaRef.getCurLexicalContext(),
3038 SemaRef.getCurScope());
3039 }
3040 return false;
3041 }
3042
3043 std::unique_ptr<CorrectionCandidateCallback> clone() override {
3044 return std::make_unique<VarOrFuncDeclFilterCCC>(*this);
3045 }
3046};
3047
3048} // namespace
3049
3051 CXXScopeSpec &ScopeSpec,
3052 const DeclarationNameInfo &Id,
3053 OpenMPDirectiveKind Kind) {
3054 LookupResult Lookup(*this, Id, LookupOrdinaryName);
3055 LookupParsedName(Lookup, CurScope, &ScopeSpec, true);
3056
3057 if (Lookup.isAmbiguous())
3058 return ExprError();
3059
3060 VarDecl *VD;
3061 if (!Lookup.isSingleResult()) {
3062 VarDeclFilterCCC CCC(*this);
3063 if (TypoCorrection Corrected =
3064 CorrectTypo(Id, LookupOrdinaryName, CurScope, nullptr, CCC,
3066 diagnoseTypo(Corrected,
3067 PDiag(Lookup.empty()
3068 ? diag::err_undeclared_var_use_suggest
3069 : diag::err_omp_expected_var_arg_suggest)
3070 << Id.getName());
3071 VD = Corrected.getCorrectionDeclAs<VarDecl>();
3072 } else {
3073 Diag(Id.getLoc(), Lookup.empty() ? diag::err_undeclared_var_use
3074 : diag::err_omp_expected_var_arg)
3075 << Id.getName();
3076 return ExprError();
3077 }
3078 } else if (!(VD = Lookup.getAsSingle<VarDecl>())) {
3079 Diag(Id.getLoc(), diag::err_omp_expected_var_arg) << Id.getName();
3080 Diag(Lookup.getFoundDecl()->getLocation(), diag::note_declared_at);
3081 return ExprError();
3082 }
3083 Lookup.suppressDiagnostics();
3084
3085 // OpenMP [2.9.2, Syntax, C/C++]
3086 // Variables must be file-scope, namespace-scope, or static block-scope.
3087 if (Kind == OMPD_threadprivate && !VD->hasGlobalStorage()) {
3088 Diag(Id.getLoc(), diag::err_omp_global_var_arg)
3089 << getOpenMPDirectiveName(Kind) << !VD->isStaticLocal();
3090 bool IsDecl =
3092 Diag(VD->getLocation(),
3093 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3094 << VD;
3095 return ExprError();
3096 }
3097
3098 VarDecl *CanonicalVD = VD->getCanonicalDecl();
3099 NamedDecl *ND = CanonicalVD;
3100 // OpenMP [2.9.2, Restrictions, C/C++, p.2]
3101 // A threadprivate directive for file-scope variables must appear outside
3102 // any definition or declaration.
3103 if (CanonicalVD->getDeclContext()->isTranslationUnit() &&
3104 !getCurLexicalContext()->isTranslationUnit()) {
3105 Diag(Id.getLoc(), diag::err_omp_var_scope)
3106 << getOpenMPDirectiveName(Kind) << VD;
3107 bool IsDecl =
3109 Diag(VD->getLocation(),
3110 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3111 << VD;
3112 return ExprError();
3113 }
3114 // OpenMP [2.9.2, Restrictions, C/C++, p.3]
3115 // A threadprivate directive for static class member variables must appear
3116 // in the class definition, in the same scope in which the member
3117 // variables are declared.
3118 if (CanonicalVD->isStaticDataMember() &&
3119 !CanonicalVD->getDeclContext()->Equals(getCurLexicalContext())) {
3120 Diag(Id.getLoc(), diag::err_omp_var_scope)
3121 << getOpenMPDirectiveName(Kind) << VD;
3122 bool IsDecl =
3124 Diag(VD->getLocation(),
3125 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3126 << VD;
3127 return ExprError();
3128 }
3129 // OpenMP [2.9.2, Restrictions, C/C++, p.4]
3130 // A threadprivate directive for namespace-scope variables must appear
3131 // outside any definition or declaration other than the namespace
3132 // definition itself.
3133 if (CanonicalVD->getDeclContext()->isNamespace() &&
3134 (!getCurLexicalContext()->isFileContext() ||
3135 !getCurLexicalContext()->Encloses(CanonicalVD->getDeclContext()))) {
3136 Diag(Id.getLoc(), diag::err_omp_var_scope)
3137 << getOpenMPDirectiveName(Kind) << VD;
3138 bool IsDecl =
3140 Diag(VD->getLocation(),
3141 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3142 << VD;
3143 return ExprError();
3144 }
3145 // OpenMP [2.9.2, Restrictions, C/C++, p.6]
3146 // A threadprivate directive for static block-scope variables must appear
3147 // in the scope of the variable and not in a nested scope.
3148 if (CanonicalVD->isLocalVarDecl() && CurScope &&
3149 !isDeclInScope(ND, getCurLexicalContext(), CurScope)) {
3150 Diag(Id.getLoc(), diag::err_omp_var_scope)
3151 << getOpenMPDirectiveName(Kind) << VD;
3152 bool IsDecl =
3154 Diag(VD->getLocation(),
3155 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3156 << VD;
3157 return ExprError();
3158 }
3159
3160 // OpenMP [2.9.2, Restrictions, C/C++, p.2-6]
3161 // A threadprivate directive must lexically precede all references to any
3162 // of the variables in its list.
3163 if (Kind == OMPD_threadprivate && VD->isUsed() &&
3164 !DSAStack->isThreadPrivate(VD)) {
3165 Diag(Id.getLoc(), diag::err_omp_var_used)
3166 << getOpenMPDirectiveName(Kind) << VD;
3167 return ExprError();
3168 }
3169
3170 QualType ExprType = VD->getType().getNonReferenceType();
3172 SourceLocation(), VD,
3173 /*RefersToEnclosingVariableOrCapture=*/false,
3174 Id.getLoc(), ExprType, VK_LValue);
3175}
3176
3179 ArrayRef<Expr *> VarList) {
3180 if (OMPThreadPrivateDecl *D = CheckOMPThreadPrivateDecl(Loc, VarList)) {
3181 CurContext->addDecl(D);
3183 }
3184 return nullptr;
3185}
3186
3187namespace {
3188class LocalVarRefChecker final
3189 : public ConstStmtVisitor<LocalVarRefChecker, bool> {
3190 Sema &SemaRef;
3191
3192public:
3193 bool VisitDeclRefExpr(const DeclRefExpr *E) {
3194 if (const auto *VD = dyn_cast<VarDecl>(E->getDecl())) {
3195 if (VD->hasLocalStorage()) {
3196 SemaRef.Diag(E->getBeginLoc(),
3197 diag::err_omp_local_var_in_threadprivate_init)
3198 << E->getSourceRange();
3199 SemaRef.Diag(VD->getLocation(), diag::note_defined_here)
3200 << VD << VD->getSourceRange();
3201 return true;
3202 }
3203 }
3204 return false;
3205 }
3206 bool VisitStmt(const Stmt *S) {
3207 for (const Stmt *Child : S->children()) {
3208 if (Child && Visit(Child))
3209 return true;
3210 }
3211 return false;
3212 }
3213 explicit LocalVarRefChecker(Sema &SemaRef) : SemaRef(SemaRef) {}
3214};
3215} // namespace
3216
3220 for (Expr *RefExpr : VarList) {
3221 auto *DE = cast<DeclRefExpr>(RefExpr);
3222 auto *VD = cast<VarDecl>(DE->getDecl());
3223 SourceLocation ILoc = DE->getExprLoc();
3224
3225 // Mark variable as used.
3226 VD->setReferenced();
3227 VD->markUsed(Context);
3228
3229 QualType QType = VD->getType();
3230 if (QType->isDependentType() || QType->isInstantiationDependentType()) {
3231 // It will be analyzed later.
3232 Vars.push_back(DE);
3233 continue;
3234 }
3235
3236 // OpenMP [2.9.2, Restrictions, C/C++, p.10]
3237 // A threadprivate variable must not have an incomplete type.
3238 if (RequireCompleteType(ILoc, VD->getType(),
3239 diag::err_omp_threadprivate_incomplete_type)) {
3240 continue;
3241 }
3242
3243 // OpenMP [2.9.2, Restrictions, C/C++, p.10]
3244 // A threadprivate variable must not have a reference type.
3245 if (VD->getType()->isReferenceType()) {
3246 Diag(ILoc, diag::err_omp_ref_type_arg)
3247 << getOpenMPDirectiveName(OMPD_threadprivate) << VD->getType();
3248 bool IsDecl =
3250 Diag(VD->getLocation(),
3251 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3252 << VD;
3253 continue;
3254 }
3255
3256 // Check if this is a TLS variable. If TLS is not being supported, produce
3257 // the corresponding diagnostic.
3258 if ((VD->getTLSKind() != VarDecl::TLS_None &&
3259 !(VD->hasAttr<OMPThreadPrivateDeclAttr>() &&
3260 getLangOpts().OpenMPUseTLS &&
3261 getASTContext().getTargetInfo().isTLSSupported())) ||
3262 (VD->getStorageClass() == SC_Register && VD->hasAttr<AsmLabelAttr>() &&
3263 !VD->isLocalVarDecl())) {
3264 Diag(ILoc, diag::err_omp_var_thread_local)
3265 << VD << ((VD->getTLSKind() != VarDecl::TLS_None) ? 0 : 1);
3266 bool IsDecl =
3268 Diag(VD->getLocation(),
3269 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3270 << VD;
3271 continue;
3272 }
3273
3274 // Check if initial value of threadprivate variable reference variable with
3275 // local storage (it is not supported by runtime).
3276 if (const Expr *Init = VD->getAnyInitializer()) {
3277 LocalVarRefChecker Checker(*this);
3278 if (Checker.Visit(Init))
3279 continue;
3280 }
3281
3282 Vars.push_back(RefExpr);
3283 DSAStack->addDSA(VD, DE, OMPC_threadprivate);
3284 VD->addAttr(OMPThreadPrivateDeclAttr::CreateImplicit(
3285 Context, SourceRange(Loc, Loc)));
3287 ML->DeclarationMarkedOpenMPThreadPrivate(VD);
3288 }
3289 OMPThreadPrivateDecl *D = nullptr;
3290 if (!Vars.empty()) {
3292 Vars);
3293 D->setAccess(AS_public);
3294 }
3295 return D;
3296}
3297
3298static OMPAllocateDeclAttr::AllocatorTypeTy
3299getAllocatorKind(Sema &S, DSAStackTy *Stack, Expr *Allocator) {
3300 if (!Allocator)
3301 return OMPAllocateDeclAttr::OMPNullMemAlloc;
3302 if (Allocator->isTypeDependent() || Allocator->isValueDependent() ||
3303 Allocator->isInstantiationDependent() ||
3304 Allocator->containsUnexpandedParameterPack())
3305 return OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3306 auto AllocatorKindRes = OMPAllocateDeclAttr::OMPUserDefinedMemAlloc;
3307 llvm::FoldingSetNodeID AEId;
3308 const Expr *AE = Allocator->IgnoreParenImpCasts();
3309 AE->IgnoreImpCasts()->Profile(AEId, S.getASTContext(), /*Canonical=*/true);
3310 for (int I = 0; I < OMPAllocateDeclAttr::OMPUserDefinedMemAlloc; ++I) {
3311 auto AllocatorKind = static_cast<OMPAllocateDeclAttr::AllocatorTypeTy>(I);
3312 const Expr *DefAllocator = Stack->getAllocator(AllocatorKind);
3313 llvm::FoldingSetNodeID DAEId;
3314 DefAllocator->IgnoreImpCasts()->Profile(DAEId, S.getASTContext(),
3315 /*Canonical=*/true);
3316 if (AEId == DAEId) {
3317 AllocatorKindRes = AllocatorKind;
3318 break;
3319 }
3320 }
3321 return AllocatorKindRes;
3322}
3323
3325 Sema &S, DSAStackTy *Stack, Expr *RefExpr, VarDecl *VD,
3326 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind, Expr *Allocator) {
3327 if (!VD->hasAttr<OMPAllocateDeclAttr>())
3328 return false;
3329 const auto *A = VD->getAttr<OMPAllocateDeclAttr>();
3330 Expr *PrevAllocator = A->getAllocator();
3331 OMPAllocateDeclAttr::AllocatorTypeTy PrevAllocatorKind =
3332 getAllocatorKind(S, Stack, PrevAllocator);
3333 bool AllocatorsMatch = AllocatorKind == PrevAllocatorKind;
3334 if (AllocatorsMatch &&
3335 AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc &&
3336 Allocator && PrevAllocator) {
3337 const Expr *AE = Allocator->IgnoreParenImpCasts();
3338 const Expr *PAE = PrevAllocator->IgnoreParenImpCasts();
3339 llvm::FoldingSetNodeID AEId, PAEId;
3340 AE->Profile(AEId, S.Context, /*Canonical=*/true);
3341 PAE->Profile(PAEId, S.Context, /*Canonical=*/true);
3342 AllocatorsMatch = AEId == PAEId;
3343 }
3344 if (!AllocatorsMatch) {
3345 SmallString<256> AllocatorBuffer;
3346 llvm::raw_svector_ostream AllocatorStream(AllocatorBuffer);
3347 if (Allocator)
3348 Allocator->printPretty(AllocatorStream, nullptr, S.getPrintingPolicy());
3349 SmallString<256> PrevAllocatorBuffer;
3350 llvm::raw_svector_ostream PrevAllocatorStream(PrevAllocatorBuffer);
3351 if (PrevAllocator)
3352 PrevAllocator->printPretty(PrevAllocatorStream, nullptr,
3353 S.getPrintingPolicy());
3354
3355 SourceLocation AllocatorLoc =
3356 Allocator ? Allocator->getExprLoc() : RefExpr->getExprLoc();
3357 SourceRange AllocatorRange =
3358 Allocator ? Allocator->getSourceRange() : RefExpr->getSourceRange();
3359 SourceLocation PrevAllocatorLoc =
3360 PrevAllocator ? PrevAllocator->getExprLoc() : A->getLocation();
3361 SourceRange PrevAllocatorRange =
3362 PrevAllocator ? PrevAllocator->getSourceRange() : A->getRange();
3363 S.Diag(AllocatorLoc, diag::warn_omp_used_different_allocator)
3364 << (Allocator ? 1 : 0) << AllocatorStream.str()
3365 << (PrevAllocator ? 1 : 0) << PrevAllocatorStream.str()
3366 << AllocatorRange;
3367 S.Diag(PrevAllocatorLoc, diag::note_omp_previous_allocator)
3368 << PrevAllocatorRange;
3369 return true;
3370 }
3371 return false;
3372}
3373
3374static void
3376 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind,
3377 Expr *Allocator, Expr *Alignment, SourceRange SR) {
3378 if (VD->hasAttr<OMPAllocateDeclAttr>())
3379 return;
3380 if (Alignment &&
3381 (Alignment->isTypeDependent() || Alignment->isValueDependent() ||
3382 Alignment->isInstantiationDependent() ||
3383 Alignment->containsUnexpandedParameterPack()))
3384 // Apply later when we have a usable value.
3385 return;
3386 if (Allocator &&
3387 (Allocator->isTypeDependent() || Allocator->isValueDependent() ||
3388 Allocator->isInstantiationDependent() ||
3389 Allocator->containsUnexpandedParameterPack()))
3390 return;
3391 auto *A = OMPAllocateDeclAttr::CreateImplicit(S.Context, AllocatorKind,
3392 Allocator, Alignment, SR);
3393 VD->addAttr(A);
3395 ML->DeclarationMarkedOpenMPAllocate(VD, A);
3396}
3397
3400 ArrayRef<OMPClause *> Clauses,
3401 DeclContext *Owner) {
3402 assert(Clauses.size() <= 2 && "Expected at most two clauses.");
3403 Expr *Alignment = nullptr;
3404 Expr *Allocator = nullptr;
3405 if (Clauses.empty()) {
3406 // OpenMP 5.0, 2.11.3 allocate Directive, Restrictions.
3407 // allocate directives that appear in a target region must specify an
3408 // allocator clause unless a requires directive with the dynamic_allocators
3409 // clause is present in the same compilation unit.
3410 if (LangOpts.OpenMPIsTargetDevice &&
3411 !DSAStack->hasRequiresDeclWithClause<OMPDynamicAllocatorsClause>())
3412 targetDiag(Loc, diag::err_expected_allocator_clause);
3413 } else {
3414 for (const OMPClause *C : Clauses)
3415 if (const auto *AC = dyn_cast<OMPAllocatorClause>(C))
3416 Allocator = AC->getAllocator();
3417 else if (const auto *AC = dyn_cast<OMPAlignClause>(C))
3418 Alignment = AC->getAlignment();
3419 else
3420 llvm_unreachable("Unexpected clause on allocate directive");
3421 }
3422 OMPAllocateDeclAttr::AllocatorTypeTy AllocatorKind =
3423 getAllocatorKind(*this, DSAStack, Allocator);
3425 for (Expr *RefExpr : VarList) {
3426 auto *DE = cast<DeclRefExpr>(RefExpr);
3427 auto *VD = cast<VarDecl>(DE->getDecl());
3428
3429 // Check if this is a TLS variable or global register.
3430 if (VD->getTLSKind() != VarDecl::TLS_None ||
3431 VD->hasAttr<OMPThreadPrivateDeclAttr>() ||
3432 (VD->getStorageClass() == SC_Register && VD->hasAttr<AsmLabelAttr>() &&
3433 !VD->isLocalVarDecl()))
3434 continue;
3435
3436 // If the used several times in the allocate directive, the same allocator
3437 // must be used.
3438 if (checkPreviousOMPAllocateAttribute(*this, DSAStack, RefExpr, VD,
3439 AllocatorKind, Allocator))
3440 continue;
3441
3442 // OpenMP, 2.11.3 allocate Directive, Restrictions, C / C++
3443 // If a list item has a static storage type, the allocator expression in the
3444 // allocator clause must be a constant expression that evaluates to one of
3445 // the predefined memory allocator values.
3446 if (Allocator && VD->hasGlobalStorage()) {
3447 if (AllocatorKind == OMPAllocateDeclAttr::OMPUserDefinedMemAlloc) {
3448 Diag(Allocator->getExprLoc(),
3449 diag::err_omp_expected_predefined_allocator)
3450 << Allocator->getSourceRange();
3451 bool IsDecl = VD->isThisDeclarationADefinition(Context) ==
3453 Diag(VD->getLocation(),
3454 IsDecl ? diag::note_previous_decl : diag::note_defined_here)
3455 << VD;
3456 continue;
3457 }
3458 }
3459
3460 Vars.push_back(RefExpr);
3461 applyOMPAllocateAttribute(*this, VD, AllocatorKind, Allocator, Alignment,
3462 DE->getSourceRange());
3463 }
3464 if (Vars.empty())
3465 return nullptr;
3466 if (!Owner)
3467 Owner = getCurLexicalContext();
3468 auto *D = OMPAllocateDecl::Create(Context, Owner, Loc, Vars, Clauses);
3469 D->setAccess(AS_public);
3470 Owner->addDecl(D);
3472}
3473
3476 ArrayRef<OMPClause *> ClauseList) {
3477 OMPRequiresDecl *D = nullptr;
3478 if (!CurContext->isFileContext()) {
3479 Diag(Loc, diag::err_omp_invalid_scope) << "requires";
3480 } else {
3481 D = CheckOMPRequiresDecl(Loc, ClauseList);
3482 if (D) {
3483 CurContext->addDecl(D);
3484 DSAStack->addRequiresDecl(D);
3485 }
3486 }
3488}
3489
3491 OpenMPDirectiveKind DKind,
3492 ArrayRef<std::string> Assumptions,
3493 bool SkippedClauses) {
3494 if (!SkippedClauses && Assumptions.empty())
3495 Diag(Loc, diag::err_omp_no_clause_for_directive)
3496 << llvm::omp::getAllAssumeClauseOptions()
3497 << llvm::omp::getOpenMPDirectiveName(DKind);
3498
3499 auto *AA = AssumptionAttr::Create(Context, llvm::join(Assumptions, ","), Loc);
3500 if (DKind == llvm::omp::Directive::OMPD_begin_assumes) {
3501 OMPAssumeScoped.push_back(AA);
3502 return;
3503 }
3504
3505 // Global assumes without assumption clauses are ignored.
3506 if (Assumptions.empty())
3507 return;
3508
3509 assert(DKind == llvm::omp::Directive::OMPD_assumes &&
3510 "Unexpected omp assumption directive!");
3511 OMPAssumeGlobal.push_back(AA);
3512
3513 // The OMPAssumeGlobal scope above will take care of new declarations but
3514 // we also want to apply the assumption to existing ones, e.g., to
3515 // declarations in included headers. To this end, we traverse all existing
3516 // declaration contexts and annotate function declarations here.
3517 SmallVector<DeclContext *, 8> DeclContexts;
3518 auto *Ctx = CurContext;
3519 while (Ctx->getLexicalParent())
3520 Ctx = Ctx->getLexicalParent();
3521 DeclContexts.push_back(Ctx);
3522 while (!DeclContexts.empty()) {
3523 DeclContext *DC = DeclContexts.pop_back_val();
3524 for (auto *SubDC : DC->decls()) {
3525 if (SubDC->isInvalidDecl())
3526 continue;
3527 if (auto *CTD = dyn_cast<ClassTemplateDecl>(SubDC)) {
3528 DeclContexts.push_back(CTD->getTemplatedDecl());
3529 llvm::append_range(DeclContexts, CTD->specializations());
3530 continue;
3531 }
3532 if (auto *DC = dyn_cast<DeclContext>(SubDC))
3533 DeclContexts.push_back(DC);
3534 if (auto *F = dyn_cast<FunctionDecl>(SubDC)) {
3535 F->addAttr(AA);
3536 continue;
3537 }
3538 }
3539 }
3540}
3541
3543 assert(isInOpenMPAssumeScope() && "Not in OpenMP assumes scope!");
3544 OMPAssumeScoped.pop_back();
3545}
3546
3548 ArrayRef<OMPClause *> ClauseList) {
3549 /// For target specific clauses, the requires directive cannot be
3550 /// specified after the handling of any of the target regions in the
3551 /// current compilation unit.
3552 ArrayRef<SourceLocation> TargetLocations =
3553 DSAStack->getEncounteredTargetLocs();
3554 SourceLocation AtomicLoc = DSAStack->getAtomicDirectiveLoc();
3555 if (!TargetLocations.empty() || !AtomicLoc.isInvalid()) {
3556 for (const OMPClause *CNew : ClauseList) {
3557 // Check if any of the requires clauses affect target regions.
3558 if (isa<OMPUnifiedSharedMemoryClause>(CNew) ||
3559 isa<OMPUnifiedAddressClause>(CNew) ||
3560 isa<OMPReverseOffloadClause>(CNew) ||
3561 isa<OMPDynamicAllocatorsClause>(CNew)) {
3562 Diag(Loc, diag::err_omp_directive_before_requires)
3563 << "target" << getOpenMPClauseName(CNew->getClauseKind());
3564 for (SourceLocation TargetLoc : TargetLocations) {
3565 Diag(TargetLoc, diag::note_omp_requires_encountered_directive)
3566 << "target";
3567 }
3568 } else if (!AtomicLoc.isInvalid() &&
3569 isa<OMPAtomicDefaultMemOrderClause>(CNew)) {
3570 Diag(Loc, diag::err_omp_directive_before_requires)
3571 << "atomic" << getOpenMPClauseName(CNew->getClauseKind());
3572 Diag(AtomicLoc, diag::note_omp_requires_encountered_directive)
3573 << "atomic";
3574 }
3575 }
3576 }
3577
3578 if (!DSAStack->hasDuplicateRequiresClause(ClauseList))
3580 ClauseList);
3581 return nullptr;
3582}
3583
3584static void reportOriginalDsa(Sema &SemaRef, const DSAStackTy *Stack,
3585 const ValueDecl *D,
3586 const DSAStackTy::DSAVarData &DVar,
3587 bool IsLoopIterVar) {
3588 if (DVar.RefExpr) {
3589 SemaRef.Diag(DVar.RefExpr->getExprLoc(), diag::note_omp_explicit_dsa)
3590 << getOpenMPClauseName(DVar.CKind);
3591 return;
3592 }
3593 enum {
3594 PDSA_StaticMemberShared,
3595 PDSA_StaticLocalVarShared,
3596 PDSA_LoopIterVarPrivate,
3597 PDSA_LoopIterVarLinear,
3598 PDSA_LoopIterVarLastprivate,
3599 PDSA_ConstVarShared,
3600 PDSA_GlobalVarShared,
3601 PDSA_TaskVarFirstprivate,
3602 PDSA_LocalVarPrivate,
3603 PDSA_Implicit
3604 } Reason = PDSA_Implicit;
3605 bool ReportHint = false;
3606 auto ReportLoc = D->getLocation();
3607 auto *VD = dyn_cast<VarDecl>(D);
3608 if (IsLoopIterVar) {
3609 if (DVar.CKind == OMPC_private)
3610 Reason = PDSA_LoopIterVarPrivate;
3611 else if (DVar.CKind == OMPC_lastprivate)
3612 Reason = PDSA_LoopIterVarLastprivate;
3613 else
3614 Reason = PDSA_LoopIterVarLinear;
3615 } else if (isOpenMPTaskingDirective(DVar.DKind) &&
3616 DVar.CKind == OMPC_firstprivate) {
3617 Reason = PDSA_TaskVarFirstprivate;
3618 ReportLoc = DVar.ImplicitDSALoc;
3619 } else if (VD && VD->isStaticLocal())
3620 Reason = PDSA_StaticLocalVarShared;
3621 else if (VD && VD->isStaticDataMember())
3622 Reason = PDSA_StaticMemberShared;
3623 else if (VD && VD->isFileVarDecl())
3624 Reason = PDSA_GlobalVarShared;
3625 else if (D->getType().isConstant(SemaRef.getASTContext()))
3626 Reason = PDSA_ConstVarShared;
3627 else if (VD && VD->isLocalVarDecl() && DVar.CKind == OMPC_private) {
3628 ReportHint = true;
3629 Reason = PDSA_LocalVarPrivate;
3630 }
3631 if (Reason != PDSA_Implicit) {
3632 SemaRef.Diag(ReportLoc, diag::note_omp_predetermined_dsa)
3633 << Reason << ReportHint
3634 << getOpenMPDirectiveName(Stack->getCurrentDirective());
3635 } else if (DVar.ImplicitDSALoc.isValid()) {
3636 SemaRef.Diag(DVar.ImplicitDSALoc, diag::note_omp_implicit_dsa)
3637 << getOpenMPClauseName(DVar.CKind);
3638 }
3639}
3640
3643 bool IsAggregateOrDeclareTarget) {
3645 switch (M) {
3646 case OMPC_DEFAULTMAP_MODIFIER_alloc:
3647 Kind = OMPC_MAP_alloc;
3648 break;
3649 case OMPC_DEFAULTMAP_MODIFIER_to:
3650 Kind = OMPC_MAP_to;
3651 break;
3652 case OMPC_DEFAULTMAP_MODIFIER_from:
3653 Kind = OMPC_MAP_from;
3654 break;
3655 case OMPC_DEFAULTMAP_MODIFIER_tofrom:
3656 Kind = OMPC_MAP_tofrom;
3657 break;
3658 case OMPC_DEFAULTMAP_MODIFIER_present:
3659 // OpenMP 5.1 [2.21.7.3] defaultmap clause, Description]
3660 // If implicit-behavior is present, each variable referenced in the
3661 // construct in the category specified by variable-category is treated as if
3662 // it had been listed in a map clause with the map-type of alloc and
3663 // map-type-modifier of present.
3664 Kind = OMPC_MAP_alloc;
3665 break;
3666 case OMPC_DEFAULTMAP_MODIFIER_firstprivate:
3668 llvm_unreachable("Unexpected defaultmap implicit behavior");
3669 case OMPC_DEFAULTMAP_MODIFIER_none:
3670 case OMPC_DEFAULTMAP_MODIFIER_default:
3672 // IsAggregateOrDeclareTarget could be true if:
3673 // 1. the implicit behavior for aggregate is tofrom
3674 // 2. it's a declare target link
3675 if (IsAggregateOrDeclareTarget) {
3676 Kind = OMPC_MAP_tofrom;
3677 break;
3678 }
3679 llvm_unreachable("Unexpected defaultmap implicit behavior");
3680 }
3681 assert(Kind != OMPC_MAP_unknown && "Expect map kind to be known");
3682 return Kind;
3683}
3684
3685namespace {
3686class DSAAttrChecker final : public StmtVisitor<DSAAttrChecker, void> {
3687 DSAStackTy *Stack;
3688 Sema &SemaRef;
3689 bool ErrorFound = false;
3690 bool TryCaptureCXXThisMembers = false;
3691 CapturedStmt *CS = nullptr;
3692 const static unsigned DefaultmapKindNum = OMPC_DEFAULTMAP_pointer + 1;
3693 llvm::SmallVector<Expr *, 4> ImplicitFirstprivate;
3694 llvm::SmallVector<Expr *, 4> ImplicitPrivate;
3695 llvm::SmallVector<Expr *, 4> ImplicitMap[DefaultmapKindNum][OMPC_MAP_delete];
3697 ImplicitMapModifier[DefaultmapKindNum];
3698 Sema::VarsWithInheritedDSAType VarsWithInheritedDSA;
3699 llvm::SmallDenseSet<const ValueDecl *, 4> ImplicitDeclarations;
3700
3701 void VisitSubCaptures(OMPExecutableDirective *S) {
3702 // Check implicitly captured variables.
3703 if (!S->hasAssociatedStmt() || !S->getAssociatedStmt())
3704 return;
3705 if (S->getDirectiveKind() == OMPD_atomic ||
3706 S->getDirectiveKind() == OMPD_critical ||
3707 S->getDirectiveKind() == OMPD_section ||
3708 S->getDirectiveKind() == OMPD_master ||
3709 S->getDirectiveKind() == OMPD_masked ||
3710 S->getDirectiveKind() == OMPD_scope ||
3711 isOpenMPLoopTransformationDirective(S->getDirectiveKind())) {
3712 Visit(S->getAssociatedStmt());
3713 return;
3714 }
3715 visitSubCaptures(S->getInnermostCapturedStmt());
3716 // Try to capture inner this->member references to generate correct mappings
3717 // and diagnostics.
3718 if (TryCaptureCXXThisMembers ||
3719 (isOpenMPTargetExecutionDirective(Stack->getCurrentDirective()) &&
3720 llvm::any_of(S->getInnermostCapturedStmt()->captures(),
3721 [](const CapturedStmt::Capture &C) {
3722 return C.capturesThis();
3723 }))) {
3724 bool SavedTryCaptureCXXThisMembers = TryCaptureCXXThisMembers;
3725 TryCaptureCXXThisMembers = true;
3726 Visit(S->getInnermostCapturedStmt()->getCapturedStmt());
3727 TryCaptureCXXThisMembers = SavedTryCaptureCXXThisMembers;
3728 }
3729 // In tasks firstprivates are not captured anymore, need to analyze them
3730 // explicitly.
3731 if (isOpenMPTaskingDirective(S->getDirectiveKind()) &&
3732 !isOpenMPTaskLoopDirective(S->getDirectiveKind())) {
3733 for (OMPClause *C : S->clauses())
3734 if (auto *FC = dyn_cast<OMPFirstprivateClause>(C)) {
3735 for (Expr *Ref : FC->varlists())
3736 Visit(Ref);
3737 }
3738 }
3739 }
3740
3741public:
3742 void VisitDeclRefExpr(DeclRefExpr *E) {
3743 if (TryCaptureCXXThisMembers || E->isTypeDependent() ||
3746 return;
3747 if (auto *VD = dyn_cast<VarDecl>(E->getDecl())) {
3748 // Check the datasharing rules for the expressions in the clauses.
3749 if (!CS || (isa<OMPCapturedExprDecl>(VD) && !CS->capturesVariable(VD) &&
3750 !Stack->getTopDSA(VD, /*FromParent=*/false).RefExpr &&
3751 !Stack->isImplicitDefaultFirstprivateFD(VD))) {
3752 if (auto *CED = dyn_cast<OMPCapturedExprDecl>(VD))
3753 if (!CED->hasAttr<OMPCaptureNoInitAttr>()) {
3754 Visit(CED->getInit());
3755 return;
3756 }
3757 } else if (VD->isImplicit() || isa<OMPCapturedExprDecl>(VD))
3758 // Do not analyze internal variables and do not enclose them into
3759 // implicit clauses.
3760 if (!Stack->isImplicitDefaultFirstprivateFD(VD))
3761 return;
3762 VD = VD->getCanonicalDecl();
3763 // Skip internally declared variables.
3764 if (VD->hasLocalStorage() && CS && !CS->capturesVariable(VD) &&
3765 !Stack->isImplicitDefaultFirstprivateFD(VD) &&
3766 !Stack->isImplicitTaskFirstprivate(VD))
3767 return;
3768 // Skip allocators in uses_allocators clauses.
3769 if (Stack->isUsesAllocatorsDecl(VD))
3770 return;
3771
3772 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(VD, /*FromParent=*/false);
3773 // Check if the variable has explicit DSA set and stop analysis if it so.
3774 if (DVar.RefExpr || !ImplicitDeclarations.insert(VD).second)
3775 return;
3776
3777 // Skip internally declared static variables.
3778 std::optional<OMPDeclareTargetDeclAttr::MapTypeTy> Res =
3779 OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD);
3780 if (VD->hasGlobalStorage() && CS && !CS->capturesVariable(VD) &&
3781 (Stack->hasRequiresDeclWithClause<OMPUnifiedSharedMemoryClause>() ||
3782 !Res || *Res != OMPDeclareTargetDeclAttr::MT_Link) &&
3783 !Stack->isImplicitDefaultFirstprivateFD(VD) &&
3784 !Stack->isImplicitTaskFirstprivate(VD))
3785 return;
3786
3787 SourceLocation ELoc = E->getExprLoc();
3788 OpenMPDirectiveKind DKind = Stack->getCurrentDirective();
3789 // The default(none) clause requires that each variable that is referenced
3790 // in the construct, and does not have a predetermined data-sharing
3791 // attribute, must have its data-sharing attribute explicitly determined
3792 // by being listed in a data-sharing attribute clause.
3793 if (DVar.CKind == OMPC_unknown &&
3794 (Stack->getDefaultDSA() == DSA_none ||
3795 Stack->getDefaultDSA() == DSA_private ||
3796 Stack->getDefaultDSA() == DSA_firstprivate) &&
3797 isImplicitOrExplicitTaskingRegion(DKind) &&
3798 VarsWithInheritedDSA.count(VD) == 0) {
3799 bool InheritedDSA = Stack->getDefaultDSA() == DSA_none;
3800 if (!InheritedDSA && (Stack->getDefaultDSA() == DSA_firstprivate ||
3801 Stack->getDefaultDSA() == DSA_private)) {
3802 DSAStackTy::DSAVarData DVar =
3803 Stack->getImplicitDSA(VD, /*FromParent=*/false);
3804 InheritedDSA = DVar.CKind == OMPC_unknown;
3805 }
3806 if (InheritedDSA)
3807 VarsWithInheritedDSA[VD] = E;
3808 if (Stack->getDefaultDSA() == DSA_none)
3809 return;
3810 }
3811
3812 // OpenMP 5.0 [2.19.7.2, defaultmap clause, Description]
3813 // If implicit-behavior is none, each variable referenced in the
3814 // construct that does not have a predetermined data-sharing attribute
3815 // and does not appear in a to or link clause on a declare target
3816 // directive must be listed in a data-mapping attribute clause, a
3817 // data-sharing attribute clause (including a data-sharing attribute
3818 // clause on a combined construct where target. is one of the
3819 // constituent constructs), or an is_device_ptr clause.
3820 OpenMPDefaultmapClauseKind ClauseKind =
3822 if (SemaRef.getLangOpts().OpenMP >= 50) {
3823 bool IsModifierNone = Stack->getDefaultmapModifier(ClauseKind) ==
3824 OMPC_DEFAULTMAP_MODIFIER_none;
3825 if (DVar.CKind == OMPC_unknown && IsModifierNone &&
3826 VarsWithInheritedDSA.count(VD) == 0 && !Res) {
3827 // Only check for data-mapping attribute and is_device_ptr here
3828 // since we have already make sure that the declaration does not
3829 // have a data-sharing attribute above
3830 if (!Stack->checkMappableExprComponentListsForDecl(
3831 VD, /*CurrentRegionOnly=*/true,
3833 MapExprComponents,
3835 auto MI = MapExprComponents.rbegin();
3836 auto ME = MapExprComponents.rend();
3837 return MI != ME && MI->getAssociatedDeclaration() == VD;
3838 })) {
3839 VarsWithInheritedDSA[VD] = E;
3840 return;
3841 }
3842 }
3843 }
3844 if (SemaRef.getLangOpts().OpenMP > 50) {
3845 bool IsModifierPresent = Stack->getDefaultmapModifier(ClauseKind) ==
3846 OMPC_DEFAULTMAP_MODIFIER_present;
3847 if (IsModifierPresent) {
3848 if (!llvm::is_contained(ImplicitMapModifier[ClauseKind],
3849 OMPC_MAP_MODIFIER_present)) {
3850 ImplicitMapModifier[ClauseKind].push_back(
3851 OMPC_MAP_MODIFIER_present);
3852 }
3853 }
3854 }
3855
3857 !Stack->isLoopControlVariable(VD).first) {
3858 if (!Stack->checkMappableExprComponentListsForDecl(
3859 VD, /*CurrentRegionOnly=*/true,
3861 StackComponents,
3863 if (SemaRef.LangOpts.OpenMP >= 50)
3864 return !StackComponents.empty();
3865 // Variable is used if it has been marked as an array, array
3866 // section, array shaping or the variable iself.
3867 return StackComponents.size() == 1 ||
3868 llvm::all_of(
3869 llvm::drop_begin(llvm::reverse(StackComponents)),
3870 [](const OMPClauseMappableExprCommon::
3871 MappableComponent &MC) {
3872 return MC.getAssociatedDeclaration() ==
3873 nullptr &&
3874 (isa<OMPArraySectionExpr>(
3875 MC.getAssociatedExpression()) ||
3876 isa<OMPArrayShapingExpr>(
3877 MC.getAssociatedExpression()) ||
3878 isa<ArraySubscriptExpr>(
3879 MC.getAssociatedExpression()));
3880 });
3881 })) {
3882 bool IsFirstprivate = false;
3883 // By default lambdas are captured as firstprivates.
3884 if (const auto *RD =
3886 IsFirstprivate = RD->isLambda();
3887 IsFirstprivate =
3888 IsFirstprivate || (Stack->mustBeFirstprivate(ClauseKind) && !Res);
3889 if (IsFirstprivate) {
3890 ImplicitFirstprivate.emplace_back(E);
3891 } else {
3893 Stack->getDefaultmapModifier(ClauseKind);
3895 M, ClauseKind == OMPC_DEFAULTMAP_aggregate || Res);
3896 ImplicitMap[ClauseKind][Kind].emplace_back(E);
3897 }
3898 return;
3899 }
3900 }
3901
3902 // OpenMP [2.9.3.6, Restrictions, p.2]
3903 // A list item that appears in a reduction clause of the innermost
3904 // enclosing worksharing or parallel construct may not be accessed in an
3905 // explicit task.
3906 DVar = Stack->hasInnermostDSA(
3907 VD,
3908 [](OpenMPClauseKind C, bool AppliedToPointee) {
3909 return C == OMPC_reduction && !AppliedToPointee;
3910 },
3911 [](OpenMPDirectiveKind K) {
3912 return isOpenMPParallelDirective(K) ||
3914 },
3915 /*FromParent=*/true);
3916 if (isOpenMPTaskingDirective(DKind) && DVar.CKind == OMPC_reduction) {
3917 ErrorFound = true;
3918 SemaRef.Diag(ELoc, diag::err_omp_reduction_in_task);
3919 reportOriginalDsa(SemaRef, Stack, VD, DVar);
3920 return;
3921 }
3922
3923 // Define implicit data-sharing attributes for task.
3924 DVar = Stack->getImplicitDSA(VD, /*FromParent=*/false);
3925 if (((isOpenMPTaskingDirective(DKind) && DVar.CKind != OMPC_shared) ||
3926 (((Stack->getDefaultDSA() == DSA_firstprivate &&
3927 DVar.CKind == OMPC_firstprivate) ||
3928 (Stack->getDefaultDSA() == DSA_private &&
3929 DVar.CKind == OMPC_private)) &&
3930 !DVar.RefExpr)) &&
3931 !Stack->isLoopControlVariable(VD).first) {
3932 if (Stack->getDefaultDSA() == DSA_private)
3933 ImplicitPrivate.push_back(E);
3934 else
3935 ImplicitFirstprivate.push_back(E);
3936 return;
3937 }
3938
3939 // Store implicitly used globals with declare target link for parent
3940 // target.
3941 if (!isOpenMPTargetExecutionDirective(DKind) && Res &&
3942 *Res == OMPDeclareTargetDeclAttr::MT_Link) {
3943 Stack->addToParentTargetRegionLinkGlobals(E);
3944 return;
3945 }
3946 }
3947 }
3948 void VisitMemberExpr(MemberExpr *E) {
3949 if (E->isTypeDependent() || E->isValueDependent() ||
3951 return;
3952 auto *FD = dyn_cast<FieldDecl>(E->getMemberDecl());
3953 OpenMPDirectiveKind DKind = Stack->getCurrentDirective();
3954 if (auto *TE = dyn_cast<CXXThisExpr>(E->getBase()->IgnoreParenCasts())) {
3955 if (!FD)
3956 return;
3957 DSAStackTy::DSAVarData DVar = Stack->getTopDSA(FD, /*FromParent=*/false);
3958 // Check if the variable has explicit DSA set and stop analysis if it
3959 // so.
3960 if (DVar.RefExpr || !ImplicitDeclarations.insert(FD).second)
3961 return;
3962
3964 !Stack->isLoopControlVariable(FD).first &&
3965 !Stack->checkMappableExprComponentListsForDecl(
3966 FD, /*CurrentRegionOnly=*/true,
3968 StackComponents,
3970 return isa<CXXThisExpr>(
3971 cast<MemberExpr>(
3972 StackComponents.back().getAssociatedExpression())
3973 ->getBase()
3974 ->IgnoreParens());
3975 })) {
3976 // OpenMP 4.5 [2.15.5.1, map Clause, Restrictions, C/C++, p.3]
3977 // A bit-field cannot appear in a map clause.
3978 //
3979 if (FD->isBitField())
3980 return;
3981
3982 // Check to see if the member expression is referencing a class that
3983 // has already been explicitly mapped
3984 if (Stack->isClassPreviouslyMapped(TE->getType()))
3985 return;
3986
3988 Stack->getDefaultmapModifier(OMPC_DEFAULTMAP_aggregate);
3989 OpenMPDefaultmapClauseKind ClauseKind =
3992 Modifier, /*IsAggregateOrDeclareTarget*/ true);
3993 ImplicitMap[ClauseKind][Kind].emplace_back(E);
3994 return;
3995 }
3996
3997 SourceLocation ELoc = E->getExprLoc();
3998 // OpenMP [2.9.3.6, Restrictions, p.2]
3999 // A list item that appears in a reduction clause of the innermost
4000 // enclosing worksharing or parallel construct may not be accessed in
4001 // an explicit task.
4002 DVar = Stack->hasInnermostDSA(
4003 FD,
4004 [](OpenMPClauseKind C, bool AppliedToPointee) {
4005 return C == OMPC_reduction && !AppliedToPointee;
4006 },
4007 [](OpenMPDirectiveKind K) {
4008 return isOpenMPParallelDirective(K) ||
4010 },
4011 /*FromParent=*/true);
4012 if (isOpenMPTaskingDirective(DKind) && DVar.CKind == OMPC_reduction) {
4013 ErrorFound = true;
4014 SemaRef.Diag(ELoc, diag::err_omp_reduction_in_task);
4015 reportOriginalDsa(SemaRef, Stack, FD, DVar);
4016 return;
4017 }
4018
4019 // Define implicit data-sharing attributes for task.
4020 DVar = Stack->getImplicitDSA(FD, /*FromParent=*/false);
4021 if (isOpenMPTaskingDirective(DKind) && DVar.CKind != OMPC_shared &&
4022 !Stack->isLoopControlVariable(FD).first) {
4023 // Check if there is a captured expression for the current field in the
4024 // region. Do not mark it as firstprivate unless there is no captured
4025 // expression.
4026 // TODO: try to make it firstprivate.
4027 if (DVar.CKind != OMPC_unknown)
4028 ImplicitFirstprivate.push_back(E);
4029 }
4030 return;
4031 }
4034 if (!checkMapClauseExpressionBase(SemaRef, E, CurComponents, OMPC_map,
4035 Stack->getCurrentDirective(),
4036 /*NoDiagnose=*/true))
4037 return;
4038 const auto *VD = cast<ValueDecl>(
4039 CurComponents.back().getAssociatedDeclaration()->getCanonicalDecl());
4040 if (!Stack->checkMappableExprComponentListsForDecl(
4041 VD, /*CurrentRegionOnly=*/true,
4042 [&CurComponents](
4044 StackComponents,
4046 auto CCI = CurComponents.rbegin();
4047 auto CCE = CurComponents.rend();
4048 for (const auto &SC : llvm::reverse(StackComponents)) {
4049 // Do both expressions have the same kind?
4050 if (CCI->getAssociatedExpression()->getStmtClass() !=
4051 SC.getAssociatedExpression()->getStmtClass())
4052 if (!((isa<OMPArraySectionExpr>(
4053 SC.getAssociatedExpression()) ||
4054 isa<OMPArrayShapingExpr>(
4055 SC.getAssociatedExpression())) &&
4056 isa<ArraySubscriptExpr>(
4057 CCI->getAssociatedExpression())))
4058 return false;
4059
4060 const Decl *CCD = CCI->getAssociatedDeclaration();
4061 const Decl *SCD = SC.getAssociatedDeclaration();
4062 CCD = CCD ? CCD->getCanonicalDecl() : nullptr;
4063 SCD = SCD ? SCD->getCanonicalDecl() : nullptr;
4064 if (SCD != CCD)
4065 return false;
4066 std::advance(CCI, 1);
4067 if (CCI == CCE)
4068 break;
4069 }
4070 return true;
4071 })) {
4072 Visit(E->getBase());
4073 }
4074 } else if (!TryCaptureCXXThisMembers) {
4075 Visit(E->getBase());
4076 }
4077 }
4078 void VisitOMPExecutableDirective(OMPExecutableDirective *S) {
4079 for (OMPClause *C : S->clauses()) {
4080 // Skip analysis of arguments of private clauses for task|target
4081 // directives.
4082 if (isa_and_nonnull<OMPPrivateClause>(C))
4083 continue;
4084 // Skip analysis of arguments of implicitly defined firstprivate clause
4085 // for task|target directives.
4086 // Skip analysis of arguments of implicitly defined map clause for target
4087 // directives.
4088 if (C && !((isa<OMPFirstprivateClause>(C) || isa<OMPMapClause>(C)) &&
4089 C->isImplicit() &&
4090 !isOpenMPTaskingDirective(Stack->getCurrentDirective()))) {
4091 for (Stmt *CC : C->children()) {
4092 if (CC)
4093 Visit(CC);
4094 }
4095 }
4096 }
4097 // Check implicitly captured variables.
4098 VisitSubCaptures(S);
4099 }
4100
4101 void VisitOMPLoopTransformationDirective(OMPLoopTransformationDirective *S) {
4102 // Loop transformation directives do not introduce data sharing
4103 VisitStmt(S);
4104 }
4105
4106 void VisitCallExpr(CallExpr *S) {
4107 for (Stmt *C : S->arguments()) {
4108 if (C) {
4109 // Check implicitly captured variables in the task-based directives to
4110 // check if they must be firstprivatized.
4111 Visit(C);
4112 }
4113 }
4114 if (Expr *Callee = S->getCallee()) {
4115 auto *CI = Callee->IgnoreParenImpCasts();
4116 if (auto *CE = dyn_cast<MemberExpr>(CI))
4117 Visit(CE->getBase());
4118 else if (auto *CE = dyn_cast<DeclRefExpr>(CI))
4119 Visit(CE);
4120 }
4121 }
4122 void VisitStmt(Stmt *S) {
4123 for (Stmt *C : S->children()) {
4124 if (C) {
4125 // Check implicitly captured variables in the task-based directives to
4126 // check if they must be firstprivatized.
4127 Visit(C);
4128 }
4129 }
4130 }
4131
4132 void visitSubCaptures(CapturedStmt *S) {
4133 for (const CapturedStmt::Capture &Cap : S->captures()) {
4134 if (!Cap.capturesVariable() && !Cap.capturesVariableByCopy())
4135 continue;
4136 VarDecl *VD = Cap.getCapturedVar();
4137 // Do not try to map the variable if it or its sub-component was mapped
4138 // already.
4139 if (isOpenMPTargetExecutionDirective(Stack->getCurrentDirective()) &&
4140 Stack->checkMappableExprComponentListsForDecl(
4141 VD, /*CurrentRegionOnly=*/true,
4143 OpenMPClauseKind) { return true; }))
4144 continue;
4146 SemaRef, VD, VD->getType().getNonLValueExprType(SemaRef.Context),
4147 Cap.getLocation(), /*RefersToCapture=*/true);
4148 Visit(DRE);
4149 }
4150 }
4151 bool isErrorFound() const { return ErrorFound; }
4152 ArrayRef<Expr *> getImplicitFirstprivate() const {
4153 return ImplicitFirstprivate;
4154 }
4155 ArrayRef<Expr *> getImplicitPrivate() const { return ImplicitPrivate; }
4157 OpenMPMapClauseKind MK) const {
4158 return ImplicitMap[DK][MK];
4159 }
4161 getImplicitMapModifier(OpenMPDefaultmapClauseKind Kind) const {
4162 return ImplicitMapModifier[Kind];
4163 }
4164 const Sema::VarsWithInheritedDSAType &getVarsWithInheritedDSA() const {
4165 return VarsWithInheritedDSA;
4166 }
4167
4168 DSAAttrChecker(DSAStackTy *S, Sema &SemaRef, CapturedStmt *CS)
4169 : Stack(S), SemaRef(SemaRef), ErrorFound(false), CS(CS) {
4170 // Process declare target link variables for the target directives.
4171 if (isOpenMPTargetExecutionDirective(S->getCurrentDirective())) {
4172 for (DeclRefExpr *E : Stack->getLinkGlobals())
4173 Visit(E);
4174 }
4175 }
4176};
4177} // namespace
4178
4179static void handleDeclareVariantConstructTrait(DSAStackTy *Stack,
4180 OpenMPDirectiveKind DKind,
4181 bool ScopeEntry) {
4184 Traits.emplace_back(llvm::omp::TraitProperty::construct_target_target);
4185 if (isOpenMPTeamsDirective(DKind))
4186 Traits.emplace_back(llvm::omp::TraitProperty::construct_teams_teams);
4187 if (isOpenMPParallelDirective(DKind))
4188 Traits.emplace_back(llvm::omp::TraitProperty::construct_parallel_parallel);
4190 Traits.emplace_back(llvm::omp::TraitProperty::construct_for_for);
4191 if (isOpenMPSimdDirective(DKind))
4192 Traits.emplace_back(llvm::omp::TraitProperty::construct_simd_simd);
4193 Stack->handleConstructTrait(Traits, ScopeEntry);
4194}
4195
4197 switch (DKind) {
4198 case OMPD_parallel:
4199 case OMPD_parallel_for:
4200 case OMPD_parallel_for_simd:
4201 case OMPD_parallel_sections:
4202 case OMPD_parallel_master:
4203 case OMPD_parallel_masked:
4204 case OMPD_parallel_loop:
4205 case OMPD_teams:
4206 case OMPD_teams_distribute:
4207 case OMPD_teams_distribute_simd: {
4208 QualType KmpInt32Ty = Context.getIntTypeForBitwidth(32, 1).withConst();
4209 QualType KmpInt32PtrTy =
4211 Sema::CapturedParamNameType Params[] = {
4212 std::make_pair(".global_tid.", KmpInt32PtrTy),
4213 std::make_pair(".bound_tid.", KmpInt32PtrTy),
4214 std::make_pair(StringRef(), QualType()) // __context with shared vars
4215 };
4216 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4217 Params);
4218 break;
4219 }
4220 case OMPD_target_teams:
4221 case OMPD_target_parallel:
4222 case OMPD_target_parallel_for:
4223 case OMPD_target_parallel_for_simd:
4224 case OMPD_target_parallel_loop:
4225 case OMPD_target_teams_distribute:
4226 case OMPD_target_teams_distribute_simd: {
4227 QualType KmpInt32Ty = Context.getIntTypeForBitwidth(32, 1).withConst();
4229 QualType KmpInt32PtrTy =
4231 QualType Args[] = {VoidPtrTy};
4233 EPI.Variadic = true;
4234 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4235 Sema::CapturedParamNameType Params[] = {
4236 std::make_pair(".global_tid.", KmpInt32Ty),
4237 std::make_pair(".part_id.", KmpInt32PtrTy),
4238 std::make_pair(".privates.", VoidPtrTy),
4239 std::make_pair(
4240 ".copy_fn.",
4242 std::make_pair(".task_t.", Context.VoidPtrTy.withConst()),
4243 std::make_pair(StringRef(), QualType()) // __context with shared vars
4244 };
4245 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4246 Params, /*OpenMPCaptureLevel=*/0);
4247 // Mark this captured region as inlined, because we don't use outlined
4248 // function directly.
4250 AlwaysInlineAttr::CreateImplicit(
4251 Context, {}, AlwaysInlineAttr::Keyword_forceinline));
4252 Sema::CapturedParamNameType ParamsTarget[] = {
4253 std::make_pair(StringRef(), QualType()) // __context with shared vars
4254 };
4255 // Start a captured region for 'target' with no implicit parameters.
4256 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4257 ParamsTarget, /*OpenMPCaptureLevel=*/1);
4258 Sema::CapturedParamNameType ParamsTeamsOrParallel[] = {
4259 std::make_pair(".global_tid.", KmpInt32PtrTy),
4260 std::make_pair(".bound_tid.", KmpInt32PtrTy),
4261 std::make_pair(StringRef(), QualType()) // __context with shared vars
4262 };
4263 // Start a captured region for 'teams' or 'parallel'. Both regions have
4264 // the same implicit parameters.
4265 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4266 ParamsTeamsOrParallel, /*OpenMPCaptureLevel=*/2);
4267 break;
4268 }
4269 case OMPD_target:
4270 case OMPD_target_simd: {
4271 QualType KmpInt32Ty = Context.getIntTypeForBitwidth(32, 1).withConst();
4273 QualType KmpInt32PtrTy =
4275 QualType Args[] = {VoidPtrTy};
4277 EPI.Variadic = true;
4278 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4279 Sema::CapturedParamNameType Params[] = {
4280 std::make_pair(".global_tid.", KmpInt32Ty),
4281 std::make_pair(".part_id.", KmpInt32PtrTy),
4282 std::make_pair(".privates.", VoidPtrTy),
4283 std::make_pair(
4284 ".copy_fn.",
4286 std::make_pair(".task_t.", Context.VoidPtrTy.withConst()),
4287 std::make_pair(StringRef(), QualType()) // __context with shared vars
4288 };
4289 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4290 Params, /*OpenMPCaptureLevel=*/0);
4291 // Mark this captured region as inlined, because we don't use outlined
4292 // function directly.
4294 AlwaysInlineAttr::CreateImplicit(
4295 Context, {}, AlwaysInlineAttr::Keyword_forceinline));
4296 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4297 std::make_pair(StringRef(), QualType()),
4298 /*OpenMPCaptureLevel=*/1);
4299 break;
4300 }
4301 case OMPD_atomic:
4302 case OMPD_critical:
4303 case OMPD_section:
4304 case OMPD_master:
4305 case OMPD_masked:
4306 case OMPD_tile:
4307 case OMPD_unroll:
4308 break;
4309 case OMPD_loop:
4310 // TODO: 'loop' may require additional parameters depending on the binding.
4311 // Treat similar to OMPD_simd/OMPD_for for now.
4312 case OMPD_simd:
4313 case OMPD_for:
4314 case OMPD_for_simd:
4315 case OMPD_sections:
4316 case OMPD_single:
4317 case OMPD_taskgroup:
4318 case OMPD_distribute:
4319 case OMPD_distribute_simd:
4320 case OMPD_ordered:
4321 case OMPD_scope:
4322 case OMPD_target_data:
4323 case OMPD_dispatch: {
4324 Sema::CapturedParamNameType Params[] = {
4325 std::make_pair(StringRef(), QualType()) // __context with shared vars
4326 };
4327 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4328 Params);
4329 break;
4330 }
4331 case OMPD_task: {
4332 QualType KmpInt32Ty = Context.getIntTypeForBitwidth(32, 1).withConst();
4334 QualType KmpInt32PtrTy =
4336 QualType Args[] = {VoidPtrTy};
4338 EPI.Variadic = true;
4339 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4340 Sema::CapturedParamNameType Params[] = {
4341 std::make_pair(".global_tid.", KmpInt32Ty),
4342 std::make_pair(".part_id.", KmpInt32PtrTy),
4343 std::make_pair(".privates.", VoidPtrTy),
4344 std::make_pair(
4345 ".copy_fn.",
4347 std::make_pair(".task_t.", Context.VoidPtrTy.withConst()),
4348 std::make_pair(StringRef(), QualType()) // __context with shared vars
4349 };
4350 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4351 Params);
4352 // Mark this captured region as inlined, because we don't use outlined
4353 // function directly.
4355 AlwaysInlineAttr::CreateImplicit(
4356 Context, {}, AlwaysInlineAttr::Keyword_forceinline));
4357 break;
4358 }
4359 case OMPD_taskloop:
4360 case OMPD_taskloop_simd:
4361 case OMPD_master_taskloop:
4362 case OMPD_masked_taskloop:
4363 case OMPD_masked_taskloop_simd:
4364 case OMPD_master_taskloop_simd: {
4365 QualType KmpInt32Ty =
4366 Context.getIntTypeForBitwidth(/*DestWidth=*/32, /*Signed=*/1)
4367 .withConst();
4368 QualType KmpUInt64Ty =
4369 Context.getIntTypeForBitwidth(/*DestWidth=*/64, /*Signed=*/0)
4370 .withConst();
4371 QualType KmpInt64Ty =
4372 Context.getIntTypeForBitwidth(/*DestWidth=*/64, /*Signed=*/1)
4373 .withConst();
4375 QualType KmpInt32PtrTy =
4377 QualType Args[] = {VoidPtrTy};
4379 EPI.Variadic = true;
4380 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4381 Sema::CapturedParamNameType Params[] = {
4382 std::make_pair(".global_tid.", KmpInt32Ty),
4383 std::make_pair(".part_id.", KmpInt32PtrTy),
4384 std::make_pair(".privates.", VoidPtrTy),
4385 std::make_pair(
4386 ".copy_fn.",
4388 std::make_pair(".task_t.", Context.VoidPtrTy.withConst()),
4389 std::make_pair(".lb.", KmpUInt64Ty),
4390 std::make_pair(".ub.", KmpUInt64Ty),
4391 std::make_pair(".st.", KmpInt64Ty),
4392 std::make_pair(".liter.", KmpInt32Ty),
4393 std::make_pair(".reductions.", VoidPtrTy),
4394 std::make_pair(StringRef(), QualType()) // __context with shared vars
4395 };
4396 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4397 Params);
4398 // Mark this captured region as inlined, because we don't use outlined
4399 // function directly.
4401 AlwaysInlineAttr::CreateImplicit(
4402 Context, {}, AlwaysInlineAttr::Keyword_forceinline));
4403 break;
4404 }
4405 case OMPD_parallel_masked_taskloop:
4406 case OMPD_parallel_masked_taskloop_simd:
4407 case OMPD_parallel_master_taskloop:
4408 case OMPD_parallel_master_taskloop_simd: {
4409 QualType KmpInt32Ty =
4410 Context.getIntTypeForBitwidth(/*DestWidth=*/32, /*Signed=*/1)
4411 .withConst();
4412 QualType KmpUInt64Ty =
4413 Context.getIntTypeForBitwidth(/*DestWidth=*/64, /*Signed=*/0)
4414 .withConst();
4415 QualType KmpInt64Ty =
4416 Context.getIntTypeForBitwidth(/*DestWidth=*/64, /*Signed=*/1)
4417 .withConst();
4419 QualType KmpInt32PtrTy =
4421 Sema::CapturedParamNameType ParamsParallel[] = {
4422 std::make_pair(".global_tid.", KmpInt32PtrTy),
4423 std::make_pair(".bound_tid.", KmpInt32PtrTy),
4424 std::make_pair(StringRef(), QualType()) // __context with shared vars
4425 };
4426 // Start a captured region for 'parallel'.
4427 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4428 ParamsParallel, /*OpenMPCaptureLevel=*/0);
4429 QualType Args[] = {VoidPtrTy};
4431 EPI.Variadic = true;
4432 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4433 Sema::CapturedParamNameType Params[] = {
4434 std::make_pair(".global_tid.", KmpInt32Ty),
4435 std::make_pair(".part_id.", KmpInt32PtrTy),
4436 std::make_pair(".privates.", VoidPtrTy),
4437 std::make_pair(
4438 ".copy_fn.",
4440 std::make_pair(".task_t.", Context.VoidPtrTy.withConst()),
4441 std::make_pair(".lb.", KmpUInt64Ty),
4442 std::make_pair(".ub.", KmpUInt64Ty),
4443 std::make_pair(".st.", KmpInt64Ty),
4444 std::make_pair(".liter.", KmpInt32Ty),
4445 std::make_pair(".reductions.", VoidPtrTy),
4446 std::make_pair(StringRef(), QualType()) // __context with shared vars
4447 };
4448 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4449 Params, /*OpenMPCaptureLevel=*/1);
4450 // Mark this captured region as inlined, because we don't use outlined
4451 // function directly.
4453 AlwaysInlineAttr::CreateImplicit(
4454 Context, {}, AlwaysInlineAttr::Keyword_forceinline));
4455 break;
4456 }
4457 case OMPD_distribute_parallel_for_simd:
4458 case OMPD_distribute_parallel_for: {
4459 QualType KmpInt32Ty = Context.getIntTypeForBitwidth(32, 1).withConst();
4460 QualType KmpInt32PtrTy =
4462 Sema::CapturedParamNameType Params[] = {
4463 std::make_pair(".global_tid.", KmpInt32PtrTy),
4464 std::make_pair(".bound_tid.", KmpInt32PtrTy),
4465 std::make_pair(".previous.lb.", Context.getSizeType().withConst()),
4466 std::make_pair(".previous.ub.", Context.getSizeType().withConst()),
4467 std::make_pair(StringRef(), QualType()) // __context with shared vars
4468 };
4469 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4470 Params);
4471 break;
4472 }
4473 case OMPD_target_teams_loop:
4474 case OMPD_target_teams_distribute_parallel_for:
4475 case OMPD_target_teams_distribute_parallel_for_simd: {
4476 QualType KmpInt32Ty = Context.getIntTypeForBitwidth(32, 1).withConst();
4477 QualType KmpInt32PtrTy =
4480
4481 QualType Args[] = {VoidPtrTy};
4483 EPI.Variadic = true;
4484 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4485 Sema::CapturedParamNameType Params[] = {
4486 std::make_pair(".global_tid.", KmpInt32Ty),
4487 std::make_pair(".part_id.", KmpInt32PtrTy),
4488 std::make_pair(".privates.", VoidPtrTy),
4489 std::make_pair(
4490 ".copy_fn.",
4492 std::make_pair(".task_t.", Context.VoidPtrTy.withConst()),
4493 std::make_pair(StringRef(), QualType()) // __context with shared vars
4494 };
4495 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4496 Params, /*OpenMPCaptureLevel=*/0);
4497 // Mark this captured region as inlined, because we don't use outlined
4498 // function directly.
4500 AlwaysInlineAttr::CreateImplicit(
4501 Context, {}, AlwaysInlineAttr::Keyword_forceinline));
4502 Sema::CapturedParamNameType ParamsTarget[] = {
4503 std::make_pair(StringRef(), QualType()) // __context with shared vars
4504 };
4505 // Start a captured region for 'target' with no implicit parameters.
4506 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4507 ParamsTarget, /*OpenMPCaptureLevel=*/1);
4508
4509 Sema::CapturedParamNameType ParamsTeams[] = {
4510 std::make_pair(".global_tid.", KmpInt32PtrTy),
4511 std::make_pair(".bound_tid.", KmpInt32PtrTy),
4512 std::make_pair(StringRef(), QualType()) // __context with shared vars
4513 };
4514 // Start a captured region for 'target' with no implicit parameters.
4515 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4516 ParamsTeams, /*OpenMPCaptureLevel=*/2);
4517
4518 Sema::CapturedParamNameType ParamsParallel[] = {
4519 std::make_pair(".global_tid.", KmpInt32PtrTy),
4520 std::make_pair(".bound_tid.", KmpInt32PtrTy),
4521 std::make_pair(".previous.lb.", Context.getSizeType().withConst()),
4522 std::make_pair(".previous.ub.", Context.getSizeType().withConst()),
4523 std::make_pair(StringRef(), QualType()) // __context with shared vars
4524 };
4525 // Start a captured region for 'teams' or 'parallel'. Both regions have
4526 // the same implicit parameters.
4527 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4528 ParamsParallel, /*OpenMPCaptureLevel=*/3);
4529 break;
4530 }
4531
4532 case OMPD_teams_loop:
4533 case OMPD_teams_distribute_parallel_for:
4534 case OMPD_teams_distribute_parallel_for_simd: {
4535 QualType KmpInt32Ty = Context.getIntTypeForBitwidth(32, 1).withConst();
4536 QualType KmpInt32PtrTy =
4538
4539 Sema::CapturedParamNameType ParamsTeams[] = {
4540 std::make_pair(".global_tid.", KmpInt32PtrTy),
4541 std::make_pair(".bound_tid.", KmpInt32PtrTy),
4542 std::make_pair(StringRef(), QualType()) // __context with shared vars
4543 };
4544 // Start a captured region for 'target' with no implicit parameters.
4545 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4546 ParamsTeams, /*OpenMPCaptureLevel=*/0);
4547
4548 Sema::CapturedParamNameType ParamsParallel[] = {
4549 std::make_pair(".global_tid.", KmpInt32PtrTy),
4550 std::make_pair(".bound_tid.", KmpInt32PtrTy),
4551 std::make_pair(".previous.lb.", Context.getSizeType().withConst()),
4552 std::make_pair(".previous.ub.", Context.getSizeType().withConst()),
4553 std::make_pair(StringRef(), QualType()) // __context with shared vars
4554 };
4555 // Start a captured region for 'teams' or 'parallel'. Both regions have
4556 // the same implicit parameters.
4557 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4558 ParamsParallel, /*OpenMPCaptureLevel=*/1);
4559 break;
4560 }
4561 case OMPD_target_update:
4562 case OMPD_target_enter_data:
4563 case OMPD_target_exit_data: {
4564 QualType KmpInt32Ty = Context.getIntTypeForBitwidth(32, 1).withConst();
4566 QualType KmpInt32PtrTy =
4568 QualType Args[] = {VoidPtrTy};
4570 EPI.Variadic = true;
4571 QualType CopyFnType = Context.getFunctionType(Context.VoidTy, Args, EPI);
4572 Sema::CapturedParamNameType Params[] = {
4573 std::make_pair(".global_tid.", KmpInt32Ty),
4574 std::make_pair(".part_id.", KmpInt32PtrTy),
4575 std::make_pair(".privates.", VoidPtrTy),
4576 std::make_pair(
4577 ".copy_fn.",
4579 std::make_pair(".task_t.", Context.VoidPtrTy.withConst()),
4580 std::make_pair(StringRef(), QualType()) // __context with shared vars
4581 };
4582 ActOnCapturedRegionStart(DSAStack->getConstructLoc(), CurScope, CR_OpenMP,
4583 Params);
4584 // Mark this captured region as inlined, because we don't use outlined
4585 // function directly.
4587 AlwaysInlineAttr::CreateImplicit(
4588 Context, {}, AlwaysInlineAttr::Keyword_forceinline));
4589 break;
4590 }
4591 case OMPD_threadprivate:
4592 case OMPD_allocate:
4593 case OMPD_taskyield:
4594 case OMPD_error:
4595 case OMPD_barrier:
4596 case OMPD_taskwait:
4597 case OMPD_cancellation_point:
4598 case OMPD_cancel:
4599 case OMPD_flush:
4600 case OMPD_depobj:
4601 case OMPD_scan:
4602 case OMPD_declare_reduction:
4603 case OMPD_declare_mapper:
4604 case OMPD_declare_simd:
4605 case OMPD_declare_target:
4606 case OMPD_end_declare_target:
4607 case OMPD_requires:
4608 case OMPD_declare_variant:
4609 case OMPD_begin_declare_variant:
4610 case OMPD_end_declare_variant:
4611 case OMPD_metadirective:
4612 llvm_unreachable("OpenMP Directive is not allowed");
4613 case OMPD_unknown:
4614 default:
4615 llvm_unreachable("Unknown OpenMP directive");
4616 }
4617 DSAStack->setContext(CurContext);
4618 handleDeclareVariantConstructTrait(DSAStack, DKind, /* ScopeEntry */ true);
4619}
4620
4621int Sema::getNumberOfConstructScopes(unsigned Level) const {
4622 return getOpenMPCaptureLevels(DSAStack->getDirective(Level));
4623}
4624
4627 getOpenMPCaptureRegions(CaptureRegions, DKind);
4628 return CaptureRegions.size();
4629}
4630
4632 Expr *CaptureExpr, bool WithInit,
4633 DeclContext *CurContext,
4634 bool AsExpression) {
4635 assert(CaptureExpr);
4636 ASTContext &C = S.getASTContext();
4637 Expr *Init = AsExpression ? CaptureExpr : CaptureExpr->IgnoreImpCasts();
4638 QualType Ty = Init->getType();
4639 if (CaptureExpr->getObjectKind() == OK_Ordinary && CaptureExpr->isGLValue()) {
4640 if (S.getLangOpts().CPlusPlus) {
4641 Ty = C.getLValueReferenceType(Ty);
4642 } else {
4643 Ty = C.getPointerType(Ty);
4644 ExprResult Res =
4645 S.CreateBuiltinUnaryOp(CaptureExpr->getExprLoc(), UO_AddrOf, Init);
4646 if (!Res.isUsable())
4647 return nullptr;
4648 Init = Res.get();
4649 }
4650 WithInit = true;
4651 }
4652 auto *CED = OMPCapturedExprDecl::Create(C, CurContext, Id, Ty,
4653 CaptureExpr->getBeginLoc());
4654 if (!WithInit)
4655 CED->addAttr(OMPCaptureNoInitAttr::CreateImplicit(C));
4656 CurContext->addHiddenDecl(CED);
4658 S.AddInitializerToDecl(CED, Init, /*DirectInit=*/false);
4659 return CED;
4660}
4661
4662static DeclRefExpr *buildCapture(Sema &S, ValueDecl *D, Expr *CaptureExpr,
4663 bool WithInit) {
4665 if (VarDecl *VD = S.isOpenMPCapturedDecl(D))
4666 CD = cast<OMPCapturedExprDecl>(VD);
4667 else
4668 CD = buildCaptureDecl(S, D->getIdentifier(), CaptureExpr, WithInit,
4669 S.CurContext,
4670 /*AsExpression=*/false);
4671 return buildDeclRefExpr(S, CD, CD->getType().getNonReferenceType(),
4672 CaptureExpr->getExprLoc());
4673}
4674
4675static ExprResult buildCapture(Sema &S, Expr *CaptureExpr, DeclRefExpr *&Ref,
4676 StringRef Name) {
4677 CaptureExpr = S.DefaultLvalueConversion(CaptureExpr).get();
4678 if (!Ref) {
4680 S, &S.getASTContext().Idents.get(Name), CaptureExpr,
4681 /*WithInit=*/true, S.CurContext, /*AsExpression=*/true);
4682 Ref = buildDeclRefExpr(S, CD, CD->getType().getNonReferenceType(),
4683 CaptureExpr->getExprLoc());
4684 }
4685 ExprResult Res = Ref;
4686 if (!S.getLangOpts().CPlusPlus &&
4687 CaptureExpr->getObjectKind() == OK_Ordinary && CaptureExpr->isGLValue() &&
4688 Ref->getType()->isPointerType()) {
4689 Res = S.CreateBuiltinUnaryOp(CaptureExpr->getExprLoc(), UO_Deref, Ref);
4690 if (!Res.isUsable())
4691 return ExprError();
4692 }
4693 return S.DefaultLvalueConversion(Res.get());
4694}
4695
4696namespace {
4697// OpenMP directives parsed in this section are represented as a
4698// CapturedStatement with an associated statement. If a syntax error
4699// is detected during the parsing of the associated statement, the
4700// compiler must abort processing and close the CapturedStatement.
4701//
4702// Combined directives such as 'target parallel' have more than one
4703// nested CapturedStatements. This RAII ensures that we unwind out
4704// of all the nested CapturedStatements when an error is found.
4705class CaptureRegionUnwinderRAII {
4706private:
4707 Sema &S;
4708 bool &ErrorFound;
4709 OpenMPDirectiveKind DKind = OMPD_unknown;
4710
4711public:
4712 CaptureRegionUnwinderRAII(Sema &S, bool &ErrorFound,
4713 OpenMPDirectiveKind DKind)
4714 : S(S), ErrorFound(ErrorFound), DKind(DKind) {}
4715 ~CaptureRegionUnwinderRAII() {
4716 if (ErrorFound) {
4717 int ThisCaptureLevel = S.getOpenMPCaptureLevels(DKind);
4718 while (--ThisCaptureLevel >= 0)
4720 }
4721 }
4722};
4723} // namespace
4724
4726 // Capture variables captured by reference in lambdas for target-based
4727 // directives.
4729 (isOpenMPTargetExecutionDirective(DSAStack->getCurrentDirective()) ||
4731 DSAStack->getCurrentDirective()))) {
4732 QualType Type = V->getType();
4733 if (const auto *RD = Type.getCanonicalType()
4734 .getNonReferenceType()
4735 ->getAsCXXRecordDecl()) {
4736 bool SavedForceCaptureByReferenceInTargetExecutable =
4737 DSAStack->isForceCaptureByReferenceInTargetExecutable();
4738 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4739 /*V=*/true);
4740 if (RD->isLambda()) {
4741 llvm::DenseMap<const ValueDecl *, FieldDecl *> Captures;
4742 FieldDecl *ThisCapture;
4743 RD->getCaptureFields(Captures, ThisCapture);
4744 for (const LambdaCapture &LC : RD->captures()) {
4745 if (LC.getCaptureKind() == LCK_ByRef) {
4746 VarDecl *VD = cast<VarDecl>(LC.getCapturedVar());
4747 DeclContext *VDC = VD->getDeclContext();
4748 if (!VDC->Encloses(CurContext))
4749 continue;
4750 MarkVariableReferenced(LC.getLocation(), VD);
4751 } else if (LC.getCaptureKind() == LCK_This) {
4752 QualType ThisTy = getCurrentThisType();
4753 if (!ThisTy.isNull() &&
4754 Context.typesAreCompatible(ThisTy, ThisCapture->getType()))
4755 CheckCXXThisCapture(LC.getLocation());
4756 }
4757 }
4758 }
4759 DSAStack->setForceCaptureByReferenceInTargetExecutable(
4760 SavedForceCaptureByReferenceInTargetExecutable);
4761 }
4762 }
4763}
4764
4766 const ArrayRef<OMPClause *> Clauses) {
4767 const OMPOrderedClause *Ordered = nullptr;
4768 const OMPOrderClause *Order = nullptr;
4769
4770 for (const OMPClause *Clause : Clauses) {
4771 if (Clause->getClauseKind() == OMPC_ordered)
4772 Ordered = cast<OMPOrderedClause>(Clause);
4773 else if (Clause->getClauseKind() == OMPC_order) {
4774 Order = cast<OMPOrderClause>(Clause);
4775 if (Order->getKind() != OMPC_ORDER_concurrent)
4776 Order = nullptr;
4777 }
4778 if (Ordered && Order)
4779 break;
4780 }
4781
4782 if (Ordered && Order) {
4783 S.Diag(Order->getKindKwLoc(),
4784 diag::err_omp_simple_clause_incompatible_with_ordered)
4785 << getOpenMPClauseName(OMPC_order)
4786 << getOpenMPSimpleClauseTypeName(OMPC_order, OMPC_ORDER_concurrent)
4787 << SourceRange(Order->getBeginLoc(), Order->getEndLoc());
4788 S.Diag(Ordered->getBeginLoc(), diag::note_omp_ordered_param)
4789 << 0 << SourceRange(Ordered->getBeginLoc(), Ordered->getEndLoc());
4790 return true;
4791 }
4792 return false;
4793}
4794
4796 ArrayRef<OMPClause *> Clauses) {
4798 /* ScopeEntry */ false);
4799 if (DSAStack->getCurrentDirective() == OMPD_atomic ||
4800 DSAStack->getCurrentDirective() == OMPD_critical ||
4801 DSAStack->getCurrentDirective() == OMPD_section ||
4802 DSAStack->getCurrentDirective() == OMPD_master ||
4803 DSAStack->getCurrentDirective() == OMPD_masked)
4804 return S;
4805
4806 bool ErrorFound = false;
4807 CaptureRegionUnwinderRAII CaptureRegionUnwinder(
4808 *this, ErrorFound, DSAStack->getCurrentDirective());
4809 if (!S.isUsable()) {
4810 ErrorFound = true;
4811 return StmtError();
4812 }
4813
4815 getOpenMPCaptureRegions(CaptureRegions, DSAStack->getCurrentDirective());
4816 OMPOrderedClause *OC = nullptr;
4817 OMPScheduleClause *SC = nullptr;
4820 // This is required for proper codegen.
4821 for (OMPClause *Clause : Clauses) {
4822 if (!LangOpts.OpenMPSimd &&
4823 (isOpenMPTaskingDirective(DSAStack->getCurrentDirective()) ||
4824 DSAStack->getCurrentDirective() == OMPD_target) &&
4825 Clause->getClauseKind() == OMPC_in_reduction) {
4826 // Capture taskgroup task_reduction descriptors inside the tasking regions
4827 // with the corresponding in_reduction items.
4828 auto *IRC = cast<OMPInReductionClause>(Clause);
4829 for (Expr *E : IRC->taskgroup_descriptors())
4830 if (E)
4832 }
4833 if (isOpenMPPrivate(Clause->getClauseKind()) ||
4834 Clause->getClauseKind() == OMPC_copyprivate ||
4835 (getLangOpts().OpenMPUseTLS &&
4836 getASTContext().getTargetInfo().isTLSSupported() &&
4837 Clause->getClauseKind() == OMPC_copyin)) {
4838 DSAStack->setForceVarCapturing(Clause->getClauseKind() == OMPC_copyin);
4839 // Mark all variables in private list clauses as used in inner region.
4840 for (Stmt *VarRef : Clause->children()) {
4841 if (auto *E = cast_or_null<Expr>(VarRef)) {
4843 }
4844 }
4845 DSAStack->setForceVarCapturing(/*V=*/false);
4847 DSAStack->getCurrentDirective())) {
4848 assert(CaptureRegions.empty() &&
4849 "No captured regions in loop transformation directives.");
4850 } else if (CaptureRegions.size() > 1 ||
4851 CaptureRegions.back() != OMPD_unknown) {
4852 if (auto *C = OMPClauseWithPreInit::get(Clause))
4853 PICs.push_back(C);
4854 if (auto *C = OMPClauseWithPostUpdate::get(Clause)) {
4855 if (Expr *E = C->getPostUpdateExpr())
4857 }
4858 }
4859 if (Clause->getClauseKind() == OMPC_schedule)
4860 SC = cast<OMPScheduleClause>(Clause);
4861 else if (Clause->getClauseKind() == OMPC_ordered)
4862 OC = cast<OMPOrderedClause>(Clause);
4863 else if (Clause->getClauseKind() == OMPC_linear)
4864 LCs.push_back(cast<OMPLinearClause>(Clause));
4865 }
4866 // Capture allocator expressions if used.
4867 for (Expr *E : DSAStack->getInnerAllocators())
4869 // OpenMP, 2.7.1 Loop Construct, Restrictions
4870 // The nonmonotonic modifier cannot be specified if an ordered clause is
4871 // specified.
4872 if (SC &&
4873 (SC->getFirstScheduleModifier() == OMPC_SCHEDULE_MODIFIER_nonmonotonic ||
4875 OMPC_SCHEDULE_MODIFIER_nonmonotonic) &&
4876 OC) {
4877 Diag(SC->getFirstScheduleModifier() == OMPC_SCHEDULE_MODIFIER_nonmonotonic
4880 diag::err_omp_simple_clause_incompatible_with_ordered)
4881 << getOpenMPClauseName(OMPC_schedule)
4882 << getOpenMPSimpleClauseTypeName(OMPC_schedule,
4883 OMPC_SCHEDULE_MODIFIER_nonmonotonic)
4884 << SourceRange(OC->getBeginLoc(), OC->getEndLoc());
4885 ErrorFound = true;
4886 }
4887 // OpenMP 5.0, 2.9.2 Worksharing-Loop Construct, Restrictions.
4888 // If an order(concurrent) clause is present, an ordered clause may not appear
4889 // on the same directive.
4890 if (checkOrderedOrderSpecified(*this, Clauses))
4891 ErrorFound = true;
4892 if (!LCs.empty() && OC && OC->getNumForLoops()) {
4893 for (const OMPLinearClause *C : LCs) {
4894 Diag(C->getBeginLoc(), diag::err_omp_linear_ordered)
4895 << SourceRange(OC->getBeginLoc(), OC->getEndLoc());
4896 }
4897 ErrorFound = true;
4898 }
4899 if (isOpenMPWorksharingDirective(DSAStack->getCurrentDirective()) &&
4900 isOpenMPSimdDirective(DSAStack->getCurrentDirective()) && OC &&
4901 OC->getNumForLoops()) {
4902 Diag(OC->getBeginLoc(), diag::err_omp_ordered_simd)
4903 << getOpenMPDirectiveName(DSAStack->getCurrentDirective());
4904 ErrorFound = true;
4905 }
4906 if (ErrorFound) {
4907 return StmtError();
4908 }
4909 StmtResult SR = S;
4910 unsigned CompletedRegions = 0;
4911 for (OpenMPDirectiveKind ThisCaptureRegion : llvm::reverse(CaptureRegions)) {
4912 // Mark all variables in private list clauses as used in inner region.
4913 // Required for proper codegen of combined directives.
4914 // TODO: add processing for other clauses.
4915 if (ThisCaptureRegion != OMPD_unknown) {
4916 for (const clang::OMPClauseWithPreInit *C : PICs) {
4917 OpenMPDirectiveKind CaptureRegion = C->getCaptureRegion();
4918 // Find the particular capture region for the clause if the
4919 // directive is a combined one with multiple capture regions.
4920 // If the directive is not a combined one, the capture region
4921 // associated with the clause is OMPD_unknown and is generated
4922 // only once.
4923 if (CaptureRegion == ThisCaptureRegion ||
4924 CaptureRegion == OMPD_unknown) {
4925 if (auto *DS = cast_or_null<DeclStmt>(C->getPreInitStmt())) {
4926 for (Decl *D : DS->decls())
4927 MarkVariableReferenced(D->getLocation(), cast<VarDecl>(D));
4928 }
4929 }
4930 }
4931 }
4932 if (ThisCaptureRegion == OMPD_target) {
4933 // Capture allocator traits in the target region. They are used implicitly
4934 // and, thus, are not captured by default.
4935 for (OMPClause *C : Clauses) {
4936 if (const auto *UAC = dyn_cast<OMPUsesAllocatorsClause>(C)) {
4937 for (unsigned I = 0, End = UAC->getNumberOfAllocators(); I < End;
4938 ++I) {
4939 OMPUsesAllocatorsClause::Data D = UAC->getAllocatorData(I);
4940 if (Expr *E = D.AllocatorTraits)
4942 }
4943 continue;
4944 }
4945 }
4946 }
4947 if (ThisCaptureRegion == OMPD_parallel) {
4948 // Capture temp arrays for inscan reductions and locals in aligned
4949 // clauses.
4950 for (OMPClause *C : Clauses) {
4951 if (auto *RC = dyn_cast<OMPReductionClause>(C)) {
4952 if (RC->getModifier() != OMPC_REDUCTION_inscan)
4953 continue;
4954 for (Expr *E : RC->copy_array_temps())
4956 }
4957 if (auto *AC = dyn_cast<OMPAlignedClause>(C)) {
4958 for (Expr *E : AC->varlists())
4960 }
4961 }
4962 }
4963 if (++CompletedRegions == CaptureRegions.size())
4964 DSAStack->setBodyComplete();
4965 SR = ActOnCapturedRegionEnd(SR.get());
4966 }
4967 return SR;
4968}
4969
4970static bool checkCancelRegion(Sema &SemaRef, OpenMPDirectiveKind CurrentRegion,
4971 OpenMPDirectiveKind CancelRegion,
4972 SourceLocation StartLoc) {
4973 // CancelRegion is only needed for cancel and cancellation_point.
4974 if (CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_cancellation_point)
4975 return false;
4976
4977 if (CancelRegion == OMPD_parallel || CancelRegion == OMPD_for ||
4978 CancelRegion == OMPD_sections || CancelRegion == OMPD_taskgroup)
4979 return false;
4980
4981 SemaRef.Diag(StartLoc, diag::err_omp_wrong_cancel_region)
4982 << getOpenMPDirectiveName(CancelRegion);
4983 return true;
4984}
4985
4986static bool checkNestingOfRegions(Sema &SemaRef, const DSAStackTy *Stack,
4987 OpenMPDirectiveKind CurrentRegion,
4988 const DeclarationNameInfo &CurrentName,
4989 OpenMPDirectiveKind CancelRegion,
4990 OpenMPBindClauseKind BindKind,
4991 SourceLocation StartLoc) {
4992 if (Stack->getCurScope()) {
4993 OpenMPDirectiveKind ParentRegion = Stack->getParentDirective();
4994 OpenMPDirectiveKind OffendingRegion = ParentRegion;
4995 bool NestingProhibited = false;
4996 bool CloseNesting = true;
4997 bool OrphanSeen = false;
4998 enum {
4999 NoRecommend,
5000 ShouldBeInParallelRegion,
5001 ShouldBeInOrderedRegion,
5002 ShouldBeInTargetRegion,
5003 ShouldBeInTeamsRegion,
5004 ShouldBeInLoopSimdRegion,
5005 } Recommend = NoRecommend;
5006 if (SemaRef.LangOpts.OpenMP >= 51 && Stack->isParentOrderConcurrent() &&
5007 CurrentRegion != OMPD_simd && CurrentRegion != OMPD_loop &&
5008 CurrentRegion != OMPD_parallel &&
5009 !isOpenMPCombinedParallelADirective(CurrentRegion)) {
5010 SemaRef.Diag(StartLoc, diag::err_omp_prohibited_region_order)
5011 << getOpenMPDirectiveName(CurrentRegion);
5012 return true;
5013 }
5014 if (isOpenMPSimdDirective(ParentRegion) &&
5015 ((SemaRef.LangOpts.OpenMP <= 45 && CurrentRegion != OMPD_ordered) ||
5016 (SemaRef.LangOpts.OpenMP >= 50 && CurrentRegion != OMPD_ordered &&
5017 CurrentRegion != OMPD_simd && CurrentRegion != OMPD_atomic &&
5018 CurrentRegion != OMPD_scan))) {
5019 // OpenMP [2.16, Nesting of Regions]
5020 // OpenMP constructs may not be nested inside a simd region.
5021 // OpenMP [2.8.1,simd Construct, Restrictions]
5022 // An ordered construct with the simd clause is the only OpenMP
5023 // construct that can appear in the simd region.
5024 // Allowing a SIMD construct nested in another SIMD construct is an
5025 // extension. The OpenMP 4.5 spec does not allow it. Issue a warning
5026 // message.
5027 // OpenMP 5.0 [2.9.3.1, simd Construct, Restrictions]
5028 // The only OpenMP constructs that can be encountered during execution of
5029 // a simd region are the atomic construct, the loop construct, the simd
5030 // construct and the ordered construct with the simd clause.
5031 SemaRef.Diag(StartLoc, (CurrentRegion != OMPD_simd)
5032 ? diag::err_omp_prohibited_region_simd
5033 : diag::warn_omp_nesting_simd)
5034 << (SemaRef.LangOpts.OpenMP >= 50 ? 1 : 0);
5035 return CurrentRegion != OMPD_simd;
5036 }
5037 if (ParentRegion == OMPD_atomic) {
5038 // OpenMP [2.16, Nesting of Regions]
5039 // OpenMP constructs may not be nested inside an atomic region.
5040 SemaRef.Diag(StartLoc, diag::err_omp_prohibited_region_atomic);
5041 return true;
5042 }
5043 if (CurrentRegion == OMPD_section) {
5044 // OpenMP [2.7.2, sections Construct, Restrictions]
5045 // Orphaned section directives are prohibited. That is, the section
5046 // directives must appear within the sections construct and must not be
5047 // encountered elsewhere in the sections region.
5048 if (ParentRegion != OMPD_sections &&
5049 ParentRegion != OMPD_parallel_sections) {
5050 SemaRef.Diag(StartLoc, diag::err_omp_orphaned_section_directive)
5051 << (ParentRegion != OMPD_unknown)
5052 << getOpenMPDirectiveName(ParentRegion);
5053 return true;
5054 }
5055 return false;
5056 }
5057 // Allow some constructs (except teams and cancellation constructs) to be
5058 // orphaned (they could be used in functions, called from OpenMP regions
5059 // with the required preconditions).
5060 if (ParentRegion == OMPD_unknown &&
5061 !isOpenMPNestingTeamsDirective(CurrentRegion) &&
5062 CurrentRegion != OMPD_cancellation_point &&
5063 CurrentRegion != OMPD_cancel && CurrentRegion != OMPD_scan)
5064 return false;
5065 if (CurrentRegion == OMPD_cancellation_point ||
5066 CurrentRegion == OMPD_cancel) {
5067 // OpenMP [2.16, Nesting of Regions]
5068 // A cancellation point construct for which construct-type-clause is
5069 // taskgroup must be nested inside a task construct. A cancellation
5070 // point construct for which construct-type-clause is not taskgroup must
5071 // be closely nested inside an OpenMP construct that matches the type
5072 // specified in construct-type-clause.
5073 // A cancel construct for which construct-type-clause is taskgroup must be
5074 // nested inside a task construct. A cancel construct for which
5075 // construct-type-clause is not taskgroup must be closely nested inside an
5076 // OpenMP construct that matches the type specified in
5077 // construct-type-clause.
5078 NestingProhibited =
5079 !((CancelRegion == OMPD_parallel &&
5080 (ParentRegion == OMPD_parallel ||
5081 ParentRegion == OMPD_target_parallel)) ||
5082 (CancelRegion == OMPD_for &&
5083 (ParentRegion == OMPD_for || ParentRegion == OMPD_parallel_for ||
5084 ParentRegion == OMPD_target_parallel_for ||
5085 ParentRegion == OMPD_distribute_parallel_for ||
5086 ParentRegion == OMPD_teams_distribute_parallel_for ||
5087 ParentRegion == OMPD_target_teams_distribute_parallel_for)) ||
5088 (CancelRegion == OMPD_taskgroup &&
5089 (ParentRegion == OMPD_task ||
5090 (SemaRef.getLangOpts().OpenMP >= 50 &&
5091 (ParentRegion == OMPD_taskloop ||
5092 ParentRegion == OMPD_master_taskloop ||
5093 ParentRegion == OMPD_masked_taskloop ||
5094 ParentRegion == OMPD_parallel_masked_taskloop ||
5095 ParentRegion == OMPD_parallel_master_taskloop)))) ||
5096 (CancelRegion == OMPD_sections &&
5097 (ParentRegion == OMPD_section || ParentRegion == OMPD_sections ||
5098 ParentRegion == OMPD_parallel_sections)));
5099 OrphanSeen = ParentRegion == OMPD_unknown;
5100 } else if (CurrentRegion == OMPD_master || CurrentRegion == OMPD_masked) {
5101 // OpenMP 5.1 [2.22, Nesting of Regions]
5102 // A masked region may not be closely nested inside a worksharing, loop,
5103 // atomic, task, or taskloop region.
5104 NestingProhibited = isOpenMPWorksharingDirective(ParentRegion) ||
5105 isOpenMPGenericLoopDirective(ParentRegion) ||
5106 isOpenMPTaskingDirective(ParentRegion);
5107 } else if (CurrentRegion == OMPD_critical && CurrentName.getName()) {
5108 // OpenMP [2.16, Nesting of Regions]
5109 // A critical region may not be nested (closely or otherwise) inside a
5110 // critical region with the same name. Note that this restriction is not
5111 // sufficient to prevent deadlock.
5112 SourceLocation PreviousCriticalLoc;
5113 bool DeadLock = Stack->hasDirective(
5114 [CurrentName, &PreviousCriticalLoc](OpenMPDirectiveKind K,
5115 const DeclarationNameInfo &DNI,
5116 SourceLocation Loc) {
5117 if (K == OMPD_critical && DNI.getName() == CurrentName.getName()) {
5118 PreviousCriticalLoc = Loc;
5119 return true;
5120 }
5121 return false;
5122 },
5123 false /* skip top directive */);
5124 if (DeadLock) {
5125 SemaRef.Diag(StartLoc,
5126 diag::err_omp_prohibited_region_critical_same_name)
5127 << CurrentName.getName();
5128 if (PreviousCriticalLoc.isValid())
5129 SemaRef.Diag(PreviousCriticalLoc,
5130 diag::note_omp_previous_critical_region);
5131 return true;
5132 }
5133 } else if (CurrentRegion == OMPD_barrier || CurrentRegion == OMPD_scope) {
5134 // OpenMP 5.1 [2.22, Nesting of Regions]
5135 // A scope region may not be closely nested inside a worksharing, loop,
5136 // task, taskloop, critical, ordered, atomic, or masked region.
5137 // OpenMP 5.1 [2.22, Nesting of Regions]
5138 // A barrier region may not be closely nested inside a worksharing, loop,
5139 // task, taskloop, critical, ordered, atomic, or masked region.
5140 NestingProhibited =
5141 isOpenMPWorksharingDirective(ParentRegion) ||
5142 isOpenMPGenericLoopDirective(ParentRegion) ||
5143 isOpenMPTaskingDirective(ParentRegion) ||
5144 ParentRegion == OMPD_master || ParentRegion == OMPD_masked ||
5145 ParentRegion == OMPD_parallel_master ||
5146 ParentRegion == OMPD_parallel_masked ||
5147 ParentRegion == OMPD_critical || ParentRegion == OMPD_ordered;
5148 } else if (isOpenMPWorksharingDirective(CurrentRegion) &&
5149 !isOpenMPParallelDirective(CurrentRegion) &&
5150 !isOpenMPTeamsDirective(CurrentRegion)) {
5151 // OpenMP 5.1 [2.22, Nesting of Regions]
5152 // A loop region that binds to a parallel region or a worksharing region
5153 // may not be closely nested inside a worksharing, loop, task, taskloop,
5154 // critical, ordered, atomic, or masked region.
5155 NestingProhibited =
5156 isOpenMPWorksharingDirective(ParentRegion) ||
5157 isOpenMPGenericLoopDirective(ParentRegion) ||
5158 isOpenMPTaskingDirective(ParentRegion) ||
5159 ParentRegion == OMPD_master || ParentRegion == OMPD_masked ||
5160 ParentRegion == OMPD_parallel_master ||
5161 ParentRegion == OMPD_parallel_masked ||
5162 ParentRegion == OMPD_critical || ParentRegion == OMPD_ordered;
5163 Recommend = ShouldBeInParallelRegion;
5164 } else if (CurrentRegion == OMPD_ordered) {
5165 // OpenMP [2.16, Nesting of Regions]
5166 // An ordered region may not be closely nested inside a critical,
5167 // atomic, or explicit task region.
5168 // An ordered region must be closely nested inside a loop region (or
5169 // parallel loop region) with an ordered clause.
5170 // OpenMP [2.8.1,simd Construct, Restrictions]
5171 // An ordered construct with the simd clause is the only OpenMP construct
5172 // that can appear in the simd region.
5173 NestingProhibited = ParentRegion == OMPD_critical ||
5174 isOpenMPTaskingDirective(ParentRegion) ||
5175 !(isOpenMPSimdDirective(ParentRegion) ||
5176 Stack->isParentOrderedRegion());
5177 Recommend = ShouldBeInOrderedRegion;
5178 } else if (isOpenMPNestingTeamsDirective(CurrentRegion)) {
5179 // OpenMP [2.16, Nesting of Regions]
5180 // If specified, a teams construct must be contained within a target
5181 // construct.
5182 NestingProhibited =
5183 (SemaRef.LangOpts.OpenMP <= 45 && ParentRegion != OMPD_target) ||
5184 (SemaRef.LangOpts.OpenMP >= 50 && ParentRegion != OMPD_unknown &&
5185 ParentRegion != OMPD_target);
5186 OrphanSeen = ParentRegion == OMPD_unknown;
5187 Recommend = ShouldBeInTargetRegion;
5188 } else if (CurrentRegion == OMPD_scan) {
5189 // OpenMP [2.16, Nesting of Regions]
5190 // If specified, a teams construct must be contained within a target
5191 // construct.
5192 NestingProhibited =
5193 SemaRef.LangOpts.OpenMP < 50 ||
5194 (ParentRegion != OMPD_simd && ParentRegion != OMPD_for &&
5195 ParentRegion != OMPD_for_simd && ParentRegion != OMPD_parallel_for &&
5196 ParentRegion != OMPD_parallel_for_simd);
5197 OrphanSeen = ParentRegion == OMPD_unknown;
5198 Recommend = ShouldBeInLoopSimdRegion;
5199 }
5200 if (!NestingProhibited &&
5201 !isOpenMPTargetExecutionDirective(CurrentRegion) &&
5202 !isOpenMPTargetDataManagementDirective(CurrentRegion) &&
5203 (ParentRegion == OMPD_teams || ParentRegion == OMPD_target_teams)) {
5204 // OpenMP [5.1, 2.22, Nesting of Regions]
5205 // distribute, distribute simd, distribute parallel worksharing-loop,
5206 // distribute parallel worksharing-loop SIMD, loop, parallel regions,
5207 // including any parallel regions arising from combined constructs,
5208 // omp_get_num_teams() regions, and omp_get_team_num() regions are the
5209 // only OpenMP regions that may be strictly nested inside the teams
5210 // region.
5211 //
5212 // As an extension, we permit atomic within teams as well.
5213 NestingProhibited = !isOpenMPParallelDirective(CurrentRegion) &&
5214 !isOpenMPDistributeDirective(CurrentRegion) &&
5215 CurrentRegion != OMPD_loop &&
5216 !(SemaRef.getLangOpts().OpenMPExtensions &&
5217 CurrentRegion == OMPD_atomic);
5218 Recommend = ShouldBeInParallelRegion;
5219 }
5220 if (!NestingProhibited && CurrentRegion == OMPD_loop) {
5221 // OpenMP [5.1, 2.11.7, loop Construct, Restrictions]
5222 // If the bind clause is present on the loop construct and binding is
5223 // teams then the corresponding loop region must be strictly nested inside
5224 // a teams region.
5225 NestingProhibited = BindKind == OMPC_BIND_teams &&
5226 ParentRegion != OMPD_teams &&
5227 ParentRegion != OMPD_target_teams;
5228 Recommend = ShouldBeInTeamsRegion;
5229 }
5230 if (!NestingProhibited &&
5231 isOpenMPNestingDistributeDirective(CurrentRegion)) {
5232 // OpenMP 4.5 [2.17 Nesting of Regions]
5233 // The region associated with the distribute construct must be strictly
5234 // nested inside a teams region
5235 NestingProhibited =
5236 (ParentRegion != OMPD_teams && ParentRegion != OMPD_target_teams);
5237 Recommend = ShouldBeInTeamsRegion;
5238 }
5239 if (!NestingProhibited &&
5240 (isOpenMPTargetExecutionDirective(CurrentRegion) ||
5241 isOpenMPTargetDataManagementDirective(CurrentRegion))) {
5242 // OpenMP 4.5 [2.17 Nesting of Regions]
5243 // If a target, target update, target data, target enter data, or
5244 // target exit data construct is encountered during execution of a
5245 // target region, the behavior is unspecified.
5246 NestingProhibited = Stack->hasDirective(
5247 [&OffendingRegion](OpenMPDirectiveKind K, const DeclarationNameInfo &,
5250 OffendingRegion = K;
5251 return true;
5252 }
5253 return false;
5254 },
5255 false /* don't skip top directive */);
5256 CloseNesting = false;
5257 }
5258 if (NestingProhibited) {
5259 if (OrphanSeen) {
5260 SemaRef.Diag(StartLoc, diag::err_omp_orphaned_device_directive)
5261 << getOpenMPDirectiveName(CurrentRegion) << Recommend;
5262 } else {
5263 SemaRef.Diag(StartLoc, diag::err_omp_prohibited_region)
5264 << CloseNesting << getOpenMPDirectiveName(OffendingRegion)
5265 << Recommend << getOpenMPDirectiveName(CurrentRegion);
5266 }
5267 return true;
5268 }
5269 }
5270 return false;
5271}
5272