34class ConditionalEvaluationFinder
36 bool foundConditional =
false;
39 bool found()
const {
return foundConditional; }
41 bool VisitAbstractConditionalOperator(AbstractConditionalOperator *) {
42 foundConditional =
true;
47 bool TraverseLambdaExpr(
LambdaExpr *) {
return true; }
48 bool TraverseBlockExpr(BlockExpr *) {
return true; }
49 bool TraverseStmtExpr(StmtExpr *) {
return true; }
65 mlir::Location loc = builder.getUnknownLoc();
78 mlir::OpBuilder::InsertionGuard guard(builder);
80 builder.createFlagStore(loc,
false, active.
getPointer());
84 builder.createFlagStore(loc,
true, active.
getPointer());
91 : cgf(cgf), cleanups(cgf), scope(
nullptr),
94 assert(subExpr &&
"ExprWithCleanups always has a sub-expression");
95 ConditionalEvaluationFinder finder;
96 finder.TraverseStmt(
const_cast<Expr *
>(subExpr));
98 mlir::Location loc = cgf.builder.getUnknownLoc();
99 cir::CleanupKind cleanupKind = cgf.getLangOpts().Exceptions
100 ? cir::CleanupKind::All
101 : cir::CleanupKind::Normal;
102 scope = cir::CleanupScopeOp::create(
103 cgf.builder, loc, cleanupKind,
105 [&](mlir::OpBuilder &
b, mlir::Location loc) {},
107 [&](mlir::OpBuilder &
b, mlir::Location loc) {});
108 cgf.builder.setInsertionPointToEnd(&scope.getBodyRegion().front());
114 assert(!exited &&
"FullExprCleanupScope::exit called twice");
117 size_t oldSize = deferredCleanupStackSize;
118 bool hasDeferredCleanups =
119 cgf.deferredConditionalCleanupStack.size() > oldSize;
122 cgf.deferredConditionalCleanupStack.truncate(oldSize);
123 cleanups.forceCleanup(valuesToReload);
129 for (mlir::Value *valPtr : valuesToReload) {
130 mlir::Value val = *valPtr;
135 Address temp = cgf.createDefaultAlignTempAlloca(val.getType(), val.getLoc(),
137 tempAllocas.push_back(temp);
138 cgf.builder.createStore(val.getLoc(), val, temp);
143 cleanups.forceCleanup();
147 mlir::OpBuilder::InsertionGuard guard(cgf.builder);
148 mlir::Block &lastBodyBlock = scope.getBodyRegion().back();
149 cgf.builder.setInsertionPointToEnd(&lastBodyBlock);
150 if (lastBodyBlock.empty() ||
151 !lastBodyBlock.back().hasTrait<mlir::OpTrait::IsTerminator>())
152 cgf.builder.createYield(scope.getLoc());
157 mlir::OpBuilder::InsertionGuard guard(cgf.builder);
158 mlir::Block &cleanupBlock = scope.getCleanupRegion().front();
159 cgf.builder.setInsertionPointToEnd(&cleanupBlock);
161 if (hasDeferredCleanups) {
163 cgf.deferredConditionalCleanupStack.begin() + oldSize,
164 cgf.deferredConditionalCleanupStack.end()))) {
165 if (entry.activeFlag.isValid()) {
167 cgf.builder.createLoad(scope.getLoc(), entry.activeFlag);
169 cgf.builder, scope.getLoc(), flag,
false,
170 [&](mlir::OpBuilder &
b, mlir::Location loc) {
171 cgf.emitDestroy(entry.addr, entry.type, entry.destroyer);
172 cgf.builder.createYield(loc);
175 cgf.emitDestroy(entry.addr, entry.type, entry.destroyer);
179 cgf.builder.createYield(scope.getLoc());
182 cgf.deferredConditionalCleanupStack.truncate(oldSize);
183 cgf.builder.setInsertionPointAfter(scope);
186 for (
auto [addr, valPtr] : llvm::zip(tempAllocas, valuesToReload)) {
189 *valPtr = cgf.builder.createLoad(valPtr->getLoc(), addr);
197void EHScopeStack::Cleanup::anchor() {}
201 stable_iterator si = getInnermostNormalCleanup();
202 stable_iterator se = stable_end();
207 si =
cleanup.getEnclosingNormalCleanup();
213char *EHScopeStack::allocate(
size_t size) {
214 size = llvm::alignTo(size, ScopeStackAlignment);
215 if (!startOfBuffer) {
216 unsigned capacity = llvm::PowerOf2Ceil(std::max<size_t>(size, 1024ul));
217 startOfBuffer = std::make_unique<char[]>(capacity);
218 startOfData = endOfBuffer = startOfBuffer.get() + capacity;
219 }
else if (
static_cast<size_t>(startOfData - startOfBuffer.get()) < size) {
220 unsigned currentCapacity = endOfBuffer - startOfBuffer.get();
221 unsigned usedCapacity =
222 currentCapacity - (startOfData - startOfBuffer.get());
223 unsigned requiredCapacity = usedCapacity + size;
226 unsigned newCapacity = llvm::PowerOf2Ceil(requiredCapacity);
228 std::unique_ptr<char[]> newStartOfBuffer =
229 std::make_unique<char[]>(newCapacity);
230 char *newEndOfBuffer = newStartOfBuffer.get() + newCapacity;
231 char *newStartOfData = newEndOfBuffer - usedCapacity;
232 memcpy(newStartOfData, startOfData, usedCapacity);
233 startOfBuffer.swap(newStartOfBuffer);
234 endOfBuffer = newEndOfBuffer;
235 startOfData = newStartOfData;
238 assert(startOfBuffer.get() + size <= startOfData);
243void EHScopeStack::deallocate(
size_t size) {
244 startOfData += llvm::alignTo(size, ScopeStackAlignment);
252 bool skipCleanupScope =
false;
254 cir::CleanupKind cleanupKind = cir::CleanupKind::All;
255 if (isEHCleanup && cgf->getLangOpts().Exceptions) {
257 isNormalCleanup ? cir::CleanupKind::All : cir::CleanupKind::EH;
263 cleanupKind = cir::CleanupKind::Normal;
265 skipCleanupScope =
true;
268 cir::CleanupScopeOp cleanupScope =
nullptr;
269 if (!skipCleanupScope) {
270 CIRGenBuilderTy &builder = cgf->getBuilder();
271 mlir::Location loc = builder.getUnknownLoc();
272 cleanupScope = cir::CleanupScopeOp::create(
273 builder, loc, cleanupKind,
275 [&](mlir::OpBuilder &
b, mlir::Location loc) {
279 [&](mlir::OpBuilder &
b, mlir::Location loc) {
283 builder.setInsertionPointToEnd(&cleanupScope.getBodyRegion().back());
290 if (innermostEHScope != stable_end() &&
294 EHCleanupScope *scope =
new (buffer)
295 EHCleanupScope(isNormalCleanup, isEHCleanup, size, cleanupScope,
296 innermostNormalCleanup, innermostEHScope);
299 innermostNormalCleanup = stable_begin();
302 innermostEHScope = stable_begin();
304 if (isLifetimeMarker)
305 cgf->cgm.errorNYI(
"push lifetime marker cleanup");
308 if (cgf->getLangOpts().EHAsynch && isEHCleanup && !isLifetimeMarker &&
309 cgf->getTarget().getCXXABI().isMicrosoft())
310 cgf->cgm.errorNYI(
"push seh cleanup");
316 assert(!empty() &&
"popping exception stack when not empty");
320 innermostNormalCleanup =
cleanup.getEnclosingNormalCleanup();
321 innermostEHScope =
cleanup.getEnclosingEHScope();
322 deallocate(
cleanup.getAllocatedSize());
324 cir::CleanupScopeOp cleanupScope =
cleanup.getCleanupScopeOp();
326 auto *block = &cleanupScope.getBodyRegion().back();
327 if (!block->mightHaveTerminator()) {
328 mlir::OpBuilder::InsertionGuard guard(cgf->getBuilder());
329 cgf->getBuilder().setInsertionPointToEnd(block);
330 cir::YieldOp::create(cgf->getBuilder(),
331 cgf->getBuilder().getUnknownLoc());
333 cgf->getBuilder().setInsertionPointAfter(cleanupScope);
341 for (stable_iterator si = getInnermostEHScope(); si != stable_end();) {
342 if (
auto *
cleanup = dyn_cast<EHCleanupScope>(&*find(si))) {
343 if (
cleanup->isLifetimeMarker()) {
358 mlir::Operation *dominatingIP) {
362 "cleanup block is neither normal nor EH?");
376 if (!var.isValid()) {
377 mlir::Location loc = builder.getUnknownLoc();
380 loc,
"cleanup.isactive");
383 assert(dominatingIP &&
"no existing variable and no dominating IP!");
386 mlir::Value val = builder.
getBool(
true, loc);
389 mlir::OpBuilder::InsertionGuard guard(builder);
390 builder.setInsertionPoint(dominatingIP);
398 mlir::Location loc = builder.getUnknownLoc();
404 mlir::Operation *dominatingIP) {
405 assert(
c !=
ehStack.stable_end() &&
"deactivating bottom of stack?");
407 assert(scope.
isActive() &&
"double deactivation");
424 EHScopeStack::Cleanup *
cleanup,
428 mlir::Block &block = cleanupScope.getCleanupRegion().back();
430 mlir::OpBuilder::InsertionGuard guard(builder);
431 builder.setInsertionPointToStart(&block);
437 mlir::Location loc = cleanupScope.getLoc();
439 cir::IfOp::create(builder, loc, isActive,
442 [&](mlir::OpBuilder &, mlir::Location) {
445 "cleanup ended with no insertion point?");
450 assert(cgf.
haveInsertPoint() &&
"cleanup ended with no insertion point?");
453 mlir::Block &cleanupRegionLastBlock = cleanupScope.getCleanupRegion().back();
454 if (cleanupRegionLastBlock.empty() ||
455 !cleanupRegionLastBlock.back().hasTrait<mlir::OpTrait::IsTerminator>()) {
456 mlir::OpBuilder::InsertionGuard guardCase(builder);
457 builder.setInsertionPointToEnd(&cleanupRegionLastBlock);
468 .walk([&](mlir::Operation *op) {
470 return mlir::WalkResult::interrupt();
471 return mlir::WalkResult::advance();
481 assert(!
ehStack.empty() &&
"cleanup stack is empty!");
486 assert(cleanupScope &&
"CleanupScopeOp is nullptr");
505 if (forDeactivation && requiresNormalCleanup) {
512 mlir::Location loc = builder.getUnknownLoc();
519 mlir::OpBuilder::InsertionGuard guard(builder);
520 builder.setInsertionPoint(cleanupScope);
521 builder.createFlagStore(loc,
true, activeFlag.
getPointer());
525 assert(builder.getInsertionBlock() ==
526 &cleanupScope.getBodyRegion().back() &&
527 "expected insertion point in cleanup body");
528 builder.createFlagStore(loc,
false, activeFlag.
getPointer());
535 if (requiresEHCleanup)
536 cleanupScope.setCleanupKind(cir::CleanupKind::EH);
537 requiresNormalCleanup =
false;
549 if (!requiresNormalCleanup && !requiresEHCleanup) {
554 mlir::Block &cleanupBlock = cleanupScope.getCleanupRegion().back();
555 if (!cleanupBlock.mightHaveTerminator()) {
556 mlir::OpBuilder::InsertionGuard guard(builder);
557 builder.setInsertionPointToEnd(&cleanupBlock);
558 cir::YieldOp::create(builder, builder.getUnknownLoc());
571 cleanupBufferStack[8 *
sizeof(
void *)];
572 std::unique_ptr<char[]> cleanupBufferHeap;
578 if (cleanupSize <=
sizeof(cleanupBufferStack)) {
579 memcpy(cleanupBufferStack, cleanupSource, cleanupSize);
582 cleanupBufferHeap.reset(
new char[cleanupSize]);
583 memcpy(cleanupBufferHeap.get(), cleanupSource, cleanupSize);
595 Address cleanupActiveFlag = normalActiveFlag.
isValid() ? normalActiveFlag
596 : ehActiveFlag.
isValid() ? ehActiveFlag
614 bool requiresCleanup =
false;
615 for (
auto it =
ehStack.begin(), ie =
ehStack.find(oldCleanupStackDepth);
618 requiresCleanup =
true;
627 if (requiresCleanup) {
628 for (mlir::Value *valPtr : valuesToReload) {
629 mlir::Value val = *valPtr;
637 tempAllocas.push_back(temp);
638 builder.createStore(val.getLoc(), val, temp);
644 while (
ehStack.stable_begin() != oldCleanupStackDepth)
648 if (requiresCleanup) {
649 for (
auto [addr, valPtr] : llvm::zip(tempAllocas, valuesToReload)) {
650 mlir::Location loc = valPtr->getLoc();
651 *valPtr = builder.createLoad(loc, addr);
static void setupCleanupBlockDeactivation(CIRGenFunction &cgf, EHScopeStack::stable_iterator c, mlir::Operation *dominatingIP)
The given cleanup block is being deactivated.
static bool bodyHasBranchThroughExits(mlir::Region &bodyRegion)
Check whether a cleanup scope body contains any non-yield exits that branch through the cleanup.
static void emitCleanup(CIRGenFunction &cgf, cir::CleanupScopeOp cleanupScope, EHScopeStack::Cleanup *cleanup, EHScopeStack::Cleanup::Flags flags, Address activeFlag)
static Decl::Kind getKind(const Decl *D)
*collection of selector each with an associated kind and an ordered *collection of selectors A selector has a kind
__DEVICE__ void * memcpy(void *__a, const void *__b, size_t __c)
__device__ __2f16 float c
cir::ConstantOp getBool(bool state, mlir::Location loc)
cir::StoreOp createFlagStore(mlir::Location loc, bool val, mlir::Value dst)
cir::YieldOp createYield(mlir::Location loc, mlir::ValueRange value={})
Create a yield operation.
cir::LoadOp createFlagLoad(mlir::Location loc, mlir::Value addr)
Emit a load from an boolean flag variable.
cir::BoolType getBoolTy()
mlir::Value getPointer() const
FullExprCleanupScope(CIRGenFunction &cgf, const Expr *subExpr)
void exit(ArrayRef< mlir::Value * > valuesToReload={})
llvm::SmallVector< PendingCleanupEntry > lifetimeExtendedCleanupStack
mlir::Block * getCurFunctionEntryBlock()
bool isInConditionalBranch() const
void setBeforeOutermostConditional(mlir::Value value, Address addr)
ConditionalEvaluation * outermostConditional
EHScopeStack ehStack
Tracks function scope overall cleanup handling.
llvm::SmallVector< PendingCleanupEntry > deferredConditionalCleanupStack
void pushDestroy(QualType::DestructionKind dtorKind, Address addr, QualType type)
Push the standard destructor for the given type as at least a normal cleanup.
static Destroyer destroyCXXObject
Address createCleanupActiveFlag()
Create an active flag variable for use with conditional cleanups.
void deactivateCleanupBlock(EHScopeStack::stable_iterator cleanup, mlir::Operation *dominatingIP)
Deactivates the given cleanup block.
bool haveInsertPoint() const
True if an insertion point is defined.
void emitCXXTemporary(const CXXTemporary *temporary, QualType tempType, Address ptr)
Emits all the code to cause the given temporary to be cleaned up.
void popCleanupBlocks(EHScopeStack::stable_iterator oldCleanupStackDepth, ArrayRef< mlir::Value * > valuesToReload={})
Takes the old cleanup stack size and emits the cleanup blocks that have been added.
CIRGenBuilderTy & getBuilder()
void pushPendingCleanupToEHStack(const PendingCleanupEntry &entry)
Promote a single pending cleanup entry onto the EH scope stack.
void popCleanupBlock(bool forDeactivation=false)
Pop a cleanup block from the stack.
EHScopeStack::stable_iterator currentCleanupStackDepth
CIRGenFunction(CIRGenModule &cgm, CIRGenBuilderTy &builder, bool suppressNewContext=false)
Address createTempAllocaWithoutCast(mlir::Type ty, CharUnits align, mlir::Location loc, const Twine &name="tmp", mlir::Value arraySize=nullptr, mlir::OpBuilder::InsertPoint ip={})
This creates a alloca and inserts it into the entry block of the current region.
Address createDefaultAlignTempAlloca(mlir::Type ty, mlir::Location loc, const Twine &name)
CreateDefaultAlignTempAlloca - This creates an alloca with the default alignment of the corresponding...
A cleanup scope which generates the cleanup blocks lazily.
void setTestFlagInNormalCleanup()
Address getActiveFlag() const
size_t getCleanupSize() const
cir::CleanupScopeOp getCleanupScopeOp()
bool shouldTestFlagInEHCleanup() const
static size_t getSizeForCleanupSize(size_t size)
Gets the size required for a lazy cleanup scope with the given cleanup-data requirements.
bool isNormalCleanup() const
void setActiveFlag(Address var)
void * getCleanupBuffer()
bool shouldTestFlagInNormalCleanup() const
void setTestFlagInEHCleanup()
void setActive(bool isActive)
void setIsEHCleanupKind()
void setIsNormalCleanupKind()
Information for lazily generating a cleanup.
A saved depth on the scope stack.
void popCleanup()
Pops a cleanup scope off the stack. This is private to CIRGenCleanup.cpp.
iterator find(stable_iterator savePoint) const
Turn a stable reference to a scope depth into a unstable pointer to the EH stack.
bool requiresCatchOrCleanup() const
stable_iterator getInnermostActiveNormalCleanup() const
Represents a C++ temporary.
static CharUnits One()
One - Construct a CharUnits quantity of one.
This represents one expression.
A (possibly-)qualified type.
A class that does preorder or postorder depth-first traversal on the entire Clang AST and visits each...
@ EHCleanup
Denotes a cleanup that should run when a scope is exited using exceptional control flow (a throw stat...
@ NormalCleanup
Denotes a cleanup that should run when a scope is exited using normal control flow (falling off the e...
The JSON file list parser is used to communicate input to InstallAPI.
bool isa(CodeGen::Address addr)
nullptr
This class represents a compute construct, representing a 'Kind' of ‘parallel’, 'serial',...
U cast(CodeGen::Address addr)
static bool emitLifetimeMarkers()
A cleanup entry that will be promoted onto the EH scope stack at a later point.