Skip to content

Commit a7dc9ef

Browse files
committed
[CIR] Handle return with cleanups
This adds support for branching through a cleanup block when a return statement is encountered while we're in a scope with cleanups.
1 parent 85265a9 commit a7dc9ef

File tree

10 files changed

+521
-81
lines changed

10 files changed

+521
-81
lines changed

clang/include/clang/CIR/MissingFeatures.h

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -210,6 +210,9 @@ struct MissingFeatures {
210210
static bool checkBitfieldClipping() { return false; }
211211
static bool cirgenABIInfo() { return false; }
212212
static bool cleanupAfterErrorDiags() { return false; }
213+
static bool cleanupAppendInsts() { return false; }
214+
static bool cleanupBranchThrough() { return false; }
215+
static bool cleanupIndexAndBIAdjustment() { return false; }
213216
static bool cleanupsToDeactivate() { return false; }
214217
static bool constEmitterAggILE() { return false; }
215218
static bool constEmitterArrayILE() { return false; }
@@ -230,6 +233,7 @@ struct MissingFeatures {
230233
static bool deleteArray() { return false; }
231234
static bool devirtualizeMemberFunction() { return false; }
232235
static bool ehCleanupFlags() { return false; }
236+
static bool ehCleanupHasPrebranchedFallthrough() { return false; }
233237
static bool ehCleanupScope() { return false; }
234238
static bool ehCleanupScopeRequiresEHCleanup() { return false; }
235239
static bool ehCleanupBranchFixups() { return false; }
@@ -285,6 +289,7 @@ struct MissingFeatures {
285289
static bool setNonGC() { return false; }
286290
static bool setObjCGCLValueClass() { return false; }
287291
static bool setTargetAttributes() { return false; }
292+
static bool simplifyCleanupEntry() { return false; }
288293
static bool sourceLanguageCases() { return false; }
289294
static bool stackBase() { return false; }
290295
static bool stackSaveOp() { return false; }

clang/lib/CIR/CodeGen/CIRGenCleanup.cpp

Lines changed: 213 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,46 @@ using namespace clang::CIRGen;
2828
// CIRGenFunction cleanup related
2929
//===----------------------------------------------------------------------===//
3030

31+
/// Build a unconditional branch to the lexical scope cleanup block
32+
/// or with the labeled blocked if already solved.
33+
///
34+
/// Track on scope basis, goto's we need to fix later.
35+
cir::BrOp CIRGenFunction::emitBranchThroughCleanup(mlir::Location loc,
36+
JumpDest dest) {
37+
// Insert a branch: to the cleanup block (unsolved) or to the already
38+
// materialized label. Keep track of unsolved goto's.
39+
assert(dest.getBlock() && "assumes incoming valid dest");
40+
auto brOp = cir::BrOp::create(builder, loc, dest.getBlock());
41+
42+
// Calculate the innermost active normal cleanup.
43+
EHScopeStack::stable_iterator topCleanup =
44+
ehStack.getInnermostActiveNormalCleanup();
45+
46+
// If we're not in an active normal cleanup scope, or if the
47+
// destination scope is within the innermost active normal cleanup
48+
// scope, we don't need to worry about fixups.
49+
if (topCleanup == ehStack.stable_end() ||
50+
topCleanup.encloses(dest.getScopeDepth())) { // works for invalid
51+
// FIXME(cir): should we clear insertion point here?
52+
return brOp;
53+
}
54+
55+
// If we can't resolve the destination cleanup scope, just add this
56+
// to the current cleanup scope as a branch fixup.
57+
if (!dest.getScopeDepth().isValid()) {
58+
BranchFixup &fixup = ehStack.addBranchFixup();
59+
fixup.destination = dest.getBlock();
60+
fixup.destinationIndex = dest.getDestIndex();
61+
fixup.initialBranch = brOp;
62+
fixup.optimisticBranchBlock = nullptr;
63+
// FIXME(cir): should we clear insertion point here?
64+
return brOp;
65+
}
66+
67+
cgm.errorNYI(loc, "emitBranchThroughCleanup: valid destination scope depth");
68+
return brOp;
69+
}
70+
3171
/// Emits all the code to cause the given temporary to be cleaned up.
3272
void CIRGenFunction::emitCXXTemporary(const CXXTemporary *temporary,
3373
QualType tempType, Address ptr) {
@@ -40,6 +80,19 @@ void CIRGenFunction::emitCXXTemporary(const CXXTemporary *temporary,
4080

4181
void EHScopeStack::Cleanup::anchor() {}
4282

83+
EHScopeStack::stable_iterator
84+
EHScopeStack::getInnermostActiveNormalCleanup() const {
85+
stable_iterator si = getInnermostNormalCleanup();
86+
stable_iterator se = stable_end();
87+
while (si != se) {
88+
EHCleanupScope &cleanup = llvm::cast<EHCleanupScope>(*find(si));
89+
if (cleanup.isActive())
90+
return si;
91+
si = cleanup.getEnclosingNormalCleanup();
92+
}
93+
return stable_end();
94+
}
95+
4396
/// Push an entry of the given size onto this protected-scope stack.
4497
char *EHScopeStack::allocate(size_t size) {
4598
size = llvm::alignTo(size, ScopeStackAlignment);
@@ -75,14 +128,30 @@ void EHScopeStack::deallocate(size_t size) {
75128
startOfData += llvm::alignTo(size, ScopeStackAlignment);
76129
}
77130

131+
/// Remove any 'null' fixups on the stack. However, we can't pop more
132+
/// fixups than the fixup depth on the innermost normal cleanup, or
133+
/// else fixups that we try to add to that cleanup will end up in the
134+
/// wrong place. We *could* try to shrink fixup depths, but that's
135+
/// actually a lot of work for little benefit.
136+
void EHScopeStack::popNullFixups() {
137+
// We expect this to only be called when there's still an innermost
138+
// normal cleanup; otherwise there really shouldn't be any fixups.
139+
cgf->cgm.errorNYI("popNullFixups");
140+
}
141+
78142
void *EHScopeStack::pushCleanup(CleanupKind kind, size_t size) {
79143
char *buffer = allocate(EHCleanupScope::getSizeForCleanupSize(size));
144+
bool isNormalCleanup = kind & NormalCleanup;
80145
bool isEHCleanup = kind & EHCleanup;
81146
bool isLifetimeMarker = kind & LifetimeMarker;
82147

83148
assert(!cir::MissingFeatures::innermostEHScope());
84149

85-
EHCleanupScope *scope = new (buffer) EHCleanupScope(size);
150+
EHCleanupScope *scope = new (buffer)
151+
EHCleanupScope(size, branchFixups.size(), innermostNormalCleanup);
152+
153+
if (isNormalCleanup)
154+
innermostNormalCleanup = stable_begin();
86155

87156
if (isLifetimeMarker)
88157
cgf->cgm.errorNYI("push lifetime marker cleanup");
@@ -100,12 +169,23 @@ void EHScopeStack::popCleanup() {
100169

101170
assert(isa<EHCleanupScope>(*begin()));
102171
EHCleanupScope &cleanup = cast<EHCleanupScope>(*begin());
172+
innermostNormalCleanup = cleanup.getEnclosingNormalCleanup();
103173
deallocate(cleanup.getAllocatedSize());
104174

105175
// Destroy the cleanup.
106176
cleanup.destroy();
107177

108-
assert(!cir::MissingFeatures::ehCleanupBranchFixups());
178+
// Check whether we can shrink the branch-fixups stack.
179+
if (!branchFixups.empty()) {
180+
// If we no longer have any normal cleanups, all the fixups are
181+
// complete.
182+
if (!hasNormalCleanups()) {
183+
branchFixups.clear();
184+
} else {
185+
// Otherwise we can still trim out unnecessary nulls.
186+
popNullFixups();
187+
}
188+
}
109189
}
110190

111191
static void emitCleanup(CIRGenFunction &cgf, EHScopeStack::Cleanup *cleanup) {
@@ -116,24 +196,40 @@ static void emitCleanup(CIRGenFunction &cgf, EHScopeStack::Cleanup *cleanup) {
116196
assert(cgf.haveInsertPoint() && "cleanup ended with no insertion point?");
117197
}
118198

199+
static mlir::Block *createNormalEntry(CIRGenFunction &cgf,
200+
EHCleanupScope &scope) {
201+
assert(scope.isNormalCleanup());
202+
mlir::Block *entry = scope.getNormalBlock();
203+
if (!entry) {
204+
mlir::OpBuilder::InsertionGuard guard(cgf.getBuilder());
205+
entry = cgf.curLexScope->getOrCreateCleanupBlock(cgf.getBuilder());
206+
scope.setNormalBlock(entry);
207+
}
208+
return entry;
209+
}
210+
119211
/// Pops a cleanup block. If the block includes a normal cleanup, the
120212
/// current insertion point is threaded through the cleanup, as are
121213
/// any branch fixups on the cleanup.
122214
void CIRGenFunction::popCleanupBlock() {
123215
assert(!ehStack.empty() && "cleanup stack is empty!");
124216
assert(isa<EHCleanupScope>(*ehStack.begin()) && "top not a cleanup!");
125217
EHCleanupScope &scope = cast<EHCleanupScope>(*ehStack.begin());
218+
assert(scope.getFixupDepth() <= ehStack.getNumBranchFixups());
126219

127220
// Remember activation information.
128221
bool isActive = scope.isActive();
129222

130-
assert(!cir::MissingFeatures::ehCleanupBranchFixups());
223+
// - whether there are branch fix-ups through this cleanup
224+
unsigned fixupDepth = scope.getFixupDepth();
225+
bool hasFixups = ehStack.getNumBranchFixups() != fixupDepth;
131226

132227
// - whether there's a fallthrough
133228
mlir::Block *fallthroughSource = builder.getInsertionBlock();
134229
bool hasFallthrough = fallthroughSource != nullptr && isActive;
135230

136-
bool requiresNormalCleanup = scope.isNormalCleanup() && hasFallthrough;
231+
bool requiresNormalCleanup =
232+
scope.isNormalCleanup() && (hasFixups || hasFallthrough);
137233

138234
// If we don't need the cleanup at all, we're done.
139235
assert(!cir::MissingFeatures::ehCleanupScopeRequiresEHCleanup());
@@ -168,9 +264,119 @@ void CIRGenFunction::popCleanupBlock() {
168264

169265
assert(!cir::MissingFeatures::ehCleanupFlags());
170266

171-
ehStack.popCleanup();
172-
scope.markEmitted();
173-
emitCleanup(*this, cleanup);
267+
// If we have a fallthrough and no other need for the cleanup,
268+
// emit it directly.
269+
if (hasFallthrough && !hasFixups) {
270+
assert(!cir::MissingFeatures::ehCleanupScopeRequiresEHCleanup());
271+
ehStack.popCleanup();
272+
scope.markEmitted();
273+
emitCleanup(*this, cleanup);
274+
} else {
275+
// Otherwise, the best approach is to thread everything through
276+
// the cleanup block and then try to clean up after ourselves.
277+
278+
// Force the entry block to exist.
279+
mlir::Block *normalEntry = createNormalEntry(*this, scope);
280+
281+
// I. Set up the fallthrough edge in.
282+
mlir::OpBuilder::InsertPoint savedInactiveFallthroughIP;
283+
284+
// If there's a fallthrough, we need to store the cleanup
285+
// destination index. For fall-throughs this is always zero.
286+
if (hasFallthrough) {
287+
assert(!cir::MissingFeatures::ehCleanupHasPrebranchedFallthrough());
288+
289+
} else if (fallthroughSource) {
290+
// Otherwise, save and clear the IP if we don't have fallthrough
291+
// because the cleanup is inactive.
292+
assert(!isActive && "source without fallthrough for active cleanup");
293+
savedInactiveFallthroughIP = builder.saveInsertionPoint();
294+
}
295+
296+
// II. Emit the entry block. This implicitly branches to it if
297+
// we have fallthrough. All the fixups and existing branches
298+
// should already be branched to it.
299+
builder.setInsertionPointToEnd(normalEntry);
300+
301+
// intercept normal cleanup to mark SEH scope end
302+
assert(!cir::MissingFeatures::ehCleanupScopeRequiresEHCleanup());
303+
304+
// III. Figure out where we're going and build the cleanup
305+
// epilogue.
306+
bool hasEnclosingCleanups =
307+
(scope.getEnclosingNormalCleanup() != ehStack.stable_end());
308+
309+
// Compute the branch-through dest if we need it:
310+
// - if there are branch-throughs threaded through the scope
311+
// - if fall-through is a branch-through
312+
// - if there are fixups that will be optimistically forwarded
313+
// to the enclosing cleanup
314+
assert(!cir::MissingFeatures::cleanupBranchThrough());
315+
if (hasFixups && hasEnclosingCleanups)
316+
cgm.errorNYI("cleanup branch-through dest");
317+
318+
mlir::Block *fallthroughDest = nullptr;
319+
320+
// If there's exactly one branch-after and no other threads,
321+
// we can route it without a switch.
322+
// Skip for SEH, since ExitSwitch is used to generate code to indicate
323+
// abnormal termination. (SEH: Except _leave and fall-through at
324+
// the end, all other exits in a _try (return/goto/continue/break)
325+
// are considered as abnormal terminations, using NormalCleanupDestSlot
326+
// to indicate abnormal termination)
327+
assert(!cir::MissingFeatures::cleanupBranchThrough());
328+
assert(!cir::MissingFeatures::ehCleanupScopeRequiresEHCleanup());
329+
330+
// IV. Pop the cleanup and emit it.
331+
scope.markEmitted();
332+
ehStack.popCleanup();
333+
assert(ehStack.hasNormalCleanups() == hasEnclosingCleanups);
334+
335+
emitCleanup(*this, cleanup);
336+
337+
// Append the prepared cleanup prologue from above.
338+
assert(!cir::MissingFeatures::cleanupAppendInsts());
339+
340+
// Optimistically hope that any fixups will continue falling through.
341+
if (fixupDepth != ehStack.getNumBranchFixups())
342+
cgm.errorNYI("cleanup fixup depth mismatch");
343+
344+
// V. Set up the fallthrough edge out.
345+
346+
// Case 1: a fallthrough source exists but doesn't branch to the
347+
// cleanup because the cleanup is inactive.
348+
if (!hasFallthrough && fallthroughSource) {
349+
// Prebranched fallthrough was forwarded earlier.
350+
// Non-prebranched fallthrough doesn't need to be forwarded.
351+
// Either way, all we need to do is restore the IP we cleared before.
352+
assert(!isActive);
353+
cgm.errorNYI("cleanup inactive fallthrough");
354+
355+
// Case 2: a fallthrough source exists and should branch to the
356+
// cleanup, but we're not supposed to branch through to the next
357+
// cleanup.
358+
} else if (hasFallthrough && fallthroughDest) {
359+
cgm.errorNYI("cleanup fallthrough destination");
360+
361+
// Case 3: a fallthrough source exists and should branch to the
362+
// cleanup and then through to the next.
363+
} else if (hasFallthrough) {
364+
// Everything is already set up for this.
365+
366+
// Case 4: no fallthrough source exists.
367+
} else {
368+
// FIXME(cir): should we clear insertion point here?
369+
}
370+
371+
// VI. Assorted cleaning.
372+
373+
// Check whether we can merge NormalEntry into a single predecessor.
374+
// This might invalidate (non-IR) pointers to NormalEntry.
375+
//
376+
// If it did invalidate those pointers, and normalEntry was the same
377+
// as NormalExit, go back and patch up the fixups.
378+
assert(!cir::MissingFeatures::simplifyCleanupEntry());
379+
}
174380
}
175381

176382
/// Pops cleanup blocks until the given savepoint is reached.

clang/lib/CIR/CodeGen/CIRGenCleanup.h

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,18 @@ class EHScope {
7272
/// A cleanup scope which generates the cleanup blocks lazily.
7373
class alignas(EHScopeStack::ScopeStackAlignment) EHCleanupScope
7474
: public EHScope {
75+
/// The nearest normal cleanup scope enclosing this one.
76+
EHScopeStack::stable_iterator enclosingNormal;
77+
78+
/// The dual entry/exit block along the normal edge. This is lazily
79+
/// created if needed before the cleanup is popped.
80+
mlir::Block *normalBlock = nullptr;
81+
82+
/// The number of fixups required by enclosing scopes (not including
83+
/// this one). If this is the top cleanup scope, all the fixups
84+
/// from this index onwards belong to this scope.
85+
unsigned fixupDepth = 0;
86+
7587
public:
7688
/// Gets the size required for a lazy cleanup scope with the given
7789
/// cleanup-data requirements.
@@ -83,7 +95,10 @@ class alignas(EHScopeStack::ScopeStackAlignment) EHCleanupScope
8395
return sizeof(EHCleanupScope) + cleanupBits.cleanupSize;
8496
}
8597

86-
EHCleanupScope(unsigned cleanupSize) : EHScope(EHScope::Cleanup) {
98+
EHCleanupScope(unsigned cleanupSize, unsigned fixupDepth,
99+
EHScopeStack::stable_iterator enclosingNormal)
100+
: EHScope(EHScope::Cleanup), enclosingNormal(enclosingNormal),
101+
fixupDepth(fixupDepth) {
87102
// TODO(cir): When exception handling is upstreamed, isNormalCleanup and
88103
// isEHCleanup will be arguments to the constructor.
89104
cleanupBits.isNormalCleanup = true;
@@ -101,11 +116,19 @@ class alignas(EHScopeStack::ScopeStackAlignment) EHCleanupScope
101116
// Objects of EHCleanupScope are not destructed. Use destroy().
102117
~EHCleanupScope() = delete;
103118

119+
mlir::Block *getNormalBlock() const { return normalBlock; }
120+
void setNormalBlock(mlir::Block *bb) { normalBlock = bb; }
121+
104122
bool isNormalCleanup() const { return cleanupBits.isNormalCleanup; }
105123

106124
bool isActive() const { return cleanupBits.isActive; }
107125
void setActive(bool isActive) { cleanupBits.isActive = isActive; }
108126

127+
unsigned getFixupDepth() const { return fixupDepth; }
128+
EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
129+
return enclosingNormal;
130+
}
131+
109132
size_t getCleanupSize() const { return cleanupBits.cleanupSize; }
110133
void *getCleanupBuffer() { return this + 1; }
111134

0 commit comments

Comments
 (0)