llvm.org GIT mirror llvm / d1d6f53
[WinEH] Disable most forms of demotion Now that the register allocator knows about the barriers on funclet entry and exit, testing has shown that this is unnecessary. We still demote PHIs on unsplittable blocks due to the differences between the IR CFG and the Machine CFG. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@253619 91177308-0d34-0410-b5e6-96231b3b80d8 Reid Kleckner 5 years ago
5 changed file(s) with 81 addition(s) and 256 deletion(s). Raw diff Collapse all Expand all
6868 AllocaInst *insertPHILoads(PHINode *PN, Function &F);
6969 void replaceUseWithLoad(Value *V, Use &U, AllocaInst *&SpillSlot,
7070 DenseMap &Loads, Function &F);
71 void demoteNonlocalUses(Value *V, SetVector &ColorsForBB,
72 Function &F);
7371 bool prepareExplicitEH(Function &F,
7472 SmallVectorImpl &EntryBlocks);
7573 void replaceTerminatePadWithCleanup(Function &F);
8987 std::map &Orig2Clone);
9088
9189 void demotePHIsOnFunclets(Function &F);
92 void demoteUsesBetweenFunclets(Function &F);
93 void demoteArgumentUses(Function &F);
9490 void cloneCommonBlocks(Function &F,
9591 SmallVectorImpl &EntryBlocks);
9692 void removeImplausibleTerminators(Function &F);
15871583 }
15881584 }
15891585
1590 void WinEHPrepare::demoteUsesBetweenFunclets(Function &F) {
1591 // Turn all inter-funclet uses of a Value into loads and stores.
1592 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE;) {
1593 BasicBlock *BB = &*FI++;
1594 SetVector &ColorsForBB = BlockColors[BB];
1595 for (BasicBlock::iterator BI = BB->begin(), BE = BB->end(); BI != BE;) {
1596 Instruction *I = &*BI++;
1597 // Funclets are permitted to use static allocas.
1598 if (auto *AI = dyn_cast(I))
1599 if (AI->isStaticAlloca())
1600 continue;
1601
1602 demoteNonlocalUses(I, ColorsForBB, F);
1603 }
1604 }
1605 }
1606
1607 void WinEHPrepare::demoteArgumentUses(Function &F) {
1608 // Also demote function parameters used in funclets.
1609 SetVector &ColorsForEntry = BlockColors[&F.getEntryBlock()];
1610 for (Argument &Arg : F.args())
1611 demoteNonlocalUses(&Arg, ColorsForEntry, F);
1612 }
1613
16141586 void WinEHPrepare::cloneCommonBlocks(
16151587 Function &F, SmallVectorImpl &EntryBlocks) {
16161588 // We need to clone all blocks which belong to multiple funclets. Values are
19131885 report_fatal_error("Uncolored BB!");
19141886 if (NumColors > 1)
19151887 report_fatal_error("Multicolor BB!");
1916 if (!DisableDemotion) {
1917 bool EHPadHasPHI = BB.isEHPad() && isa(BB.begin());
1918 assert(!EHPadHasPHI && "EH Pad still has a PHI!");
1919 if (EHPadHasPHI)
1920 report_fatal_error("EH Pad still has a PHI!");
1921 }
1888 bool EHPadHasPHI = BB.isEHPad() && isa(BB.begin());
1889 assert(!EHPadHasPHI && "EH Pad still has a PHI!");
1890 if (EHPadHasPHI)
1891 report_fatal_error("EH Pad still has a PHI!");
19221892 }
19231893 }
19241894
19291899 // Determine which blocks are reachable from which funclet entries.
19301900 colorFunclets(F, EntryBlocks);
19311901
1932 if (!DisableDemotion) {
1902 if (!DisableDemotion)
19331903 demotePHIsOnFunclets(F);
1934
1935 demoteUsesBetweenFunclets(F);
1936
1937 demoteArgumentUses(F);
1938 }
19391904
19401905 cloneCommonBlocks(F, EntryBlocks);
19411906
20472012
20482013 // Otherwise, insert the store at the end of the basic block.
20492014 new StoreInst(PredVal, SpillSlot, PredBlock->getTerminator());
2050 }
2051
2052 // The SetVector == operator uses the std::vector == operator, so it doesn't
2053 // actually tell us whether or not the two sets contain the same colors. This
2054 // function does that.
2055 // FIXME: Would it be better to add a isSetEquivalent() method to SetVector?
2056 static bool isBlockColorSetEquivalent(SetVector &SetA,
2057 SetVector &SetB) {
2058 if (SetA.size() != SetB.size())
2059 return false;
2060 for (auto *Color : SetA)
2061 if (!SetB.count(Color))
2062 return false;
2063 return true;
2064 }
2065
2066 // TODO: Share loads for same-funclet uses (requires dominators if funclets
2067 // aren't properly nested).
2068 void WinEHPrepare::demoteNonlocalUses(Value *V,
2069 SetVector &ColorsForBB,
2070 Function &F) {
2071 // Tokens can only be used non-locally due to control flow involving
2072 // unreachable edges. Don't try to demote the token usage, we'll simply
2073 // delete the cloned user later.
2074 if (isa(V) || isa(V))
2075 return;
2076
2077 DenseMap Loads;
2078 AllocaInst *SpillSlot = nullptr;
2079 for (Value::use_iterator UI = V->use_begin(), UE = V->use_end(); UI != UE;) {
2080 Use &U = *UI++;
2081 auto *UsingInst = cast(U.getUser());
2082 BasicBlock *UsingBB = UsingInst->getParent();
2083
2084 // Is the Use inside a block which is colored the same as the Def?
2085 // If so, we don't need to escape the Def because we will clone
2086 // ourselves our own private copy.
2087 SetVector &ColorsForUsingBB = BlockColors[UsingBB];
2088 if (isBlockColorSetEquivalent(ColorsForUsingBB, ColorsForBB))
2089 continue;
2090
2091 replaceUseWithLoad(V, U, SpillSlot, Loads, F);
2092 }
2093 if (SpillSlot) {
2094 // Insert stores of the computed value into the stack slot.
2095 // We have to be careful if I is an invoke instruction,
2096 // because we can't insert the store AFTER the terminator instruction.
2097 BasicBlock::iterator InsertPt;
2098 if (isa(V)) {
2099 InsertPt = F.getEntryBlock().getTerminator()->getIterator();
2100 } else if (isa(V)) {
2101 auto *II = cast(V);
2102 // We cannot demote invoke instructions to the stack if their normal
2103 // edge is critical. Therefore, split the critical edge and create a
2104 // basic block into which the store can be inserted.
2105 if (!II->getNormalDest()->getSinglePredecessor()) {
2106 unsigned SuccNum =
2107 GetSuccessorNumber(II->getParent(), II->getNormalDest());
2108 assert(isCriticalEdge(II, SuccNum) && "Expected a critical edge!");
2109 BasicBlock *NewBlock = SplitCriticalEdge(II, SuccNum);
2110 assert(NewBlock && "Unable to split critical edge.");
2111 // Update the color mapping for the newly split edge.
2112 SetVector &ColorsForUsingBB = BlockColors[II->getParent()];
2113 BlockColors[NewBlock] = ColorsForUsingBB;
2114 for (BasicBlock *FuncletPad : ColorsForUsingBB)
2115 FuncletBlocks[FuncletPad].insert(NewBlock);
2116 }
2117 InsertPt = II->getNormalDest()->getFirstInsertionPt();
2118 } else {
2119 InsertPt = cast(V)->getIterator();
2120 ++InsertPt;
2121 // Don't insert before PHI nodes or EH pad instrs.
2122 for (; isa(InsertPt) || InsertPt->isEHPad(); ++InsertPt)
2123 ;
2124 }
2125 new StoreInst(V, SpillSlot, &*InsertPt);
2126 }
21272015 }
21282016
21292017 void WinEHPrepare::replaceUseWithLoad(Value *V, Use &U, AllocaInst *&SpillSlot,
2828 ; for the use in entry's copy.
2929 ; CHECK-LABEL: define void @test1(
3030 ; CHECK: entry:
31 ; CHECK: store i32 %x, i32* [[Slot:%[^ ]+]]
31 ; CHECK: %x = call i32 @g()
3232 ; CHECK: invoke void @f()
3333 ; CHECK: to label %[[EntryCopy:[^ ]+]] unwind label %catch
3434 ; CHECK: catch:
3535 ; CHECK: catchpad []
3636 ; CHECK-NEXT: to label %[[CatchCopy:[^ ]+]] unwind
3737 ; CHECK: [[CatchCopy]]:
38 ; CHECK: [[LoadX2:%[^ ]+]] = load i32, i32* [[Slot]]
39 ; CHECK: call void @h(i32 [[LoadX2]]
38 ; CHECK: call void @h(i32 %x)
4039 ; CHECK: [[EntryCopy]]:
41 ; CHECK: [[LoadX1:%[^ ]+]] = load i32, i32* [[Slot]]
42 ; CHECK: call void @h(i32 [[LoadX1]]
40 ; CHECK: call void @h(i32 %x)
4341
4442
4543 define void @test2() personality i32 (...)* @__CxxFrameHandler3 {
280278 ; then calls @h, and that the call to @h doesn't return.
281279 ; CHECK-LABEL: define void @test6(
282280 ; CHECK: left:
281 ; CHECK: %x.for.left = call i32 @g()
282 ; CHECK: invoke void @f()
283283 ; CHECK: to label %[[SHARED_CONT_LEFT:.+]] unwind label %[[INNER_LEFT:.+]]
284284 ; CHECK: right:
285 ; CHECK: catchpad
285286 ; CHECK: to label %right.catch unwind label %right.end
286287 ; CHECK: right.catch:
287288 ; CHECK: %x = call i32 @g()
288 ; CHECK: store i32 %x, i32* %x.wineh.spillslot
289289 ; CHECK: to label %shared.cont unwind label %[[INNER_RIGHT:.+]]
290290 ; CHECK: right.end:
291291 ; CHECK: catchendpad unwind to caller
295295 ; CHECK: unreachable
296296 ; CHECK: [[INNER_RIGHT]]:
297297 ; CHECK: [[I_R:\%.+]] = cleanuppad []
298 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
299 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
298 ; CHECK: call void @h(i32 %x)
300299 ; CHECK: cleanupret [[I_R]] unwind label %right.end
301300 ; CHECK: [[INNER_LEFT]]:
302301 ; CHECK: [[I_L:\%.+]] = cleanuppad []
303 ; CHECK: [[X_RELOAD_L:\%.+]] = load i32, i32* %x.wineh.spillslot
304 ; CHECK: call void @h(i32 [[X_RELOAD_L]])
302 ; CHECK: call void @h(i32 %x.for.left)
305303 ; CHECK: unreachable
306304
307305
112112 call void @h(i32 %x)
113113 call void @h(i32 %y)
114114 catchret %cpouter to label %exit
115
116 exit:
117 ret void
118 }
119
120 ; CHECK-LABEL: @test3(
121 define void @test3(i1 %B) personality i32 (...)* @__CxxFrameHandler3 {
122 entry:
123 ; need to spill parameter %B and def %x since they're used in a funclet
124 ; CHECK: entry:
125 ; CHECK-DAG: store i1 %B, i1* [[SlotB:%[^ ]+]]
126 ; CHECK-DAG: store i32 %x, i32* [[SlotX:%[^ ]+]]
127 ; CHECK: invoke void @f
128 %x = call i32 @g()
129 invoke void @f()
130 to label %exit unwind label %catchpad
131
132 catchpad:
133 %cp = catchpad [] to label %catch unwind label %catchend
134
135 catch:
136 ; Need to reload %B here
137 ; CHECK: catch:
138 ; CHECK: [[ReloadB:%[^ ]+]] = load i1, i1* [[SlotB]]
139 ; CHECK: br i1 [[ReloadB]]
140 br i1 %B, label %left, label %right
141 left:
142 ; Use of %x is in a phi, so need reload here in pred
143 ; CHECK: left:
144 ; CHECK: [[ReloadX:%[^ ]+]] = load i32, i32* [[SlotX]]
145 ; CHECK: br label %merge
146 br label %merge
147 right:
148 br label %merge
149 merge:
150 ; CHECK: merge:
151 ; CHECK: %phi = phi i32 [ [[ReloadX]], %left ]
152 %phi = phi i32 [ %x, %left ], [ 42, %right ]
153 call void @h(i32 %phi)
154 catchret %cp to label %exit
155
156 catchend:
157 catchendpad unwind to caller
158115
159116 exit:
160117 ret void
294251 ret void
295252 }
296253
254 ; We used to demote %x, but we don't need to anymore.
297255 ; CHECK-LABEL: @test6(
298256 define void @test6() personality i32 (...)* @__CxxFrameHandler3 {
299257 entry:
300 ; Since %x needs to be stored but the edge to loop is critical,
301 ; it needs to be split
302258 ; CHECK: entry:
303 ; CHECK: invoke i32 @g
304 ; CHECK-NEXT: to label %[[SplitBlock:[^ ]+]] unwind label %to_caller
259 ; CHECK: %x = invoke i32 @g()
260 ; CHECK-NEXT: to label %loop unwind label %to_caller
305261 %x = invoke i32 @g()
306262 to label %loop unwind label %to_caller
307 ; The store should be in the split block
308 ; CHECK: [[SplitBlock]]:
309 ; CHECK: store i32 %x, i32* [[SpillSlot:%[^ ]+]]
310 ; CHECK: br label %loop
311263 to_caller:
312264 %cp1 = cleanuppad []
313265 cleanupret %cp1 unwind to caller
316268 to label %loop unwind label %cleanup
317269 cleanup:
318270 ; CHECK: cleanup:
319 ; CHECK: [[Load:%[^ ]+]] = load i32, i32* [[SpillSlot]]
320 ; CHECK: call void @h(i32 [[Load]])
271 ; CHECK: call void @h(i32 %x)
321272 %cp2 = cleanuppad []
322273 call void @h(i32 %x)
323274 cleanupret %cp2 unwind to caller
361312 ; Edge from %right to %join needs to be split so that
362313 ; the load of %y can be inserted *after* the catchret
363314 ; CHECK: right:
364 ; CHECK: store i32 %y, i32* [[SlotY:%[^ ]+]]
365 ; CHECK: catchret %[[CatchPad]] to label %[[SplitRight:[^ ]+]]
315 ; CHECK: %y = call i32 @g()
316 ; CHECK: catchret %[[CatchPad]] to label %join
366317 %y = call i32 @g()
367318 catchret %cp to label %join
368 ; CHECK: [[SplitRight]]:
369 ; CHECK: [[LoadY:%[^ ]+]] = load i32, i32* [[SlotY]]
370 ; CHECK: br label %join
371319 catchend:
372320 catchendpad unwind to caller
373321 join:
374322 ; CHECK: join:
375 ; CHECK: %phi = phi i32 [ [[LoadX]], %[[SplitLeft]] ], [ [[LoadY]], %[[SplitRight]] ]
323 ; CHECK: %phi = phi i32 [ [[LoadX]], %[[SplitLeft]] ], [ %y, %right ]
376324 %phi = phi i32 [ %x, %left ], [ %y, %right ]
377325 call void @h(i32 %phi)
378326 br label %exit
5050 ; then calls @h, and that the call to @h doesn't return.
5151 ; CHECK-LABEL: define void @test1(
5252 ; CHECK: left:
53 ; CHECK: cleanuppad
54 ; CHECK: %x.for.left = call i32 @g()
55 ; CHECK: invoke void @f()
5356 ; CHECK: to label %[[SHARED_CONT_LEFT:.+]] unwind label %[[INNER_LEFT:.+]]
5457 ; CHECK: right:
5558 ; CHECK: to label %right.catch unwind label %right.end
5659 ; CHECK: right.catch:
5760 ; CHECK: %x = call i32 @g()
58 ; CHECK: store i32 %x, i32* %x.wineh.spillslot
5961 ; CHECK: to label %shared.cont unwind label %[[INNER_RIGHT:.+]]
6062 ; CHECK: right.end:
6163 ; CHECK: catchendpad unwind to caller
6567 ; CHECK: unreachable
6668 ; CHECK: [[INNER_RIGHT]]:
6769 ; CHECK: [[I_R:\%.+]] = cleanuppad []
68 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
69 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
70 ; CHECK: call void @h(i32 %x)
7071 ; CHECK: cleanupret [[I_R]] unwind label %right.end
7172 ; CHECK: [[INNER_LEFT]]:
7273 ; CHECK: [[I_L:\%.+]] = cleanuppad []
73 ; CHECK: [[X_RELOAD_L:\%.+]] = load i32, i32* %x.wineh.spillslot
74 ; CHECK: call void @h(i32 [[X_RELOAD_L]])
74 ; CHECK: call void @h(i32 %x.for.left)
7575 ; CHECK: unreachable
7676
7777
117117 ; %right.end (which belongs to the entry funclet).
118118 ; CHECK-LABEL: define void @test2(
119119 ; CHECK: left:
120 ; CHECK: cleanuppad
121 ; CHECK: %x.for.left = call i32 @g()
122 ; CHECK: invoke void @f()
120123 ; CHECK: to label %[[SHARED_CONT_LEFT:.+]] unwind label %[[INNER_LEFT:.+]]
121124 ; CHECK: right:
122125 ; CHECK: to label %right.catch unwind label %[[RIGHT_END:.+]]
135138 ; CHECK: catchpad []
136139 ; CHECK: to label %[[INNER_CATCH_LEFT:.+]] unwind label %[[INNER_END_LEFT:.+]]
137140 ; CHECK: [[INNER_CATCH_RIGHT]]:
138 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
139 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
141 ; CHECK: call void @h(i32 %x)
140142 ; CHECK: unreachable
141143 ; CHECK: [[INNER_CATCH_LEFT]]:
142 ; CHECK: [[X_RELOAD_L:\%.+]] = load i32, i32* %x.wineh.spillslot
143 ; CHECK: call void @h(i32 [[X_RELOAD_L]])
144 ; CHECK: call void @h(i32 %x.for.left)
144145 ; CHECK: unreachable
145146 ; CHECK: [[INNER_END_LEFT]]:
146147 ; CHECK: catchendpad unwind to caller
189190 ; CHECK-LABEL: define void @test3(
190191 ; CHECK: left:
191192 ; CHECK: %l = cleanuppad []
193 ; CHECK: %x.for.left = call i32 @g()
194 ; CHECK: invoke void @f()
192195 ; CHECK: to label %[[SHARED_CONT_LEFT:.+]] unwind label %[[INNER_LEFT:.+]]
193196 ; CHECK: [[LEFT_END:left.end.*]]:
194197 ; CHECK: cleanupendpad %l unwind label %right
209212 ; CHECK: catchpad []
210213 ; CHECK: to label %[[INNER_CATCH_LEFT:.+]] unwind label %[[INNER_END_LEFT:.+]]
211214 ; CHECK: [[INNER_CATCH_RIGHT]]:
212 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
213 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
215 ; CHECK: call void @h(i32 %x)
214216 ; CHECK: unreachable
215217 ; CHECK: [[INNER_CATCH_LEFT]]:
216 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
217 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
218 ; CHECK: call void @h(i32 %x.for.left)
218219 ; CHECK: unreachable
219220 ; CHECK: [[INNER_END_LEFT]]:
220221 ; CHECK: catchendpad unwind label %[[LEFT_END]]
269270 ; CHECK: catchpad []
270271 ; CHECK: to label %left.catch unwind label %[[LEFT_END:.+]]
271272 ; CHECK: left.catch:
273 ; CHECK: %x.for.left = call i32 @g()
274 ; CHECK: invoke void @f()
272275 ; CHECK: to label %[[SHARED_CONT_LEFT:.+]] unwind label %[[INNER_LEFT:.+]]
273276 ; CHECK: [[LEFT_END]]:
274277 ; CHECK: catchendpad unwind label %right
289292 ; CHECK: catchpad []
290293 ; CHECK: to label %[[INNER_CATCH_LEFT:.+]] unwind label %[[INNER_END_LEFT:.+]]
291294 ; CHECK: [[INNER_CATCH_RIGHT]]:
292 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
293 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
295 ; CHECK: call void @h(i32 %x)
294296 ; CHECK: unreachable
295297 ; CHECK: [[INNER_CATCH_LEFT]]:
296 ; CHECK: [[X_RELOAD_L:\%.+]] = load i32, i32* %x.wineh.spillslot
297 ; CHECK: call void @h(i32 [[X_RELOAD_L]])
298 ; CHECK: call void @h(i32 %x.for.left)
298299 ; CHECK: unreachable
299300 ; CHECK: [[INNER_END_RIGHT]]:
300301 ; CHECK: catchendpad unwind to caller
361362 ; CHECK: catchpad []
362363 ; CHECK: to label %[[INNER_CATCH_LEFT:.+]] unwind label %[[INNER_END_LEFT:.+]]
363364 ; CHECK: [[INNER_CATCH_RIGHT]]:
364 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
365 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
365 ; CHECK: call void @h(i32 %x)
366366 ; CHECK: unreachable
367367 ; CHECK: [[INNER_CATCH_LEFT]]:
368 ; CHECK: [[X_RELOAD_L:\%.+]] = load i32, i32* %x.wineh.spillslot
369 ; CHECK: call void @h(i32 [[X_RELOAD_L]])
368 ; CHECK: call void @h(i32 %x.for.left)
370369 ; CHECK: unreachable
371370 ; CHECK: [[INNER_END_RIGHT]]:
372371 ; CHECK: catchendpad unwind to caller
445444 ; CHECK: catchpad []
446445 ; CHECK: to label %[[INNER_CATCH_LEFT:.+]] unwind label %[[INNER_END_LEFT:.+]]
447446 ; CHECK: [[INNER_CATCH_RIGHT]]:
448 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
449 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
447 ; CHECK: call void @h(i32 %x)
450448 ; CHECK: unreachable
451449 ; CHECK: [[INNER_CATCH_LEFT]]:
452 ; CHECK: [[X_RELOAD_L:\%.+]] = load i32, i32* %x.wineh.spillslot
453 ; CHECK: call void @h(i32 [[X_RELOAD_L]])
450 ; CHECK: call void @h(i32 %x.for.left)
454451 ; CHECK: unreachable
455452 ; CHECK: [[INNER_END_RIGHT]]:
456453 ; CHECK: catchendpad unwind to caller
521518 ; CHECK: catchpad []
522519 ; CHECK: to label %[[INNER_CATCH_LEFT:.+]] unwind label %[[INNER_END_LEFT:.+]]
523520 ; CHECK: [[INNER_CATCH_RIGHT]]:
524 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
525 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
521 ; CHECK: call void @h(i32 %x)
526522 ; CHECK: unreachable
527523 ; CHECK: [[INNER_CATCH_LEFT]]:
528 ; CHECK: [[X_RELOAD_L:\%.+]] = load i32, i32* %x.wineh.spillslot
529 ; CHECK: call void @h(i32 [[X_RELOAD_L]])
524 ; CHECK: call void @h(i32 %x.for.left)
530525 ; CHECK: unreachable
531526 ; CHECK: [[INNER_END_RIGHT]]:
532527 ; CHECK: catchendpad unwind label %[[INNER_SIBLING_RIGHT:.+]]
802797 ; CHECK: catchpad []
803798 ; CHECK: to label %[[INNER_CATCH_LEFT:.+]] unwind label %[[INNER_END_LEFT:.+]]
804799 ; CHECK: [[INNER_CATCH_RIGHT]]:
805 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
806 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
800 ; CHECK: call void @h(i32 %x)
807801 ; CHECK: unreachable
808802 ; CHECK: [[INNER_CATCH_LEFT]]:
809 ; CHECK: [[X_RELOAD_L:\%.+]] = load i32, i32* %x.wineh.spillslot
810 ; CHECK: call void @h(i32 [[X_RELOAD_L]])
803 ; CHECK: call void @h(i32 %x.for.left)
811804 ; CHECK: unreachable
812805 ; CHECK: [[INNER_END_RIGHT]]:
813806 ; CHECK: catchendpad unwind to caller
874867 ; CHECK: catchpad []
875868 ; CHECK: to label %[[INNER_CATCH_LEFT:.+]] unwind label %[[INNER_END_LEFT:.+]]
876869 ; CHECK: [[INNER_CATCH_RIGHT]]:
877 ; CHECK: [[X_RELOAD_R:\%.+]] = load i32, i32* %x.wineh.spillslot
878 ; CHECK: call void @h(i32 [[X_RELOAD_R]])
870 ; CHECK: call void @h(i32 %x)
879871 ; CHECK: unreachable
880872 ; CHECK: [[INNER_CATCH_LEFT]]:
881 ; CHECK: [[X_RELOAD_L:\%.+]] = load i32, i32* %x.wineh.spillslot
882 ; CHECK: call void @h(i32 [[X_RELOAD_L]])
873 ; CHECK: call void @h(i32 %x.for.left)
883874 ; CHECK: unreachable
884875 ; CHECK: [[INNER_END_RIGHT]]:
885876 ; CHECK: catchendpad unwind label %[[RIGHT_END]]
3434
3535 ; X64-LABEL: .seh_proc f
3636 ; X64: pushq %rbp
37 ; X64: pushq %rsi
38 ; X64: subq $56, %rsp
39 ; X64: leaq 48(%rsp), %rbp
40 ; X64: movq $-2, (%rbp)
37 ; X64: subq $64, %rsp
38 ; X64: leaq 64(%rsp), %rbp
39 ; X64: movq $-2, -8(%rbp)
40 ; X64: movl $-1, -20(%rbp) # 4-byte Folded Spill
4141 ; X64: callq g
42 ; X64: movl %esi, %eax
43 ; X64: addq $56, %rsp
44 ; X64: popq %rsi
42 ; X64: .LBB0_1
43 ; X64: movl -20(%rbp), %eax # 4-byte Reload
44 ; X64: addq $64, %rsp
4545 ; X64: popq %rbp
4646
47 ; X64: movl -4(%rbp), %esi
48 ; X64: jmp
49
50 ; X64-LABEL: "?catch$1@?0?f@4HA":
51 ; X64: .seh_proc "?catch$1@?0?f@4HA"
47 ; X64-LABEL: "?catch${{[0-9]}}@?0?f@4HA":
48 ; X64: .seh_proc "?catch${{[0-9]}}@?0?f@4HA"
5249 ; X64: movq %rdx, 16(%rsp)
5350 ; X64: pushq %rbp
54 ; X64: pushq %rsi
55 ; X64: subq $40, %rsp
56 ; X64: leaq 48(%rdx), %rbp
51 ; X64: subq $32, %rsp
52 ; X64: leaq 64(%rdx), %rbp
5753 ; arg2 is at RBP+40:
5854 ; start at arg2
5955 ; + 8 for arg1
6056 ; + 8 for retaddr
6157 ; + 8 for RBP
62 ; + 8 for RSI
63 ; + 56 for stackalloc
64 ; - 48 for setframe
58 ; + 64 for stackalloc
59 ; - 64 for setframe
6560 ; = 40
66 ; X64: movl 40(%rbp), %eax
67 ; X64: movl %eax, -4(%rbp)
68 ; X64: leaq .LBB0_2(%rip), %rax
69 ; X64: addq $40, %rsp
70 ; X64: popq %rsi
61 ; X64: movl 24(%rbp), %eax
62 ; X64: movl %eax, -20(%rbp) # 4-byte Spill
63 ; X64: leaq .LBB0_1(%rip), %rax
64 ; X64: addq $32, %rsp
7165 ; X64: popq %rbp
7266 ; X64: retq # CATCHRET
7367
7771 ; X86: pushl %ebx
7872 ; X86: pushl %edi
7973 ; X86: pushl %esi
80 ; X86: subl $28, %esp
81 ; X86: movl $-1, -40(%ebp)
74 ; X86: subl $24, %esp
75 ; X86: movl $-1, -36(%ebp)
8276 ; X86: calll _g
83 ; X86: movl -40(%ebp), %eax
84 ; X86: addl $28, %esp
77 ; X86: LBB0_[[retbb:[0-9]+]]:
78 ; X86: movl -36(%ebp), %eax
79 ; X86: addl $24, %esp
8580 ; X86: popl %esi
8681 ; X86: popl %edi
8782 ; X86: popl %ebx
8883 ; X86: popl %ebp
8984 ; X86: retl
9085
91 ; X86-LABEL: "?catch$1@?0?f@4HA":
92 ; X86: pushl %ebp
93 ; X86: addl $12, %ebp
86 ; X86: LBB0_[[restorebb:[0-9]+]]: # Block address taken
87 ; X86: addl $12, %ebp
9488 ; arg2 is at EBP offset 12:
9589 ; + 4 for arg1
9690 ; + 4 for retaddr
9791 ; + 4 for EBP
92 ; X86: movl 12(%ebp), %eax
93 ; X86: movl %eax, -36(%ebp)
94 ; X86: jmp LBB0_[[retbb]]
95
96 ; X86-LABEL: "?catch${{[0-9]}}@?0?f@4HA":
97 ; X86: pushl %ebp
98 ; X86: addl $12, %ebp
9899 ; Done due to mov %esp, %ebp
99 ; X86: movl 12(%ebp), %eax
100 ; X86: movl %eax, -32(%ebp)
101 ; X86: movl $LBB0_2, %eax
100 ; X86: leal 12(%ebp), %eax
101 ; X86: movl $LBB0_[[restorebb]], %eax
102102 ; X86: popl %ebp
103103 ; X86: retl # CATCHRET