llvm.org GIT mirror llvm / 8d335f0
[WinEH] Insert the catchpad return before CSR restoration x64 catchpads use rax to inform the unwinder where control should go next. However, we must initialize rax before the epilogue sequence so as to not perturb the unwinder. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@249910 91177308-0d34-0410-b5e6-96231b3b80d8 David Majnemer 4 years ago
2 changed file(s) with 24 addition(s) and 18 deletion(s). Raw diff Collapse all Expand all
10571057 bool NeedsWinCFI =
10581058 IsWin64Prologue && MF.getFunction()->needsUnwindTableEntry();
10591059 bool IsFunclet = isFuncletReturnInstr(MBBI);
1060 MachineBasicBlock *RestoreMBB = nullptr;
10601061
10611062 // Get the number of bytes to allocate from the FrameInfo.
10621063 uint64_t StackSize = MFI->getStackSize();
10811082 }
10821083
10831084 // For 32-bit, create a new block for the restore code.
1084 MachineBasicBlock *RestoreMBB = TargetMBB;
1085 RestoreMBB = TargetMBB;
10851086 if (STI.is32Bit()) {
10861087 RestoreMBB = MF.CreateMachineBasicBlock(MBB.getBasicBlock());
10871088 MF.insert(TargetMBB, RestoreMBB);
10891090 MBB.addSuccessor(RestoreMBB);
10901091 RestoreMBB->addSuccessor(TargetMBB);
10911092 MBBI->getOperand(0).setMBB(RestoreMBB);
1092 }
1093
1094 // Fill EAX/RAX with the address of the target block.
1095 unsigned ReturnReg = STI.is64Bit() ? X86::RAX : X86::EAX;
1096 if (STI.is64Bit()) {
1097 // LEA64r RestoreMBB(%rip), %rax
1098 BuildMI(MBB, MBBI, DL, TII.get(X86::LEA64r), ReturnReg)
1099 .addReg(X86::RIP)
1100 .addImm(0)
1101 .addReg(0)
1102 .addMBB(RestoreMBB)
1103 .addReg(0);
1104 } else {
1105 // MOV32ri $RestoreMBB, %eax
1106 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV32ri))
1107 .addReg(ReturnReg)
1108 .addMBB(RestoreMBB);
11091093 }
11101094
11111095 // Pop EBP.
11591143 --MBBI;
11601144 }
11611145 MachineBasicBlock::iterator FirstCSPop = MBBI;
1146
1147 if (RestoreMBB) {
1148 // Fill EAX/RAX with the address of the target block.
1149 unsigned ReturnReg = STI.is64Bit() ? X86::RAX : X86::EAX;
1150 if (STI.is64Bit()) {
1151 // LEA64r RestoreMBB(%rip), %rax
1152 BuildMI(MBB, FirstCSPop, DL, TII.get(X86::LEA64r), ReturnReg)
1153 .addReg(X86::RIP)
1154 .addImm(0)
1155 .addReg(0)
1156 .addMBB(RestoreMBB)
1157 .addReg(0);
1158 } else {
1159 // MOV32ri $RestoreMBB, %eax
1160 BuildMI(MBB, FirstCSPop, DL, TII.get(X86::MOV32ri))
1161 .addReg(ReturnReg)
1162 .addMBB(RestoreMBB);
1163 }
1164 }
11621165
11631166 if (MBBI != MBB.end())
11641167 DL = MBBI->getDebugLoc();
122122 ; X64: movq %rdx, 16(%rsp)
123123 ; X64: pushq %rbp
124124 ; X64: .seh_pushreg 5
125 ; X64: pushq %rsi
126 ; X64: .seh_pushreg 6
125127 ; X64: subq $32, %rsp
126128 ; X64: .seh_stackalloc 32
127129 ; X64: leaq 32(%rdx), %rbp
130132 ; X64: callq f
131133 ; X64: leaq [[contbb]](%rip), %rax
132134 ; X64: addq $32, %rsp
135 ; X64: popq %rsi
133136 ; X64: popq %rbp
134137 ; X64: retq
135138