73#include <initializer_list>
78#define DEBUG_TYPE "coro-split"
110 Value *NewFramePtr =
nullptr;
120 : OrigF(OrigF), NewF(
nullptr), Suffix(Suffix), Shape(Shape), FKind(FKind),
128 : OrigF(OrigF), NewF(NewF), Suffix(Suffix), Shape(Shape),
129 FKind(Shape.
ABI == coro::ABI::Async ? Kind::Async : Kind::Continuation),
130 Builder(OrigF.
getContext()), ActiveSuspend(ActiveSuspend) {
132 Shape.
ABI == coro::ABI::RetconOnce || Shape.
ABI == coro::ABI::Async);
133 assert(NewF &&
"need existing function for continuation");
134 assert(ActiveSuspend &&
"need active suspend point for continuation");
138 assert(NewF !=
nullptr &&
"declaration not yet set");
145 bool isSwitchDestroyFunction() {
148 case Kind::Continuation:
149 case Kind::SwitchResume:
151 case Kind::SwitchUnwind:
152 case Kind::SwitchCleanup:
158 void replaceEntryBlock();
159 Value *deriveNewFramePointer();
160 void replaceRetconOrAsyncSuspendUses();
161 void replaceCoroSuspends();
162 void replaceCoroEnds();
165 void handleFinalSuspend();
188 if (
auto Invoke = dyn_cast<InvokeInst>(CB)) {
191 Invoke->getUnwindDest(), {Awaiter, FramePtr});
194 std::copy(Invoke->bundle_op_info_begin(), Invoke->bundle_op_info_end(),
195 WrapperInvoke->bundle_op_info_begin());
196 WrapperInvoke->setAttributes(NewAttributes);
197 WrapperInvoke->setDebugLoc(Invoke->getDebugLoc());
198 NewCall = WrapperInvoke;
199 }
else if (
auto Call = dyn_cast<CallInst>(CB)) {
203 WrapperCall->setDebugLoc(Call->getDebugLoc());
204 NewCall = WrapperCall;
222 assert(Shape.
ABI == coro::ABI::Retcon || Shape.
ABI == coro::ABI::RetconOnce);
235 auto *EndAsync = dyn_cast<CoroAsyncEndInst>(
End);
241 auto *MustTailCallFunc = EndAsync->getMustTailCallFunction();
242 if (!MustTailCallFunc) {
248 auto *CoroEndBlock =
End->getParent();
249 auto *MustTailCallFuncBlock = CoroEndBlock->getSinglePredecessor();
250 assert(MustTailCallFuncBlock &&
"Must have a single predecessor block");
251 auto It = MustTailCallFuncBlock->getTerminator()->getIterator();
252 auto *MustTailCall = cast<CallInst>(&*std::prev(It));
253 CoroEndBlock->splice(
End->getIterator(), MustTailCallFuncBlock,
254 MustTailCall->getIterator());
262 auto *BB =
End->getParent();
263 BB->splitBasicBlock(
End);
264 BB->getTerminator()->eraseFromParent();
267 assert(InlineRes.isSuccess() &&
"Expected inlining to succeed");
284 case coro::ABI::Switch:
285 assert(!cast<CoroEndInst>(
End)->hasResults() &&
286 "switch coroutine should not return any values");
295 case coro::ABI::Async: {
297 if (!CoroEndBlockNeedsCleanup)
304 case coro::ABI::RetconOnce: {
306 auto *CoroEnd = cast<CoroEndInst>(
End);
309 if (!CoroEnd->hasResults()) {
315 auto *CoroResults = CoroEnd->getResults();
316 unsigned NumReturns = CoroResults->numReturns();
318 if (
auto *RetStructTy = dyn_cast<StructType>(
RetTy)) {
319 assert(RetStructTy->getNumElements() == NumReturns &&
320 "numbers of returns should match resume function singature");
323 for (
Value *RetValEl : CoroResults->return_values())
326 }
else if (NumReturns == 0) {
331 Builder.
CreateRet(*CoroResults->retval_begin());
335 CoroResults->eraseFromParent();
341 case coro::ABI::Retcon: {
342 assert(!cast<CoroEndInst>(
End)->hasResults() &&
343 "retcon coroutine should not return any values");
346 auto RetStructTy = dyn_cast<StructType>(
RetTy);
348 cast<PointerType>(RetStructTy ? RetStructTy->getElementType(0) :
RetTy);
361 auto *BB =
End->getParent();
362 BB->splitBasicBlock(
End);
363 BB->getTerminator()->eraseFromParent();
377 Shape.
ABI == coro::ABI::Switch &&
378 "markCoroutineAsDone is only supported for Switch-Resumed ABI for now.");
397 "The final suspend should only live in the last position of "
415 case coro::ABI::Switch: {
428 case coro::ABI::Async:
431 case coro::ABI::Retcon:
432 case coro::ABI::RetconOnce:
439 auto *FromPad = cast<CleanupPadInst>(Bundle->Inputs[0]);
441 End->getParent()->splitBasicBlock(
End);
442 CleanupRet->getParent()->getTerminator()->eraseFromParent();
456 End->eraseFromParent();
468void CoroCloner::handleFinalSuspend() {
469 assert(Shape.ABI == coro::ABI::Switch &&
470 Shape.SwitchLowering.HasFinalSuspend);
472 if (isSwitchDestroyFunction() && Shape.SwitchLowering.HasUnwindCoroEnd)
475 auto *
Switch = cast<SwitchInst>(VMap[Shape.SwitchLowering.ResumeSwitch]);
476 auto FinalCaseIt = std::prev(
Switch->case_end());
477 BasicBlock *ResumeBB = FinalCaseIt->getCaseSuccessor();
478 Switch->removeCase(FinalCaseIt);
479 if (isSwitchDestroyFunction()) {
484 if (NewF->isCoroOnlyDestroyWhenComplete()) {
487 Builder.CreateBr(ResumeBB);
489 auto *GepIndex = Builder.CreateStructGEP(
493 Builder.CreateLoad(Shape.getSwitchResumePointerType(), GepIndex);
494 auto *
Cond = Builder.CreateIsNull(Load);
495 Builder.CreateCondBr(
Cond, ResumeBB, NewSwitchBB);
503 auto *AsyncSuspend = cast<CoroSuspendAsyncInst>(Suspend);
504 auto *StructTy = cast<StructType>(AsyncSuspend->getType());
507 return FunctionType::get(VoidTy, StructTy->elements(),
false);
515 auto *FnTy = (Shape.
ABI != coro::ABI::Async)
523 M->getFunctionList().insert(InsertBefore, NewF);
532void CoroCloner::replaceRetconOrAsyncSuspendUses() {
533 assert(Shape.ABI == coro::ABI::Retcon || Shape.ABI == coro::ABI::RetconOnce ||
534 Shape.ABI == coro::ABI::Async);
536 auto NewS = VMap[ActiveSuspend];
537 if (NewS->use_empty())
544 bool IsAsyncABI = Shape.ABI == coro::ABI::Async;
545 for (
auto I = IsAsyncABI ? NewF->arg_begin() : std::next(NewF->arg_begin()),
552 if (!isa<StructType>(NewS->getType())) {
554 NewS->replaceAllUsesWith(
Args.front());
560 auto *EVI = dyn_cast<ExtractValueInst>(
U.getUser());
561 if (!EVI || EVI->getNumIndices() != 1)
564 EVI->replaceAllUsesWith(Args[EVI->getIndices().front()]);
565 EVI->eraseFromParent();
569 if (NewS->use_empty())
574 for (
size_t I = 0, E =
Args.size();
I != E; ++
I)
575 Agg = Builder.CreateInsertValue(Agg, Args[
I],
I);
580void CoroCloner::replaceCoroSuspends() {
581 Value *SuspendResult;
590 case coro::ABI::Switch:
591 SuspendResult = Builder.getInt8(isSwitchDestroyFunction() ? 1 : 0);
595 case coro::ABI::Async:
601 case coro::ABI::RetconOnce:
602 case coro::ABI::Retcon:
608 if (CS == ActiveSuspend)
611 auto *MappedCS = cast<AnyCoroSuspendInst>(VMap[CS]);
612 MappedCS->replaceAllUsesWith(SuspendResult);
613 MappedCS->eraseFromParent();
617void CoroCloner::replaceCoroEnds() {
621 auto *NewCE = cast<AnyCoroEndInst>(VMap[CE]);
630 Value *CachedSlot =
nullptr;
631 auto getSwiftErrorSlot = [&](
Type *ValueTy) ->
Value * {
636 for (
auto &Arg :
F.args()) {
637 if (Arg.isSwiftError()) {
644 IRBuilder<> Builder(
F.getEntryBlock().getFirstNonPHIOrDbg());
653 auto MappedOp = VMap ? cast<CallInst>((*VMap)[
Op]) :
Op;
658 if (
Op->arg_empty()) {
659 auto ValueTy =
Op->getType();
660 auto Slot = getSwiftErrorSlot(ValueTy);
661 MappedResult = Builder.
CreateLoad(ValueTy, Slot);
664 auto Value = MappedOp->getArgOperand(0);
666 auto Slot = getSwiftErrorSlot(ValueTy);
671 MappedOp->replaceAllUsesWith(MappedResult);
672 MappedOp->eraseFromParent();
676 if (VMap ==
nullptr) {
682static std::pair<SmallVector<DbgVariableIntrinsic *, 8>,
690 if (
auto *DVI = dyn_cast<DbgVariableIntrinsic>(&
I))
693 return {Intrinsics, DbgVariableRecords};
696void CoroCloner::replaceSwiftErrorOps() {
700void CoroCloner::salvageDebugInfo() {
717 auto IsUnreachableBlock = [&](
BasicBlock *BB) {
721 auto RemoveOne = [&](
auto *DVI) {
722 if (IsUnreachableBlock(DVI->getParent()))
723 DVI->eraseFromParent();
724 else if (isa_and_nonnull<AllocaInst>(DVI->getVariableLocationOp(0))) {
727 for (
auto *
User : DVI->getVariableLocationOp(0)->
users())
728 if (
auto *
I = dyn_cast<Instruction>(
User))
729 if (!isa<AllocaInst>(
I) && !IsUnreachableBlock(
I->getParent()))
732 DVI->eraseFromParent();
736 for_each(DbgVariableRecords, RemoveOne);
739void CoroCloner::replaceEntryBlock() {
745 auto *Entry = cast<BasicBlock>(VMap[Shape.AllocaSpillBlock]);
746 auto *OldEntry = &NewF->getEntryBlock();
747 Entry->setName(
"entry" + Suffix);
748 Entry->moveBefore(OldEntry);
749 Entry->getTerminator()->eraseFromParent();
754 assert(Entry->hasOneUse());
755 auto BranchToEntry = cast<BranchInst>(Entry->user_back());
756 assert(BranchToEntry->isUnconditional());
757 Builder.SetInsertPoint(BranchToEntry);
758 Builder.CreateUnreachable();
759 BranchToEntry->eraseFromParent();
762 Builder.SetInsertPoint(Entry);
764 case coro::ABI::Switch: {
768 cast<BasicBlock>(VMap[Shape.SwitchLowering.ResumeEntryBlock]);
769 Builder.CreateBr(SwitchBB);
772 case coro::ABI::Async:
773 case coro::ABI::Retcon:
774 case coro::ABI::RetconOnce: {
778 assert((Shape.ABI == coro::ABI::Async &&
779 isa<CoroSuspendAsyncInst>(ActiveSuspend)) ||
780 ((Shape.ABI == coro::ABI::Retcon ||
781 Shape.ABI == coro::ABI::RetconOnce) &&
782 isa<CoroSuspendRetconInst>(ActiveSuspend)));
783 auto *MappedCS = cast<AnyCoroSuspendInst>(VMap[ActiveSuspend]);
784 auto Branch = cast<BranchInst>(MappedCS->getNextNode());
786 Builder.CreateBr(
Branch->getSuccessor(0));
796 auto *Alloca = dyn_cast<AllocaInst>(&
I);
797 if (!Alloca ||
I.use_empty())
799 if (DT.isReachableFromEntry(
I.getParent()) ||
800 !isa<ConstantInt>(Alloca->getArraySize()))
802 I.moveBefore(*Entry, Entry->getFirstInsertionPt());
807Value *CoroCloner::deriveNewFramePointer() {
812 case coro::ABI::Switch:
813 return &*NewF->arg_begin();
819 case coro::ABI::Async: {
820 auto *ActiveAsyncSuspend = cast<CoroSuspendAsyncInst>(ActiveSuspend);
821 auto ContextIdx = ActiveAsyncSuspend->getStorageArgumentIndex() & 0xff;
822 auto *CalleeContext = NewF->getArg(ContextIdx);
823 auto *ProjectionFunc =
824 ActiveAsyncSuspend->getAsyncContextProjectionFunction();
826 cast<CoroSuspendAsyncInst>(VMap[ActiveSuspend])->getDebugLoc();
828 auto *CallerContext = Builder.CreateCall(ProjectionFunc->getFunctionType(),
829 ProjectionFunc, CalleeContext);
830 CallerContext->setCallingConv(ProjectionFunc->getCallingConv());
831 CallerContext->setDebugLoc(DbgLoc);
833 auto &
Context = Builder.getContext();
834 auto *FramePtrAddr = Builder.CreateConstInBoundsGEP1_32(
836 Shape.AsyncLowering.FrameOffset,
"async.ctx.frameptr");
840 assert(InlineRes.isSuccess());
845 case coro::ABI::Retcon:
846 case coro::ABI::RetconOnce: {
847 Argument *NewStorage = &*NewF->arg_begin();
848 auto FramePtrTy = PointerType::getUnqual(Shape.FrameTy->getContext());
851 if (Shape.RetconLowering.IsFrameInlineInStorage)
855 return Builder.CreateLoad(FramePtrTy, NewStorage);
863 Align Alignment,
bool NoAlias) {
873 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
877 unsigned ParamIndex) {
880 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
884 unsigned ParamIndex) {
887 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
892void CoroCloner::create() {
896 OrigF.getParent()->end(), ActiveSuspend);
907 VMap[&
A] = DummyArgs.
back();
914 auto savedVisibility = NewF->getVisibility();
915 auto savedUnnamedAddr = NewF->getUnnamedAddr();
916 auto savedDLLStorageClass = NewF->getDLLStorageClass();
921 auto savedLinkage = NewF->getLinkage();
925 CloneFunctionChangeType::LocalChangesOnly, Returns);
927 auto &
Context = NewF->getContext();
936 assert(SP != OrigF.getSubprogram() && SP->isDistinct());
938 if (
auto DL = ActiveSuspend->getDebugLoc())
939 if (SP->getFile() ==
DL->getFile())
940 SP->setScopeLine(
DL->getLine());
950 SP->getUnit()->getSourceLanguage() == dwarf::DW_LANG_Swift) {
951 SP->replaceLinkageName(
MDString::get(Context, NewF->getName()));
952 if (
auto *Decl = SP->getDeclaration()) {
953 auto *NewDecl = DISubprogram::get(
954 Decl->getContext(), Decl->getScope(), Decl->getName(),
955 NewF->getName(), Decl->getFile(), Decl->getLine(), Decl->getType(),
956 Decl->getScopeLine(), Decl->getContainingType(),
957 Decl->getVirtualIndex(), Decl->getThisAdjustment(),
958 Decl->getFlags(), Decl->getSPFlags(), Decl->getUnit(),
959 Decl->getTemplateParams(),
nullptr, Decl->getRetainedNodes(),
960 Decl->getThrownTypes(), Decl->getAnnotations(),
961 Decl->getTargetFuncName());
962 SP->replaceDeclaration(NewDecl);
967 NewF->setLinkage(savedLinkage);
968 NewF->setVisibility(savedVisibility);
969 NewF->setUnnamedAddr(savedUnnamedAddr);
970 NewF->setDLLStorageClass(savedDLLStorageClass);
974 if (Shape.ABI == coro::ABI::Switch &&
975 NewF->hasMetadata(LLVMContext::MD_func_sanitize))
976 NewF->eraseMetadata(LLVMContext::MD_func_sanitize);
979 auto OrigAttrs = NewF->getAttributes();
983 case coro::ABI::Switch:
986 NewAttrs = NewAttrs.addFnAttributes(
987 Context,
AttrBuilder(Context, OrigAttrs.getFnAttrs()));
990 Shape.FrameAlign,
false);
992 case coro::ABI::Async: {
993 auto *ActiveAsyncSuspend = cast<CoroSuspendAsyncInst>(ActiveSuspend);
994 if (OrigF.hasParamAttribute(Shape.AsyncLowering.ContextArgNo,
995 Attribute::SwiftAsync)) {
997 ActiveAsyncSuspend->getStorageArgumentIndex();
998 auto ContextArgIndex = ArgAttributeIndices & 0xff;
1003 auto SwiftSelfIndex = ArgAttributeIndices >> 8;
1009 auto FnAttrs = OrigF.getAttributes().getFnAttrs();
1010 NewAttrs = NewAttrs.addFnAttributes(Context,
AttrBuilder(Context, FnAttrs));
1013 case coro::ABI::Retcon:
1014 case coro::ABI::RetconOnce:
1017 NewAttrs = Shape.RetconLowering.ResumePrototype->getAttributes();
1021 Shape.getRetconCoroId()->getStorageSize(),
1022 Shape.getRetconCoroId()->getStorageAlignment(),
1028 switch (Shape.ABI) {
1033 case coro::ABI::Switch:
1034 case coro::ABI::RetconOnce:
1043 case coro::ABI::Retcon:
1049 case coro::ABI::Async:
1053 NewF->setAttributes(NewAttrs);
1054 NewF->setCallingConv(Shape.getResumeFunctionCC());
1057 replaceEntryBlock();
1059 Builder.SetInsertPoint(&NewF->getEntryBlock().front());
1060 NewFramePtr = deriveNewFramePointer();
1063 Value *OldFramePtr = VMap[Shape.FramePtr];
1064 NewFramePtr->
takeName(OldFramePtr);
1068 auto *NewVFrame = Builder.CreateBitCast(
1069 NewFramePtr, PointerType::getUnqual(Builder.getContext()),
"vFrame");
1070 Value *OldVFrame = cast<Value>(VMap[Shape.CoroBegin]);
1071 if (OldVFrame != NewVFrame)
1078 DummyArg->deleteValue();
1081 switch (Shape.ABI) {
1082 case coro::ABI::Switch:
1086 if (Shape.SwitchLowering.HasFinalSuspend)
1087 handleFinalSuspend();
1089 case coro::ABI::Async:
1090 case coro::ABI::Retcon:
1091 case coro::ABI::RetconOnce:
1094 assert(ActiveSuspend !=
nullptr &&
1095 "no active suspend when lowering a continuation-style coroutine");
1096 replaceRetconOrAsyncSuspendUses();
1101 replaceCoroSuspends();
1114 if (Shape.ABI == coro::ABI::Switch)
1116 FKind == CoroCloner::Kind::SwitchCleanup);
1122 auto *FuncPtrStruct = cast<ConstantStruct>(
1124 auto *OrigRelativeFunOffset = FuncPtrStruct->getOperand(0);
1125 auto *OrigContextSize = FuncPtrStruct->getOperand(1);
1126 auto *NewContextSize = ConstantInt::get(OrigContextSize->getType(),
1129 FuncPtrStruct->getType(), OrigRelativeFunOffset, NewContextSize);
1135 if (Shape.
ABI == coro::ABI::Async)
1148 auto *SizeIntrin = Shape.
CoroSizes.back();
1149 Module *M = SizeIntrin->getModule();
1152 auto *SizeConstant = ConstantInt::get(SizeIntrin->getType(),
Size);
1179 auto V = PN.getIncomingValueForBlock(PrevBB);
1181 auto VI = ResolvedValues.
find(V);
1182 if (VI != ResolvedValues.
end())
1185 ResolvedValues[&PN] = V;
1194 if (isa<ReturnInst>(InitialInst))
1201 auto TryResolveConstant = [&ResolvedValues](
Value *V) {
1202 auto It = ResolvedValues.
find(V);
1203 if (It != ResolvedValues.
end())
1205 return dyn_cast<ConstantInt>(V);
1210 if (isa<ReturnInst>(
I)) {
1211 assert(!cast<ReturnInst>(
I)->getReturnValue());
1216 if (
auto *BR = dyn_cast<BranchInst>(
I)) {
1217 unsigned SuccIndex = 0;
1218 if (BR->isConditional()) {
1230 SuccIndex =
Cond->isOne() ? 0 : 1;
1233 BasicBlock *Succ = BR->getSuccessor(SuccIndex);
1239 if (
auto *Cmp = dyn_cast<CmpInst>(
I)) {
1243 ConstantInt *Cond0 = TryResolveConstant(Cmp->getOperand(0));
1244 ConstantInt *Cond1 = TryResolveConstant(Cmp->getOperand(1));
1245 if (Cond0 && Cond1) {
1248 Cmp->getPredicate(), Cond0, Cond1,
DL));
1250 ResolvedValues[Cmp] = Result;
1251 I =
I->getNextNode();
1257 if (
auto *SI = dyn_cast<SwitchInst>(
I)) {
1268 if (
I->isDebugOrPseudoInst() ||
I->isLifetimeStartOrEnd() ||
1272 I =
I->getNextNode();
1289 if (!CalleeTy->getReturnType()->isVoidTy() || (CalleeTy->getNumParams() != 1))
1292 Type *CalleeParmTy = CalleeTy->getParamType(0);
1302 Attribute::StructRet, Attribute::ByVal, Attribute::InAlloca,
1303 Attribute::Preallocated, Attribute::InReg, Attribute::Returned,
1304 Attribute::SwiftSelf, Attribute::SwiftError};
1306 for (
auto AK : ABIAttrs)
1307 if (Attrs.hasParamAttr(0, AK))
1317 auto *CoroId = CoroBegin->
getId();
1319 switch (Shape.
ABI) {
1320 case coro::ABI::Switch: {
1321 auto SwitchId = cast<CoroIdInst>(CoroId);
1327 AllocInst->replaceAllUsesWith(Builder.
getFalse());
1328 AllocInst->eraseFromParent();
1329 CoroBegin->replaceAllUsesWith(Frame);
1331 CoroBegin->replaceAllUsesWith(CoroBegin->getMem());
1336 case coro::ABI::Async:
1337 case coro::ABI::Retcon:
1338 case coro::ABI::RetconOnce:
1343 CoroBegin->eraseFromParent();
1352 if (isa<IntrinsicInst>(
I))
1355 if (isa<CallBase>(
I))
1371 while (!Worklist.
empty()) {
1381 Set.
erase(ResDesBB);
1383 for (
auto *BB : Set)
1392 auto *ResumeOrDestroyBB = ResumeOrDestroy->
getParent();
1394 if (SaveBB == ResumeOrDestroyBB)
1422 Prev = Pred->getTerminator();
1425 CallBase *CB = dyn_cast<CallBase>(Prev);
1432 auto *SubFn = dyn_cast<CoroSubFnInst>(Callee);
1437 if (SubFn->getFrame() != CoroBegin)
1451 Save->eraseFromParent();
1454 if (
auto *Invoke = dyn_cast<InvokeInst>(CB)) {
1463 if (CalledValue != SubFn && CalledValue->user_empty())
1464 if (
auto *
I = dyn_cast<Instruction>(CalledValue))
1465 I->eraseFromParent();
1468 if (SubFn->user_empty())
1469 SubFn->eraseFromParent();
1477 if (Shape.
ABI != coro::ABI::Switch)
1481 size_t I = 0,
N = S.size();
1485 size_t ChangedFinalIndex = std::numeric_limits<size_t>::max();
1487 auto SI = cast<CoroSuspendInst>(S[
I]);
1496 if (cast<CoroSuspendInst>(S[
I])->isFinal()) {
1498 ChangedFinalIndex =
I;
1510 if (ChangedFinalIndex <
N) {
1511 assert(cast<CoroSuspendInst>(S[ChangedFinalIndex])->isFinal());
1512 std::swap(S[ChangedFinalIndex], S.back());
1518struct SwitchCoroutineSplitter {
1524 createResumeEntryBlock(
F, Shape);
1526 createClone(
F,
".resume", Shape, CoroCloner::Kind::SwitchResume);
1527 auto *DestroyClone =
1528 createClone(
F,
".destroy", Shape, CoroCloner::Kind::SwitchUnwind);
1529 auto *CleanupClone =
1530 createClone(
F,
".cleanup", Shape, CoroCloner::Kind::SwitchCleanup);
1542 addMustTailToCoroResumes(*ResumeClone,
TTI);
1545 updateCoroFrame(Shape, ResumeClone, DestroyClone, CleanupClone);
1555 setCoroInfo(
F, Shape, Clones);
1564 CoroCloner Cloner(
F, Suffix, Shape, FKind);
1566 return Cloner.getFunction();
1588 auto *FrameTy = Shape.
FrameTy;
1589 auto *GepIndex = Builder.CreateStructGEP(
1596 size_t SuspendIndex = 0;
1598 auto *S = cast<CoroSuspendInst>(AnyS);
1604 auto *Save = S->getCoroSave();
1605 Builder.SetInsertPoint(Save);
1611 auto *GepIndex = Builder.CreateStructGEP(
1613 Builder.CreateStore(IndexVal, GepIndex);
1617 Save->eraseFromParent();
1642 auto *SuspendBB = S->getParent();
1646 S->getNextNode(), ResumeBB->
getName() +
Twine(
".landing"));
1647 Switch->addCase(IndexVal, ResumeBB);
1649 cast<BranchInst>(SuspendBB->getTerminator())->setSuccessor(0, LandingBB);
1651 PN->insertBefore(LandingBB->begin());
1652 S->replaceAllUsesWith(PN);
1653 PN->addIncoming(Builder.getInt8(-1), SuspendBB);
1654 PN->addIncoming(S, ResumeBB);
1659 Builder.SetInsertPoint(UnreachBB);
1660 Builder.CreateUnreachable();
1671 bool Changed =
false;
1676 if (
auto *Call = dyn_cast<CallInst>(&
I))
1698 auto *ResumeAddr = Builder.CreateStructGEP(
1701 Builder.CreateStore(ResumeFn, ResumeAddr);
1703 Value *DestroyOrCleanupFn = DestroyFn;
1709 DestroyOrCleanupFn = Builder.CreateSelect(CA, DestroyFn, CleanupFn);
1712 auto *DestroyAddr = Builder.CreateStructGEP(
1715 Builder.CreateStore(DestroyOrCleanupFn, DestroyAddr);
1739 auto *ArrTy = ArrayType::get(Part->
getType(),
Args.size());
1743 GlobalVariable::PrivateLinkage, ConstVal,
1744 F.getName() +
Twine(
".resumers"));
1756 Value *Continuation) {
1759 auto *Int8PtrTy = PointerType::getUnqual(
Context);
1764 ResumeIntrinsic->eraseFromParent();
1774 for (
auto *paramTy : FnTy->params()) {
1776 if (paramTy != FnArgs[ArgIdx]->
getType())
1795 auto *TailCall = Builder.
CreateCall(FnTy, MustTailCallFn, CallArgs);
1800 TailCall->setDebugLoc(Loc);
1812 F.removeFnAttr(Attribute::NoReturn);
1813 F.removeRetAttr(Attribute::NoAlias);
1814 F.removeRetAttr(Attribute::NonNull);
1817 auto *Int8PtrTy = PointerType::getUnqual(
Context);
1826 "async.ctx.frameptr");
1837 auto NextF = std::next(
F.getIterator());
1845 auto ResumeNameSuffix =
".resume.";
1846 auto ProjectionFunctionName =
1847 Suspend->getAsyncContextProjectionFunction()->getName();
1848 bool UseSwiftMangling =
false;
1849 if (ProjectionFunctionName.equals(
"__swift_async_resume_project_context")) {
1850 ResumeNameSuffix =
"TQ";
1851 UseSwiftMangling =
true;
1852 }
else if (ProjectionFunctionName.equals(
1853 "__swift_async_resume_get_context")) {
1854 ResumeNameSuffix =
"TY";
1855 UseSwiftMangling =
true;
1859 UseSwiftMangling ? ResumeNameSuffix +
Twine(
Idx) +
"_"
1866 auto *SuspendBB = Suspend->getParent();
1867 auto *NewSuspendBB = SuspendBB->splitBasicBlock(Suspend);
1868 auto *Branch = cast<BranchInst>(SuspendBB->getTerminator());
1873 Branch->setSuccessor(0, ReturnBB);
1878 auto *Fn = Suspend->getMustTailCallFunction();
1895 auto *Clone = Clones[
Idx];
1897 CoroCloner(
F,
"resume." +
Twine(
Idx), Shape, Clone, Suspend).create();
1903 assert(Shape.
ABI == coro::ABI::Retcon || Shape.
ABI == coro::ABI::RetconOnce);
1908 F.removeFnAttr(Attribute::NoReturn);
1909 F.removeRetAttr(Attribute::NoAlias);
1910 F.removeRetAttr(Attribute::NonNull);
1916 RawFramePtr = Id->getStorage();
1932 Builder.
CreateStore(RawFramePtr, Id->getStorage());
1948 auto NextF = std::next(
F.getIterator());
1952 for (
size_t i = 0, e = Shape.
CoroSuspends.size(); i != e; ++i) {
1953 auto Suspend = cast<CoroSuspendRetconInst>(Shape.
CoroSuspends[i]);
1962 auto SuspendBB = Suspend->getParent();
1963 auto NewSuspendBB = SuspendBB->splitBasicBlock(Suspend);
1964 auto Branch = cast<BranchInst>(SuspendBB->getTerminator());
1988 auto RetTy =
F.getReturnType();
1993 auto CastedContinuationTy =
1994 (ReturnPHIs.
size() == 1 ?
RetTy :
RetTy->getStructElementType(0));
1995 auto *CastedContinuation =
1999 if (ReturnPHIs.
size() == 1) {
2000 RetV = CastedContinuation;
2004 for (
size_t I = 1, E = ReturnPHIs.
size();
I != E; ++
I)
2012 Branch->setSuccessor(0, ReturnBB);
2013 ReturnPHIs[0]->addIncoming(Continuation, SuspendBB);
2014 size_t NextPHIIndex = 1;
2015 for (
auto &VUse : Suspend->value_operands())
2016 ReturnPHIs[NextPHIIndex++]->addIncoming(&*VUse, SuspendBB);
2021 for (
size_t i = 0, e = Shape.
CoroSuspends.size(); i != e; ++i) {
2023 auto Clone = Clones[i];
2025 CoroCloner(
F,
"resume." +
Twine(i), Shape, Clone, Suspend).create();
2034 PrettyStackTraceFunction(
Function &F) :
F(
F) {}
2036 OS <<
"While splitting coroutine ";
2037 F.printAsOperand(
OS,
false,
F.getParent());
2046 std::function<
bool(
Instruction &)> MaterializableCallback) {
2047 PrettyStackTraceFunction prettyStackTrace(
F);
2060 buildCoroutineFrame(
F, Shape,
TTI, MaterializableCallback);
2068 switch (Shape.
ABI) {
2069 case coro::ABI::Switch:
2070 SwitchCoroutineSplitter::split(
F, Shape, Clones,
TTI);
2072 case coro::ABI::Async:
2075 case coro::ABI::Retcon:
2076 case coro::ABI::RetconOnce:
2091 for (
auto *DDI : DbgInsts)
2115 if (Shape.
ABI != coro::ABI::Switch)
2121 End->eraseFromParent();
2125 if (!Clones.
empty()) {
2126 switch (Shape.
ABI) {
2127 case coro::ABI::Switch:
2133 case coro::ABI::Async:
2134 case coro::ABI::Retcon:
2135 case coro::ABI::RetconOnce:
2138 if (!Clones.empty())
2167 auto *Cast = dyn_cast<BitCastInst>(U.getUser());
2168 if (!Cast || Cast->getType() != Fn->getType())
2172 Cast->replaceAllUsesWith(Fn);
2173 Cast->eraseFromParent();
2182 while (
auto *Cast = dyn_cast<BitCastInst>(CastFn)) {
2183 if (!Cast->use_empty())
2185 CastFn = Cast->getOperand(0);
2186 Cast->eraseFromParent();
2192 bool Changed =
false;
2195 auto *Prepare = cast<CallInst>(
P.getUser());
2206 auto *PrepareFn = M.getFunction(
Name);
2207 if (PrepareFn && !PrepareFn->use_empty())
2212 : MaterializableCallback(coro::defaultMaterializable),
2213 OptimizeFrame(OptimizeFrame) {}
2221 Module &M = *
C.begin()->getFunction().getParent();
2233 if (
N.getFunction().isPresplitCoroutine())
2236 if (Coroutines.
empty() && PrepareFns.
empty())
2239 if (Coroutines.
empty()) {
2240 for (
auto *PrepareFn : PrepareFns) {
2248 LLVM_DEBUG(
dbgs() <<
"CoroSplit: Processing coroutine '" <<
F.getName()
2250 F.setSplittedCoroutine();
2261 <<
"Split '" <<
ore::NV(
"function",
F.getName())
2262 <<
"' (frame_size=" <<
ore::NV(
"frame_size", Shape.FrameSize)
2263 <<
", align=" <<
ore::NV(
"align", Shape.FrameAlign.value()) <<
")";
2266 if (!Shape.CoroSuspends.empty()) {
2274 if (!PrepareFns.
empty()) {
2275 for (
auto *PrepareFn : PrepareFns) {
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
amdgpu aa AMDGPU Address space based Alias Analysis Wrapper
AMDGPU Lower Kernel Arguments
Expand Atomic instructions
This file contains the simple types necessary to represent the attributes associated with functions a...
BlockVerifier::State From
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
This file provides interfaces used to manipulate a call graph, regardless if it is a "old style" Call...
This file provides interfaces used to build and manipulate a call graph, which is a very useful tool ...
This file contains the declarations for the subclasses of Constant, which represent the different fla...
Remove calls to llvm coro end in the original static function void removeCoroEnds(const coro::Shape &Shape)
static void addSwiftSelfAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex)
static bool hasCallsBetween(Instruction *Save, Instruction *ResumeOrDestroy)
static std::pair< SmallVector< DbgVariableIntrinsic *, 8 >, SmallVector< DbgVariableRecord * > > collectDbgVariableIntrinsics(Function &F)
Returns all DbgVariableIntrinsic in F.
static void replaceSwiftErrorOps(Function &F, coro::Shape &Shape, ValueToValueMapTy *VMap)
static void addAsyncContextAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex)
static void maybeFreeRetconStorage(IRBuilder<> &Builder, const coro::Shape &Shape, Value *FramePtr, CallGraph *CG)
static bool hasCallsInBlocksBetween(BasicBlock *SaveBB, BasicBlock *ResDesBB)
static Function * createCloneDeclaration(Function &OrigF, coro::Shape &Shape, const Twine &Suffix, Module::iterator InsertBefore, AnyCoroSuspendInst *ActiveSuspend)
static FunctionType * getFunctionTypeFromAsyncSuspend(AnyCoroSuspendInst *Suspend)
static void addPrepareFunction(const Module &M, SmallVectorImpl< Function * > &Fns, StringRef Name)
static void updateCallGraphAfterCoroutineSplit(LazyCallGraph::Node &N, const coro::Shape &Shape, const SmallVectorImpl< Function * > &Clones, LazyCallGraph::SCC &C, LazyCallGraph &CG, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
static void simplifySuspendPoints(coro::Shape &Shape)
static void addFramePointerAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex, uint64_t Size, Align Alignment, bool NoAlias)
static bool replaceAllPrepares(Function *PrepareFn, LazyCallGraph &CG, LazyCallGraph::SCC &C)
static void replaceFallthroughCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
Replace a non-unwind call to llvm.coro.end.
static void replaceFrameSizeAndAlignment(coro::Shape &Shape)
static bool replaceCoroEndAsync(AnyCoroEndInst *End)
Replace an llvm.coro.end.async.
Replace a call to llvm coro prepare static retcon void replacePrepare(CallInst *Prepare, LazyCallGraph &CG, LazyCallGraph::SCC &C)
static void replaceUnwindCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
Replace an unwind call to llvm.coro.end.
static bool simplifySuspendPoint(CoroSuspendInst *Suspend, CoroBeginInst *CoroBegin)
static void lowerAwaitSuspend(IRBuilder<> &Builder, CoroAwaitSuspendInst *CB)
static bool hasCallsInBlockBetween(Instruction *From, Instruction *To)
static void markCoroutineAsDone(IRBuilder<> &Builder, const coro::Shape &Shape, Value *FramePtr)
static void updateAsyncFuncPointerContextSize(coro::Shape &Shape)
static void replaceCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
static void lowerAwaitSuspends(Function &F, coro::Shape &Shape)
static void handleNoSuspendCoroutine(coro::Shape &Shape)
static coro::Shape splitCoroutine(Function &F, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI, bool OptimizeFrame, std::function< bool(Instruction &)> MaterializableCallback)
static void postSplitCleanup(Function &F)
static void splitAsyncCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI)
static bool simplifyTerminatorLeadingToRet(Instruction *InitialInst)
static void splitRetconCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones)
static void scanPHIsAndUpdateValueMap(Instruction *Prev, BasicBlock *NewBlock, DenseMap< Value *, Value * > &ResolvedValues)
Coerce the arguments in p FnArgs according to p FnTy in p static CallArgs void coerceArguments(IRBuilder<> &Builder, FunctionType *FnTy, ArrayRef< Value * > FnArgs, SmallVectorImpl< Value * > &CallArgs)
static void replaceAsyncResumeFunction(CoroSuspendAsyncInst *Suspend, Value *Continuation)
static bool shouldBeMustTail(const CallInst &CI, const Function &F)
static Error split(StringRef Str, char Separator, std::pair< StringRef, StringRef > &Split)
Checked version of split, to ensure mandatory subparts.
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
This file contains constants used for implementing Dwarf debug support.
static Function * getFunction(Constant *C)
Rewrite Partial Register Uses
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
Implements a lazy call graph analysis and related passes for the new pass manager.
Module.h This file contains the declarations for the Module class.
FunctionAnalysisManager FAM
This file provides a priority worklist.
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
static SymbolRef::Type getType(const Symbol *Sym)
static const unsigned FramePtr
void setSwiftError(bool V)
Specify whether this alloca is used to represent a swifterror.
void setAlignment(Align Align)
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
CoroAllocInst * getCoroAlloc()
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
ArrayRef< T > drop_front(size_t N=1) const
Drop the first N elements of the array.
size_t size() const
size - Get the array size.
AttrBuilder & addAlignmentAttr(MaybeAlign Align)
This turns an alignment into the form used internally in Attribute.
AttrBuilder & addAttribute(Attribute::AttrKind Val)
Add an attribute to the builder.
AttrBuilder & addDereferenceableAttr(uint64_t Bytes)
This turns the number of dereferenceable bytes into the form used internally in Attribute.
AttributeList removeParamAttributes(LLVMContext &C, unsigned ArgNo, const AttributeMask &AttrsToRemove) const
Remove the specified attribute at the specified arg index from this attribute list.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
LLVM Basic Block Representation.
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="", bool Before=false)
Split the basic block into two basic blocks at the specified instruction.
const BasicBlock * getSinglePredecessor() const
Return the predecessor of this block if it has a single predecessor block.
const Instruction * getFirstNonPHIOrDbgOrLifetime(bool SkipPseudoOp=true) const
Returns a pointer to the first instruction in this block that is not a PHINode, a debug intrinsic,...
const Function * getParent() const
Return the enclosing method, or null if none.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
static BranchInst * Create(BasicBlock *IfTrue, BasicBlock::iterator InsertBefore)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
bool isInlineAsm() const
Check if this call is an inline asm statement.
void setCallingConv(CallingConv::ID CC)
CallingConv::ID getCallingConv() const
Value * getCalledOperand() const
void setAttributes(AttributeList A)
Set the parameter attributes for this call.
Value * getArgOperand(unsigned i) const
FunctionType * getFunctionType() const
AttributeList getAttributes() const
Return the parameter attributes for this call.
The basic data container for the call graph of a Module of IR.
This class represents a function call, abstracting a target machine's calling convention.
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
static ConstantInt * getFalse(LLVMContext &Context)
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * get(StructType *T, ArrayRef< Constant * > V)
static ConstantTokenNone * get(LLVMContext &Context)
Return the ConstantTokenNone.
This represents the llvm.coro.align instruction.
This represents the llvm.coro.alloc instruction.
This represents the llvm.coro.await.suspend.{void,bool,handle} instructions.
Value * getAwaiter() const
Function * getWrapperFunction() const
This class represents the llvm.coro.begin instruction.
AnyCoroIdInst * getId() const
This represents the llvm.coro.id instruction.
void setInfo(Constant *C)
This represents the llvm.coro.size instruction.
This represents the llvm.coro.suspend.async instruction.
CoroAsyncResumeInst * getResumeFunction() const
This represents the llvm.coro.suspend instruction.
CoroSaveInst * getCoroSave() const
DISubprogram * getSubprogram() const
Get the subprogram for this scope.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
This is the common base class for debug info intrinsics for variables.
Record of a variable value-assignment, aka a non instruction representation of the dbg....
iterator find(const_arg_type_t< KeyT > Val)
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
This class represents a freeze function that returns random concrete value if an operand is either a ...
A proxy from a FunctionAnalysisManager to an SCC.
Type * getReturnType() const
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
FunctionType * getFunctionType() const
Returns the FunctionType for me.
CallingConv::ID getCallingConv() const
getCallingConv()/setCallingConv(CC) - These method get and set the calling convention of this functio...
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Module * getParent()
Get the module that this global value is contained inside of...
PointerType * getType() const
Global values are always pointers.
@ ExternalLinkage
Externally visible function.
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void setInitializer(Constant *InitVal)
setInitializer - Sets the initializer for this global variable, removing any existing initializer if ...
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
Value * CreateInsertValue(Value *Agg, Value *Val, ArrayRef< unsigned > Idxs, const Twine &Name="")
InvokeInst * CreateInvoke(FunctionType *Ty, Value *Callee, BasicBlock *NormalDest, BasicBlock *UnwindDest, ArrayRef< Value * > Args, ArrayRef< OperandBundleDef > OpBundles, const Twine &Name="")
Create an invoke instruction.
Value * CreateStructGEP(Type *Ty, Value *Ptr, unsigned Idx, const Twine &Name="")
Value * CreateConstInBoundsGEP1_32(Type *Ty, Value *Ptr, unsigned Idx0, const Twine &Name="")
CleanupReturnInst * CreateCleanupRet(CleanupPadInst *CleanupPad, BasicBlock *UnwindBB=nullptr)
ReturnInst * CreateRet(Value *V)
Create a 'ret <val>' instruction.
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
Value * CreateBitOrPointerCast(Value *V, Type *DestTy, const Twine &Name="")
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateBitCast(Value *V, Type *DestTy, const Twine &Name="")
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
ReturnInst * CreateRetVoid()
Create a 'ret void' instruction.
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
ConstantInt * getFalse()
Get the constant value for i1 false.
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args=std::nullopt, const Twine &Name="", MDNode *FPMathTag=nullptr)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
This class captures the data input to the InlineFunction call, and records the auxiliary results prod...
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
const BasicBlock * getParent() const
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
This is an important class for using LLVM in a threaded context.
A node in the call graph.
An SCC of the call graph.
A lazily constructed view of the call graph of a module.
void addSplitFunction(Function &OriginalFunction, Function &NewFunction)
Add a new function split/outlined from an existing function.
void addSplitRefRecursiveFunctions(Function &OriginalFunction, ArrayRef< Function * > NewFunctions)
Add new ref-recursive functions split/outlined from an existing function.
Node & get(Function &F)
Get a graph node for a given function, scanning it to populate the graph data as necessary.
SCC * lookupSCC(Node &N) const
Lookup a function's SCC in the graph.
static MDString * get(LLVMContext &Context, StringRef Str)
A Module instance is used to store all the information related to an LLVM module.
FunctionListType::iterator iterator
The Function iterators.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr, BasicBlock::iterator InsertBefore)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PrettyStackTraceEntry - This class is used to represent a frame of the "pretty" stack trace that is d...
virtual void print(raw_ostream &OS) const =0
print - Emit information about this stack frame to OS.
Return a value (possibly void), from a function.
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
Type * getTypeAtIndex(const Value *V) const
Given an index value into the type, return the type of the element.
Analysis pass providing the TargetTransformInfo.
Value handle that tracks a Value across RAUW.
ValueTy * getValPtr() const
Triple - Helper class for working with autoconf configuration names.
bool isArch64Bit() const
Test whether the architecture is 64-bit.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
bool isPointerTy() const
True if this is an instance of PointerType.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static Type * getVoidTy(LLVMContext &C)
static IntegerType * getInt8Ty(LLVMContext &C)
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
void setOperand(unsigned i, Value *Val)
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ C
The default llvm calling convention, compatible with C.
void salvageDebugInfo(SmallDenseMap< Argument *, AllocaInst *, 4 > &ArgToAllocaMap, DbgVariableIntrinsic &DVI, bool OptimizeFrame, bool IsEntryPoint)
Attempts to rewrite the location operand of debug intrinsics in terms of the coroutine frame pointer,...
@ Switch
The "resume-switch" lowering, where there are separate resume and destroy functions that are shared b...
CallInst * createMustTailCall(DebugLoc Loc, Function *MustTailCallFn, TargetTransformInfo &TTI, ArrayRef< Value * > Arguments, IRBuilder<> &)
void replaceCoroFree(CoroIdInst *CoroId, bool Elide)
DiagnosticInfoOptimizationBase::Argument NV
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
UnaryFunction for_each(R &&Range, UnaryFunction F)
Provide wrappers to std::for_each which take ranges instead of having to pass begin/end explicitly.
bool verifyFunction(const Function &F, raw_ostream *OS=nullptr)
Check a function for errors, useful for use when debugging a pass.
void salvageDebugInfo(const MachineRegisterInfo &MRI, MachineInstr &MI)
Assuming the instruction MI is going to be deleted, attempt to salvage debug users of MI by writing t...
LazyCallGraph::SCC & updateCGAndAnalysisManagerForFunctionPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
Helper to update the call graph after running a function pass.
LazyCallGraph::SCC & updateCGAndAnalysisManagerForCGSCCPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
Helper to update the call graph after running a CGSCC pass.
Constant * ConstantFoldCompareInstOperands(unsigned Predicate, Constant *LHS, Constant *RHS, const DataLayout &DL, const TargetLibraryInfo *TLI=nullptr, const Instruction *I=nullptr)
Attempt to constant fold a compare instruction (icmp/fcmp) with the specified operands.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
@ Async
"Asynchronous" unwind tables (instr precise)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
unsigned changeToUnreachable(Instruction *I, bool PreserveLCSSA=false, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Insert an unreachable instruction before the specified instruction, making it and the rest of the cod...
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
DWARFExpression::Operation Op
InlineResult InlineFunction(CallBase &CB, InlineFunctionInfo &IFI, bool MergeAttributes=false, AAResults *CalleeAAR=nullptr, bool InsertLifetime=true, Function *ForwardVarArgsTo=nullptr)
This function inlines the called function into the basic block of the caller.
void CloneFunctionInto(Function *NewFunc, const Function *OldFunc, ValueToValueMapTy &VMap, CloneFunctionChangeType Changes, SmallVectorImpl< ReturnInst * > &Returns, const char *NameSuffix="", ClonedCodeInfo *CodeInfo=nullptr, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Clone OldFunc into NewFunc, transforming the old arguments into references to VMap values.
auto predecessors(const MachineBasicBlock *BB)
static auto filterDbgVars(iterator_range< simple_ilist< DbgRecord >::iterator > R)
Filter the DbgRecord range to DbgVariableRecord types only and downcast.
bool removeUnreachableBlocks(Function &F, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Remove all blocks that can not be reached from the function's entry.
bool isPotentiallyReachable(const Instruction *From, const Instruction *To, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet=nullptr, const DominatorTree *DT=nullptr, const LoopInfo *LI=nullptr)
Determine whether instruction 'To' is reachable from 'From', without passing through any blocks in Ex...
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Support structure for SCC passes to communicate updates the call graph back to the CGSCC pass manager...
SmallPriorityWorklist< LazyCallGraph::SCC *, 1 > & CWorklist
Worklist of the SCCs queued for processing.
const std::function< bool(Instruction &)> MaterializableCallback
PreservedAnalyses run(LazyCallGraph::SCC &C, CGSCCAnalysisManager &AM, LazyCallGraph &CG, CGSCCUpdateResult &UR)
CoroSplitPass(bool OptimizeFrame=false)
GlobalVariable * AsyncFuncPointer
bool IsFrameInlineInStorage
SwitchInst * ResumeSwitch
BasicBlock * ResumeEntryBlock
SmallVector< CoroAwaitSuspendInst *, 4 > CoroAwaitSuspends
AsyncLoweringStorage AsyncLowering
FunctionType * getResumeFunctionType() const
IntegerType * getIndexType() const
CoroIdInst * getSwitchCoroId() const
SmallVector< CoroSizeInst *, 2 > CoroSizes
SmallVector< AnyCoroSuspendInst *, 4 > CoroSuspends
Value * emitAlloc(IRBuilder<> &Builder, Value *Size, CallGraph *CG) const
Allocate memory according to the rules of the active lowering.
SmallVector< CallInst *, 2 > SwiftErrorOps
ConstantInt * getIndex(uint64_t Value) const
bool OptimizeFrame
This would only be true if optimization are enabled.
SwitchLoweringStorage SwitchLowering
CoroBeginInst * CoroBegin
BasicBlock::iterator getInsertPtAfterFramePtr() const
ArrayRef< Type * > getRetconResultTypes() const
void emitDealloc(IRBuilder<> &Builder, Value *Ptr, CallGraph *CG) const
Deallocate memory according to the rules of the active lowering.
RetconLoweringStorage RetconLowering
SmallVector< CoroAlignInst *, 2 > CoroAligns
SmallVector< AnyCoroEndInst *, 4 > CoroEnds
unsigned getSwitchIndexField() const