llvm.org GIT mirror llvm / 9c81a3f
[PM] Port DSE to the new pass manager Patch by JakeVanAdrighem. Thanks! git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@269847 91177308-0d34-0410-b5e6-96231b3b80d8 Justin Bogner 3 years ago
7 changed file(s) with 453 addition(s) and 398 deletion(s). Raw diff Collapse all Expand all
104104 void initializeDAEPass(PassRegistry&);
105105 void initializeDAHPass(PassRegistry&);
106106 void initializeDCELegacyPassPass(PassRegistry&);
107 void initializeDSEPass(PassRegistry&);
107 void initializeDSELegacyPassPass(PassRegistry&);
108108 void initializeDeadInstEliminationPass(PassRegistry&);
109109 void initializeDeadMachineInstructionElimPass(PassRegistry&);
110110 void initializeDelinearizationPass(PassRegistry &);
0 //===- DeadStoreElimination.h - Fast Dead Store Elimination -------------===//
1 //
2 // The LLVM Compiler Infrastructure
3 //
4 // This file is distributed under the University of Illinois Open Source
5 // License. See LICENSE.TXT for details.
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements a trivial dead store elimination that only considers
10 // basic-block local redundant stores.
11 //
12 // FIXME: This should eventually be extended to be a post-dominator tree
13 // traversal. Doing so would be pretty trivial.
14 //
15 //===----------------------------------------------------------------------===//
16
17 #ifndef LLVM_TRANSFORMS_SCALAR_DSE_H
18 #define LLVM_TRANSFORMS_SCALAR_DSE_H
19
20 #include "llvm/IR/Function.h"
21 #include "llvm/IR/PassManager.h"
22
23 namespace llvm {
24
25 /// This class implements a trivial dead store elimination. We consider
26 /// only the redundant stores that are local to a single Basic Block.
27 class DSEPass : public PassInfoMixin {
28 public:
29 PreservedAnalyses run(Function &F, AnalysisManager &FAM);
30 };
31 }
32
33 #endif // LLVM_TRANSFORMS_SCALAR_DSE_H
6363 #include "llvm/Transforms/PGOInstrumentation.h"
6464 #include "llvm/Transforms/Scalar/ADCE.h"
6565 #include "llvm/Transforms/Scalar/DCE.h"
66 #include "llvm/Transforms/Scalar/DeadStoreElimination.h"
6667 #include "llvm/Transforms/Scalar/EarlyCSE.h"
6768 #include "llvm/Transforms/Scalar/GVN.h"
6869 #include "llvm/Transforms/Scalar/LoopRotation.h"
110110 FUNCTION_PASS("aa-eval", AAEvaluator())
111111 FUNCTION_PASS("adce", ADCEPass())
112112 FUNCTION_PASS("dce", DCEPass())
113 FUNCTION_PASS("dse", DSEPass())
113114 FUNCTION_PASS("early-cse", EarlyCSEPass())
114115 FUNCTION_PASS("instcombine", InstCombinePass())
115116 FUNCTION_PASS("invalidate", InvalidateAllAnalysesPass())
1414 //
1515 //===----------------------------------------------------------------------===//
1616
17 #include "llvm/Transforms/Scalar.h"
17 #include "llvm/Transforms/Scalar/DeadStoreElimination.h"
1818 #include "llvm/ADT/STLExtras.h"
1919 #include "llvm/ADT/SetVector.h"
2020 #include "llvm/ADT/Statistic.h"
3535 #include "llvm/Pass.h"
3636 #include "llvm/Support/Debug.h"
3737 #include "llvm/Support/raw_ostream.h"
38 #include "llvm/Transforms/Scalar.h"
3839 #include "llvm/Transforms/Utils/Local.h"
3940 using namespace llvm;
4041
4445 STATISTIC(NumFastStores, "Number of stores deleted");
4546 STATISTIC(NumFastOther , "Number of other instrs removed");
4647
47 namespace {
48 struct DSE : public FunctionPass {
49 AliasAnalysis *AA;
50 MemoryDependenceResults *MD;
51 DominatorTree *DT;
52 const TargetLibraryInfo *TLI;
53
54 static char ID; // Pass identification, replacement for typeid
55 DSE() : FunctionPass(ID), AA(nullptr), MD(nullptr), DT(nullptr) {
56 initializeDSEPass(*PassRegistry::getPassRegistry());
57 }
58
59 bool runOnFunction(Function &F) override {
60 if (skipFunction(F))
61 return false;
62
63 AA = &getAnalysis().getAAResults();
64 MD = &getAnalysis().getMemDep();
65 DT = &getAnalysis().getDomTree();
66 TLI = &getAnalysis().getTLI();
67
68 bool Changed = false;
69 for (BasicBlock &I : F)
70 // Only check non-dead blocks. Dead blocks may have strange pointer
71 // cycles that will confuse alias analysis.
72 if (DT->isReachableFromEntry(&I))
73 Changed |= runOnBasicBlock(I);
74
75 AA = nullptr; MD = nullptr; DT = nullptr;
76 return Changed;
77 }
78
79 bool runOnBasicBlock(BasicBlock &BB);
80 bool MemoryIsNotModifiedBetween(Instruction *FirstI, Instruction *SecondI);
81 bool HandleFree(CallInst *F);
82 bool handleEndBlock(BasicBlock &BB);
83 void RemoveAccessedObjects(const MemoryLocation &LoadedLoc,
84 SmallSetVector &DeadStackObjects,
85 const DataLayout &DL);
86
87 void getAnalysisUsage(AnalysisUsage &AU) const override {
88 AU.setPreservesCFG();
89 AU.addRequired();
90 AU.addRequired();
91 AU.addRequired();
92 AU.addRequired();
93 AU.addPreserved();
94 AU.addPreserved();
95 AU.addPreserved();
96 }
97 };
98 }
99
100 char DSE::ID = 0;
101 INITIALIZE_PASS_BEGIN(DSE, "dse", "Dead Store Elimination", false, false)
102 INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
103 INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
104 INITIALIZE_PASS_DEPENDENCY(GlobalsAAWrapperPass)
105 INITIALIZE_PASS_DEPENDENCY(MemoryDependenceWrapperPass)
106 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
107 INITIALIZE_PASS_END(DSE, "dse", "Dead Store Elimination", false, false)
108
109 FunctionPass *llvm::createDeadStoreEliminationPass() { return new DSE(); }
11048
11149 //===----------------------------------------------------------------------===//
11250 // Helper functions
11351 //===----------------------------------------------------------------------===//
11452
115 /// DeleteDeadInstruction - Delete this instruction. Before we do, go through
116 /// and zero out all the operands of this instruction. If any of them become
117 /// dead, delete them and the computation tree that feeds them.
118 ///
53 /// Delete this instruction. Before we do, go through and zero out all the
54 /// operands of this instruction. If any of them become dead, delete them and
55 /// the computation tree that feeds them.
11956 /// If ValueSet is non-null, remove any deleted instructions from it as well.
120 ///
121 static void DeleteDeadInstruction(Instruction *I,
122 MemoryDependenceResults &MD,
123 const TargetLibraryInfo &TLI,
124 SmallSetVector *ValueSet = nullptr) {
57 static void
58 deleteDeadInstruction(Instruction *I, MemoryDependenceResults &MD,
59 const TargetLibraryInfo &TLI,
60 SmallSetVector *ValueSet = nullptr) {
12561 SmallVector NowDeadInsts;
12662
12763 NowDeadInsts.push_back(I);
15591 } while (!NowDeadInsts.empty());
15692 }
15793
158
159 /// hasMemoryWrite - Does this instruction write some memory? This only returns
160 /// true for things that we can analyze with other helpers below.
94 /// Does this instruction write some memory? This only returns true for things
95 /// that we can analyze with other helpers below.
16196 static bool hasMemoryWrite(Instruction *I, const TargetLibraryInfo &TLI) {
16297 if (isa(I))
16398 return true;
196131 return false;
197132 }
198133
199 /// getLocForWrite - Return a Location stored to by the specified instruction.
200 /// If isRemovable returns true, this function and getLocForRead completely
201 /// describe the memory operations for this instruction.
134 /// Return a Location stored to by the specified instruction. If isRemovable
135 /// returns true, this function and getLocForRead completely describe the memory
136 /// operations for this instruction.
202137 static MemoryLocation getLocForWrite(Instruction *Inst, AliasAnalysis &AA) {
203138 if (StoreInst *SI = dyn_cast(Inst))
204139 return MemoryLocation::get(SI);
227162 }
228163 }
229164
230 /// getLocForRead - Return the location read by the specified "hasMemoryWrite"
231 /// instruction if any.
165 /// Return the location read by the specified "hasMemoryWrite" instruction if
166 /// any.
232167 static MemoryLocation getLocForRead(Instruction *Inst,
233168 const TargetLibraryInfo &TLI) {
234169 assert(hasMemoryWrite(Inst, TLI) && "Unknown instruction case");
240175 return MemoryLocation();
241176 }
242177
243
244 /// isRemovable - If the value of this instruction and the memory it writes to
245 /// is unused, may we delete this instruction?
178 /// If the value of this instruction and the memory it writes to is unused, may
179 /// we delete this instruction?
246180 static bool isRemovable(Instruction *I) {
247181 // Don't remove volatile/atomic stores.
248182 if (StoreInst *SI = dyn_cast(I))
306240 return II && II->getIntrinsicID() == Intrinsic::memset;
307241 }
308242
309 /// getStoredPointerOperand - Return the pointer that is being written to.
243 /// Return the pointer that is being written to.
310244 static Value *getStoredPointerOperand(Instruction *I) {
311245 if (StoreInst *SI = dyn_cast(I))
312246 return SI->getPointerOperand();
457391 return OverwriteUnknown;
458392 }
459393
460 /// isPossibleSelfRead - If 'Inst' might be a self read (i.e. a noop copy of a
394 /// If 'Inst' might be a self read (i.e. a noop copy of a
461395 /// memory region into an identical pointer) then it doesn't actually make its
462396 /// input dead in the traditional sense. Consider this case:
463397 ///
502436 }
503437
504438
505 //===----------------------------------------------------------------------===//
506 // DSE Pass
507 //===----------------------------------------------------------------------===//
508
509 bool DSE::runOnBasicBlock(BasicBlock &BB) {
439 /// Returns true if the memory which is accessed by the second instruction is not
440 /// modified between the first and the second instruction.
441 /// Precondition: Second instruction must be dominated by the first
442 /// instruction.
443 static bool memoryIsNotModifiedBetween(Instruction *FirstI,
444 Instruction *SecondI,
445 AliasAnalysis *AA) {
446 SmallVector WorkList;
447 SmallPtrSet Visited;
448 BasicBlock::iterator FirstBBI(FirstI);
449 ++FirstBBI;
450 BasicBlock::iterator SecondBBI(SecondI);
451 BasicBlock *FirstBB = FirstI->getParent();
452 BasicBlock *SecondBB = SecondI->getParent();
453 MemoryLocation MemLoc = MemoryLocation::get(SecondI);
454
455 // Start checking the store-block.
456 WorkList.push_back(SecondBB);
457 bool isFirstBlock = true;
458
459 // Check all blocks going backward until we reach the load-block.
460 while (!WorkList.empty()) {
461 BasicBlock *B = WorkList.pop_back_val();
462
463 // Ignore instructions before LI if this is the FirstBB.
464 BasicBlock::iterator BI = (B == FirstBB ? FirstBBI : B->begin());
465
466 BasicBlock::iterator EI;
467 if (isFirstBlock) {
468 // Ignore instructions after SI if this is the first visit of SecondBB.
469 assert(B == SecondBB && "first block is not the store block");
470 EI = SecondBBI;
471 isFirstBlock = false;
472 } else {
473 // It's not SecondBB or (in case of a loop) the second visit of SecondBB.
474 // In this case we also have to look at instructions after SI.
475 EI = B->end();
476 }
477 for (; BI != EI; ++BI) {
478 Instruction *I = &*BI;
479 if (I->mayWriteToMemory() && I != SecondI) {
480 auto Res = AA->getModRefInfo(I, MemLoc);
481 if (Res != MRI_NoModRef)
482 return false;
483 }
484 }
485 if (B != FirstBB) {
486 assert(B != &FirstBB->getParent()->getEntryBlock() &&
487 "Should not hit the entry block because SI must be dominated by LI");
488 for (auto PredI = pred_begin(B), PE = pred_end(B); PredI != PE; ++PredI) {
489 if (!Visited.insert(*PredI).second)
490 continue;
491 WorkList.push_back(*PredI);
492 }
493 }
494 }
495 return true;
496 }
497
498 /// Find all blocks that will unconditionally lead to the block BB and append
499 /// them to F.
500 static void findUnconditionalPreds(SmallVectorImpl &Blocks,
501 BasicBlock *BB, DominatorTree *DT) {
502 for (pred_iterator I = pred_begin(BB), E = pred_end(BB); I != E; ++I) {
503 BasicBlock *Pred = *I;
504 if (Pred == BB) continue;
505 TerminatorInst *PredTI = Pred->getTerminator();
506 if (PredTI->getNumSuccessors() != 1)
507 continue;
508
509 if (DT->isReachableFromEntry(Pred))
510 Blocks.push_back(Pred);
511 }
512 }
513
514 /// Handle frees of entire structures whose dependency is a store
515 /// to a field of that structure.
516 static bool handleFree(CallInst *F, AliasAnalysis *AA,
517 MemoryDependenceResults *MD, DominatorTree *DT,
518 const TargetLibraryInfo *TLI) {
519 bool MadeChange = false;
520
521 MemoryLocation Loc = MemoryLocation(F->getOperand(0));
522 SmallVector Blocks;
523 Blocks.push_back(F->getParent());
524 const DataLayout &DL = F->getModule()->getDataLayout();
525
526 while (!Blocks.empty()) {
527 BasicBlock *BB = Blocks.pop_back_val();
528 Instruction *InstPt = BB->getTerminator();
529 if (BB == F->getParent()) InstPt = F;
530
531 MemDepResult Dep =
532 MD->getPointerDependencyFrom(Loc, false, InstPt->getIterator(), BB);
533 while (Dep.isDef() || Dep.isClobber()) {
534 Instruction *Dependency = Dep.getInst();
535 if (!hasMemoryWrite(Dependency, *TLI) || !isRemovable(Dependency))
536 break;
537
538 Value *DepPointer =
539 GetUnderlyingObject(getStoredPointerOperand(Dependency), DL);
540
541 // Check for aliasing.
542 if (!AA->isMustAlias(F->getArgOperand(0), DepPointer))
543 break;
544
545 auto Next = ++Dependency->getIterator();
546
547 // DCE instructions only used to calculate that store
548 deleteDeadInstruction(Dependency, *MD, *TLI);
549 ++NumFastStores;
550 MadeChange = true;
551
552 // Inst's old Dependency is now deleted. Compute the next dependency,
553 // which may also be dead, as in
554 // s[0] = 0;
555 // s[1] = 0; // This has just been deleted.
556 // free(s);
557 Dep = MD->getPointerDependencyFrom(Loc, false, Next, BB);
558 }
559
560 if (Dep.isNonLocal())
561 findUnconditionalPreds(Blocks, BB, DT);
562 }
563
564 return MadeChange;
565 }
566
567 /// Check to see if the specified location may alias any of the stack objects in
568 /// the DeadStackObjects set. If so, they become live because the location is
569 /// being loaded.
570 static void removeAccessedObjects(const MemoryLocation &LoadedLoc,
571 SmallSetVector &DeadStackObjects,
572 const DataLayout &DL, AliasAnalysis *AA,
573 const TargetLibraryInfo *TLI) {
574 const Value *UnderlyingPointer = GetUnderlyingObject(LoadedLoc.Ptr, DL);
575
576 // A constant can't be in the dead pointer set.
577 if (isa(UnderlyingPointer))
578 return;
579
580 // If the kill pointer can be easily reduced to an alloca, don't bother doing
581 // extraneous AA queries.
582 if (isa(UnderlyingPointer) || isa(UnderlyingPointer)) {
583 DeadStackObjects.remove(const_cast(UnderlyingPointer));
584 return;
585 }
586
587 // Remove objects that could alias LoadedLoc.
588 DeadStackObjects.remove_if([&](Value *I) {
589 // See if the loaded location could alias the stack location.
590 MemoryLocation StackLoc(I, getPointerSize(I, DL, *TLI));
591 return !AA->isNoAlias(StackLoc, LoadedLoc);
592 });
593 }
594
595 /// Remove dead stores to stack-allocated locations in the function end block.
596 /// Ex:
597 /// %A = alloca i32
598 /// ...
599 /// store i32 1, i32* %A
600 /// ret void
601 static bool handleEndBlock(BasicBlock &BB, AliasAnalysis *AA,
602 MemoryDependenceResults *MD,
603 const TargetLibraryInfo *TLI) {
604 bool MadeChange = false;
605
606 // Keep track of all of the stack objects that are dead at the end of the
607 // function.
608 SmallSetVector DeadStackObjects;
609
610 // Find all of the alloca'd pointers in the entry block.
611 BasicBlock &Entry = BB.getParent()->front();
612 for (Instruction &I : Entry) {
613 if (isa(&I))
614 DeadStackObjects.insert(&I);
615
616 // Okay, so these are dead heap objects, but if the pointer never escapes
617 // then it's leaked by this function anyways.
618 else if (isAllocLikeFn(&I, TLI) && !PointerMayBeCaptured(&I, true, true))
619 DeadStackObjects.insert(&I);
620 }
621
622 // Treat byval or inalloca arguments the same, stores to them are dead at the
623 // end of the function.
624 for (Argument &AI : BB.getParent()->args())
625 if (AI.hasByValOrInAllocaAttr())
626 DeadStackObjects.insert(&AI);
627
628 const DataLayout &DL = BB.getModule()->getDataLayout();
629
630 // Scan the basic block backwards
631 for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){
632 --BBI;
633
634 // If we find a store, check to see if it points into a dead stack value.
635 if (hasMemoryWrite(&*BBI, *TLI) && isRemovable(&*BBI)) {
636 // See through pointer-to-pointer bitcasts
637 SmallVector Pointers;
638 GetUnderlyingObjects(getStoredPointerOperand(&*BBI), Pointers, DL);
639
640 // Stores to stack values are valid candidates for removal.
641 bool AllDead = true;
642 for (SmallVectorImpl::iterator I = Pointers.begin(),
643 E = Pointers.end(); I != E; ++I)
644 if (!DeadStackObjects.count(*I)) {
645 AllDead = false;
646 break;
647 }
648
649 if (AllDead) {
650 Instruction *Dead = &*BBI++;
651
652 DEBUG(dbgs() << "DSE: Dead Store at End of Block:\n DEAD: "
653 << *Dead << "\n Objects: ";
654 for (SmallVectorImpl::iterator I = Pointers.begin(),
655 E = Pointers.end(); I != E; ++I) {
656 dbgs() << **I;
657 if (std::next(I) != E)
658 dbgs() << ", ";
659 }
660 dbgs() << '\n');
661
662 // DCE instructions only used to calculate that store.
663 deleteDeadInstruction(Dead, *MD, *TLI, &DeadStackObjects);
664 ++NumFastStores;
665 MadeChange = true;
666 continue;
667 }
668 }
669
670 // Remove any dead non-memory-mutating instructions.
671 if (isInstructionTriviallyDead(&*BBI, TLI)) {
672 Instruction *Inst = &*BBI++;
673 deleteDeadInstruction(Inst, *MD, *TLI, &DeadStackObjects);
674 ++NumFastOther;
675 MadeChange = true;
676 continue;
677 }
678
679 if (isa(BBI)) {
680 // Remove allocas from the list of dead stack objects; there can't be
681 // any references before the definition.
682 DeadStackObjects.remove(&*BBI);
683 continue;
684 }
685
686 if (auto CS = CallSite(&*BBI)) {
687 // Remove allocation function calls from the list of dead stack objects;
688 // there can't be any references before the definition.
689 if (isAllocLikeFn(&*BBI, TLI))
690 DeadStackObjects.remove(&*BBI);
691
692 // If this call does not access memory, it can't be loading any of our
693 // pointers.
694 if (AA->doesNotAccessMemory(CS))
695 continue;
696
697 // If the call might load from any of our allocas, then any store above
698 // the call is live.
699 DeadStackObjects.remove_if([&](Value *I) {
700 // See if the call site touches the value.
701 ModRefInfo A = AA->getModRefInfo(CS, I, getPointerSize(I, DL, *TLI));
702
703 return A == MRI_ModRef || A == MRI_Ref;
704 });
705
706 // If all of the allocas were clobbered by the call then we're not going
707 // to find anything else to process.
708 if (DeadStackObjects.empty())
709 break;
710
711 continue;
712 }
713
714 MemoryLocation LoadedLoc;
715
716 // If we encounter a use of the pointer, it is no longer considered dead
717 if (LoadInst *L = dyn_cast(BBI)) {
718 if (!L->isUnordered()) // Be conservative with atomic/volatile load
719 break;
720 LoadedLoc = MemoryLocation::get(L);
721 } else if (VAArgInst *V = dyn_cast(BBI)) {
722 LoadedLoc = MemoryLocation::get(V);
723 } else if (MemTransferInst *MTI = dyn_cast(BBI)) {
724 LoadedLoc = MemoryLocation::getForSource(MTI);
725 } else if (!BBI->mayReadFromMemory()) {
726 // Instruction doesn't read memory. Note that stores that weren't removed
727 // above will hit this case.
728 continue;
729 } else {
730 // Unknown inst; assume it clobbers everything.
731 break;
732 }
733
734 // Remove any allocas from the DeadPointer set that are loaded, as this
735 // makes any stores above the access live.
736 removeAccessedObjects(LoadedLoc, DeadStackObjects, DL, AA, TLI);
737
738 // If all of the allocas were clobbered by the access then we're not going
739 // to find anything else to process.
740 if (DeadStackObjects.empty())
741 break;
742 }
743
744 return MadeChange;
745 }
746
747 static bool eliminateDeadStores(BasicBlock &BB, AliasAnalysis *AA,
748 MemoryDependenceResults *MD, DominatorTree *DT,
749 const TargetLibraryInfo *TLI) {
510750 const DataLayout &DL = BB.getModule()->getDataLayout();
511751 bool MadeChange = false;
512752
516756
517757 // Handle 'free' calls specially.
518758 if (CallInst *F = isFreeCall(Inst, TLI)) {
519 MadeChange |= HandleFree(F);
759 MadeChange |= handleFree(F, AA, MD, DT, TLI);
520760 continue;
521761 }
522762
529769 if (StoreInst *SI = dyn_cast(Inst)) {
530770
531771 auto RemoveDeadInstAndUpdateBBI = [&](Instruction *DeadInst) {
532 // DeleteDeadInstruction can delete the current instruction. Save BBI
772 // deleteDeadInstruction can delete the current instruction. Save BBI
533773 // in case we need it.
534774 WeakVH NextInst(&*BBI);
535775
536 DeleteDeadInstruction(DeadInst, *MD, *TLI);
776 deleteDeadInstruction(DeadInst, *MD, *TLI);
537777
538778 if (!NextInst) // Next instruction deleted.
539779 BBI = BB.begin();
546786 if (LoadInst *DepLoad = dyn_cast(SI->getValueOperand())) {
547787 if (SI->getPointerOperand() == DepLoad->getPointerOperand() &&
548788 isRemovable(SI) &&
549 MemoryIsNotModifiedBetween(DepLoad, SI)) {
789 memoryIsNotModifiedBetween(DepLoad, SI, AA)) {
550790
551791 DEBUG(dbgs() << "DSE: Remove Store Of Load from same pointer:\n "
552792 << "LOAD: " << *DepLoad << "\n STORE: " << *SI << '\n');
565805 GetUnderlyingObject(SI->getPointerOperand(), DL));
566806
567807 if (UnderlyingPointer && isCallocLikeFn(UnderlyingPointer, TLI) &&
568 MemoryIsNotModifiedBetween(UnderlyingPointer, SI)) {
808 memoryIsNotModifiedBetween(UnderlyingPointer, SI, AA)) {
569809 DEBUG(dbgs()
570810 << "DSE: Remove null store to the calloc'ed object:\n DEAD: "
571811 << *Inst << "\n OBJECT: " << *UnderlyingPointer << '\n');
617857 << *DepWrite << "\n KILLER: " << *Inst << '\n');
618858
619859 // Delete the store and now-dead instructions that feed it.
620 DeleteDeadInstruction(DepWrite, *MD, *TLI);
860 deleteDeadInstruction(DepWrite, *MD, *TLI);
621861 ++NumFastStores;
622862 MadeChange = true;
623863
624 // DeleteDeadInstruction can delete the current instruction in loop
864 // deleteDeadInstruction can delete the current instruction in loop
625865 // cases, reset BBI.
626866 BBI = Inst->getIterator();
627867 if (BBI != BB.begin())
697937 // If this block ends in a return, unwind, or unreachable, all allocas are
698938 // dead at its end, which means stores to them are also dead.
699939 if (BB.getTerminator()->getNumSuccessors() == 0)
700 MadeChange |= handleEndBlock(BB);
940 MadeChange |= handleEndBlock(BB, AA, MD, TLI);
701941
702942 return MadeChange;
703943 }
704944
705 /// Returns true if the memory which is accessed by the second instruction is not
706 /// modified between the first and the second instruction.
707 /// Precondition: Second instruction must be dominated by the first
708 /// instruction.
709 bool DSE::MemoryIsNotModifiedBetween(Instruction *FirstI,
710 Instruction *SecondI) {
711 SmallVector WorkList;
712 SmallPtrSet Visited;
713 BasicBlock::iterator FirstBBI(FirstI);
714 ++FirstBBI;
715 BasicBlock::iterator SecondBBI(SecondI);
716 BasicBlock *FirstBB = FirstI->getParent();
717 BasicBlock *SecondBB = SecondI->getParent();
718 MemoryLocation MemLoc = MemoryLocation::get(SecondI);
719
720 // Start checking the store-block.
721 WorkList.push_back(SecondBB);
722 bool isFirstBlock = true;
723
724 // Check all blocks going backward until we reach the load-block.
725 while (!WorkList.empty()) {
726 BasicBlock *B = WorkList.pop_back_val();
727
728 // Ignore instructions before LI if this is the FirstBB.
729 BasicBlock::iterator BI = (B == FirstBB ? FirstBBI : B->begin());
730
731 BasicBlock::iterator EI;
732 if (isFirstBlock) {
733 // Ignore instructions after SI if this is the first visit of SecondBB.
734 assert(B == SecondBB && "first block is not the store block");
735 EI = SecondBBI;
736 isFirstBlock = false;
737 } else {
738 // It's not SecondBB or (in case of a loop) the second visit of SecondBB.
739 // In this case we also have to look at instructions after SI.
740 EI = B->end();
741 }
742 for (; BI != EI; ++BI) {
743 Instruction *I = &*BI;
744 if (I->mayWriteToMemory() && I != SecondI) {
745 auto Res = AA->getModRefInfo(I, MemLoc);
746 if (Res != MRI_NoModRef)
747 return false;
748 }
749 }
750 if (B != FirstBB) {
751 assert(B != &FirstBB->getParent()->getEntryBlock() &&
752 "Should not hit the entry block because SI must be dominated by LI");
753 for (auto PredI = pred_begin(B), PE = pred_end(B); PredI != PE; ++PredI) {
754 if (!Visited.insert(*PredI).second)
755 continue;
756 WorkList.push_back(*PredI);
757 }
758 }
759 }
760 return true;
761 }
762
763 /// Find all blocks that will unconditionally lead to the block BB and append
764 /// them to F.
765 static void FindUnconditionalPreds(SmallVectorImpl &Blocks,
766 BasicBlock *BB, DominatorTree *DT) {
767 for (pred_iterator I = pred_begin(BB), E = pred_end(BB); I != E; ++I) {
768 BasicBlock *Pred = *I;
769 if (Pred == BB) continue;
770 TerminatorInst *PredTI = Pred->getTerminator();
771 if (PredTI->getNumSuccessors() != 1)
772 continue;
773
774 if (DT->isReachableFromEntry(Pred))
775 Blocks.push_back(Pred);
776 }
777 }
778
779 /// HandleFree - Handle frees of entire structures whose dependency is a store
780 /// to a field of that structure.
781 bool DSE::HandleFree(CallInst *F) {
945 static bool eliminateDeadStores(Function &F, AliasAnalysis *AA,
946 MemoryDependenceResults *MD, DominatorTree *DT,
947 const TargetLibraryInfo *TLI) {
782948 bool MadeChange = false;
783
784 MemoryLocation Loc = MemoryLocation(F->getOperand(0));
785 SmallVector Blocks;
786 Blocks.push_back(F->getParent());
787 const DataLayout &DL = F->getModule()->getDataLayout();
788
789 while (!Blocks.empty()) {
790 BasicBlock *BB = Blocks.pop_back_val();
791 Instruction *InstPt = BB->getTerminator();
792 if (BB == F->getParent()) InstPt = F;
793
794 MemDepResult Dep =
795 MD->getPointerDependencyFrom(Loc, false, InstPt->getIterator(), BB);
796 while (Dep.isDef() || Dep.isClobber()) {
797 Instruction *Dependency = Dep.getInst();
798 if (!hasMemoryWrite(Dependency, *TLI) || !isRemovable(Dependency))
799 break;
800
801 Value *DepPointer =
802 GetUnderlyingObject(getStoredPointerOperand(Dependency), DL);
803
804 // Check for aliasing.
805 if (!AA->isMustAlias(F->getArgOperand(0), DepPointer))
806 break;
807
808 auto Next = ++Dependency->getIterator();
809
810 // DCE instructions only used to calculate that store
811 DeleteDeadInstruction(Dependency, *MD, *TLI);
812 ++NumFastStores;
813 MadeChange = true;
814
815 // Inst's old Dependency is now deleted. Compute the next dependency,
816 // which may also be dead, as in
817 // s[0] = 0;
818 // s[1] = 0; // This has just been deleted.
819 // free(s);
820 Dep = MD->getPointerDependencyFrom(Loc, false, Next, BB);
821 }
822
823 if (Dep.isNonLocal())
824 FindUnconditionalPreds(Blocks, BB, DT);
825 }
826
949 for (BasicBlock &BB : F)
950 // Only check non-dead blocks. Dead blocks may have strange pointer
951 // cycles that will confuse alias analysis.
952 if (DT->isReachableFromEntry(&BB))
953 MadeChange |= eliminateDeadStores(BB, AA, MD, DT, TLI);
827954 return MadeChange;
828955 }
829956
830 /// handleEndBlock - Remove dead stores to stack-allocated locations in the
831 /// function end block. Ex:
832 /// %A = alloca i32
833 /// ...
834 /// store i32 1, i32* %A
835 /// ret void
836 bool DSE::handleEndBlock(BasicBlock &BB) {
837 bool MadeChange = false;
838
839 // Keep track of all of the stack objects that are dead at the end of the
840 // function.
841 SmallSetVector DeadStackObjects;
842
843 // Find all of the alloca'd pointers in the entry block.
844 BasicBlock &Entry = BB.getParent()->front();
845 for (Instruction &I : Entry) {
846 if (isa(&I))
847 DeadStackObjects.insert(&I);
848
849 // Okay, so these are dead heap objects, but if the pointer never escapes
850 // then it's leaked by this function anyways.
851 else if (isAllocLikeFn(&I, TLI) && !PointerMayBeCaptured(&I, true, true))
852 DeadStackObjects.insert(&I);
853 }
854
855 // Treat byval or inalloca arguments the same, stores to them are dead at the
856 // end of the function.
857 for (Argument &AI : BB.getParent()->args())
858 if (AI.hasByValOrInAllocaAttr())
859 DeadStackObjects.insert(&AI);
860
861 const DataLayout &DL = BB.getModule()->getDataLayout();
862
863 // Scan the basic block backwards
864 for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){
865 --BBI;
866
867 // If we find a store, check to see if it points into a dead stack value.
868 if (hasMemoryWrite(&*BBI, *TLI) && isRemovable(&*BBI)) {
869 // See through pointer-to-pointer bitcasts
870 SmallVector Pointers;
871 GetUnderlyingObjects(getStoredPointerOperand(&*BBI), Pointers, DL);
872
873 // Stores to stack values are valid candidates for removal.
874 bool AllDead = true;
875 for (SmallVectorImpl::iterator I = Pointers.begin(),
876 E = Pointers.end(); I != E; ++I)
877 if (!DeadStackObjects.count(*I)) {
878 AllDead = false;
879 break;
880 }
881
882 if (AllDead) {
883 Instruction *Dead = &*BBI++;
884
885 DEBUG(dbgs() << "DSE: Dead Store at End of Block:\n DEAD: "
886 << *Dead << "\n Objects: ";
887 for (SmallVectorImpl::iterator I = Pointers.begin(),
888 E = Pointers.end(); I != E; ++I) {
889 dbgs() << **I;
890 if (std::next(I) != E)
891 dbgs() << ", ";
892 }
893 dbgs() << '\n');
894
895 // DCE instructions only used to calculate that store.
896 DeleteDeadInstruction(Dead, *MD, *TLI, &DeadStackObjects);
897 ++NumFastStores;
898 MadeChange = true;
899 continue;
900 }
901 }
902
903 // Remove any dead non-memory-mutating instructions.
904 if (isInstructionTriviallyDead(&*BBI, TLI)) {
905 Instruction *Inst = &*BBI++;
906 DeleteDeadInstruction(Inst, *MD, *TLI, &DeadStackObjects);
907 ++NumFastOther;
908 MadeChange = true;
909 continue;
910 }
911
912 if (isa(BBI)) {
913 // Remove allocas from the list of dead stack objects; there can't be
914 // any references before the definition.
915 DeadStackObjects.remove(&*BBI);
916 continue;
917 }
918
919 if (auto CS = CallSite(&*BBI)) {
920 // Remove allocation function calls from the list of dead stack objects;
921 // there can't be any references before the definition.
922 if (isAllocLikeFn(&*BBI, TLI))
923 DeadStackObjects.remove(&*BBI);
924
925 // If this call does not access memory, it can't be loading any of our
926 // pointers.
927 if (AA->doesNotAccessMemory(CS))
928 continue;
929
930 // If the call might load from any of our allocas, then any store above
931 // the call is live.
932 DeadStackObjects.remove_if([&](Value *I) {
933 // See if the call site touches the value.
934 ModRefInfo A = AA->getModRefInfo(CS, I, getPointerSize(I, DL, *TLI));
935
936 return A == MRI_ModRef || A == MRI_Ref;
937 });
938
939 // If all of the allocas were clobbered by the call then we're not going
940 // to find anything else to process.
941 if (DeadStackObjects.empty())
942 break;
943
944 continue;
945 }
946
947 MemoryLocation LoadedLoc;
948
949 // If we encounter a use of the pointer, it is no longer considered dead
950 if (LoadInst *L = dyn_cast(BBI)) {
951 if (!L->isUnordered()) // Be conservative with atomic/volatile load
952 break;
953 LoadedLoc = MemoryLocation::get(L);
954 } else if (VAArgInst *V = dyn_cast(BBI)) {
955 LoadedLoc = MemoryLocation::get(V);
956 } else if (MemTransferInst *MTI = dyn_cast(BBI)) {
957 LoadedLoc = MemoryLocation::getForSource(MTI);
958 } else if (!BBI->mayReadFromMemory()) {
959 // Instruction doesn't read memory. Note that stores that weren't removed
960 // above will hit this case.
961 continue;
962 } else {
963 // Unknown inst; assume it clobbers everything.
964 break;
965 }
966
967 // Remove any allocas from the DeadPointer set that are loaded, as this
968 // makes any stores above the access live.
969 RemoveAccessedObjects(LoadedLoc, DeadStackObjects, DL);
970
971 // If all of the allocas were clobbered by the access then we're not going
972 // to find anything else to process.
973 if (DeadStackObjects.empty())
974 break;
975 }
976
977 return MadeChange;
978 }
979
980 /// RemoveAccessedObjects - Check to see if the specified location may alias any
981 /// of the stack objects in the DeadStackObjects set. If so, they become live
982 /// because the location is being loaded.
983 void DSE::RemoveAccessedObjects(const MemoryLocation &LoadedLoc,
984 SmallSetVector &DeadStackObjects,
985 const DataLayout &DL) {
986 const Value *UnderlyingPointer = GetUnderlyingObject(LoadedLoc.Ptr, DL);
987
988 // A constant can't be in the dead pointer set.
989 if (isa(UnderlyingPointer))
990 return;
991
992 // If the kill pointer can be easily reduced to an alloca, don't bother doing
993 // extraneous AA queries.
994 if (isa(UnderlyingPointer) || isa(UnderlyingPointer)) {
995 DeadStackObjects.remove(const_cast(UnderlyingPointer));
996 return;
997 }
998
999 // Remove objects that could alias LoadedLoc.
1000 DeadStackObjects.remove_if([&](Value *I) {
1001 // See if the loaded location could alias the stack location.
1002 MemoryLocation StackLoc(I, getPointerSize(I, DL, *TLI));
1003 return !AA->isNoAlias(StackLoc, LoadedLoc);
1004 });
1005 }
957 //===----------------------------------------------------------------------===//
958 // DSE Pass
959 //===----------------------------------------------------------------------===//
960 PreservedAnalyses DSEPass::run(Function &F, FunctionAnalysisManager &AM) {
961 AliasAnalysis *AA = &AM.getResult(F);
962 DominatorTree *DT = &AM.getResult(F);
963 MemoryDependenceResults *MD = &AM.getResult(F);
964 const TargetLibraryInfo *TLI = &AM.getResult(F);
965
966 if (!eliminateDeadStores(F, AA, MD, DT, TLI))
967 return PreservedAnalyses::all();
968 PreservedAnalyses PA;
969 PA.preserve();
970 PA.preserve();
971 PA.preserve();
972 return PA;
973 }
974
975 /// A legacy pass for the legacy pass manager that wraps \c DSEPass.
976 class DSELegacyPass : public FunctionPass {
977 public:
978 DSELegacyPass() : FunctionPass(ID) {
979 initializeDSELegacyPassPass(*PassRegistry::getPassRegistry());
980 }
981
982 bool runOnFunction(Function &F) override {
983 if (skipFunction(F))
984 return false;
985
986 DominatorTree *DT = &getAnalysis().getDomTree();
987 AliasAnalysis *AA = &getAnalysis().getAAResults();
988 MemoryDependenceResults *MD =
989 &getAnalysis().getMemDep();
990 const TargetLibraryInfo *TLI =
991 &getAnalysis().getTLI();
992
993 return eliminateDeadStores(F, AA, MD, DT, TLI);
994 }
995
996 void getAnalysisUsage(AnalysisUsage &AU) const override {
997 AU.setPreservesCFG();
998 AU.addRequired();
999 AU.addRequired();
1000 AU.addRequired();
1001 AU.addRequired();
1002 AU.addPreserved();
1003 AU.addPreserved();
1004 AU.addPreserved();
1005 }
1006
1007 static char ID; // Pass identification, replacement for typeid
1008 };
1009
1010 char DSELegacyPass::ID = 0;
1011 INITIALIZE_PASS_BEGIN(DSELegacyPass, "dse", "Dead Store Elimination", false,
1012 false)
1013 INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
1014 INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
1015 INITIALIZE_PASS_DEPENDENCY(GlobalsAAWrapperPass)
1016 INITIALIZE_PASS_DEPENDENCY(MemoryDependenceWrapperPass)
1017 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
1018 INITIALIZE_PASS_END(DSELegacyPass, "dse", "Dead Store Elimination", false,
1019 false)
1020
1021 FunctionPass *llvm::createDeadStoreEliminationPass() {
1022 return new DSELegacyPass();
1023 }
3939 initializeDCELegacyPassPass(Registry);
4040 initializeDeadInstEliminationPass(Registry);
4141 initializeScalarizerPass(Registry);
42 initializeDSEPass(Registry);
42 initializeDSELegacyPassPass(Registry);
4343 initializeGVNLegacyPassPass(Registry);
4444 initializeEarlyCSELegacyPassPass(Registry);
4545 initializeFlattenCFGPassPass(Registry);
0 ; RUN: opt < %s -basicaa -dse -S | FileCheck %s
1 ; RUN: opt < %s -aa-pipeline=basic-aa -passes=dse -S | FileCheck %s
12 target datalayout = "E-p:64:64:64-a0:0:8-f32:32:32-f64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:32:64-v64:64:64-v128:128:128"
23
34 declare void @llvm.memset.p0i8.i64(i8* nocapture, i8, i64, i32, i1) nounwind