llvm.org GIT mirror llvm / 81a0382
Live interval splitting: When a live interval is being spilled, rather than creating short, non-spillable intervals for every def / use, split the interval at BB boundaries. That is, for every BB where the live interval is defined or used, create a new interval that covers all the defs and uses in the BB. This is designed to eliminate one common problem: multiple reloads of the same value in a single basic block. Note, it does *not* decrease the number of spills since no copies are inserted so the split intervals are *connected* through spill and reloads (or rematerialization). The newly created intervals can be spilled again, in that case, since it does not span multiple basic blocks, it's spilled in the usual manner. However, it can reuse the same stack slot as the previously split interval. This is currently controlled by -split-intervals-at-bb. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@44198 91177308-0d34-0410-b5e6-96231b3b80d8 Evan Cheng 12 years ago
8 changed file(s) with 634 addition(s) and 165 deletion(s). Raw diff Collapse all Expand all
3131 namespace llvm {
3232
3333 class LiveVariables;
34 class LoopInfo;
3435 class MRegisterInfo;
3536 class SSARegMap;
3637 class TargetInstrInfo;
103104 return getBaseIndex(index) + InstrSlots::STORE;
104105 }
105106
106 static float getSpillWeight(const MachineOperand &mop, unsigned loopDepth) {
107 return (mop.isUse()+mop.isDef()) * powf(10.0F, (float)loopDepth);
107 static float getSpillWeight(bool isDef, bool isUse, unsigned loopDepth) {
108 return (isDef + isUse) * powf(10.0F, (float)loopDepth);
108109 }
109110
110111 typedef Reg2IntervalMap::iterator iterator;
228229 /// addIntervalsForSpills - Create new intervals for spilled defs / uses of
229230 /// the given interval.
230231 std::vector
231 addIntervalsForSpills(const LiveInterval& i, VirtRegMap& vrm);
232 addIntervalsForSpills(const LiveInterval& i,
233 const LoopInfo *loopInfo, VirtRegMap& vrm);
232234
233235 private:
234236 /// computeIntervals - Compute live intervals.
274276 MachineInstr *DefMI, unsigned index, unsigned i,
275277 bool isSS, int slot, unsigned reg);
276278
279 bool anyKillInMBBAfterIdx(const LiveInterval &li,
280 MachineBasicBlock *MBB, unsigned Idx,
281 const VNInfo *VNI = NULL) const;
282
283 bool intervalIsInOneMBB(const LiveInterval &li) const;
284
277285 /// rewriteInstructionForSpills, rewriteInstructionsForSpills - Helper functions
278286 /// for addIntervalsForSpills to rewrite uses / defs for the given live range.
279 void rewriteInstructionForSpills(const LiveInterval &li,
287 void rewriteInstructionForSpills(const LiveInterval &li, bool TrySplit,
280288 unsigned id, unsigned index, unsigned end, MachineInstr *MI,
281289 MachineInstr *OrigDefMI, MachineInstr *DefMI, unsigned Slot, int LdSlot,
282290 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
283291 VirtRegMap &vrm, SSARegMap *RegMap, const TargetRegisterClass* rc,
284292 SmallVector &ReMatIds,
293 unsigned &NewVReg, bool &HasDef, bool &HasUse, const LoopInfo *loopInfo,
294 std::vector &NewVRegs,
285295 std::vector &NewLIs);
286 void rewriteInstructionsForSpills(const LiveInterval &li,
296 void rewriteInstructionsForSpills(const LiveInterval &li, bool TrySplit,
287297 LiveInterval::Ranges::const_iterator &I,
288298 MachineInstr *OrigDefMI, MachineInstr *DefMI, unsigned Slot, int LdSlot,
289299 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
290300 VirtRegMap &vrm, SSARegMap *RegMap, const TargetRegisterClass* rc,
291 SmallVector &ReMatIds,
301 SmallVector &ReMatIds, const LoopInfo *loopInfo,
302 BitVector &SpillMBBs,
303 std::vector > &SpillIdxes,
304 std::vector &NewVRegs,
292305 std::vector &NewLIs);
293306
294307 static LiveInterval createInterval(unsigned Reg);
153153
154154 SmallVector *PHIVarInfo;
155155
156 /// addRegisterKilled - We have determined MI kills a register. Look for the
157 /// operand that uses it and mark it as IsKill. If AddIfNotFound is true,
158 /// add a implicit operand if it's not found. Returns true if the operand
159 /// exists / is added.
160 bool addRegisterKilled(unsigned IncomingReg, MachineInstr *MI,
161 bool AddIfNotFound = false);
162
163 /// addRegisterDead - We have determined MI defined a register without a use.
164 /// Look for the operand that defines it and mark it as IsDead. If
165 /// AddIfNotFound is true, add a implicit operand if it's not found. Returns
166 /// true if the operand exists / is added.
167 bool addRegisterDead(unsigned IncomingReg, MachineInstr *MI,
168 bool AddIfNotFound = false);
169
170156 void addRegisterKills(unsigned Reg, MachineInstr *MI,
171157 SmallSet &SubKills);
172158
209195 /// the records for NewMI.
210196 void instructionChanged(MachineInstr *OldMI, MachineInstr *NewMI);
211197
198 /// transferKillDeadInfo - Similar to instructionChanged except it does not
199 /// update live variables internal data structures.
200 static void transferKillDeadInfo(MachineInstr *OldMI, MachineInstr *NewMI,
201 const MRegisterInfo *RegInfo);
202
203 /// addRegisterKilled - We have determined MI kills a register. Look for the
204 /// operand that uses it and mark it as IsKill. If AddIfNotFound is true,
205 /// add a implicit operand if it's not found. Returns true if the operand
206 /// exists / is added.
207 static bool addRegisterKilled(unsigned IncomingReg, MachineInstr *MI,
208 const MRegisterInfo *RegInfo,
209 bool AddIfNotFound = false);
210
212211 /// addVirtualRegisterKilled - Add information about the fact that the
213212 /// specified register is killed after being used by the specified
214213 /// instruction. If AddIfNotFound is true, add a implicit operand if it's
215214 /// not found.
216215 void addVirtualRegisterKilled(unsigned IncomingReg, MachineInstr *MI,
217216 bool AddIfNotFound = false) {
218 if (addRegisterKilled(IncomingReg, MI, AddIfNotFound))
217 if (addRegisterKilled(IncomingReg, MI, RegInfo, AddIfNotFound))
219218 getVarInfo(IncomingReg).Kills.push_back(MI);
220 }
219 }
221220
222221 /// removeVirtualRegisterKilled - Remove the specified virtual
223222 /// register from the live variable information. Returns true if the
247246 /// instruction.
248247 void removeVirtualRegistersKilled(MachineInstr *MI);
249248
249 /// addRegisterDead - We have determined MI defined a register without a use.
250 /// Look for the operand that defines it and mark it as IsDead. If
251 /// AddIfNotFound is true, add a implicit operand if it's not found. Returns
252 /// true if the operand exists / is added.
253 static bool addRegisterDead(unsigned IncomingReg, MachineInstr *MI,
254 const MRegisterInfo *RegInfo,
255 bool AddIfNotFound = false);
256
250257 /// addVirtualRegisterDead - Add information about the fact that the specified
251258 /// register is dead after being used by the specified instruction. If
252259 /// AddIfNotFound is true, add a implicit operand if it's not found.
253260 void addVirtualRegisterDead(unsigned IncomingReg, MachineInstr *MI,
254261 bool AddIfNotFound = false) {
255 if (addRegisterDead(IncomingReg, MI, AddIfNotFound))
262 if (addRegisterDead(IncomingReg, MI, RegInfo, AddIfNotFound))
256263 getVarInfo(IncomingReg).Kills.push_back(MI);
257264 }
258265
1818 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
1919 #include "VirtRegMap.h"
2020 #include "llvm/Value.h"
21 #include "llvm/Analysis/LoopInfo.h"
2122 #include "llvm/CodeGen/LiveVariables.h"
2223 #include "llvm/CodeGen/MachineFrameInfo.h"
2324 #include "llvm/CodeGen/MachineInstr.h"
3738 namespace {
3839 // Hidden options for help debugging.
3940 cl::opt DisableReMat("disable-rematerialization",
41 cl::init(false), cl::Hidden);
42
43 cl::opt SplitAtBB("split-intervals-at-bb",
4044 cl::init(false), cl::Hidden);
4145 }
4246
631635 /// slot / to reg or any rematerialized load into ith operand of specified
632636 /// MI. If it is successul, MI is updated with the newly created MI and
633637 /// returns true.
634 bool LiveIntervals::tryFoldMemoryOperand(MachineInstr* &MI, VirtRegMap &vrm,
638 bool LiveIntervals::tryFoldMemoryOperand(MachineInstr* &MI,
639 VirtRegMap &vrm,
635640 MachineInstr *DefMI,
636641 unsigned index, unsigned i,
637642 bool isSS, int slot, unsigned reg) {
643648 // we can do this, we don't need to insert spill code.
644649 if (lv_)
645650 lv_->instructionChanged(MI, fmi);
651 else
652 LiveVariables::transferKillDeadInfo(MI, fmi, mri_);
646653 MachineBasicBlock &MBB = *MI->getParent();
647654 vrm.virtFolded(reg, MI, i, fmi);
655 vrm.transferSpillPts(MI, fmi);
648656 mi2iMap_.erase(MI);
649657 i2miMap_[index/InstrSlots::NUM] = fmi;
650658 mi2iMap_[fmi] = index;
655663 return false;
656664 }
657665
666 bool LiveIntervals::intervalIsInOneMBB(const LiveInterval &li) const {
667 SmallPtrSet MBBs;
668 for (LiveInterval::Ranges::const_iterator
669 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
670 std::vector::const_iterator II =
671 std::lower_bound(Idx2MBBMap.begin(), Idx2MBBMap.end(), I->start);
672 if (II == Idx2MBBMap.end())
673 continue;
674 if (I->end > II->first) // crossing a MBB.
675 return false;
676 MBBs.insert(II->second);
677 if (MBBs.size() > 1)
678 return false;
679 }
680 return true;
681 }
682
683 static
684 bool hasALaterUse(MachineBasicBlock *MBB, MachineInstr *MI, unsigned Reg) {
685 MachineBasicBlock::iterator I = MI;
686 if (I == MBB->end())
687 return false;
688 ++I;
689 while (I != MBB->end()) {
690 if (I->findRegisterUseOperandIdx(Reg) != -1)
691 return true;
692 ++I;
693 }
694 return false;
695 }
696
658697 /// rewriteInstructionForSpills, rewriteInstructionsForSpills - Helper functions
659698 /// for addIntervalsForSpills to rewrite uses / defs for the given live range.
660699 void LiveIntervals::
661 rewriteInstructionForSpills(const LiveInterval &li,
662 unsigned id, unsigned index, unsigned end,
663 MachineInstr *MI, MachineInstr *OrigDefMI, MachineInstr *DefMI,
700 rewriteInstructionForSpills(const LiveInterval &li, bool TrySplit,
701 unsigned id, unsigned index, unsigned end, MachineInstr *MI,
702 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
664703 unsigned Slot, int LdSlot,
665704 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
666705 VirtRegMap &vrm, SSARegMap *RegMap,
667706 const TargetRegisterClass* rc,
668707 SmallVector &ReMatIds,
708 unsigned &NewVReg, bool &HasDef, bool &HasUse,
709 const LoopInfo *loopInfo, std::vector &NewVRegs,
669710 std::vector &NewLIs) {
670711 RestartInstruction:
671712 for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
687728 if (DefIsReMat) {
688729 // If this is the rematerializable definition MI itself and
689730 // all of its uses are rematerialized, simply delete it.
690 if (MI == OrigDefMI && CanDelete) {
731 if (MI == ReMatOrigDefMI && CanDelete) {
691732 RemoveMachineInstrFromMaps(MI);
692733 MI->eraseFromParent();
693734 break;
694735 }
695736
696737 // If def for this use can't be rematerialized, then try folding.
697 TryFold = !OrigDefMI || (OrigDefMI && (MI == OrigDefMI || isLoad));
738 TryFold = !ReMatOrigDefMI ||
739 (ReMatOrigDefMI && (MI == ReMatOrigDefMI || isLoad));
698740 if (isLoad) {
699741 // Try fold loads (from stack slot, constant pool, etc.) into uses.
700742 FoldSS = isLoadSS;
702744 }
703745 }
704746
747 // If we are splitting live intervals, only fold if it's 1) the first
748 // use and it's a kill or 2) there isn't another use later in this MBB.
749 TryFold &= NewVReg == 0;
750 if (TryFold && TrySplit)
751 // Do not fold store into def here if we are splitting. We'll find an
752 // optimal point to insert a store later.
753 if (HasDef || mop.isDef() ||
754 (!mop.isKill() && hasALaterUse(MI->getParent(), MI, li.reg)))
755 TryFold = false;
756
705757 // FIXME: fold subreg use
706758 if (!isSubReg && TryFold &&
707 tryFoldMemoryOperand(MI, vrm, DefMI, index, i, FoldSS, FoldSlot, Reg))
759 tryFoldMemoryOperand(MI, vrm, ReMatDefMI, index, i, FoldSS, FoldSlot,
760 Reg))
708761 // Folding the load/store can completely change the instruction in
709762 // unpredictable ways, rescan it from the beginning.
710763 goto RestartInstruction;
711764
712765 // Create a new virtual register for the spill interval.
713 unsigned NewVReg = RegMap->createVirtualRegister(rc);
714 vrm.grow();
766 bool CreatedNewVReg = false;
767 if (NewVReg == 0) {
768 NewVReg = RegMap->createVirtualRegister(rc);
769 vrm.grow();
770 CreatedNewVReg = true;
771 }
772 mop.setReg(NewVReg);
715773
716774 // Scan all of the operands of this instruction rewriting operands
717775 // to use NewVReg instead of li.reg as appropriate. We do this for
724782 //
725783 // Keep track of whether we replace a use and/or def so that we can
726784 // create the spill interval with the appropriate range.
727 mop.setReg(NewVReg);
728785
729 bool HasUse = mop.isUse();
730 bool HasDef = mop.isDef();
786 HasUse = mop.isUse();
787 HasDef = mop.isDef();
731788 for (unsigned j = i+1, e = MI->getNumOperands(); j != e; ++j) {
732789 if (!MI->getOperand(j).isRegister())
733790 continue;
741798 }
742799 }
743800
744 if (DefIsReMat) {
745 vrm.setVirtIsReMaterialized(NewVReg, DefMI/*, CanDelete*/);
746 if (ReMatIds[id] == VirtRegMap::MAX_STACK_SLOT) {
747 // Each valnum may have its own remat id.
748 ReMatIds[id] = vrm.assignVirtReMatId(NewVReg);
801 if (CreatedNewVReg) {
802 if (DefIsReMat) {
803 vrm.setVirtIsReMaterialized(NewVReg, ReMatDefMI/*, CanDelete*/);
804 if (ReMatIds[id] == VirtRegMap::MAX_STACK_SLOT) {
805 // Each valnum may have its own remat id.
806 ReMatIds[id] = vrm.assignVirtReMatId(NewVReg);
807 } else {
808 vrm.assignVirtReMatId(NewVReg, ReMatIds[id]);
809 }
810 if (!CanDelete || (HasUse && HasDef)) {
811 // If this is a two-addr instruction then its use operands are
812 // rematerializable but its def is not. It should be assigned a
813 // stack slot.
814 vrm.assignVirt2StackSlot(NewVReg, Slot);
815 }
749816 } else {
750 vrm.assignVirtReMatId(NewVReg, ReMatIds[id]);
751 }
752 if (!CanDelete || (HasUse && HasDef)) {
753 // If this is a two-addr instruction then its use operands are
754 // rematerializable but its def is not. It should be assigned a
755 // stack slot.
756817 vrm.assignVirt2StackSlot(NewVReg, Slot);
757818 }
758 } else {
759 vrm.assignVirt2StackSlot(NewVReg, Slot);
760819 }
761820
762821 // create a new register interval for this spill / remat.
763822 LiveInterval &nI = getOrCreateInterval(NewVReg);
764 assert(nI.empty());
765 NewLIs.push_back(&nI);
766
767 // the spill weight is now infinity as it
768 // cannot be spilled again
769 nI.weight = HUGE_VALF;
823 if (CreatedNewVReg) {
824 NewLIs.push_back(&nI);
825 NewVRegs[MI->getParent()->getNumber()] = NewVReg;
826 if (TrySplit)
827 vrm.setIsSplitFromReg(NewVReg, li.reg);
828 }
770829
771830 if (HasUse) {
772 LiveRange LR(getLoadIndex(index), getUseIndex(index)+1,
773 nI.getNextValue(~0U, 0, VNInfoAllocator));
774 DOUT << " +" << LR;
775 nI.addRange(LR);
831 if (CreatedNewVReg) {
832 LiveRange LR(getLoadIndex(index), getUseIndex(index)+1,
833 nI.getNextValue(~0U, 0, VNInfoAllocator));
834 DOUT << " +" << LR;
835 nI.addRange(LR);
836 } else {
837 // Extend the split live interval to this def / use.
838 unsigned End = getUseIndex(index)+1;
839 LiveRange LR(nI.ranges[nI.ranges.size()-1].end, End,
840 nI.getValNumInfo(nI.getNumValNums()-1));
841 DOUT << " +" << LR;
842 nI.addRange(LR);
843 }
776844 }
777845 if (HasDef) {
778846 LiveRange LR(getDefIndex(index), getStoreIndex(index),
780848 DOUT << " +" << LR;
781849 nI.addRange(LR);
782850 }
783
784 // update live variables if it is available
785 if (lv_)
786 lv_->addVirtualRegisterKilled(NewVReg, MI);
787
851
788852 DOUT << "\t\t\t\tAdded new interval: ";
789853 nI.print(DOUT, mri_);
790854 DOUT << '\n';
791855 }
792856 }
793857
858 bool LiveIntervals::anyKillInMBBAfterIdx(const LiveInterval &li,
859 MachineBasicBlock *MBB, unsigned Idx,
860 const VNInfo *VNI) const {
861 unsigned End = getMBBEndIdx(MBB);
862 if (VNI) {
863 for (unsigned j = 0, ee = VNI->kills.size(); j != ee; ++j) {
864 unsigned KillIdx = VNI->kills[j];
865 if (KillIdx > Idx && KillIdx < End)
866 return true;
867 }
868 return false;
869 }
870
871 // Look at all the VNInfo's.
872 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
873 i != e; ++i) {
874 const VNInfo *VNI = *i;
875 for (unsigned j = 0, ee = VNI->kills.size(); j != ee; ++j) {
876 unsigned KillIdx = VNI->kills[j];
877 if (KillIdx > Idx && KillIdx < End)
878 return true;
879 }
880 }
881 return false;
882 }
883
794884 void LiveIntervals::
795 rewriteInstructionsForSpills(const LiveInterval &li,
885 rewriteInstructionsForSpills(const LiveInterval &li, bool TrySplit,
796886 LiveInterval::Ranges::const_iterator &I,
797 MachineInstr *OrigDefMI, MachineInstr *DefMI,
887 MachineInstr *ReMatOrigDefMI, MachineInstr *ReMatDefMI,
798888 unsigned Slot, int LdSlot,
799889 bool isLoad, bool isLoadSS, bool DefIsReMat, bool CanDelete,
800890 VirtRegMap &vrm, SSARegMap *RegMap,
801891 const TargetRegisterClass* rc,
802892 SmallVector &ReMatIds,
893 const LoopInfo *loopInfo,
894 BitVector &SpillMBBs,
895 std::vector > &SpillIdxes,
896 std::vector &NewVRegs,
803897 std::vector &NewLIs) {
898 unsigned NewVReg = 0;
804899 unsigned index = getBaseIndex(I->start);
805900 unsigned end = getBaseIndex(I->end-1) + InstrSlots::NUM;
901 bool TrySplitMI = TrySplit && vrm.getPreSplitReg(li.reg) == 0;
806902 for (; index != end; index += InstrSlots::NUM) {
807903 // skip deleted instructions
808904 while (index != end && !getInstructionFromIndex(index))
810906 if (index == end) break;
811907
812908 MachineInstr *MI = getInstructionFromIndex(index);
813 rewriteInstructionForSpills(li, I->valno->id, index, end, MI,
814 OrigDefMI, DefMI, Slot, LdSlot, isLoad,
815 isLoadSS, DefIsReMat, CanDelete, vrm,
816 RegMap, rc, ReMatIds, NewLIs);
817 }
818 }
909 MachineBasicBlock *MBB = MI->getParent();
910 NewVReg = !TrySplitMI ? 0 : NewVRegs[MBB->getNumber()];
911 bool IsNew = NewVReg == 0;
912 bool HasDef = false;
913 bool HasUse = false;
914 rewriteInstructionForSpills(li, TrySplitMI, I->valno->id, index, end,
915 MI, ReMatOrigDefMI, ReMatDefMI, Slot, LdSlot,
916 isLoad, isLoadSS, DefIsReMat, CanDelete, vrm,
917 RegMap, rc, ReMatIds, NewVReg, HasDef, HasUse,
918 loopInfo, NewVRegs, NewLIs);
919 if (!HasDef && !HasUse)
920 continue;
921
922 // Update weight of spill interval.
923 LiveInterval &nI = getOrCreateInterval(NewVReg);
924 if (!TrySplitMI)
925 // The spill weight is now infinity as it cannot be spilled again.
926 nI.weight = HUGE_VALF;
927 else {
928 // Keep track of the last def in each MBB.
929 if (HasDef) {
930 if (MI != ReMatOrigDefMI || !CanDelete) {
931 // If this is a two-address code, then this index probably starts a
932 // VNInfo so we should examine all the VNInfo's.
933 bool HasKill = HasUse
934 ? anyKillInMBBAfterIdx(li, MBB, getDefIndex(index))
935 : anyKillInMBBAfterIdx(li, MBB, getDefIndex(index), I->valno);
936 if (!HasKill) {
937 unsigned MBBId = MBB->getNumber();
938 if ((int)index > SpillIdxes[MBBId].first)
939 // High bit specify whether this spill ought to be folded if
940 // possible.
941 SpillIdxes[MBBId] = std::make_pair(index, NewVReg | (1 << 31));
942 SpillMBBs.set(MBBId);
943 }
944 }
945 if (!IsNew) {
946 // It this interval hasn't been assigned a stack slot
947 // (because earlier def is remat), do it now.
948 int SS = vrm.getStackSlot(NewVReg);
949 if (SS != (int)Slot) {
950 assert(SS == VirtRegMap::NO_STACK_SLOT);
951 vrm.assignVirt2StackSlot(NewVReg, Slot);
952 }
953 }
954 } else if (HasUse) {
955 // Use(s) following the last def, it's not safe to fold the spill.
956 unsigned MBBId = MBB->getNumber();
957 if ((SpillIdxes[MBBId].second & ((1<<31)-1)) == NewVReg &&
958 (int)getUseIndex(index) > SpillIdxes[MBBId].first)
959 SpillIdxes[MBBId].second &= (1<<31)-1;
960 }
961
962 // Update spill weight.
963 unsigned loopDepth = loopInfo->getLoopDepth(MBB->getBasicBlock());
964 nI.weight += getSpillWeight(HasDef, HasUse, loopDepth);
965 }
966 }
967 }
968
969
819970
820971 std::vector LiveIntervals::
821 addIntervalsForSpills(const LiveInterval &li, VirtRegMap &vrm) {
972 addIntervalsForSpills(const LiveInterval &li,
973 const LoopInfo *loopInfo, VirtRegMap &vrm) {
822974 // Since this is called after the analysis is done we don't know if
823975 // LiveVariables is available
824976 lv_ = getAnalysisToUpdate();
830982 li.print(DOUT, mri_);
831983 DOUT << '\n';
832984
985 // Each bit specify whether it a spill is required in the MBB.
986 BitVector SpillMBBs(mf_->getNumBlockIDs());
987 std::vector > SpillIdxes(mf_->getNumBlockIDs(),
988 std::make_pair(-1,0));
989 std::vector NewVRegs(mf_->getNumBlockIDs(), 0);
833990 std::vector NewLIs;
834991 SSARegMap *RegMap = mf_->getSSARegMap();
835992 const TargetRegisterClass* rc = RegMap->getRegClass(li.reg);
8441001 BitVector ReMatDelete(NumValNums);
8451002 unsigned Slot = VirtRegMap::MAX_STACK_SLOT;
8461003
1004 // Spilling a split live interval. It cannot be split any further. Also,
1005 // it's also guaranteed to be a single val# / range interval.
1006 if (vrm.getPreSplitReg(li.reg)) {
1007 vrm.setIsSplitFromReg(li.reg, 0);
1008 bool DefIsReMat = vrm.isReMaterialized(li.reg);
1009 Slot = vrm.getStackSlot(li.reg);
1010 assert(Slot != VirtRegMap::MAX_STACK_SLOT);
1011 MachineInstr *ReMatDefMI = DefIsReMat ?
1012 vrm.getReMaterializedMI(li.reg) : NULL;
1013 int LdSlot = 0;
1014 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
1015 bool isLoad = isLoadSS ||
1016 (DefIsReMat && (ReMatDefMI->getInstrDescriptor()->Flags & M_LOAD_FLAG));
1017 vrm.removeAllSpillPtsForReg(li.reg);
1018 bool IsFirstRange = true;
1019 for (LiveInterval::Ranges::const_iterator
1020 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
1021 // If this is a split live interval with multiple ranges, it means there
1022 // are two-address instructions that re-defined the value. Only the
1023 // first def can be rematerialized!
1024 if (IsFirstRange) {
1025 rewriteInstructionsForSpills(li, false, I, NULL, ReMatDefMI,
1026 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
1027 false, vrm, RegMap, rc, ReMatIds,
1028 loopInfo, SpillMBBs, SpillIdxes, NewVRegs, NewLIs);
1029 } else {
1030 rewriteInstructionsForSpills(li, false, I, NULL, 0,
1031 Slot, 0, false, false, false,
1032 false, vrm, RegMap, rc, ReMatIds,
1033 loopInfo, SpillMBBs, SpillIdxes, NewVRegs, NewLIs);
1034 }
1035 IsFirstRange = false;
1036 }
1037 return NewLIs;
1038 }
1039
1040 bool TrySplit = SplitAtBB && !intervalIsInOneMBB(li);
8471041 bool NeedStackSlot = false;
8481042 for (LiveInterval::const_vni_iterator i = li.vni_begin(), e = li.vni_end();
8491043 i != e; ++i) {
8531047 if (DefIdx == ~1U)
8541048 continue; // Dead val#.
8551049 // Is the def for the val# rematerializable?
856 MachineInstr *DefMI = (DefIdx == ~0u) ? 0 : getInstructionFromIndex(DefIdx);
857 if (DefMI && isReMaterializable(li, VNI, DefMI)) {
1050 MachineInstr *ReMatDefMI = (DefIdx == ~0u)
1051 ? 0 : getInstructionFromIndex(DefIdx);
1052 if (ReMatDefMI && isReMaterializable(li, VNI, ReMatDefMI)) {
8581053 // Remember how to remat the def of this val#.
859 ReMatOrigDefs[VN] = DefMI;
1054 ReMatOrigDefs[VN] = ReMatDefMI;
8601055 // Original def may be modified so we have to make a copy here. vrm must
8611056 // delete these!
862 ReMatDefs[VN] = DefMI = DefMI->clone();
863 vrm.setVirtIsReMaterialized(li.reg, DefMI);
1057 ReMatDefs[VN] = ReMatDefMI = ReMatDefMI->clone();
1058 vrm.setVirtIsReMaterialized(li.reg, ReMatDefMI);
8641059
8651060 bool CanDelete = true;
8661061 for (unsigned j = 0, ee = VNI->kills.size(); j != ee; ++j) {
8881083 }
8891084
8901085 // One stack slot per live interval.
891 if (NeedStackSlot)
1086 if (NeedStackSlot && vrm.getPreSplitReg(li.reg) == 0)
8921087 Slot = vrm.assignVirt2StackSlot(li.reg);
1088
8931089
8941090 // Create new intervals and rewrite defs and uses.
8951091 for (LiveInterval::Ranges::const_iterator
8961092 I = li.ranges.begin(), E = li.ranges.end(); I != E; ++I) {
897 MachineInstr *DefMI = ReMatDefs[I->valno->id];
898 MachineInstr *OrigDefMI = ReMatOrigDefs[I->valno->id];
899 bool DefIsReMat = DefMI != NULL;
1093 MachineInstr *ReMatDefMI = ReMatDefs[I->valno->id];
1094 MachineInstr *ReMatOrigDefMI = ReMatOrigDefs[I->valno->id];
1095 bool DefIsReMat = ReMatDefMI != NULL;
9001096 bool CanDelete = ReMatDelete[I->valno->id];
9011097 int LdSlot = 0;
902 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(DefMI, LdSlot);
1098 bool isLoadSS = DefIsReMat && tii_->isLoadFromStackSlot(ReMatDefMI, LdSlot);
9031099 bool isLoad = isLoadSS ||
904 (DefIsReMat && (DefMI->getInstrDescriptor()->Flags & M_LOAD_FLAG));
905 rewriteInstructionsForSpills(li, I, OrigDefMI, DefMI, Slot, LdSlot,
906 isLoad, isLoadSS, DefIsReMat, CanDelete,
907 vrm, RegMap, rc, ReMatIds, NewLIs);
908 }
1100 (DefIsReMat && (ReMatDefMI->getInstrDescriptor()->Flags & M_LOAD_FLAG));
1101 rewriteInstructionsForSpills(li, TrySplit, I, ReMatOrigDefMI, ReMatDefMI,
1102 Slot, LdSlot, isLoad, isLoadSS, DefIsReMat,
1103 CanDelete, vrm, RegMap, rc, ReMatIds,
1104 loopInfo, SpillMBBs, SpillIdxes, NewVRegs, NewLIs);
1105 }
1106
1107 // Insert spills if we are splitting.
1108 if (TrySplit && NeedStackSlot) {
1109 int Id = SpillMBBs.find_first();
1110 while (Id != -1) {
1111 unsigned index = SpillIdxes[Id].first;
1112 unsigned VReg = SpillIdxes[Id].second & ((1 << 31)-1);
1113 bool TryFold = SpillIdxes[Id].second & (1 << 31);
1114 MachineInstr *MI = getInstructionFromIndex(index);
1115 int OpIdx = -1;
1116 if (TryFold) {
1117 for (unsigned j = 0, ee = MI->getNumOperands(); j != ee; ++j) {
1118 MachineOperand &MO = MI->getOperand(j);
1119 if (!MO.isRegister() || MO.getReg() != VReg)
1120 continue;
1121 if (MO.isUse()) {
1122 // Can't fold if it's two-address code.
1123 OpIdx = -1;
1124 break;
1125 }
1126 OpIdx = (int)j;
1127 }
1128 }
1129 // Fold the store into the def if possible.
1130 if (OpIdx == -1 ||
1131 !tryFoldMemoryOperand(MI, vrm, NULL, index, OpIdx, true, Slot, VReg))
1132 // Else tell the spiller to issue a store for us.
1133 vrm.addSpillPoint(VReg, MI);
1134 Id = SpillMBBs.find_next(Id);
1135 }
1136 }
1137
1138 // Finalize spill weights.
1139 if (TrySplit)
1140 for (unsigned i = 0, e = NewLIs.size(); i != e; ++i)
1141 NewLIs[i]->weight /= NewLIs[i]->getSize();
9091142
9101143 return NewLIs;
9111144 }
192192 }
193193
194194 bool LiveVariables::addRegisterKilled(unsigned IncomingReg, MachineInstr *MI,
195 const MRegisterInfo *RegInfo,
195196 bool AddIfNotFound) {
196197 bool Found = false;
197198 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
223224 }
224225
225226 bool LiveVariables::addRegisterDead(unsigned IncomingReg, MachineInstr *MI,
227 const MRegisterInfo *RegInfo,
226228 bool AddIfNotFound) {
227229 bool Found = false;
228230 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
330332 void LiveVariables::addRegisterKills(unsigned Reg, MachineInstr *MI,
331333 SmallSet &SubKills) {
332334 if (SubKills.count(Reg) == 0)
333 addRegisterKilled(Reg, MI, true);
335 addRegisterKilled(Reg, MI, RegInfo, true);
334336 else {
335337 for (const unsigned *SubRegs = RegInfo->getImmediateSubRegisters(Reg);
336338 unsigned SubReg = *SubRegs; ++SubRegs)
341343 bool LiveVariables::HandlePhysRegKill(unsigned Reg, MachineInstr *RefMI) {
342344 SmallSet SubKills;
343345 if (HandlePhysRegKill(Reg, RefMI, SubKills)) {
344 addRegisterKilled(Reg, RefMI, true);
346 addRegisterKilled(Reg, RefMI, RegInfo, true);
345347 return true;
346348 } else {
347349 // Some sub-registers are killed by another MI.
358360 if (PhysRegUsed[Reg]) {
359361 if (!HandlePhysRegKill(Reg, LastRef)) {
360362 if (PhysRegPartUse[Reg])
361 addRegisterKilled(Reg, PhysRegPartUse[Reg], true);
363 addRegisterKilled(Reg, PhysRegPartUse[Reg], RegInfo, true);
362364 }
363365 } else if (PhysRegPartUse[Reg])
364366 // Add implicit use / kill to last partial use.
365 addRegisterKilled(Reg, PhysRegPartUse[Reg], true);
367 addRegisterKilled(Reg, PhysRegPartUse[Reg], RegInfo, true);
366368 else if (LastRef != MI)
367369 // Defined, but not used. However, watch out for cases where a super-reg
368370 // is also defined on the same MI.
369 addRegisterDead(Reg, LastRef);
371 addRegisterDead(Reg, LastRef, RegInfo);
370372 }
371373
372374 for (const unsigned *SubRegs = RegInfo->getSubRegisters(Reg);
375377 if (PhysRegUsed[SubReg]) {
376378 if (!HandlePhysRegKill(SubReg, LastRef)) {
377379 if (PhysRegPartUse[SubReg])
378 addRegisterKilled(SubReg, PhysRegPartUse[SubReg], true);
380 addRegisterKilled(SubReg, PhysRegPartUse[SubReg], RegInfo, true);
379381 }
380382 } else if (PhysRegPartUse[SubReg])
381383 // Add implicit use / kill to last use of a sub-register.
382 addRegisterKilled(SubReg, PhysRegPartUse[SubReg], true);
384 addRegisterKilled(SubReg, PhysRegPartUse[SubReg], RegInfo, true);
383385 else if (LastRef != MI)
384386 // This must be a def of the subreg on the same MI.
385 addRegisterDead(SubReg, LastRef);
387 addRegisterDead(SubReg, LastRef, RegInfo);
386388 }
387389 }
388390
560562 for (unsigned j = 0, e2 = VirtRegInfo[i].Kills.size(); j != e2; ++j) {
561563 if (VirtRegInfo[i].Kills[j] == VirtRegInfo[i].DefInst)
562564 addRegisterDead(i + MRegisterInfo::FirstVirtualRegister,
563 VirtRegInfo[i].Kills[j]);
565 VirtRegInfo[i].Kills[j], RegInfo);
564566 else
565567 addRegisterKilled(i + MRegisterInfo::FirstVirtualRegister,
566 VirtRegInfo[i].Kills[j]);
568 VirtRegInfo[i].Kills[j], RegInfo);
567569 }
568570
569571 // Check to make sure there are no unreachable blocks in the MC CFG for the
617619 }
618620 }
619621
622 /// transferKillDeadInfo - Similar to instructionChanged except it does not
623 /// update live variables internal data structures.
624 void LiveVariables::transferKillDeadInfo(MachineInstr *OldMI,
625 MachineInstr *NewMI,
626 const MRegisterInfo *RegInfo) {
627 // If the instruction defines any virtual registers, update the VarInfo,
628 // kill and dead information for the instruction.
629 for (unsigned i = 0, e = OldMI->getNumOperands(); i != e; ++i) {
630 MachineOperand &MO = OldMI->getOperand(i);
631 if (MO.isRegister() && MO.getReg() &&
632 MRegisterInfo::isVirtualRegister(MO.getReg())) {
633 unsigned Reg = MO.getReg();
634 if (MO.isDef()) {
635 if (MO.isDead()) {
636 MO.unsetIsDead();
637 addRegisterDead(Reg, NewMI, RegInfo);
638 }
639 }
640 if (MO.isKill()) {
641 MO.unsetIsKill();
642 addRegisterKilled(Reg, NewMI, RegInfo);
643 }
644 }
645 }
646 }
647
648
620649 /// removeVirtualRegistersKilled - Remove all killed info for the specified
621650 /// instruction.
622651 void LiveVariables::removeVirtualRegistersKilled(MachineInstr *MI) {
1515 #include "PhysRegTracker.h"
1616 #include "VirtRegMap.h"
1717 #include "llvm/Function.h"
18 #include "llvm/Analysis/LoopInfo.h"
1819 #include "llvm/CodeGen/MachineFunctionPass.h"
1920 #include "llvm/CodeGen/MachineInstr.h"
2021 #include "llvm/CodeGen/Passes.h"
6566 SSARegMap *regmap_;
6667 BitVector allocatableRegs_;
6768 LiveIntervals* li_;
69 const LoopInfo *loopInfo;
6870
6971 /// handled_ - Intervals are added to the handled_ set in the order of their
7072 /// start value. This is uses for backtracking.
100102 // Make sure PassManager knows which analyses to make available
101103 // to coalescing and which analyses coalescing invalidates.
102104 AU.addRequiredTransitive();
105 AU.addRequired();
103106 MachineFunctionPass::getAnalysisUsage(AU);
104107 }
105108
250253 regmap_ = mf_->getSSARegMap();
251254 allocatableRegs_ = mri_->getAllocatableSet(fn);
252255 li_ = &getAnalysis();
256 loopInfo = &getAnalysis();
253257
254258 // We don't run the coalescer here because we have no reason to
255259 // interact with it. If the coalescer requires interaction, it
346350 DOUT << "\tinterval " << *i->first << " expired\n");
347351 inactive_.clear();
348352
349 // Add live-ins to every BB except for entry.
353 // Add live-ins to every BB except for entry. Also perform trivial coalescing.
350354 MachineFunction::iterator EntryMBB = mf_->begin();
351355 SmallVector LiveInMBBs;
352356 for (LiveIntervals::iterator i = li_->begin(), e = li_->end(); i != e; ++i) {
353357 LiveInterval &cur = i->second;
354358 unsigned Reg = 0;
355 if (MRegisterInfo::isPhysicalRegister(cur.reg))
359 bool isPhys = MRegisterInfo::isPhysicalRegister(cur.reg);
360 if (isPhys)
356361 Reg = i->second.reg;
357362 else if (vrm_->isAssignedReg(cur.reg))
358363 Reg = attemptTrivialCoalescing(cur, vrm_->getPhys(cur.reg));
359364 if (!Reg)
365 continue;
366 // Ignore splited live intervals.
367 if (!isPhys && vrm_->getPreSplitReg(cur.reg))
360368 continue;
361369 for (LiveInterval::Ranges::const_iterator I = cur.begin(), E = cur.end();
362370 I != E; ++I) {
685693 if (cur->weight != HUGE_VALF && cur->weight <= minWeight) {
686694 DOUT << "\t\t\tspilling(c): " << *cur << '\n';
687695 std::vector added =
688 li_->addIntervalsForSpills(*cur, *vrm_);
696 li_->addIntervalsForSpills(*cur, loopInfo, *vrm_);
689697 if (added.empty())
690698 return; // Early exit if all spills were folded.
691699
737745 DOUT << "\t\t\tspilling(a): " << *i->first << '\n';
738746 earliestStart = std::min(earliestStart, i->first->beginNumber());
739747 std::vector newIs =
740 li_->addIntervalsForSpills(*i->first, *vrm_);
748 li_->addIntervalsForSpills(*i->first, loopInfo, *vrm_);
741749 std::copy(newIs.begin(), newIs.end(), std::back_inserter(added));
742750 spilled.insert(reg);
743751 }
750758 DOUT << "\t\t\tspilling(i): " << *i->first << '\n';
751759 earliestStart = std::min(earliestStart, i->first->beginNumber());
752760 std::vector newIs =
753 li_->addIntervalsForSpills(*i->first, *vrm_);
761 li_->addIntervalsForSpills(*i->first, loopInfo, *vrm_);
754762 std::copy(newIs.begin(), newIs.end(), std::back_inserter(added));
755763 spilled.insert(reg);
756764 }
14621462 if (UniqueUses.count(reg) != 0)
14631463 continue;
14641464 LiveInterval &RegInt = li_->getInterval(reg);
1465 RegInt.weight += li_->getSpillWeight(mop, loopDepth);
1465 RegInt.weight +=
1466 li_->getSpillWeight(mop.isDef(), mop.isUse(), loopDepth);
14661467 UniqueUses.insert(reg);
14671468 }
14681469 }
6262 VirtRegMap::VirtRegMap(MachineFunction &mf)
6363 : TII(*mf.getTarget().getInstrInfo()), MF(mf),
6464 Virt2PhysMap(NO_PHYS_REG), Virt2StackSlotMap(NO_STACK_SLOT),
65 Virt2ReMatIdMap(NO_STACK_SLOT), ReMatMap(NULL),
66 ReMatId(MAX_STACK_SLOT+1) {
65 Virt2ReMatIdMap(NO_STACK_SLOT), Virt2SplitMap(0),
66 ReMatMap(NULL), ReMatId(MAX_STACK_SLOT+1) {
6767 grow();
6868 }
6969
7272 Virt2PhysMap.grow(LastVirtReg);
7373 Virt2StackSlotMap.grow(LastVirtReg);
7474 Virt2ReMatIdMap.grow(LastVirtReg);
75 Virt2SplitMap.grow(LastVirtReg);
76 Virt2SpillPtsMap.grow(LastVirtReg);
7577 ReMatMap.grow(LastVirtReg);
7678 }
7779
277279 AvailableSpills &Spills, BitVector &RegKills,
278280 std::vector &KillOps,
279281 VirtRegMap &VRM);
282 void SpillRegToStackSlot(MachineBasicBlock &MBB,
283 MachineBasicBlock::iterator &MII,
284 int Idx, unsigned PhysReg, int StackSlot,
285 const TargetRegisterClass *RC,
286 MachineInstr *&LastStore,
287 AvailableSpills &Spills,
288 SmallSet &ReMatDefs,
289 BitVector &RegKills,
290 std::vector &KillOps,
291 VirtRegMap &VRM);
280292 void RewriteMBB(MachineBasicBlock &MBB, VirtRegMap &VRM);
281293 };
282294 }
816828 assert(NewMIs.size() == 1);
817829 MachineInstr *NewMI = NewMIs.back();
818830 NewMIs.clear();
819 unsigned Idx = NewMI->findRegisterUseOperandIdx(VirtReg);
831 int Idx = NewMI->findRegisterUseOperandIdx(VirtReg);
832 assert(Idx != -1);
820833 MachineInstr *FoldedMI = MRI->foldMemoryOperand(NewMI, Idx, SS);
821834 if (FoldedMI) {
822835 if (!VRM.hasPhys(UnfoldVR))
846859 return 0;
847860 }
848861
862 /// SpillRegToStackSlot - Spill a register to a specified stack slot. Check if
863 /// the last store to the same slot is now dead. If so, remove the last store.
864 void LocalSpiller::SpillRegToStackSlot(MachineBasicBlock &MBB,
865 MachineBasicBlock::iterator &MII,
866 int Idx, unsigned PhysReg, int StackSlot,
867 const TargetRegisterClass *RC,
868 MachineInstr *&LastStore,
869 AvailableSpills &Spills,
870 SmallSet &ReMatDefs,
871 BitVector &RegKills,
872 std::vector &KillOps,
873 VirtRegMap &VRM) {
874 MRI->storeRegToStackSlot(MBB, next(MII), PhysReg, StackSlot, RC);
875 DOUT << "Store:\t" << *next(MII);
876
877 // If there is a dead store to this stack slot, nuke it now.
878 if (LastStore) {
879 DOUT << "Removed dead store:\t" << *LastStore;
880 ++NumDSE;
881 SmallVector KillRegs;
882 InvalidateKills(*LastStore, RegKills, KillOps, &KillRegs);
883 MachineBasicBlock::iterator PrevMII = LastStore;
884 bool CheckDef = PrevMII != MBB.begin();
885 if (CheckDef)
886 --PrevMII;
887 MBB.erase(LastStore);
888 VRM.RemoveFromFoldedVirtMap(LastStore);
889 if (CheckDef) {
890 // Look at defs of killed registers on the store. Mark the defs
891 // as dead since the store has been deleted and they aren't
892 // being reused.
893 for (unsigned j = 0, ee = KillRegs.size(); j != ee; ++j) {
894 bool HasOtherDef = false;
895 if (InvalidateRegDef(PrevMII, *MII, KillRegs[j], HasOtherDef)) {
896 MachineInstr *DeadDef = PrevMII;
897 if (ReMatDefs.count(DeadDef) && !HasOtherDef) {
898 // FIXME: This assumes a remat def does not have side
899 // effects.
900 MBB.erase(DeadDef);
901 VRM.RemoveFromFoldedVirtMap(DeadDef);
902 ++NumDRM;
903 }
904 }
905 }
906 }
907 }
908
909 LastStore = next(MII);
910
911 // If the stack slot value was previously available in some other
912 // register, change it now. Otherwise, make the register available,
913 // in PhysReg.
914 Spills.ModifyStackSlotOrReMat(StackSlot);
915 Spills.ClobberPhysReg(PhysReg);
916 Spills.addAvailable(StackSlot, LastStore, PhysReg);
917 ++NumStores;
918 }
919
849920 /// rewriteMBB - Keep track of which spills are available even after the
850 /// register allocator is done with them. If possible, avoid reloading vregs.
921 /// register allocator is done with them. If possible, avid reloading vregs.
851922 void LocalSpiller::RewriteMBB(MachineBasicBlock &MBB, VirtRegMap &VRM) {
852923 DOUT << MBB.getBasicBlock()->getName() << ":\n";
853924
869940 // ReMatDefs - These are rematerializable def MIs which are not deleted.
870941 SmallSet ReMatDefs;
871942
943 // ReloadedSplits - Splits must be reloaded once per MBB. This keeps track
944 // which have been reloaded.
945 SmallSet ReloadedSplits;
946
872947 // Keep track of kill information.
873948 BitVector RegKills(MRI->getNumRegs());
874949 std::vector KillOps;
885960 MaybeDeadStores, Spills, RegKills, KillOps, VRM))
886961 NextMII = next(MII);
887962
963 MachineInstr &MI = *MII;
964 const TargetInstrDescriptor *TID = MI.getInstrDescriptor();
965
966 // Insert spills here if asked to.
967 std::vector SpillRegs = VRM.getSpillPtSpills(&MI);
968 for (unsigned i = 0, e = SpillRegs.size(); i != e; ++i) {
969 unsigned VirtReg = SpillRegs[i];
970 const TargetRegisterClass *RC = RegMap->getRegClass(VirtReg);
971 unsigned Phys = VRM.getPhys(VirtReg);
972 int StackSlot = VRM.getStackSlot(VirtReg);
973 MachineInstr *&LastStore = MaybeDeadStores[StackSlot];
974 SpillRegToStackSlot(MBB, MII, i, Phys, StackSlot, RC,
975 LastStore, Spills, ReMatDefs, RegKills, KillOps, VRM);
976 }
977
888978 /// ReusedOperands - Keep track of operand reuse in case we need to undo
889979 /// reuse.
890 MachineInstr &MI = *MII;
891980 ReuseInfo ReusedOperands(MI, MRI);
892
893 const TargetInstrDescriptor *TID = MI.getInstrDescriptor();
894
895981 // Process all of the spilled uses and all non spilled reg references.
896982 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
897983 MachineOperand &MO = MI.getOperand(i);
9161002 MF.setPhysRegUsed(Phys);
9171003 if (MO.isDef())
9181004 ReusedOperands.markClobbered(Phys);
1005
1006 // If it's a split live interval, insert a reload for the first use
1007 // unless it's previously defined in the MBB.
1008 unsigned SplitReg = VRM.getPreSplitReg(VirtReg);
1009 if (SplitReg) {
1010 if (ReloadedSplits.insert(VirtReg)) {
1011 bool HasUse = MO.isUse();
1012 // If it's a def, we don't need to reload the value unless it's
1013 // a two-address code.
1014 if (!HasUse) {
1015 for (unsigned j = i+1; j != e; ++j) {
1016 MachineOperand &MOJ = MI.getOperand(j);
1017 if (MOJ.isRegister() && MOJ.getReg() == VirtReg) {
1018 HasUse = true;
1019 break;
1020 }
1021 }
1022 }
1023
1024 if (HasUse) {
1025 if (VRM.isReMaterialized(VirtReg)) {
1026 MRI->reMaterialize(MBB, &MI, Phys,
1027 VRM.getReMaterializedMI(VirtReg));
1028 ++NumReMats;
1029 } else {
1030 const TargetRegisterClass* RC = RegMap->getRegClass(VirtReg);
1031 MRI->loadRegFromStackSlot(MBB, &MI, Phys, VRM.getStackSlot(VirtReg), RC);
1032 ++NumLoads;
1033 }
1034 // This invalidates Phys.
1035 Spills.ClobberPhysReg(Phys);
1036 UpdateKills(*prior(MII), RegKills, KillOps);
1037 DOUT << '\t' << *prior(MII);
1038 }
1039 }
1040 }
1041
9191042 unsigned RReg = SubIdx ? MRI->getSubReg(Phys, SubIdx) : Phys;
9201043 MI.getOperand(i).setReg(RReg);
9211044 continue;
11271250
11281251 DOUT << '\t' << MI;
11291252
1253
11301254 // If we have folded references to memory operands, make sure we clear all
11311255 // physical registers that may contain the value of the spilled virtual
11321256 // register
11351259 unsigned VirtReg = I->second.first;
11361260 VirtRegMap::ModRef MR = I->second.second;
11371261 DOUT << "Folded vreg: " << VirtReg << " MR: " << MR;
1138 if (VRM.isAssignedReg(VirtReg)) {
1139 DOUT << ": No stack slot!\n";
1262
1263 // If this is a split live interval, remember we have seen this so
1264 // we do not need to reload it for later uses.
1265 unsigned SplitReg = VRM.getPreSplitReg(VirtReg);
1266 if (SplitReg)
1267 ReloadedSplits.insert(VirtReg);
1268
1269 int SS = VRM.getStackSlot(VirtReg);
1270 if (SS == VirtRegMap::NO_STACK_SLOT)
11401271 continue;
1141 }
1142 int SS = VRM.getStackSlot(VirtReg);
11431272 FoldedSS.insert(SS);
11441273 DOUT << " - StackSlot: " << SS << "\n";
11451274
13371466 MI.getOperand(i).setReg(RReg);
13381467
13391468 if (!MO.isDead()) {
1340 MRI->storeRegToStackSlot(MBB, next(MII), PhysReg, StackSlot, RC);
1341 DOUT << "Store:\t" << *next(MII);
1342
1343 // If there is a dead store to this stack slot, nuke it now.
13441469 MachineInstr *&LastStore = MaybeDeadStores[StackSlot];
1345 if (LastStore) {
1346 DOUT << "Removed dead store:\t" << *LastStore;
1347 ++NumDSE;
1348 SmallVector KillRegs;
1349 InvalidateKills(*LastStore, RegKills, KillOps, &KillRegs);
1350 MachineBasicBlock::iterator PrevMII = LastStore;
1351 bool CheckDef = PrevMII != MBB.begin();
1352 if (CheckDef)
1353 --PrevMII;
1354 MBB.erase(LastStore);
1355 VRM.RemoveFromFoldedVirtMap(LastStore);
1356 if (CheckDef) {
1357 // Look at defs of killed registers on the store. Mark the defs
1358 // as dead since the store has been deleted and they aren't
1359 // being reused.
1360 for (unsigned j = 0, ee = KillRegs.size(); j != ee; ++j) {
1361 bool HasOtherDef = false;
1362 if (InvalidateRegDef(PrevMII, MI, KillRegs[j], HasOtherDef)) {
1363 MachineInstr *DeadDef = PrevMII;
1364 if (ReMatDefs.count(DeadDef) && !HasOtherDef) {
1365 // FIXME: This assumes a remat def does not have side
1366 // effects.
1367 MBB.erase(DeadDef);
1368 VRM.RemoveFromFoldedVirtMap(DeadDef);
1369 ++NumDRM;
1370 }
1371 }
1372 }
1373 }
1374 }
1375 LastStore = next(MII);
1376
1377 // If the stack slot value was previously available in some other
1378 // register, change it now. Otherwise, make the register available,
1379 // in PhysReg.
1380 Spills.ModifyStackSlotOrReMat(StackSlot);
1381 Spills.ClobberPhysReg(PhysReg);
1382 Spills.addAvailable(StackSlot, LastStore, PhysReg);
1383 ++NumStores;
1470 SpillRegToStackSlot(MBB, MII, -1, PhysReg, StackSlot, RC, LastStore,
1471 Spills, ReMatDefs, RegKills, KillOps, VRM);
13841472
13851473 // Check to see if this is a noop copy. If so, eliminate the
13861474 // instruction before considering the dest reg to be changed.
1717 #define LLVM_CODEGEN_VIRTREGMAP_H
1818
1919 #include "llvm/Target/MRegisterInfo.h"
20 #include "llvm/ADT/BitVector.h"
20 #include "llvm/ADT/DenseMap.h"
2121 #include "llvm/ADT/IndexedMap.h"
2222 #include "llvm/Support/Streams.h"
2323 #include
4949 /// spilled register is the temporary used to load it from the
5050 /// stack).
5151 IndexedMap Virt2PhysMap;
52
5253 /// Virt2StackSlotMap - This is virtual register to stack slot
5354 /// mapping. Each spilled virtual register has an entry in it
5455 /// which corresponds to the stack slot this register is spilled
5556 /// at.
5657 IndexedMap Virt2StackSlotMap;
58
59 /// Virt2StackSlotMap - This is virtual register to rematerialization id
60 /// mapping. Each spilled virtual register that should be remat'd has an
61 /// entry in it which corresponds to the remat id.
5762 IndexedMap Virt2ReMatIdMap;
63
64 /// Virt2SplitMap - This is virtual register to splitted virtual register
65 /// mapping.
66 IndexedMap Virt2SplitMap;
67
68 /// ReMatMap - This is virtual register to re-materialized instruction
69 /// mapping. Each virtual register whose definition is going to be
70 /// re-materialized has an entry in it.
71 IndexedMap ReMatMap;
72
5873 /// MI2VirtMap - This is MachineInstr to virtual register
5974 /// mapping. In the case of memory spill code being folded into
6075 /// instructions, we need to know which virtual register was
6176 /// read/written by this instruction.
6277 MI2VirtMapTy MI2VirtMap;
6378
64 /// ReMatMap - This is virtual register to re-materialized instruction
65 /// mapping. Each virtual register whose definition is going to be
66 /// re-materialized has an entry in it.
67 IndexedMap ReMatMap;
79 /// SpillPt2VirtMap - This records the virtual registers which should
80 /// be spilled right after the MachineInstr due to live interval
81 /// splitting.
82 DenseMap > SpillPt2VirtMap;
83
84 /// Virt2SplitMap - This records the MachineInstrs where a virtual
85 /// register should be spilled due to live interval splitting.
86 IndexedMap, VirtReg2IndexFunctor>
87 Virt2SpillPtsMap;
6888
6989 /// ReMatId - Instead of assigning a stack slot to a to be rematerialized
7090 /// virtual register, an unique id is being assigned. This keeps track of
119139 grow();
120140 }
121141
142 /// @brief records virtReg is a split live interval from SReg.
143 void setIsSplitFromReg(unsigned virtReg, unsigned SReg) {
144 Virt2SplitMap[virtReg] = SReg;
145 }
146
147 /// @brief returns the live interval virtReg is split from.
148 unsigned getPreSplitReg(unsigned virtReg) {
149 return Virt2SplitMap[virtReg];
150 }
151
122152 /// @brief returns true is the specified virtual register is not
123153 /// mapped to a stack slot or rematerialized.
124154 bool isAssignedReg(unsigned virtReg) const {
125 return getStackSlot(virtReg) == NO_STACK_SLOT &&
126 getReMatId(virtReg) == NO_STACK_SLOT;
155 if (getStackSlot(virtReg) == NO_STACK_SLOT &&
156 getReMatId(virtReg) == NO_STACK_SLOT)
157 return true;
158 // Split register can be assigned a physical register as well as a
159 // stack slot or remat id.
160 return (Virt2SplitMap[virtReg] && Virt2PhysMap[virtReg] != NO_PHYS_REG);
127161 }
128162
129163 /// @brief returns the stack slot mapped to the specified virtual
172206 /// registers are rematerialized and it's safe to delete the definition.
173207 void setVirtIsReMaterialized(unsigned virtReg, MachineInstr *def) {
174208 ReMatMap[virtReg] = def;
209 }
210
211 /// @brief returns the virtual registers that should be spilled due to
212 /// splitting right after the specified MachineInstr.
213 std::vector &getSpillPtSpills(MachineInstr *Pt) {
214 return SpillPt2VirtMap[Pt];
215 }
216
217 /// @brief records the specified MachineInstr as a spill point for virtReg.
218 void addSpillPoint(unsigned virtReg, MachineInstr *Pt) {
219 SpillPt2VirtMap[Pt].push_back(virtReg);
220 Virt2SpillPtsMap[virtReg].push_back(Pt);
221 }
222
223 /// @brief remove the virtReg from the list of registers that should be
224 /// spilled (due to splitting) right after the specified MachineInstr.
225 void removeRegFromSpillPt(MachineInstr *Pt, unsigned virtReg) {
226 std::vector &Regs = SpillPt2VirtMap[Pt];
227 if (Regs.back() == virtReg) // Most common case.
228 Regs.pop_back();
229 for (unsigned i = 0, e = Regs.size(); i != e; ++i)
230 if (Regs[i] == virtReg) {
231 Regs.erase(Regs.begin()+i-1);
232 break;
233 }
234 }
235
236 /// @brief specify virtReg is no longer being spilled due to splitting.
237 void removeAllSpillPtsForReg(unsigned virtReg) {
238 std::vector &SpillPts = Virt2SpillPtsMap[virtReg];
239 for (unsigned i = 0, e = SpillPts.size(); i != e; ++i)
240 removeRegFromSpillPt(SpillPts[i], virtReg);
241 Virt2SpillPtsMap[virtReg].clear();
242 }
243
244 /// @brief remove the specified MachineInstr as a spill point for the
245 /// specified register.
246 void removeRegSpillPt(unsigned virtReg, MachineInstr *Pt) {
247 std::vector &SpillPts = Virt2SpillPtsMap[virtReg];
248 if (SpillPts.back() == Pt) // Most common case.
249 SpillPts.pop_back();
250 for (unsigned i = 0, e = SpillPts.size(); i != e; ++i)
251 if (SpillPts[i] == Pt) {
252 SpillPts.erase(SpillPts.begin()+i-1);
253 break;
254 }
255 }
256
257 void transferSpillPts(MachineInstr *Old, MachineInstr *New) {
258 std::vector &OldRegs = SpillPt2VirtMap[Old];
259 while (!OldRegs.empty()) {
260 unsigned virtReg = OldRegs.back();
261 OldRegs.pop_back();
262 removeRegSpillPt(virtReg, Old);
263 addSpillPoint(virtReg, New);
264 }
175265 }
176266
177267 /// @brief Updates information about the specified virtual register's value