llvm.org GIT mirror llvm / 896617b
Improve ValueAlreadyLiveAtInst with a cheap and dirty, but effective heuristic: the value is already live at the new memory operation if it is used by some other instruction in the memop's block. This is cheap and simple to compute (moreso than full liveness). This improves the new heuristic even more. For example, it cuts two out of three new instructions out of 255.vortex:DbmFileInGrpHdr, which is one of the functions that the heuristic regressed. This overall eliminates another 40 instructions from 403.gcc and visibly reduces register pressure in 255.vortex (though this only actually ends up saving the 2 instructions from the whole program). git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@60084 91177308-0d34-0410-b5e6-96231b3b80d8 Chris Lattner 11 years ago
1 changed file(s) with 32 addition(s) and 12 deletion(s). Raw diff Collapse all Expand all
538538 class AddressingModeMatcher {
539539 SmallVectorImpl &AddrModeInsts;
540540 const TargetLowering &TLI;
541
542 /// AccessTy/MemoryInst - This is the type for the access (e.g. double) and
543 /// the memory instruction that we're computing this address for.
541544 const Type *AccessTy;
545 Instruction *MemoryInst;
546
547 /// AddrMode - This is the addressing mode that we're building up. This is
548 /// part of the return value of this addressing mode matching stuff.
542549 ExtAddrMode &AddrMode;
543550
544551 /// IgnoreProfitability - This is set to true when we should not do
547554 bool IgnoreProfitability;
548555
549556 AddressingModeMatcher(SmallVectorImpl &AMI,
550 const TargetLowering &T, const Type *AT,ExtAddrMode &AM)
551 : AddrModeInsts(AMI), TLI(T), AccessTy(AT), AddrMode(AM) {
557 const TargetLowering &T, const Type *AT,
558 Instruction *MI, ExtAddrMode &AM)
559 : AddrModeInsts(AMI), TLI(T), AccessTy(AT), MemoryInst(MI), AddrMode(AM) {
552560 IgnoreProfitability = false;
553561 }
554562 public:
556564 /// Match - Find the maximal addressing mode that a load/store of V can fold,
557565 /// give an access type of AccessTy. This returns a list of involved
558566 /// instructions in AddrModeInsts.
559 static ExtAddrMode Match(Value *V, const Type *AccessTy,
567 static ExtAddrMode Match(Value *V, const Type *AccessTy,
568 Instruction *MemoryInst,
560569 SmallVectorImpl &AddrModeInsts,
561570 const TargetLowering &TLI) {
562571 ExtAddrMode Result;
563572
564573 bool Success =
565 AddressingModeMatcher(AddrModeInsts,TLI,AccessTy,Result).MatchAddr(V, 0);
574 AddressingModeMatcher(AddrModeInsts, TLI, AccessTy,
575 MemoryInst, Result).MatchAddr(V, 0);
566576 Success = Success; assert(Success && "Couldn't select *anything*?");
567577 return Result;
568578 }
944954 if (Val == 0 || Val == KnownLive1 || Val == KnownLive2)
945955 return true;
946956
947 // All non-instruction values other than arguments (constants) are live.
957 // All values other than instructions and arguments (e.g. constants) are live.
948958 if (!isa(Val) && !isa(Val)) return true;
949959
950960 // If Val is a constant sized alloca in the entry block, it is live, this is
952962 // live for the whole function.
953963 if (AllocaInst *AI = dyn_cast(Val))
954964 if (AI->isStaticAlloca())
965 return true;
966
967 // Check to see if this value is already used in the memory instruction's
968 // block. If so, it's already live into the block at the very least, so we
969 // can reasonably fold it.
970 BasicBlock *MemBB = MemoryInst->getParent();
971 for (Value::use_iterator UI = Val->use_begin(), E = Val->use_end();
972 UI != E; ++UI)
973 // We know that uses of arguments and instructions have to be instructions.
974 if (cast(*UI)->getParent() == MemBB)
955975 return true;
956976
957977 return false;
10431063 // *actually* cover the shared instruction.
10441064 ExtAddrMode Result;
10451065 AddressingModeMatcher Matcher(MatchedAddrModeInsts, TLI, AddressAccessTy,
1046 Result);
1066 MemoryInst, Result);
10471067 Matcher.IgnoreProfitability = true;
10481068 bool Success = Matcher.MatchAddr(Address, 0);
10491069 Success = Success; assert(Success && "Couldn't select *anything*?");
10811101 ///
10821102 /// This method is used to optimize both load/store and inline asms with memory
10831103 /// operands.
1084 bool CodeGenPrepare::OptimizeMemoryInst(Instruction *LdStInst, Value *Addr,
1104 bool CodeGenPrepare::OptimizeMemoryInst(Instruction *MemoryInst, Value *Addr,
10851105 const Type *AccessTy,
10861106 DenseMap &SunkAddrs) {
10871107 // Figure out what addressing mode will be built up for this operation.
10881108 SmallVector AddrModeInsts;
1089 ExtAddrMode AddrMode =
1090 AddressingModeMatcher::Match(Addr, AccessTy, AddrModeInsts, *TLI);
1109 ExtAddrMode AddrMode = AddressingModeMatcher::Match(Addr, AccessTy,MemoryInst,
1110 AddrModeInsts, *TLI);
10911111
10921112 // Check to see if any of the instructions supersumed by this addr mode are
10931113 // non-local to I's BB.
10941114 bool AnyNonLocal = false;
10951115 for (unsigned i = 0, e = AddrModeInsts.size(); i != e; ++i) {
1096 if (IsNonLocalValue(AddrModeInsts[i], LdStInst->getParent())) {
1116 if (IsNonLocalValue(AddrModeInsts[i], MemoryInst->getParent())) {
10971117 AnyNonLocal = true;
10981118 break;
10991119 }
11081128 // Insert this computation right after this user. Since our caller is
11091129 // scanning from the top of the BB to the bottom, reuse of the expr are
11101130 // guaranteed to happen later.
1111 BasicBlock::iterator InsertPt = LdStInst;
1131 BasicBlock::iterator InsertPt = MemoryInst;
11121132
11131133 // Now that we determined the addressing expression we want to use and know
11141134 // that we have to sink it into this block. Check to see if we have already
11801200 SunkAddr = new IntToPtrInst(Result, Addr->getType(), "sunkaddr",InsertPt);
11811201 }
11821202
1183 LdStInst->replaceUsesOfWith(Addr, SunkAddr);
1203 MemoryInst->replaceUsesOfWith(Addr, SunkAddr);
11841204
11851205 if (Addr->use_empty())
11861206 EraseDeadInstructions(Addr);