llvm.org GIT mirror llvm / 5379f41
Fix PR3149. If an early clobber def is a physical register and it is tied to an input operand, it effectively extends the live range of the physical register. Currently we do not have a good way to represent this. 172 %ECX<def> = MOV32rr %reg1039<kill> 180 INLINEASM <es:subl $5,$1 sbbl $3,$0>, 10, %EAX<def>, 14, %ECX<earlyclobber,def>, 9, %EAX<kill>, 36, <fi#0>, 1, %reg0, 0, 9, %ECX<kill>, 36, <fi#1>, 1, %reg0, 0 188 %EAX<def> = MOV32rr %EAX<kill> 196 %ECX<def> = MOV32rr %ECX<kill> 204 %ECX<def> = MOV32rr %ECX<kill> 212 %EAX<def> = MOV32rr %EAX<kill> 220 %EAX<def> = MOV32rr %EAX 228 %reg1039<def> = MOV32rr %ECX<kill> The early clobber operand ties ECX input to the ECX def. The live interval of ECX is represented as this: %reg20,inf = [46,47:1)[174,230:0) 0@174-(230) 1@46-(47) The right way to represent this is something like %reg20,inf = [46,47:2)[174,182:1)[181:230:0) 0@174-(182) 1@181-230 @2@46-(47) Of course that won't work since that means overlapping live ranges defined by two val#. The workaround for now is to add a bit to val# which says the val# is redefined by a early clobber def somewhere. This prevents the move at 228 from being optimized away by SimpleRegisterCoalescing::AdjustCopiesBackFrom. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@61259 91177308-0d34-0410-b5e6-96231b3b80d8 Evan Cheng 11 years ago
5 changed file(s) with 69 addition(s) and 11 deletion(s). Raw diff Collapse all Expand all
3737 /// - or reg # of the definition if it's a stack slot liveinterval.
3838 /// copy - Copy iff val# is defined by a copy; zero otherwise.
3939 /// hasPHIKill - One or more of the kills are PHI nodes.
40 /// redefByEC - Re-defined by early clobber somewhere during the live range.
4041 /// kills - Instruction # of the kills.
4142 struct VNInfo {
4243 unsigned id;
4344 unsigned def;
4445 MachineInstr *copy;
45 bool hasPHIKill;
46 bool hasPHIKill : 1;
47 bool redefByEC : 1;
4648 SmallVector kills;
47 VNInfo() : id(~1U), def(~1U), copy(0), hasPHIKill(false) {}
49 VNInfo()
50 : id(~1U), def(~1U), copy(0), hasPHIKill(false), redefByEC(false) {}
4851 VNInfo(unsigned i, unsigned d, MachineInstr *c)
49 : id(i), def(d), copy(c), hasPHIKill(false) {}
52 : id(i), def(d), copy(c), hasPHIKill(false), redefByEC(false) {}
5053 };
5154
5255 /// LiveRange structure - This represents a simple register range in the
176179 DstValNo->def = SrcValNo->def;
177180 DstValNo->copy = SrcValNo->copy;
178181 DstValNo->hasPHIKill = SrcValNo->hasPHIKill;
182 DstValNo->redefByEC = SrcValNo->redefByEC;
179183 DstValNo->kills = SrcValNo->kills;
180184 }
181185
359359 mi->getOpcode() == TargetInstrInfo::INSERT_SUBREG ||
360360 tii_->isMoveInstr(*mi, SrcReg, DstReg))
361361 CopyMI = mi;
362 // Earlyclobbers move back one.
362363 ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
363364
364365 assert(ValNo->id == 0 && "First value in interval is not 0?");
434435 assert(interval.containsOneValue());
435436 unsigned DefIndex = getDefIndex(interval.getValNumInfo(0)->def);
436437 unsigned RedefIndex = getDefIndex(MIIdx);
437 // Earlyclobbers move back one.
438 if (MO.isEarlyClobber())
439 RedefIndex = getUseIndex(MIIdx);
438 // It cannot be an early clobber MO.
439 assert(!MO.isEarlyClobber() && "Unexpected early clobber!");
440440
441441 const LiveRange *OldLR = interval.getLiveRangeContaining(RedefIndex-1);
442442 VNInfo *OldValNo = OldLR->valno;
504504 // live until the end of the block. We've already taken care of the
505505 // rest of the live range.
506506 unsigned defIndex = getDefIndex(MIIdx);
507 // Earlyclobbers move back one.
508 if (MO.isEarlyClobber())
509 defIndex = getUseIndex(MIIdx);
507 // It cannot be an early clobber MO.
508 assert(!MO.isEarlyClobber() && "Unexpected early clobber!");
510509
511510 VNInfo *ValNo;
512511 MachineInstr *CopyMI = NULL;
591590
592591 // Already exists? Extend old live interval.
593592 LiveInterval::iterator OldLR = interval.FindLiveRangeContaining(start);
594 VNInfo *ValNo = (OldLR != interval.end())
593 bool Extend = OldLR != interval.end();
594 VNInfo *ValNo = Extend
595595 ? OldLR->valno : interval.getNextValue(start, CopyMI, VNInfoAllocator);
596 if (MO.isEarlyClobber() && Extend)
597 ValNo->redefByEC = true;
596598 LiveRange LR(start, end, ValNo);
597599 interval.addRange(LR);
598600 interval.addKill(LR.valno, end);
118118 if (ALR == IntA.end()) // Should never happen!
119119 return false;
120120 VNInfo *AValNo = ALR->valno;
121 // If it's re-defined by an early clobber somewhere in the live range, then
122 // it's not safe to eliminate the copy. FIXME: This is a temporary workaround.
123 // See PR3149:
124 // 172 %ECX = MOV32rr %reg1039
125 // 180 INLINEASM
126 // sbbl $3,$0>, 10, %EAX, 14, %ECX, 9, %EAX,
127 // 36, , 1, %reg0, 0, 9, %ECX, 36, , 1, %reg0, 0
128 // 188 %EAX = MOV32rr %EAX
129 // 196 %ECX = MOV32rr %ECX
130 // 204 %ECX = MOV32rr %ECX
131 // 212 %EAX = MOV32rr %EAX
132 // 220 %EAX = MOV32rr %EAX
133 // 228 %reg1039 = MOV32rr %ECX
134 // The early clobber operand ties ECX input to the ECX def.
135 //
136 // The live interval of ECX is represented as this:
137 // %reg20,inf = [46,47:1)[174,230:0) 0@174-(230) 1@46-(47)
138 // The coalescer has no idea there was a def in the middle of [174,230].
139 if (AValNo->redefByEC)
140 return false;
121141
122142 // If AValNo is defined as a copy from IntB, we can potentially process this.
123143 // Get the instruction that defines this value number.
1616 ; return ((long long)Y << 32) | X;
1717 ;}
1818
19 define i64 @test(i32 %A, i32 %B, i32 %C) {
19 define i64 @test(i32 %A, i32 %B, i32 %C) nounwind {
2020 entry:
2121 %Y = alloca i32, align 4 ; [#uses=2]
2222 %tmp4 = call i32 asm "subf${3:I}c $1,$4,$3\0A\09subfze $0,$2", "=r,=*&r,r,rI,r"( i32* %Y, i32 %A, i32 %B, i32 %C ) ; [#uses=1]
0 ; RUN: llvm-as < %s | llc -mtriple=i386-apple-darwin | %prcontext End 1 | grep {movl.*%ecx}
1 ; PR3149
2
3 @"\01LC" = internal constant [7 x i8] c"n0=%d\0A\00" ; <[7 x i8]*> [#uses=1]
4 @llvm.used = appending global [1 x i8*] [ i8* bitcast (i32 (i64, i64)* @umoddi3 to i8*) ], section "llvm.metadata" ; <[1 x i8*]*> [#uses=0]
5
6 define i32 @umoddi3(i64 %u, i64 %v) nounwind noinline {
7 entry:
8 %0 = trunc i64 %v to i32 ; [#uses=2]
9 %1 = trunc i64 %u to i32 ; [#uses=4]
10 %2 = lshr i64 %u, 32 ; [#uses=1]
11 %3 = trunc i64 %2 to i32 ; [#uses=2]
12 %4 = tail call i32 (i8*, ...)* @printf(i8* getelementptr ([7 x i8]* @"\01LC", i32 0, i32 0), i32 %1) nounwind ; [#uses=0]
13 %5 = icmp ult i32 %1, %0 ; [#uses=1]
14 br i1 %5, label %bb2, label %bb
15
16 bb: ; preds = %entry
17 %6 = lshr i64 %v, 32 ; [#uses=1]
18 %7 = trunc i64 %6 to i32 ; [#uses=1]
19 %asmtmp = tail call { i32, i32 } asm "subl $5,$1\0A\09sbbl $3,$0", "=r,=&r,0,imr,1,imr,~{dirflag},~{fpsr},~{flags}"(i32 %3, i32 %7, i32 %1, i32 %0) nounwind ; <{ i32, i32 }> [#uses=2]
20 %asmresult = extractvalue { i32, i32 } %asmtmp, 0 ; [#uses=1]
21 %asmresult1 = extractvalue { i32, i32 } %asmtmp, 1 ; [#uses=1]
22 br label %bb2
23
24 bb2: ; preds = %bb, %entry
25 %n1.0 = phi i32 [ %asmresult, %bb ], [ %3, %entry ] ; [#uses=1]
26 %n0.0 = phi i32 [ %asmresult1, %bb ], [ %1, %entry ] ; [#uses=1]
27 %8 = add i32 %n0.0, %n1.0 ; [#uses=1]
28 ret i32 %8
29 }
30
31 declare i32 @printf(i8*, ...) nounwind