llvm.org GIT mirror llvm / cc70523
Avoid illegal integer promotion in fastisel Stop folding constant adds into GEP when the type size doesn't match. Otherwise, the adds' operands are effectively being promoted, changing the conditions of an overflow. Results are different when: sext(a) + sext(b) != sext(a + b) Problem originally found on x86-64, but also fixed issues with ARM and PPC, which used similar code. <rdar://problem/15292280> Patch by Duncan Exon Smith! git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@194840 91177308-0d34-0410-b5e6-96231b3b80d8 Bob Wilson 6 years ago
8 changed file(s) with 103 addition(s) and 22 deletion(s). Raw diff Collapse all Expand all
357357 return 0;
358358 }
359359
360 /// \brief Check if \c Add is an add that can be safely folded into \c GEP.
361 ///
362 /// \c Add can be folded into \c GEP if:
363 /// - \c Add is an add,
364 /// - \c Add's size matches \c GEP's,
365 /// - \c Add is in the same basic block as \c GEP, and
366 /// - \c Add has a constant operand.
367 bool canFoldAddIntoGEP(const User *GEP, const Value *Add);
368
360369 private:
361370 bool SelectBinaryOp(const User *I, unsigned ISDOpcode);
362371
15701570 return tryToFoldLoadIntoMI(User, RI.getOperandNo(), LI);
15711571 }
15721572
1573
1573 bool FastISel::canFoldAddIntoGEP(const User *GEP, const Value *Add) {
1574 // Must be an add.
1575 if (!isa(Add))
1576 return false;
1577 // Type size needs to match.
1578 if (TD.getTypeSizeInBits(GEP->getType()) !=
1579 TD.getTypeSizeInBits(Add->getType()))
1580 return false;
1581 // Must be in the same basic block.
1582 if (isa(Add) &&
1583 FuncInfo.MBBMap[cast(Add)->getParent()] != FuncInfo.MBB)
1584 return false;
1585 // Must have a constant operand.
1586 return isa(cast(Add)->getOperand(1));
1587 }
1588
899899 TmpOffset += CI->getSExtValue() * S;
900900 break;
901901 }
902 if (isa(Op) &&
903 (!isa(Op) ||
904 FuncInfo.MBBMap[cast(Op)->getParent()]
905 == FuncInfo.MBB) &&
906 isa(cast(Op)->getOperand(1))) {
907 // An add (in the same block) with a constant operand. Fold the
908 // constant.
902 if (canFoldAddIntoGEP(U, Op)) {
903 // A compatible add with a constant operand. Fold the constant.
909904 ConstantInt *CI =
910905 cast(cast(Op)->getOperand(1));
911906 TmpOffset += CI->getSExtValue() * S;
335335 TmpOffset += CI->getSExtValue() * S;
336336 break;
337337 }
338 if (isa(Op) &&
339 (!isa(Op) ||
340 FuncInfo.MBBMap[cast(Op)->getParent()]
341 == FuncInfo.MBB) &&
342 isa(cast(Op)->getOperand(1))) {
343 // An add (in the same block) with a constant operand. Fold the
344 // constant.
338 if (canFoldAddIntoGEP(U, Op)) {
339 // A compatible add with a constant operand. Fold the constant.
345340 ConstantInt *CI =
346341 cast(cast(Op)->getOperand(1));
347342 TmpOffset += CI->getSExtValue() * S;
560560 Disp += CI->getSExtValue() * S;
561561 break;
562562 }
563 if (isa(Op) &&
564 (!isa(Op) ||
565 FuncInfo.MBBMap[cast(Op)->getParent()]
566 == FuncInfo.MBB) &&
567 isa(cast(Op)->getOperand(1))) {
568 // An add (in the same block) with a constant operand. Fold the
569 // constant.
563 if (canFoldAddIntoGEP(U, Op)) {
564 // A compatible add with a constant operand. Fold the constant.
570565 ConstantInt *CI =
571566 cast(cast(Op)->getOperand(1));
572567 Disp += CI->getSExtValue() * S;
0 ; fastisel should not fold add with non-pointer bitwidth
1 ; sext(a) + sext(b) != sext(a + b)
2 ; RUN: llc -mtriple=armv7-apple-ios %s -O0 -o - | FileCheck %s
3
4 define zeroext i8 @gep_promotion(i8* %ptr) nounwind uwtable ssp {
5 entry:
6 %ptr.addr = alloca i8*, align 8
7 %add = add i8 64, 64 ; 0x40 + 0x40
8 %0 = load i8** %ptr.addr, align 8
9
10 ; CHECK-LABEL: _gep_promotion:
11 ; CHECK: ldrb {{r[0-9]+}}, {{\[r[0-9]+\]}}
12 %arrayidx = getelementptr inbounds i8* %0, i8 %add
13
14 %1 = load i8* %arrayidx, align 1
15 ret i8 %1
16 }
17
0 ; fastisel should not fold add with non-pointer bitwidth
1 ; sext(a) + sext(b) != sext(a + b)
2 ; RUN: llc -mtriple=powerpc64-unknown-freebsd10.0 %s -O0 -o - | FileCheck %s
3
4 define zeroext i8 @gep_promotion(i8* %ptr) nounwind uwtable ssp {
5 entry:
6 %ptr.addr = alloca i8*, align 8
7 %add = add i8 64, 64 ; 0x40 + 0x40
8 %0 = load i8** %ptr.addr, align 8
9
10 ; CHECK-LABEL: gep_promotion:
11 ; CHECK: lbz {{[0-9]+}}, 0({{.*}})
12 %arrayidx = getelementptr inbounds i8* %0, i8 %add
13
14 %1 = load i8* %arrayidx, align 1
15 ret i8 %1
16 }
0 ; fastisel should not fold add with non-pointer bitwidth
1 ; sext(a) + sext(b) != sext(a + b)
2 ; RUN: llc -mtriple=x86_64-apple-darwin %s -O0 -o - | FileCheck %s
3
4 define zeroext i8 @gep_promotion(i8* %ptr) nounwind uwtable ssp {
5 entry:
6 %ptr.addr = alloca i8*, align 8
7 %add = add i8 64, 64 ; 0x40 + 0x40
8 %0 = load i8** %ptr.addr, align 8
9
10 ; CHECK-LABEL: _gep_promotion:
11 ; CHECK: movzbl ({{.*}})
12 %arrayidx = getelementptr inbounds i8* %0, i8 %add
13
14 %1 = load i8* %arrayidx, align 1
15 ret i8 %1
16 }
17
18 define zeroext i8 @gep_promotion_nonconst(i8 %i, i8* %ptr) nounwind uwtable ssp {
19 entry:
20 %i.addr = alloca i8, align 4
21 %ptr.addr = alloca i8*, align 8
22 store i8 %i, i8* %i.addr, align 4
23 store i8* %ptr, i8** %ptr.addr, align 8
24 %0 = load i8* %i.addr, align 4
25 ; CHECK-LABEL: _gep_promotion_nonconst:
26 ; CHECK: movzbl ({{.*}})
27 %xor = xor i8 %0, -128 ; %0 ^ 0x80
28 %add = add i8 %xor, -127 ; %xor + 0x81
29 %1 = load i8** %ptr.addr, align 8
30
31 %arrayidx = getelementptr inbounds i8* %1, i8 %add
32
33 %2 = load i8* %arrayidx, align 1
34 ret i8 %2
35 }
36