llvm.org GIT mirror llvm / 9c0b078
[WebAssembly] Don't old negative load/store offsets in fast-isel. WebAssembly's load/store offsets are unsigned and don't wrap, so it's not valid to fold in a negative offset. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@290342 91177308-0d34-0410-b5e6-96231b3b80d8 Dan Gohman 3 years ago
2 changed file(s) with 54 addition(s) and 9 deletion(s). Raw diff Collapse all Expand all
8383 return Base.FI;
8484 }
8585
86 void setOffset(int64_t Offset_) { Offset = Offset_; }
86 void setOffset(int64_t Offset_) {
87 assert(Offset_ >= 0 && "Offsets must be non-negative");
88 Offset = Offset_;
89 }
8790 int64_t getOffset() const { return Offset; }
8891 void setGlobalValue(const GlobalValue *G) { GV = G; }
8992 const GlobalValue *getGlobalValue() const { return GV; }
235238 case Instruction::GetElementPtr: {
236239 Address SavedAddr = Addr;
237240 uint64_t TmpOffset = Addr.getOffset();
241 // Non-inbounds geps can wrap; wasm's offsets can't.
242 if (!cast(U)->isInBounds())
243 goto unsupported_gep;
238244 // Iterate through the GEP folding the constants into offsets where
239245 // we can.
240246 for (gep_type_iterator GTI = gep_type_begin(U), E = gep_type_end(U);
271277 }
272278 }
273279 }
274 // Try to grab the base operand now.
275 Addr.setOffset(TmpOffset);
276 if (computeAddress(U->getOperand(0), Addr))
277 return true;
280 // Don't fold in negative offsets.
281 if (int64_t(TmpOffset) >= 0) {
282 // Try to grab the base operand now.
283 Addr.setOffset(TmpOffset);
284 if (computeAddress(U->getOperand(0), Addr))
285 return true;
286 }
278287 // We failed, restore everything and try the other options.
279288 Addr = SavedAddr;
280289 unsupported_gep:
300309 std::swap(LHS, RHS);
301310
302311 if (const ConstantInt *CI = dyn_cast(RHS)) {
303 Addr.setOffset(Addr.getOffset() + CI->getSExtValue());
304 return computeAddress(LHS, Addr);
312 uint64_t TmpOffset = Addr.getOffset() + CI->getSExtValue();
313 if (int64_t(TmpOffset) >= 0) {
314 Addr.setOffset(TmpOffset);
315 return computeAddress(LHS, Addr);
316 }
305317 }
306318
307319 Address Backup = Addr;
317329 const Value *RHS = U->getOperand(1);
318330
319331 if (const ConstantInt *CI = dyn_cast(RHS)) {
320 Addr.setOffset(Addr.getOffset() - CI->getSExtValue());
321 return computeAddress(LHS, Addr);
332 int64_t TmpOffset = Addr.getOffset() - CI->getSExtValue();
333 if (TmpOffset >= 0) {
334 Addr.setOffset(TmpOffset);
335 return computeAddress(LHS, Addr);
336 }
322337 }
323338 break;
324339 }
4545 %y = bitcast i64 %x to double
4646 ret double %y
4747 }
48
49 ; Do fold offsets into geps.
50 ; CHECK-LABEL: do_fold_offset_into_gep:
51 ; CHECK: i64.load $push{{[0-9]+}}=, 8($0)
52 define i64 @do_fold_offset_into_gep(i64* %p) {
53 bb:
54 %tmp = getelementptr inbounds i64, i64* %p, i32 1
55 %tmp2 = load i64, i64* %tmp, align 8
56 ret i64 %tmp2
57 }
58
59 ; Don't fold negative offsets into geps.
60 ; CHECK-LABEL: dont_fold_negative_offset:
61 ; CHECK: i64.load $push{{[0-9]+}}=, 0($pop{{[0-9]+}})
62 define i64 @dont_fold_negative_offset(i64* %p) {
63 bb:
64 %tmp = getelementptr inbounds i64, i64* %p, i32 -1
65 %tmp2 = load i64, i64* %tmp, align 8
66 ret i64 %tmp2
67 }
68
69 ; Don't fold non-inbounds geps.
70 ; CHECK-LABEL: dont_fold_non_inbounds_gep:
71 ; CHECK: i64.load $push{{[0-9]+}}=, 0($pop{{[0-9]+}})
72 define i64 @dont_fold_non_inbounds_gep(i64* %p) {
73 bb:
74 %tmp = getelementptr i64, i64* %p, i32 1
75 %tmp2 = load i64, i64* %tmp, align 8
76 ret i64 %tmp2
77 }