llvm.org GIT mirror llvm / 8f02330
[tsan] Add support for pointer typed atomic stores, loads, and cmpxchg TSan instrumentation functions for atomic stores, loads, and cmpxchg work on integer value types. This patch adds casts before calling TSan instrumentation functions in cases where the value is a pointer. Differential Revision: http://reviews.llvm.org/D17833 git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@262876 91177308-0d34-0410-b5e6-96231b3b80d8 Anna Zaks 4 years ago
2 changed file(s) with 67 addition(s) and 9 deletion(s). Raw diff Collapse all Expand all
495495 return false;
496496 }
497497
498 static Value *createIntOrPtrToIntCast(Value *V, Type* Ty, IRBuilder<> &IRB) {
499 return isa(V->getType()) ?
500 IRB.CreatePtrToInt(V, Ty) : IRB.CreateIntCast(V, Ty, false);
501 }
502
498503 // Both llvm and ThreadSanitizer atomic operations are based on C++11/C1x
499504 // standards. For background see C++11 standard. A slightly older, publicly
500505 // available draft of the standard (not entirely up-to-date, but close enough
516521 Type *PtrTy = Ty->getPointerTo();
517522 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
518523 createOrdering(&IRB, LI->getOrdering())};
519 CallInst *C = CallInst::Create(TsanAtomicLoad[Idx], Args);
520 ReplaceInstWithInst(I, C);
521
524 Type *OrigTy = cast(Addr->getType())->getElementType();
525 if (Ty == OrigTy) {
526 Instruction *C = CallInst::Create(TsanAtomicLoad[Idx], Args);
527 ReplaceInstWithInst(I, C);
528 } else {
529 // We are loading a pointer, so we need to cast the return value.
530 Value *C = IRB.CreateCall(TsanAtomicLoad[Idx], Args);
531 Instruction *Cast = CastInst::Create(Instruction::IntToPtr, C, OrigTy);
532 ReplaceInstWithInst(I, Cast);
533 }
522534 } else if (StoreInst *SI = dyn_cast(I)) {
523535 Value *Addr = SI->getPointerOperand();
524536 int Idx = getMemoryAccessFuncIndex(Addr, DL);
529541 Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize);
530542 Type *PtrTy = Ty->getPointerTo();
531543 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
532 IRB.CreateIntCast(SI->getValueOperand(), Ty, false),
544 createIntOrPtrToIntCast(SI->getValueOperand(), Ty, IRB),
533545 createOrdering(&IRB, SI->getOrdering())};
534546 CallInst *C = CallInst::Create(TsanAtomicStore[Idx], Args);
535547 ReplaceInstWithInst(I, C);
559571 const unsigned BitSize = ByteSize * 8;
560572 Type *Ty = Type::getIntNTy(IRB.getContext(), BitSize);
561573 Type *PtrTy = Ty->getPointerTo();
574 Value *CmpOperand =
575 createIntOrPtrToIntCast(CASI->getCompareOperand(), Ty, IRB);
576 Value *NewOperand =
577 createIntOrPtrToIntCast(CASI->getNewValOperand(), Ty, IRB);
562578 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
563 IRB.CreateIntCast(CASI->getCompareOperand(), Ty, false),
564 IRB.CreateIntCast(CASI->getNewValOperand(), Ty, false),
579 CmpOperand,
580 NewOperand,
565581 createOrdering(&IRB, CASI->getSuccessOrdering()),
566582 createOrdering(&IRB, CASI->getFailureOrdering())};
567583 CallInst *C = IRB.CreateCall(TsanAtomicCAS[Idx], Args);
568 Value *Success = IRB.CreateICmpEQ(C, CASI->getCompareOperand());
569
570 Value *Res = IRB.CreateInsertValue(UndefValue::get(CASI->getType()), C, 0);
584 Value *Success = IRB.CreateICmpEQ(C, CmpOperand);
585 Value *OldVal = C;
586 Type *OrigOldValTy = CASI->getNewValOperand()->getType();
587 if (Ty != OrigOldValTy) {
588 // The value is a pointer, so we need to cast the return value.
589 OldVal = IRB.CreateIntToPtr(C, OrigOldValTy);
590 }
591
592 Value *Res =
593 IRB.CreateInsertValue(UndefValue::get(CASI->getType()), OldVal, 0);
571594 Res = IRB.CreateInsertValue(Res, Success, 1);
572595
573596 I->replaceAllUsesWith(Res);
11851185 ; CHECK-LABEL: atomic64_load_seq_cst
11861186 ; CHECK: call i64 @__tsan_atomic64_load(i64* %a, i32 5), !dbg
11871187
1188 define i8* @atomic64_load_seq_cst_ptr_ty(i8** %a) nounwind uwtable {
1189 entry:
1190 %0 = load atomic i8*, i8** %a seq_cst, align 8, !dbg !7
1191 ret i8* %0, !dbg !7
1192 }
1193 ; CHECK-LABEL: atomic64_load_seq_cst
1194 ; CHECK: bitcast i8** %{{.+}} to i64*
1195 ; CHECK-NEXT: call i64 @__tsan_atomic64_load(i64* %{{.+}}, i32 5), !dbg
1196 ; CHECK-NEXT: inttoptr i64 %{{.+}} to i8*
1197
11881198 define void @atomic64_store_unordered(i64* %a) nounwind uwtable {
11891199 entry:
11901200 store atomic i64 0, i64* %a unordered, align 8, !dbg !7
12171227 ; CHECK-LABEL: atomic64_store_seq_cst
12181228 ; CHECK: call void @__tsan_atomic64_store(i64* %a, i64 0, i32 5), !dbg
12191229
1230 define void @atomic64_store_seq_cst_ptr_ty(i8** %a, i8* %v) nounwind uwtable {
1231 entry:
1232 store atomic i8* %v, i8** %a seq_cst, align 8, !dbg !7
1233 ret void, !dbg !7
1234 }
1235 ; CHECK-LABEL: atomic64_store_seq_cst
1236 ; CHECK: %{{.*}} = bitcast i8** %{{.*}} to i64*
1237 ; CHECK-NEXT: %{{.*}} = ptrtoint i8* %{{.*}} to i64
1238 ; CHECK-NEXT: call void @__tsan_atomic64_store(i64* %{{.*}}, i64 %{{.*}}, i32 5), !dbg
1239
12201240 define void @atomic64_xchg_monotonic(i64* %a) nounwind uwtable {
12211241 entry:
12221242 atomicrmw xchg i64* %a, i64 0 monotonic, !dbg !7
15361556 }
15371557 ; CHECK-LABEL: atomic64_cas_seq_cst
15381558 ; CHECK: call i64 @__tsan_atomic64_compare_exchange_val(i64* %a, i64 0, i64 1, i32 5, i32 5), !dbg
1559
1560 define void @atomic64_cas_seq_cst_ptr_ty(i8** %a, i8* %v1, i8* %v2) nounwind uwtable {
1561 entry:
1562 cmpxchg i8** %a, i8* %v1, i8* %v2 seq_cst seq_cst, !dbg !7
1563 ret void
1564 }
1565 ; CHECK-LABEL: atomic64_cas_seq_cst
1566 ; CHECK: {{.*}} = ptrtoint i8* %v1 to i64
1567 ; CHECK-NEXT: {{.*}} = ptrtoint i8* %v2 to i64
1568 ; CHECK-NEXT: {{.*}} = bitcast i8** %a to i64*
1569 ; CHECK-NEXT: {{.*}} = call i64 @__tsan_atomic64_compare_exchange_val(i64* {{.*}}, i64 {{.*}}, i64 {{.*}}, i32 5, i32 5), !dbg
1570 ; CHECK-NEXT: {{.*}} = icmp eq i64
1571 ; CHECK-NEXT: {{.*}} = inttoptr i64 {{.*}} to i8*
1572 ; CHECK-NEXT: {{.*}} = insertvalue { i8*, i1 } undef, i8* {{.*}}, 0
1573 ; CHECK-NEXT: {{.*}} = insertvalue { i8*, i1 } {{.*}}, i1 {{.*}}, 1
15391574
15401575 define i128 @atomic128_load_unordered(i128* %a) nounwind uwtable {
15411576 entry: