llvm.org GIT mirror llvm / dbe266b
Calls and invokes with the new clang.arc.no_objc_arc_exceptions metadata may still unwind, but only in ways that the ARC optimizer doesn't need to consider. This permits more aggressive optimization. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@150829 91177308-0d34-0410-b5e6-96231b3b80d8 Dan Gohman 8 years ago
3 changed file(s) with 175 addition(s) and 6 deletion(s). Raw diff Collapse all Expand all
6969 /// terminator of the successor.
7070 unsigned getOperandNo() const {
7171 return It.getOperandNo();
72 }
73
74 /// getUse - Return the operand Use in the predecessor's terminator
75 /// of the successor.
76 Use &getUse() const {
77 return It.getUse();
7278 }
7379 };
7480
16541654 /// metadata.
16551655 unsigned CopyOnEscapeMDKind;
16561656
1657 /// NoObjCARCExceptionsMDKind - The Metadata Kind for
1658 /// clang.arc.no_objc_arc_exceptions metadata.
1659 unsigned NoObjCARCExceptionsMDKind;
1660
16571661 Constant *getRetainRVCallee(Module *M);
16581662 Constant *getAutoreleaseRVCallee(Module *M);
16591663 Constant *getReleaseCallee(Module *M);
24052409 bool SomeSuccHasSame = false;
24062410 bool AllSuccsHaveSame = true;
24072411 PtrState &S = MyStates.getPtrTopDownState(Arg);
2408 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
2412 succ_const_iterator SI(TI), SE(TI, false);
2413
2414 // If the terminator is an invoke marked with the
2415 // clang.arc.no_objc_arc_exceptions metadata, the unwind edge can be
2416 // ignored, for ARC purposes.
2417 if (isa(TI) && TI->getMetadata(NoObjCARCExceptionsMDKind))
2418 --SE;
2419
2420 for (; SI != SE; ++SI) {
24092421 PtrState &SuccS = BBStates[*SI].getPtrBottomUpState(Arg);
24102422 switch (SuccS.GetSeq()) {
24112423 case S_None:
24402452 bool SomeSuccHasSame = false;
24412453 bool AllSuccsHaveSame = true;
24422454 PtrState &S = MyStates.getPtrTopDownState(Arg);
2443 for (succ_const_iterator SI(TI), SE(TI, false); SI != SE; ++SI) {
2455 succ_const_iterator SI(TI), SE(TI, false);
2456
2457 // If the terminator is an invoke marked with the
2458 // clang.arc.no_objc_arc_exceptions metadata, the unwind edge can be
2459 // ignored, for ARC purposes.
2460 if (isa(TI) && TI->getMetadata(NoObjCARCExceptionsMDKind))
2461 --SE;
2462
2463 for (; SI != SE; ++SI) {
24442464 PtrState &SuccS = BBStates[*SI].getPtrBottomUpState(Arg);
24452465 switch (SuccS.GetSeq()) {
24462466 case S_None: {
24852505 succ_const_iterator SI(TI), SE(TI, false);
24862506 if (SI == SE)
24872507 MyStates.SetAsExit();
2488 else
2508 else {
2509 // If the terminator is an invoke marked with the
2510 // clang.arc.no_objc_arc_exceptions metadata, the unwind edge can be
2511 // ignored, for ARC purposes.
2512 if (isa(TI) && TI->getMetadata(NoObjCARCExceptionsMDKind))
2513 --SE;
2514
24892515 do {
24902516 const BasicBlock *Succ = *SI++;
24912517 if (Succ == BB)
25062532 }
25072533 break;
25082534 } while (SI != SE);
2535 }
25092536
25102537 // Visit all the instructions, bottom-up.
25112538 for (BasicBlock::iterator I = BB->end(), E = BB->begin(); I != E; --I) {
26672694 MyStates.SetAsEntry();
26682695 else
26692696 do {
2670 const BasicBlock *Pred = *PI++;
2697 unsigned OperandNo = PI.getOperandNo();
2698 const Use &Us = PI.getUse();
2699 ++PI;
2700
2701 // Skip invoke unwind edges on invoke instructions marked with
2702 // clang.arc.no_objc_arc_exceptions.
2703 if (const InvokeInst *II = dyn_cast(Us.getUser()))
2704 if (OperandNo == II->getNumArgOperands() + 2 &&
2705 II->getMetadata(NoObjCARCExceptionsMDKind))
2706 continue;
2707
2708 const BasicBlock *Pred = cast(Us.getUser())->getParent();
26712709 if (Pred == BB)
26722710 continue;
26732711 DenseMap::iterator I = BBStates.find(Pred);
28492887 OnStack.insert(EntryBB);
28502888 do {
28512889 dfs_next_succ:
2852 succ_iterator End = succ_end(SuccStack.back().first);
2890 TerminatorInst *TI = cast(&SuccStack.back().first->back());
2891 succ_iterator End = succ_iterator(TI, true);
28532892 while (SuccStack.back().second != End) {
28542893 BasicBlock *BB = *SuccStack.back().second++;
28552894 if (Visited.insert(BB)) {
28702909 SmallVector Exits;
28712910 for (Function::iterator I = F.begin(), E = F.end(); I != E; ++I) {
28722911 BasicBlock *BB = I;
2873 if (BB->getTerminator()->getNumSuccessors() == 0)
2912 if (cast(&BB->back())->getNumSuccessors() == 0)
28742913 Exits.push_back(BB);
28752914 }
28762915
35063545 M.getContext().getMDKindID("clang.imprecise_release");
35073546 CopyOnEscapeMDKind =
35083547 M.getContext().getMDKindID("clang.arc.copy_on_escape");
3548 NoObjCARCExceptionsMDKind =
3549 M.getContext().getMDKindID("clang.arc.no_objc_arc_exceptions");
35093550
35103551 // Intuitively, objc_retain and others are nocapture, however in practice
35113552 // they are not, because they return their argument value. And objc_release
0 ; RUN: opt -S -objc-arc < %s | FileCheck %s
1
2 target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
3 %struct.__block_byref_x = type { i8*, %struct.__block_byref_x*, i32, i32, i32 }
4 %struct.__block_descriptor = type { i64, i64 }
5 @_NSConcreteStackBlock = external global i8*
6 @__block_descriptor_tmp = external hidden constant { i64, i64, i8*, i8*, i8*, i8* }
7
8 ; The optimizer should make use of the !clang.arc.no_objc_arc_exceptions
9 ; metadata and eliminate the retainBlock+release pair here.
10 ; rdar://10803830.
11
12 ; CHECK: define void @test0(
13 ; CHECK-NOT: @objc
14 ; CHECK: }
15 define void @test0() {
16 entry:
17 %x = alloca %struct.__block_byref_x, align 8
18 %block = alloca <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>, align 8
19 %byref.isa = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 0
20 store i8* null, i8** %byref.isa, align 8
21 %byref.forwarding = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 1
22 store %struct.__block_byref_x* %x, %struct.__block_byref_x** %byref.forwarding, align 8
23 %byref.flags = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 2
24 store i32 0, i32* %byref.flags, align 8
25 %byref.size = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 3
26 store i32 32, i32* %byref.size, align 4
27 %block.isa = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 0
28 store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa, align 8
29 %block.flags = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 1
30 store i32 1107296256, i32* %block.flags, align 8
31 %block.reserved = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 2
32 store i32 0, i32* %block.reserved, align 4
33 %block.invoke = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 3
34 store i8* bitcast (void (i8*)* @__foo_block_invoke_0 to i8*), i8** %block.invoke, align 8
35 %block.descriptor = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 4
36 store %struct.__block_descriptor* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_descriptor_tmp to %struct.__block_descriptor*), %struct.__block_descriptor** %block.descriptor, align 8
37 %block.captured = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 5
38 %t1 = bitcast %struct.__block_byref_x* %x to i8*
39 store i8* %t1, i8** %block.captured, align 8
40 %t2 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block to i8*
41 %t3 = call i8* @objc_retainBlock(i8* %t2) nounwind, !clang.arc.copy_on_escape !4
42 %t4 = getelementptr inbounds i8* %t3, i64 16
43 %t5 = bitcast i8* %t4 to i8**
44 %t6 = load i8** %t5, align 8
45 %t7 = bitcast i8* %t6 to void (i8*)*
46 invoke void %t7(i8* %t3)
47 to label %invoke.cont unwind label %lpad, !clang.arc.no_objc_arc_exceptions !4
48
49 invoke.cont: ; preds = %entry
50 call void @objc_release(i8* %t3) nounwind, !clang.imprecise_release !4
51 call void @_Block_object_dispose(i8* %t1, i32 8)
52 ret void
53
54 lpad: ; preds = %entry
55 %t8 = landingpad { i8*, i32 } personality i8* bitcast (i32 (...)* @__objc_personality_v0 to i8*)
56 cleanup
57 call void @_Block_object_dispose(i8* %t1, i32 8)
58 resume { i8*, i32 } %t8
59 }
60
61 ; There is no !clang.arc.no_objc_arc_exceptions
62 ; metadata here, so the optimizer shouldn't eliminate anything.
63
64 ; CHECK: define void @test0_no_metadata(
65 ; CHECK: call i8* @objc_retainBlock(
66 ; CHECK: invoke
67 ; CHECK: call void @objc_release(
68 ; CHECK: }
69 define void @test0_no_metadata() {
70 entry:
71 %x = alloca %struct.__block_byref_x, align 8
72 %block = alloca <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>, align 8
73 %byref.isa = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 0
74 store i8* null, i8** %byref.isa, align 8
75 %byref.forwarding = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 1
76 store %struct.__block_byref_x* %x, %struct.__block_byref_x** %byref.forwarding, align 8
77 %byref.flags = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 2
78 store i32 0, i32* %byref.flags, align 8
79 %byref.size = getelementptr inbounds %struct.__block_byref_x* %x, i64 0, i32 3
80 store i32 32, i32* %byref.size, align 4
81 %block.isa = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 0
82 store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa, align 8
83 %block.flags = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 1
84 store i32 1107296256, i32* %block.flags, align 8
85 %block.reserved = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 2
86 store i32 0, i32* %block.reserved, align 4
87 %block.invoke = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 3
88 store i8* bitcast (void (i8*)* @__foo_block_invoke_0 to i8*), i8** %block.invoke, align 8
89 %block.descriptor = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 4
90 store %struct.__block_descriptor* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_descriptor_tmp to %struct.__block_descriptor*), %struct.__block_descriptor** %block.descriptor, align 8
91 %block.captured = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block, i64 0, i32 5
92 %t1 = bitcast %struct.__block_byref_x* %x to i8*
93 store i8* %t1, i8** %block.captured, align 8
94 %t2 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, i8* }>* %block to i8*
95 %t3 = call i8* @objc_retainBlock(i8* %t2) nounwind, !clang.arc.copy_on_escape !4
96 %t4 = getelementptr inbounds i8* %t3, i64 16
97 %t5 = bitcast i8* %t4 to i8**
98 %t6 = load i8** %t5, align 8
99 %t7 = bitcast i8* %t6 to void (i8*)*
100 invoke void %t7(i8* %t3)
101 to label %invoke.cont unwind label %lpad
102
103 invoke.cont: ; preds = %entry
104 call void @objc_release(i8* %t3) nounwind, !clang.imprecise_release !4
105 call void @_Block_object_dispose(i8* %t1, i32 8)
106 ret void
107
108 lpad: ; preds = %entry
109 %t8 = landingpad { i8*, i32 } personality i8* bitcast (i32 (...)* @__objc_personality_v0 to i8*)
110 cleanup
111 call void @_Block_object_dispose(i8* %t1, i32 8)
112 resume { i8*, i32 } %t8
113 }
114
115 declare i8* @objc_retainBlock(i8*)
116 declare void @objc_release(i8*)
117 declare void @_Block_object_dispose(i8*, i32)
118 declare i32 @__objc_personality_v0(...)
119 declare void @__foo_block_invoke_0(i8* nocapture) uwtable ssp
120
121 !4 = metadata !{}