llvm.org GIT mirror llvm / dd9344f
Move FindAvailableLoadedValue isSafeToLoadUnconditionally out of lib/Transforms/Utils and into lib/Analysis so that Analysis passes can use them. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@104949 91177308-0d34-0410-b5e6-96231b3b80d8 Dan Gohman 10 years ago
11 changed file(s) with 291 addition(s) and 252 deletion(s). Raw diff Collapse all Expand all
0 //===- Loads.h - Local load analysis --------------------------------------===//
1 //
2 // The LLVM Compiler Infrastructure
3 //
4 // This file is distributed under the University of Illinois Open Source
5 // License. See LICENSE.TXT for details.
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file declares simple local analyses for load instructions.
10 //
11 //===----------------------------------------------------------------------===//
12
13 #ifndef LLVM_ANALYSIS_LOADS_H
14 #define LLVM_ANALYSIS_LOADS_H
15
16 #include "llvm/BasicBlock.h"
17
18 namespace llvm {
19
20 class AliasAnalysis;
21 class TargetData;
22
23 /// isSafeToLoadUnconditionally - Return true if we know that executing a load
24 /// from this value cannot trap. If it is not obviously safe to load from the
25 /// specified pointer, we do a quick local scan of the basic block containing
26 /// ScanFrom, to determine if the address is already accessed.
27 bool isSafeToLoadUnconditionally(Value *V, Instruction *ScanFrom,
28 unsigned Align, const TargetData *TD = 0);
29
30 /// FindAvailableLoadedValue - Scan the ScanBB block backwards (starting at
31 /// the instruction before ScanFrom) checking to see if we have the value at
32 /// the memory address *Ptr locally available within a small number of
33 /// instructions. If the value is available, return it.
34 ///
35 /// If not, return the iterator for the last validated instruction that the
36 /// value would be live through. If we scanned the entire block and didn't
37 /// find something that invalidates *Ptr or provides it, ScanFrom would be
38 /// left at begin() and this returns null. ScanFrom could also be left
39 ///
40 /// MaxInstsToScan specifies the maximum instructions to scan in the block.
41 /// If it is set to 0, it will scan the whole block. You can also optionally
42 /// specify an alias analysis implementation, which makes this more precise.
43 Value *FindAvailableLoadedValue(Value *Ptr, BasicBlock *ScanBB,
44 BasicBlock::iterator &ScanFrom,
45 unsigned MaxInstsToScan = 6,
46 AliasAnalysis *AA = 0);
47
48 }
49
50 #endif
6464 // instruction specified by To.
6565 //
6666 void ReplaceInstWithInst(Instruction *From, Instruction *To);
67
68 /// FindAvailableLoadedValue - Scan the ScanBB block backwards (starting at the
69 /// instruction before ScanFrom) checking to see if we have the value at the
70 /// memory address *Ptr locally available within a small number of instructions.
71 /// If the value is available, return it.
72 ///
73 /// If not, return the iterator for the last validated instruction that the
74 /// value would be live through. If we scanned the entire block and didn't find
75 /// something that invalidates *Ptr or provides it, ScanFrom would be left at
76 /// begin() and this returns null. ScanFrom could also be left
77 ///
78 /// MaxInstsToScan specifies the maximum instructions to scan in the block. If
79 /// it is set to 0, it will scan the whole block. You can also optionally
80 /// specify an alias analysis implementation, which makes this more precise.
81 Value *FindAvailableLoadedValue(Value *Ptr, BasicBlock *ScanBB,
82 BasicBlock::iterator &ScanFrom,
83 unsigned MaxInstsToScan = 6,
84 AliasAnalysis *AA = 0);
8567
8668 /// FindFunctionBackedges - Analyze the specified function to find all of the
8769 /// loop backedges in the function and return them. This is a relatively cheap
2929
3030 template class SmallVectorImpl;
3131
32 //===----------------------------------------------------------------------===//
33 // Local analysis.
34 //
35
36 /// isSafeToLoadUnconditionally - Return true if we know that executing a load
37 /// from this value cannot trap. If it is not obviously safe to load from the
38 /// specified pointer, we do a quick local scan of the basic block containing
39 /// ScanFrom, to determine if the address is already accessed.
40 bool isSafeToLoadUnconditionally(Value *V, Instruction *ScanFrom,
41 unsigned Align, const TargetData *TD = 0);
42
4332 //===----------------------------------------------------------------------===//
4433 // Local constant propagation.
4534 //
2222 LibCallSemantics.cpp
2323 Lint.cpp
2424 LiveValues.cpp
25 Loads.cpp
2526 LoopDependenceAnalysis.cpp
2627 LoopInfo.cpp
2728 LoopPass.cpp
0 //===- Loads.cpp - Local load analysis ------------------------------------===//
1 //
2 // The LLVM Compiler Infrastructure
3 //
4 // This file is distributed under the University of Illinois Open Source
5 // License. See LICENSE.TXT for details.
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines simple local analyses for load instructions.
10 //
11 //===----------------------------------------------------------------------===//
12
13 #include "llvm/Analysis/Loads.h"
14 #include "llvm/Analysis/AliasAnalysis.h"
15 #include "llvm/Target/TargetData.h"
16 #include "llvm/GlobalAlias.h"
17 #include "llvm/GlobalVariable.h"
18 #include "llvm/IntrinsicInst.h"
19 using namespace llvm;
20
21 /// AreEquivalentAddressValues - Test if A and B will obviously have the same
22 /// value. This includes recognizing that %t0 and %t1 will have the same
23 /// value in code like this:
24 /// %t0 = getelementptr \@a, 0, 3
25 /// store i32 0, i32* %t0
26 /// %t1 = getelementptr \@a, 0, 3
27 /// %t2 = load i32* %t1
28 ///
29 static bool AreEquivalentAddressValues(const Value *A, const Value *B) {
30 // Test if the values are trivially equivalent.
31 if (A == B) return true;
32
33 // Test if the values come from identical arithmetic instructions.
34 // Use isIdenticalToWhenDefined instead of isIdenticalTo because
35 // this function is only used when one address use dominates the
36 // other, which means that they'll always either have the same
37 // value or one of them will have an undefined value.
38 if (isa(A) || isa(A) ||
39 isa(A) || isa(A))
40 if (const Instruction *BI = dyn_cast(B))
41 if (cast(A)->isIdenticalToWhenDefined(BI))
42 return true;
43
44 // Otherwise they may not be equivalent.
45 return false;
46 }
47
48 /// getUnderlyingObjectWithOffset - Strip off up to MaxLookup GEPs and
49 /// bitcasts to get back to the underlying object being addressed, keeping
50 /// track of the offset in bytes from the GEPs relative to the result.
51 /// This is closely related to Value::getUnderlyingObject but is located
52 /// here to avoid making VMCore depend on TargetData.
53 static Value *getUnderlyingObjectWithOffset(Value *V, const TargetData *TD,
54 uint64_t &ByteOffset,
55 unsigned MaxLookup = 6) {
56 if (!V->getType()->isPointerTy())
57 return V;
58 for (unsigned Count = 0; MaxLookup == 0 || Count < MaxLookup; ++Count) {
59 if (GEPOperator *GEP = dyn_cast(V)) {
60 if (!GEP->hasAllConstantIndices())
61 return V;
62 SmallVector Indices(GEP->op_begin() + 1, GEP->op_end());
63 ByteOffset += TD->getIndexedOffset(GEP->getPointerOperandType(),
64 &Indices[0], Indices.size());
65 V = GEP->getPointerOperand();
66 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
67 V = cast(V)->getOperand(0);
68 } else if (GlobalAlias *GA = dyn_cast(V)) {
69 if (GA->mayBeOverridden())
70 return V;
71 V = GA->getAliasee();
72 } else {
73 return V;
74 }
75 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
76 }
77 return V;
78 }
79
80 /// isSafeToLoadUnconditionally - Return true if we know that executing a load
81 /// from this value cannot trap. If it is not obviously safe to load from the
82 /// specified pointer, we do a quick local scan of the basic block containing
83 /// ScanFrom, to determine if the address is already accessed.
84 bool llvm::isSafeToLoadUnconditionally(Value *V, Instruction *ScanFrom,
85 unsigned Align, const TargetData *TD) {
86 uint64_t ByteOffset = 0;
87 Value *Base = V;
88 if (TD)
89 Base = getUnderlyingObjectWithOffset(V, TD, ByteOffset);
90
91 const Type *BaseType = 0;
92 unsigned BaseAlign = 0;
93 if (const AllocaInst *AI = dyn_cast(Base)) {
94 // An alloca is safe to load from as load as it is suitably aligned.
95 BaseType = AI->getAllocatedType();
96 BaseAlign = AI->getAlignment();
97 } else if (const GlobalValue *GV = dyn_cast(Base)) {
98 // Global variables are safe to load from but their size cannot be
99 // guaranteed if they are overridden.
100 if (!isa(GV) && !GV->mayBeOverridden()) {
101 BaseType = GV->getType()->getElementType();
102 BaseAlign = GV->getAlignment();
103 }
104 }
105
106 if (BaseType && BaseType->isSized()) {
107 if (TD && BaseAlign == 0)
108 BaseAlign = TD->getPrefTypeAlignment(BaseType);
109
110 if (Align <= BaseAlign) {
111 if (!TD)
112 return true; // Loading directly from an alloca or global is OK.
113
114 // Check if the load is within the bounds of the underlying object.
115 const PointerType *AddrTy = cast(V->getType());
116 uint64_t LoadSize = TD->getTypeStoreSize(AddrTy->getElementType());
117 if (ByteOffset + LoadSize <= TD->getTypeAllocSize(BaseType) &&
118 (Align == 0 || (ByteOffset % Align) == 0))
119 return true;
120 }
121 }
122
123 // Otherwise, be a little bit aggressive by scanning the local block where we
124 // want to check to see if the pointer is already being loaded or stored
125 // from/to. If so, the previous load or store would have already trapped,
126 // so there is no harm doing an extra load (also, CSE will later eliminate
127 // the load entirely).
128 BasicBlock::iterator BBI = ScanFrom, E = ScanFrom->getParent()->begin();
129
130 while (BBI != E) {
131 --BBI;
132
133 // If we see a free or a call which may write to memory (i.e. which might do
134 // a free) the pointer could be marked invalid.
135 if (isa(BBI) && BBI->mayWriteToMemory() &&
136 !isa(BBI))
137 return false;
138
139 if (LoadInst *LI = dyn_cast(BBI)) {
140 if (AreEquivalentAddressValues(LI->getOperand(0), V)) return true;
141 } else if (StoreInst *SI = dyn_cast(BBI)) {
142 if (AreEquivalentAddressValues(SI->getOperand(1), V)) return true;
143 }
144 }
145 return false;
146 }
147
148 /// FindAvailableLoadedValue - Scan the ScanBB block backwards (starting at the
149 /// instruction before ScanFrom) checking to see if we have the value at the
150 /// memory address *Ptr locally available within a small number of instructions.
151 /// If the value is available, return it.
152 ///
153 /// If not, return the iterator for the last validated instruction that the
154 /// value would be live through. If we scanned the entire block and didn't find
155 /// something that invalidates *Ptr or provides it, ScanFrom would be left at
156 /// begin() and this returns null. ScanFrom could also be left
157 ///
158 /// MaxInstsToScan specifies the maximum instructions to scan in the block. If
159 /// it is set to 0, it will scan the whole block. You can also optionally
160 /// specify an alias analysis implementation, which makes this more precise.
161 Value *llvm::FindAvailableLoadedValue(Value *Ptr, BasicBlock *ScanBB,
162 BasicBlock::iterator &ScanFrom,
163 unsigned MaxInstsToScan,
164 AliasAnalysis *AA) {
165 if (MaxInstsToScan == 0) MaxInstsToScan = ~0U;
166
167 // If we're using alias analysis to disambiguate get the size of *Ptr.
168 unsigned AccessSize = 0;
169 if (AA) {
170 const Type *AccessTy = cast(Ptr->getType())->getElementType();
171 AccessSize = AA->getTypeStoreSize(AccessTy);
172 }
173
174 while (ScanFrom != ScanBB->begin()) {
175 // We must ignore debug info directives when counting (otherwise they
176 // would affect codegen).
177 Instruction *Inst = --ScanFrom;
178 if (isa(Inst))
179 continue;
180
181 // Restore ScanFrom to expected value in case next test succeeds
182 ScanFrom++;
183
184 // Don't scan huge blocks.
185 if (MaxInstsToScan-- == 0) return 0;
186
187 --ScanFrom;
188 // If this is a load of Ptr, the loaded value is available.
189 if (LoadInst *LI = dyn_cast(Inst))
190 if (AreEquivalentAddressValues(LI->getOperand(0), Ptr))
191 return LI;
192
193 if (StoreInst *SI = dyn_cast(Inst)) {
194 // If this is a store through Ptr, the value is available!
195 if (AreEquivalentAddressValues(SI->getOperand(1), Ptr))
196 return SI->getOperand(0);
197
198 // If Ptr is an alloca and this is a store to a different alloca, ignore
199 // the store. This is a trivial form of alias analysis that is important
200 // for reg2mem'd code.
201 if ((isa(Ptr) || isa(Ptr)) &&
202 (isa(SI->getOperand(1)) ||
203 isa(SI->getOperand(1))))
204 continue;
205
206 // If we have alias analysis and it says the store won't modify the loaded
207 // value, ignore the store.
208 if (AA &&
209 (AA->getModRefInfo(SI, Ptr, AccessSize) & AliasAnalysis::Mod) == 0)
210 continue;
211
212 // Otherwise the store that may or may not alias the pointer, bail out.
213 ++ScanFrom;
214 return 0;
215 }
216
217 // If this is some other instruction that may clobber Ptr, bail out.
218 if (Inst->mayWriteToMemory()) {
219 // If alias analysis claims that it really won't modify the load,
220 // ignore it.
221 if (AA &&
222 (AA->getModRefInfo(Inst, Ptr, AccessSize) & AliasAnalysis::Mod) == 0)
223 continue;
224
225 // May modify the pointer, bail out.
226 ++ScanFrom;
227 return 0;
228 }
229 }
230
231 // Got to the start of the block, we didn't find it, but are done for this
232 // block.
233 return 0;
234 }
1212
1313 #include "InstCombine.h"
1414 #include "llvm/IntrinsicInst.h"
15 #include "llvm/Analysis/Loads.h"
1516 #include "llvm/Target/TargetData.h"
1617 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
1718 #include "llvm/Transforms/Utils/Local.h"
3434 #include "llvm/Analysis/AliasAnalysis.h"
3535 #include "llvm/Analysis/ConstantFolding.h"
3636 #include "llvm/Analysis/Dominators.h"
37 #include "llvm/Analysis/Loads.h"
3738 #include "llvm/Analysis/MemoryBuiltins.h"
3839 #include "llvm/Analysis/MemoryDependenceAnalysis.h"
3940 #include "llvm/Analysis/PHITransAddr.h"
1717 #include "llvm/Pass.h"
1818 #include "llvm/Analysis/InstructionSimplify.h"
1919 #include "llvm/Analysis/LazyValueInfo.h"
20 #include "llvm/Analysis/Loads.h"
2021 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
2122 #include "llvm/Transforms/Utils/Local.h"
2223 #include "llvm/Transforms/Utils/SSAUpdater.h"
5959 #include "llvm/Pass.h"
6060 #include "llvm/Analysis/CaptureTracking.h"
6161 #include "llvm/Analysis/InlineCost.h"
62 #include "llvm/Analysis/Loads.h"
6263 #include "llvm/Support/CallSite.h"
6364 #include "llvm/Support/CFG.h"
6465 #include "llvm/ADT/Statistic.h"
557557
558558
559559 }
560
561
562
563 /// AreEquivalentAddressValues - Test if A and B will obviously have the same
564 /// value. This includes recognizing that %t0 and %t1 will have the same
565 /// value in code like this:
566 /// %t0 = getelementptr \@a, 0, 3
567 /// store i32 0, i32* %t0
568 /// %t1 = getelementptr \@a, 0, 3
569 /// %t2 = load i32* %t1
570 ///
571 static bool AreEquivalentAddressValues(const Value *A, const Value *B) {
572 // Test if the values are trivially equivalent.
573 if (A == B) return true;
574
575 // Test if the values come from identical arithmetic instructions.
576 // Use isIdenticalToWhenDefined instead of isIdenticalTo because
577 // this function is only used when one address use dominates the
578 // other, which means that they'll always either have the same
579 // value or one of them will have an undefined value.
580 if (isa(A) || isa(A) ||
581 isa(A) || isa(A))
582 if (const Instruction *BI = dyn_cast(B))
583 if (cast(A)->isIdenticalToWhenDefined(BI))
584 return true;
585
586 // Otherwise they may not be equivalent.
587 return false;
588 }
589
590 /// FindAvailableLoadedValue - Scan the ScanBB block backwards (starting at the
591 /// instruction before ScanFrom) checking to see if we have the value at the
592 /// memory address *Ptr locally available within a small number of instructions.
593 /// If the value is available, return it.
594 ///
595 /// If not, return the iterator for the last validated instruction that the
596 /// value would be live through. If we scanned the entire block and didn't find
597 /// something that invalidates *Ptr or provides it, ScanFrom would be left at
598 /// begin() and this returns null. ScanFrom could also be left
599 ///
600 /// MaxInstsToScan specifies the maximum instructions to scan in the block. If
601 /// it is set to 0, it will scan the whole block. You can also optionally
602 /// specify an alias analysis implementation, which makes this more precise.
603 Value *llvm::FindAvailableLoadedValue(Value *Ptr, BasicBlock *ScanBB,
604 BasicBlock::iterator &ScanFrom,
605 unsigned MaxInstsToScan,
606 AliasAnalysis *AA) {
607 if (MaxInstsToScan == 0) MaxInstsToScan = ~0U;
608
609 // If we're using alias analysis to disambiguate get the size of *Ptr.
610 unsigned AccessSize = 0;
611 if (AA) {
612 const Type *AccessTy = cast(Ptr->getType())->getElementType();
613 AccessSize = AA->getTypeStoreSize(AccessTy);
614 }
615
616 while (ScanFrom != ScanBB->begin()) {
617 // We must ignore debug info directives when counting (otherwise they
618 // would affect codegen).
619 Instruction *Inst = --ScanFrom;
620 if (isa(Inst))
621 continue;
622
623 // Restore ScanFrom to expected value in case next test succeeds
624 ScanFrom++;
625
626 // Don't scan huge blocks.
627 if (MaxInstsToScan-- == 0) return 0;
628
629 --ScanFrom;
630 // If this is a load of Ptr, the loaded value is available.
631 if (LoadInst *LI = dyn_cast(Inst))
632 if (AreEquivalentAddressValues(LI->getOperand(0), Ptr))
633 return LI;
634
635 if (StoreInst *SI = dyn_cast(Inst)) {
636 // If this is a store through Ptr, the value is available!
637 if (AreEquivalentAddressValues(SI->getOperand(1), Ptr))
638 return SI->getOperand(0);
639
640 // If Ptr is an alloca and this is a store to a different alloca, ignore
641 // the store. This is a trivial form of alias analysis that is important
642 // for reg2mem'd code.
643 if ((isa(Ptr) || isa(Ptr)) &&
644 (isa(SI->getOperand(1)) ||
645 isa(SI->getOperand(1))))
646 continue;
647
648 // If we have alias analysis and it says the store won't modify the loaded
649 // value, ignore the store.
650 if (AA &&
651 (AA->getModRefInfo(SI, Ptr, AccessSize) & AliasAnalysis::Mod) == 0)
652 continue;
653
654 // Otherwise the store that may or may not alias the pointer, bail out.
655 ++ScanFrom;
656 return 0;
657 }
658
659 // If this is some other instruction that may clobber Ptr, bail out.
660 if (Inst->mayWriteToMemory()) {
661 // If alias analysis claims that it really won't modify the load,
662 // ignore it.
663 if (AA &&
664 (AA->getModRefInfo(Inst, Ptr, AccessSize) & AliasAnalysis::Mod) == 0)
665 continue;
666
667 // May modify the pointer, bail out.
668 ++ScanFrom;
669 return 0;
670 }
671 }
672
673 // Got to the start of the block, we didn't find it, but are done for this
674 // block.
675 return 0;
676 }
677
3232 #include "llvm/Support/ValueHandle.h"
3333 #include "llvm/Support/raw_ostream.h"
3434 using namespace llvm;
35
36 //===----------------------------------------------------------------------===//
37 // Local analysis.
38 //
39
40 /// getUnderlyingObjectWithOffset - Strip off up to MaxLookup GEPs and
41 /// bitcasts to get back to the underlying object being addressed, keeping
42 /// track of the offset in bytes from the GEPs relative to the result.
43 /// This is closely related to Value::getUnderlyingObject but is located
44 /// here to avoid making VMCore depend on TargetData.
45 static Value *getUnderlyingObjectWithOffset(Value *V, const TargetData *TD,
46 uint64_t &ByteOffset,
47 unsigned MaxLookup = 6) {
48 if (!V->getType()->isPointerTy())
49 return V;
50 for (unsigned Count = 0; MaxLookup == 0 || Count < MaxLookup; ++Count) {
51 if (GEPOperator *GEP = dyn_cast(V)) {
52 if (!GEP->hasAllConstantIndices())
53 return V;
54 SmallVector Indices(GEP->op_begin() + 1, GEP->op_end());
55 ByteOffset += TD->getIndexedOffset(GEP->getPointerOperandType(),
56 &Indices[0], Indices.size());
57 V = GEP->getPointerOperand();
58 } else if (Operator::getOpcode(V) == Instruction::BitCast) {
59 V = cast(V)->getOperand(0);
60 } else if (GlobalAlias *GA = dyn_cast(V)) {
61 if (GA->mayBeOverridden())
62 return V;
63 V = GA->getAliasee();
64 } else {
65 return V;
66 }
67 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
68 }
69 return V;
70 }
71
72 /// isSafeToLoadUnconditionally - Return true if we know that executing a load
73 /// from this value cannot trap. If it is not obviously safe to load from the
74 /// specified pointer, we do a quick local scan of the basic block containing
75 /// ScanFrom, to determine if the address is already accessed.
76 bool llvm::isSafeToLoadUnconditionally(Value *V, Instruction *ScanFrom,
77 unsigned Align, const TargetData *TD) {
78 uint64_t ByteOffset = 0;
79 Value *Base = V;
80 if (TD)
81 Base = getUnderlyingObjectWithOffset(V, TD, ByteOffset);
82
83 const Type *BaseType = 0;
84 unsigned BaseAlign = 0;
85 if (const AllocaInst *AI = dyn_cast(Base)) {
86 // An alloca is safe to load from as load as it is suitably aligned.
87 BaseType = AI->getAllocatedType();
88 BaseAlign = AI->getAlignment();
89 } else if (const GlobalValue *GV = dyn_cast(Base)) {
90 // Global variables are safe to load from but their size cannot be
91 // guaranteed if they are overridden.
92 if (!isa(GV) && !GV->mayBeOverridden()) {
93 BaseType = GV->getType()->getElementType();
94 BaseAlign = GV->getAlignment();
95 }
96 }
97
98 if (BaseType && BaseType->isSized()) {
99 if (TD && BaseAlign == 0)
100 BaseAlign = TD->getPrefTypeAlignment(BaseType);
101
102 if (Align <= BaseAlign) {
103 if (!TD)
104 return true; // Loading directly from an alloca or global is OK.
105
106 // Check if the load is within the bounds of the underlying object.
107 const PointerType *AddrTy = cast(V->getType());
108 uint64_t LoadSize = TD->getTypeStoreSize(AddrTy->getElementType());
109 if (ByteOffset + LoadSize <= TD->getTypeAllocSize(BaseType) &&
110 (Align == 0 || (ByteOffset % Align) == 0))
111 return true;
112 }
113 }
114
115 // Otherwise, be a little bit aggressive by scanning the local block where we
116 // want to check to see if the pointer is already being loaded or stored
117 // from/to. If so, the previous load or store would have already trapped,
118 // so there is no harm doing an extra load (also, CSE will later eliminate
119 // the load entirely).
120 BasicBlock::iterator BBI = ScanFrom, E = ScanFrom->getParent()->begin();
121
122 while (BBI != E) {
123 --BBI;
124
125 // If we see a free or a call which may write to memory (i.e. which might do
126 // a free) the pointer could be marked invalid.
127 if (isa(BBI) && BBI->mayWriteToMemory() &&
128 !isa(BBI))
129 return false;
130
131 if (LoadInst *LI = dyn_cast(BBI)) {
132 if (LI->getOperand(0) == V) return true;
133 } else if (StoreInst *SI = dyn_cast(BBI)) {
134 if (SI->getOperand(1) == V) return true;
135 }
136 }
137 return false;
138 }
139
14035
14136 //===----------------------------------------------------------------------===//
14237 // Local constant propagation.