llvm.org GIT mirror llvm / a6c1ba4
Expose must/may alias info in MemorySSA. Summary: Building MemorySSA gathers alias information for Defs/Uses. Store and expose this information when optimizing uses (when building MemorySSA), and when optimizing defs or updating uses (getClobberingMemoryAccess). Current patch does not propagate alias information through MemoryPhis. Reviewers: gbiv, dberlin Subscribers: Prazek, sanjoy, llvm-commits Differential Revision: https://reviews.llvm.org/D38569 git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@327035 91177308-0d34-0410-b5e6-96231b3b80d8 Alina Sbirlea 2 years ago
3 changed file(s) with 324 addition(s) and 42 deletion(s). Raw diff Collapse all Expand all
248248 DECLARE_TRANSPARENT_OPERAND_ACCESSORS(MemoryAccess);
249249
250250 /// \brief Get the instruction that this MemoryUse represents.
251 Instruction *getMemoryInst() const { return MemoryInst; }
251 Instruction *getMemoryInst() const { return MemoryAliasPair.getPointer(); }
252252
253253 /// \brief Get the access that produces the memory state used by this Use.
254254 MemoryAccess *getDefiningAccess() const { return getOperand(0); }
263263 inline MemoryAccess *getOptimized() const;
264264 inline void setOptimized(MemoryAccess *);
265265
266 // Retrieve AliasResult type of the optimized access. Ideally this would be
267 // returned by the caching walker and may go away in the future.
268 Optional getOptimizedAccessType() const {
269 return threeBitIntToOptionalAliasResult(MemoryAliasPair.getInt());
270 }
271
266272 /// \brief Reset the ID of what this MemoryUse was optimized to, causing it to
267273 /// be rewalked by the walker if necessary.
268274 /// This really should only be called by tests.
274280
275281 MemoryUseOrDef(LLVMContext &C, MemoryAccess *DMA, unsigned Vty,
276282 DeleteValueTy DeleteValue, Instruction *MI, BasicBlock *BB)
277 : MemoryAccess(C, Vty, DeleteValue, BB, 1), MemoryInst(MI) {
283 : MemoryAccess(C, Vty, DeleteValue, BB, 1),
284 MemoryAliasPair(MI, optionalAliasResultToThreeBitInt(MayAlias)) {
278285 setDefiningAccess(DMA);
279286 }
280287
281288 // Use deleteValue() to delete a generic MemoryUseOrDef.
282289 ~MemoryUseOrDef() = default;
283290
284 void setDefiningAccess(MemoryAccess *DMA, bool Optimized = false) {
291 void setOptimizedAccessType(Optional AR) {
292 MemoryAliasPair.setInt(optionalAliasResultToThreeBitInt(AR));
293 }
294
295 void setDefiningAccess(MemoryAccess *DMA, bool Optimized = false,
296 Optional AR = MayAlias) {
285297 if (!Optimized) {
286298 setOperand(0, DMA);
287299 return;
288300 }
289301 setOptimized(DMA);
302 setOptimizedAccessType(AR);
290303 }
291304
292305 private:
293 Instruction *MemoryInst;
306 // Pair of memory instruction and Optional with optimized access.
307 PointerIntPair MemoryAliasPair;
308
309 static int optionalAliasResultToThreeBitInt(Optional OAR) {
310 if (OAR == None)
311 return 4;
312 return (int)OAR.getValue();
313 }
314
315 static Optional threeBitIntToOptionalAliasResult(int I) {
316 assert((I <= 4 && I >= 0) &&
317 "Invalid value for converting to an Optional");
318 if (I == 4)
319 return None;
320 return (AliasResult)I;
321 }
294322 };
295323
296324 template <>
223223 return !(SeqCstUse || MayClobberIsAcquire);
224224 }
225225
226 static bool instructionClobbersQuery(MemoryDef *MD,
227 const MemoryLocation &UseLoc,
228 const Instruction *UseInst,
229 AliasAnalysis &AA) {
226 namespace {
227
228 struct ClobberAlias {
229 bool IsClobber;
230 Optional AR;
231 };
232
233 } // end anonymous namespace
234
235 // Return a pair of {IsClobber (bool), AR (AliasResult)}. It relies on AR being
236 // ignored if IsClobber = false.
237 static ClobberAlias instructionClobbersQuery(MemoryDef *MD,
238 const MemoryLocation &UseLoc,
239 const Instruction *UseInst,
240 AliasAnalysis &AA) {
230241 Instruction *DefInst = MD->getMemoryInst();
231242 assert(DefInst && "Defining instruction not actually an instruction");
232243 ImmutableCallSite UseCS(UseInst);
244 Optional AR;
233245
234246 if (const IntrinsicInst *II = dyn_cast(DefInst)) {
235247 // These intrinsics will show up as affecting memory, but they are just
237249 switch (II->getIntrinsicID()) {
238250 case Intrinsic::lifetime_start:
239251 if (UseCS)
240 return false;
241 return AA.isMustAlias(MemoryLocation(II->getArgOperand(1)), UseLoc);
252 return {false, NoAlias};
253 AR = AA.alias(MemoryLocation(II->getArgOperand(1)), UseLoc);
254 return {AR == MustAlias, AR};
242255 case Intrinsic::lifetime_end:
243256 case Intrinsic::invariant_start:
244257 case Intrinsic::invariant_end:
245258 case Intrinsic::assume:
246 return false;
259 return {false, NoAlias};
247260 default:
248261 break;
249262 }
251264
252265 if (UseCS) {
253266 ModRefInfo I = AA.getModRefInfo(DefInst, UseCS);
254 return isModOrRefSet(I);
267 AR = isMustSet(I) ? MustAlias : MayAlias;
268 return {isModOrRefSet(I), AR};
255269 }
256270
257271 if (auto *DefLoad = dyn_cast(DefInst))
258272 if (auto *UseLoad = dyn_cast(UseInst))
259 return !areLoadsReorderable(UseLoad, DefLoad);
260
261 return isModSet(AA.getModRefInfo(DefInst, UseLoc));
262 }
263
264 static bool instructionClobbersQuery(MemoryDef *MD, const MemoryUseOrDef *MU,
265 const MemoryLocOrCall &UseMLOC,
266 AliasAnalysis &AA) {
273 return {!areLoadsReorderable(UseLoad, DefLoad), MayAlias};
274
275 ModRefInfo I = AA.getModRefInfo(DefInst, UseLoc);
276 AR = isMustSet(I) ? MustAlias : MayAlias;
277 return {isModSet(I), AR};
278 }
279
280 static ClobberAlias instructionClobbersQuery(MemoryDef *MD,
281 const MemoryUseOrDef *MU,
282 const MemoryLocOrCall &UseMLOC,
283 AliasAnalysis &AA) {
267284 // FIXME: This is a temporary hack to allow a single instructionClobbersQuery
268285 // to exist while MemoryLocOrCall is pushed through places.
269286 if (UseMLOC.IsCall)
276293 // Return true when MD may alias MU, return false otherwise.
277294 bool MemorySSAUtil::defClobbersUseOrDef(MemoryDef *MD, const MemoryUseOrDef *MU,
278295 AliasAnalysis &AA) {
279 return instructionClobbersQuery(MD, MU, MemoryLocOrCall(MU), AA);
296 return instructionClobbersQuery(MD, MU, MemoryLocOrCall(MU), AA).IsClobber;
280297 }
281298
282299 namespace {
291308 const Instruction *Inst = nullptr;
292309 // The MemoryAccess we actually got called with, used to test local domination
293310 const MemoryAccess *OriginalAccess = nullptr;
311 Optional AR = MayAlias;
294312
295313 UpwardsMemoryQuery() = default;
296314
374392 //
375393 // Also, note that this can't be hoisted out of the `Worklist` loop,
376394 // since MD may only act as a clobber for 1 of N MemoryLocations.
377 FoundClobber =
378 FoundClobber || MSSA.isLiveOnEntryDef(MD) ||
379 instructionClobbersQuery(MD, MAP.second, Query.Inst, AA);
395 FoundClobber = FoundClobber || MSSA.isLiveOnEntryDef(MD);
396 if (!FoundClobber) {
397 ClobberAlias CA =
398 instructionClobbersQuery(MD, MAP.second, Query.Inst, AA);
399 if (CA.IsClobber) {
400 FoundClobber = true;
401 // Not used: CA.AR;
402 }
403 }
380404 }
381405 break;
382406 }
386410
387411 if (auto *MD = dyn_cast(MA)) {
388412 (void)MD;
389 assert(!instructionClobbersQuery(MD, MAP.second, Query.Inst, AA) &&
413 assert(!instructionClobbersQuery(MD, MAP.second, Query.Inst, AA)
414 .IsClobber &&
390415 "Found clobber before reaching ClobberAt!");
391416 continue;
392417 }
456481 /// Result of calling walkToPhiOrClobber.
457482 struct UpwardsWalkResult {
458483 /// The "Result" of the walk. Either a clobber, the last thing we walked, or
459 /// both.
484 /// both. Include alias info when clobber found.
460485 MemoryAccess *Result;
461486 bool IsKnownClobber;
487 Optional AR;
462488 };
463489
464490 /// Walk to the next Phi or Clobber in the def chain starting at Desc.Last.
474500 for (MemoryAccess *Current : def_chain(Desc.Last)) {
475501 Desc.Last = Current;
476502 if (Current == StopAt)
477 return {Current, false};
478
479 if (auto *MD = dyn_cast(Current))
480 if (MSSA.isLiveOnEntryDef(MD) ||
481 instructionClobbersQuery(MD, Desc.Loc, Query->Inst, AA))
482 return {MD, true};
503 return {Current, false, MayAlias};
504
505 if (auto *MD = dyn_cast(Current)) {
506 if (MSSA.isLiveOnEntryDef(MD))
507 return {MD, true, MustAlias};
508 ClobberAlias CA =
509 instructionClobbersQuery(MD, Desc.Loc, Query->Inst, AA);
510 if (CA.IsClobber)
511 return {MD, true, CA.AR};
512 }
483513 }
484514
485515 assert(isa(Desc.Last) &&
486516 "Ended at a non-clobber that's not a phi?");
487 return {Desc.Last, false};
517 return {Desc.Last, false, MayAlias};
488518 }
489519
490520 void addSearches(MemoryPhi *Phi, SmallVectorImpl &PausedSearches,
827857 MemoryAccess *Result;
828858 if (WalkResult.IsKnownClobber) {
829859 Result = WalkResult.Result;
860 Q.AR = WalkResult.AR;
830861 } else {
831862 OptznResult OptRes = tryOptimizePhi(cast(FirstDesc.Last),
832863 Current, Q.StartingLoc);
10941125 // This is where the last walk for this memory location ended.
10951126 unsigned long LastKill;
10961127 bool LastKillValid;
1128 Optional AR;
10971129 };
10981130
10991131 void optimizeUsesInBlock(const BasicBlock *, unsigned long &, unsigned long &,
11531185 }
11541186
11551187 if (isUseTriviallyOptimizableToLiveOnEntry(*AA, MU->getMemoryInst())) {
1156 MU->setDefiningAccess(MSSA->getLiveOnEntryDef(), true);
1188 MU->setDefiningAccess(MSSA->getLiveOnEntryDef(), true, None);
11571189 continue;
11581190 }
11591191
11951227 if (!LocInfo.LastKillValid) {
11961228 LocInfo.LastKill = VersionStack.size() - 1;
11971229 LocInfo.LastKillValid = true;
1230 LocInfo.AR = MayAlias;
11981231 }
11991232
12001233 // At this point, we should have corrected last kill and LowerBound to be
12381271 // Reset UpperBound to liveOnEntryDef's place in the stack
12391272 UpperBound = 0;
12401273 FoundClobberResult = true;
1274 LocInfo.AR = MustAlias;
12411275 break;
12421276 }
1243 if (instructionClobbersQuery(MD, MU, UseMLOC, *AA)) {
1277 ClobberAlias CA = instructionClobbersQuery(MD, MU, UseMLOC, *AA);
1278 if (CA.IsClobber) {
12441279 FoundClobberResult = true;
1280 LocInfo.AR = CA.AR;
12451281 break;
12461282 }
12471283 --UpperBound;
12481284 }
1285
1286 // Note: Phis always have AliasResult AR set to MayAlias ATM.
1287
12491288 // At the end of this loop, UpperBound is either a clobber, or lower bound
12501289 // PHI walking may cause it to be < LowerBound, and in fact, < LastKill.
12511290 if (FoundClobberResult || UpperBound < LocInfo.LastKill) {
1252 MU->setDefiningAccess(VersionStack[UpperBound], true);
12531291 // We were last killed now by where we got to
1292 if (MSSA->isLiveOnEntryDef(VersionStack[UpperBound]))
1293 LocInfo.AR = None;
1294 MU->setDefiningAccess(VersionStack[UpperBound], true, LocInfo.AR);
12541295 LocInfo.LastKill = UpperBound;
12551296 } else {
12561297 // Otherwise, we checked all the new ones, and now we know we can get to
12571298 // LastKill.
1258 MU->setDefiningAccess(VersionStack[LocInfo.LastKill], true);
1299 MU->setDefiningAccess(VersionStack[LocInfo.LastKill], true, LocInfo.AR);
12591300 }
12601301 LocInfo.LowerBound = VersionStack.size() - 1;
12611302 LocInfo.LowerBoundBlock = BB;
20252066 return MA;
20262067
20272068 // If this is an already optimized use or def, return the optimized result.
2028 // Note: Currently, we do not store the optimized def result because we'd need
2029 // a separate field, since we can't use it as the defining access.
2069 // Note: Currently, we store the optimized def result in a separate field,
2070 // since we can't use the defining access.
20302071 if (StartingAccess->isOptimized())
20312072 return StartingAccess->getOptimized();
20322073
20402081
20412082 if (isUseTriviallyOptimizableToLiveOnEntry(*MSSA->AA, I)) {
20422083 MemoryAccess *LiveOnEntry = MSSA->getLiveOnEntryDef();
2043 if (auto *MUD = dyn_cast(StartingAccess))
2084 if (auto *MUD = dyn_cast(StartingAccess)) {
20442085 MUD->setOptimized(LiveOnEntry);
2086 MUD->setOptimizedAccessType(None);
2087 }
20452088 return LiveOnEntry;
20462089 }
20472090
20502093
20512094 // At this point, DefiningAccess may be the live on entry def.
20522095 // If it is, we will not get a better result.
2053 if (MSSA->isLiveOnEntryDef(DefiningAccess))
2096 if (MSSA->isLiveOnEntryDef(DefiningAccess)) {
2097 if (auto *MUD = dyn_cast(StartingAccess)) {
2098 MUD->setOptimized(DefiningAccess);
2099 MUD->setOptimizedAccessType(None);
2100 }
20542101 return DefiningAccess;
2102 }
20552103
20562104 MemoryAccess *Result = getClobberingMemoryAccess(DefiningAccess, Q);
20572105 DEBUG(dbgs() << "Starting Memory SSA clobber for " << *I << " is ");
20582106 DEBUG(dbgs() << *DefiningAccess << "\n");
20592107 DEBUG(dbgs() << "Final Memory SSA clobber for " << *I << " is ");
20602108 DEBUG(dbgs() << *Result << "\n");
2061 if (auto *MUD = dyn_cast(StartingAccess))
2109
2110 if (auto *MUD = dyn_cast(StartingAccess)) {
20622111 MUD->setOptimized(Result);
2112 if (MSSA->isLiveOnEntryDef(Result))
2113 MUD->setOptimizedAccessType(None);
2114 else if (Q.AR == MustAlias)
2115 MUD->setOptimizedAccessType(MustAlias);
2116 }
20632117
20642118 return Result;
20652119 }
998998 MSSA.getLiveOnEntryDef())
999999 << "(DefX1 = " << DefX1 << ")";
10001000 }
1001
1002 // Test Must alias for optimized uses
1003 TEST_F(MemorySSATest, TestLoadMustAlias) {
1004 F = Function::Create(FunctionType::get(B.getVoidTy(), {}, false),
1005 GlobalValue::ExternalLinkage, "F", &M);
1006 B.SetInsertPoint(BasicBlock::Create(C, "", F));
1007 Type *Int8 = Type::getInt8Ty(C);
1008 Value *AllocaA = B.CreateAlloca(Int8, ConstantInt::get(Int8, 1), "A");
1009 Value *AllocaB = B.CreateAlloca(Int8, ConstantInt::get(Int8, 1), "B");
1010
1011 B.CreateStore(ConstantInt::get(Int8, 1), AllocaB);
1012 // Check load from LOE
1013 LoadInst *LA1 = B.CreateLoad(AllocaA, "");
1014 // Check load alias cached for second load
1015 LoadInst *LA2 = B.CreateLoad(AllocaA, "");
1016
1017 B.CreateStore(ConstantInt::get(Int8, 1), AllocaA);
1018 // Check load from store/def
1019 LoadInst *LA3 = B.CreateLoad(AllocaA, "");
1020 // Check load alias cached for second load
1021 LoadInst *LA4 = B.CreateLoad(AllocaA, "");
1022
1023 setupAnalyses();
1024 MemorySSA &MSSA = *Analyses->MSSA;
1025
1026 unsigned I = 0;
1027 for (LoadInst *V : {LA1, LA2}) {
1028 MemoryUse *MemUse = dyn_cast_or_null(MSSA.getMemoryAccess(V));
1029 EXPECT_EQ(MemUse->getOptimizedAccessType(), None)
1030 << "Load " << I << " doesn't have the correct alias information";
1031 // EXPECT_EQ expands such that if we increment I above, it won't get
1032 // incremented except when we try to print the error message.
1033 ++I;
1034 }
1035 for (LoadInst *V : {LA3, LA4}) {
1036 MemoryUse *MemUse = dyn_cast_or_null(MSSA.getMemoryAccess(V));
1037 EXPECT_EQ(MemUse->getOptimizedAccessType(), MustAlias)
1038 << "Load " << I << " doesn't have the correct alias information";
1039 // EXPECT_EQ expands such that if we increment I above, it won't get
1040 // incremented except when we try to print the error message.
1041 ++I;
1042 }
1043 }
1044
1045 // Test Must alias for optimized defs.
1046 TEST_F(MemorySSATest, TestStoreMustAlias) {
1047 F = Function::Create(FunctionType::get(B.getVoidTy(), {}, false),
1048 GlobalValue::ExternalLinkage, "F", &M);
1049 B.SetInsertPoint(BasicBlock::Create(C, "", F));
1050 Type *Int8 = Type::getInt8Ty(C);
1051 Value *AllocaA = B.CreateAlloca(Int8, ConstantInt::get(Int8, 1), "A");
1052 Value *AllocaB = B.CreateAlloca(Int8, ConstantInt::get(Int8, 1), "B");
1053 StoreInst *SA1 = B.CreateStore(ConstantInt::get(Int8, 1), AllocaA);
1054 StoreInst *SB1 = B.CreateStore(ConstantInt::get(Int8, 1), AllocaB);
1055 StoreInst *SA2 = B.CreateStore(ConstantInt::get(Int8, 2), AllocaA);
1056 StoreInst *SB2 = B.CreateStore(ConstantInt::get(Int8, 2), AllocaB);
1057 StoreInst *SA3 = B.CreateStore(ConstantInt::get(Int8, 3), AllocaA);
1058 StoreInst *SB3 = B.CreateStore(ConstantInt::get(Int8, 3), AllocaB);
1059
1060 setupAnalyses();
1061 MemorySSA &MSSA = *Analyses->MSSA;
1062 MemorySSAWalker *Walker = Analyses->Walker;
1063
1064 unsigned I = 0;
1065 for (StoreInst *V : {SA1, SB1, SA2, SB2, SA3, SB3}) {
1066 MemoryDef *MemDef = dyn_cast_or_null(MSSA.getMemoryAccess(V));
1067 EXPECT_EQ(MemDef->isOptimized(), false)
1068 << "Store " << I << " is optimized from the start?";
1069 EXPECT_EQ(MemDef->getOptimizedAccessType(), MayAlias)
1070 << "Store " << I
1071 << " has correct alias information before being optimized?";
1072 if (V == SA1)
1073 Walker->getClobberingMemoryAccess(V);
1074 else {
1075 MemoryAccess *Def = MemDef->getDefiningAccess();
1076 MemoryAccess *Clob = Walker->getClobberingMemoryAccess(V);
1077 EXPECT_NE(Def, Clob) << "Store " << I
1078 << " has Defining Access equal to Clobbering Access";
1079 }
1080 EXPECT_EQ(MemDef->isOptimized(), true)
1081 << "Store " << I << " was not optimized";
1082 if (I == 0 || I == 1)
1083 EXPECT_EQ(MemDef->getOptimizedAccessType(), None)
1084 << "Store " << I << " doesn't have the correct alias information";
1085 else
1086 EXPECT_EQ(MemDef->getOptimizedAccessType(), MustAlias)
1087 << "Store " << I << " doesn't have the correct alias information";
1088 // EXPECT_EQ expands such that if we increment I above, it won't get
1089 // incremented except when we try to print the error message.
1090 ++I;
1091 }
1092 }
1093
1094 // Test May alias for optimized uses.
1095 TEST_F(MemorySSATest, TestLoadMayAlias) {
1096 F = Function::Create(FunctionType::get(B.getVoidTy(),
1097 {B.getInt8PtrTy(), B.getInt8PtrTy()},
1098 false),
1099 GlobalValue::ExternalLinkage, "F", &M);
1100 B.SetInsertPoint(BasicBlock::Create(C, "", F));
1101 Type *Int8 = Type::getInt8Ty(C);
1102 auto *ArgIt = F->arg_begin();
1103 Argument *PointerA = &*ArgIt;
1104 Argument *PointerB = &*(++ArgIt);
1105 B.CreateStore(ConstantInt::get(Int8, 1), PointerB);
1106 LoadInst *LA1 = B.CreateLoad(PointerA, "");
1107 B.CreateStore(ConstantInt::get(Int8, 0), PointerA);
1108 LoadInst *LB1 = B.CreateLoad(PointerB, "");
1109 B.CreateStore(ConstantInt::get(Int8, 0), PointerA);
1110 LoadInst *LA2 = B.CreateLoad(PointerA, "");
1111 B.CreateStore(ConstantInt::get(Int8, 0), PointerB);
1112 LoadInst *LB2 = B.CreateLoad(PointerB, "");
1113
1114 setupAnalyses();
1115 MemorySSA &MSSA = *Analyses->MSSA;
1116
1117 unsigned I = 0;
1118 for (LoadInst *V : {LA1, LB1}) {
1119 MemoryUse *MemUse = dyn_cast_or_null(MSSA.getMemoryAccess(V));
1120 EXPECT_EQ(MemUse->getOptimizedAccessType(), MayAlias)
1121 << "Load " << I << " doesn't have the correct alias information";
1122 // EXPECT_EQ expands such that if we increment I above, it won't get
1123 // incremented except when we try to print the error message.
1124 ++I;
1125 }
1126 for (LoadInst *V : {LA2, LB2}) {
1127 MemoryUse *MemUse = dyn_cast_or_null(MSSA.getMemoryAccess(V));
1128 EXPECT_EQ(MemUse->getOptimizedAccessType(), MustAlias)
1129 << "Load " << I << " doesn't have the correct alias information";
1130 // EXPECT_EQ expands such that if we increment I above, it won't get
1131 // incremented except when we try to print the error message.
1132 ++I;
1133 }
1134 }
1135
1136 // Test May alias for optimized defs.
1137 TEST_F(MemorySSATest, TestStoreMayAlias) {
1138 F = Function::Create(FunctionType::get(B.getVoidTy(),
1139 {B.getInt8PtrTy(), B.getInt8PtrTy()},
1140 false),
1141 GlobalValue::ExternalLinkage, "F", &M);
1142 B.SetInsertPoint(BasicBlock::Create(C, "", F));
1143 Type *Int8 = Type::getInt8Ty(C);
1144 auto *ArgIt = F->arg_begin();
1145 Argument *PointerA = &*ArgIt;
1146 Argument *PointerB = &*(++ArgIt);
1147 Value *AllocaC = B.CreateAlloca(Int8, ConstantInt::get(Int8, 1), "C");
1148 // Store into arg1, must alias because it's LOE => must
1149 StoreInst *SA1 = B.CreateStore(ConstantInt::get(Int8, 0), PointerA);
1150 // Store into arg2, may alias store to arg1 => may
1151 StoreInst *SB1 = B.CreateStore(ConstantInt::get(Int8, 1), PointerB);
1152 // Store into aloca, no alias with args, so must alias LOE => must
1153 StoreInst *SC1 = B.CreateStore(ConstantInt::get(Int8, 2), AllocaC);
1154 // Store into arg1, may alias store to arg2 => may
1155 StoreInst *SA2 = B.CreateStore(ConstantInt::get(Int8, 3), PointerA);
1156 // Store into arg2, may alias store to arg1 => may
1157 StoreInst *SB2 = B.CreateStore(ConstantInt::get(Int8, 4), PointerB);
1158 // Store into aloca, no alias with args, so must alias SC1 => must
1159 StoreInst *SC2 = B.CreateStore(ConstantInt::get(Int8, 5), AllocaC);
1160 // Store into arg2, must alias store to arg2 => must
1161 StoreInst *SB3 = B.CreateStore(ConstantInt::get(Int8, 6), PointerB);
1162 std::initializer_list Sts = {SA1, SB1, SC1, SA2, SB2, SC2, SB3};
1163
1164 setupAnalyses();
1165 MemorySSA &MSSA = *Analyses->MSSA;
1166 MemorySSAWalker *Walker = Analyses->Walker;
1167
1168 unsigned I = 0;
1169 for (StoreInst *V : Sts) {
1170 MemoryDef *MemDef = dyn_cast_or_null(MSSA.getMemoryAccess(V));
1171 EXPECT_EQ(MemDef->isOptimized(), false)
1172 << "Store " << I << " is optimized from the start?";
1173 EXPECT_EQ(MemDef->getOptimizedAccessType(), MayAlias)
1174 << "Store " << I
1175 << " has correct alias information before being optimized?";
1176 ++I;
1177 }
1178
1179 for (StoreInst *V : Sts)
1180 Walker->getClobberingMemoryAccess(V);
1181
1182 I = 0;
1183 for (StoreInst *V : Sts) {
1184 MemoryDef *MemDef = dyn_cast_or_null(MSSA.getMemoryAccess(V));
1185 EXPECT_EQ(MemDef->isOptimized(), true)
1186 << "Store " << I << " was not optimized";
1187 if (I == 1 || I == 3 || I == 4)
1188 EXPECT_EQ(MemDef->getOptimizedAccessType(), MayAlias)
1189 << "Store " << I << " doesn't have the correct alias information";
1190 else if (I == 0 || I == 2)
1191 EXPECT_EQ(MemDef->getOptimizedAccessType(), None)
1192 << "Store " << I << " doesn't have the correct alias information";
1193 else
1194 EXPECT_EQ(MemDef->getOptimizedAccessType(), MustAlias)
1195 << "Store " << I << " doesn't have the correct alias information";
1196 // EXPECT_EQ expands such that if we increment I above, it won't get
1197 // incremented except when we try to print the error message.
1198 ++I;
1199 }
1200 }