llvm.org GIT mirror llvm / 5401ba7
Update SetVector to rely on the underlying set's insert to return a pair<iterator, bool> This is to be consistent with StringSet and ultimately with the standard library's associative container insert function. This lead to updating SmallSet::insert to return pair<iterator, bool>, and then to update SmallPtrSet::insert to return pair<iterator, bool>, and then to update all the existing users of those functions... git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@222334 91177308-0d34-0410-b5e6-96231b3b80d8 David Blaikie 5 years ago
107 changed file(s) with 255 addition(s) and 228 deletion(s). Raw diff Collapse all Expand all
5656 // Return true if edge destination should be visited.
5757 template
5858 bool insertEdge(NodeType *From, NodeType *To) {
59 return Visited.insert(To);
59 return Visited.insert(To).second;
6060 }
6161
6262 // Called after all children of BB have been visited.
9999 /// \brief Insert a new element into the SetVector.
100100 /// \returns true iff the element was inserted into the SetVector.
101101 bool insert(const value_type &X) {
102 bool result = set_.insert(X);
102 bool result = set_.insert(X).second;
103103 if (result)
104104 vector_.push_back(X);
105105 return result;
109109 template
110110 void insert(It Start, It End) {
111111 for (; Start != End; ++Start)
112 if (set_.insert(*Start))
112 if (set_.insert(*Start).second)
113113 vector_.push_back(*Start);
114114 }
115115
9999 /// insert_imp - This returns true if the pointer was new to the set, false if
100100 /// it was already in the set. This is hidden from the client so that the
101101 /// derived class can check that the right type of pointer is passed in.
102 bool insert_imp(const void * Ptr);
102 std::pair insert_imp(const void *Ptr);
103103
104104 /// erase_imp - If the set contains the specified pointer, remove it and
105105 /// return true, otherwise return false. This is hidden from the client so
252252 : SmallPtrSetImplBase(SmallStorage, SmallSize) {}
253253
254254 public:
255 typedef SmallPtrSetIterator iterator;
256 typedef SmallPtrSetIterator const_iterator;
257
255258 /// insert - This returns true if the pointer was new to the set, false if it
256259 /// was already in the set.
257 bool insert(PtrType Ptr) {
258 return insert_imp(PtrTraits::getAsVoidPointer(Ptr));
260 std::pair insert(PtrType Ptr) {
261 auto p = insert_imp(PtrTraits::getAsVoidPointer(Ptr));
262 return std::make_pair(iterator(p.first, CurArray + CurArraySize), p.second);
259263 }
260264
261265 /// erase - If the set contains the specified pointer, remove it and return
275279 insert(*I);
276280 }
277281
278 typedef SmallPtrSetIterator iterator;
279 typedef SmallPtrSetIterator const_iterator;
280282 inline iterator begin() const {
281283 return iterator(CurArray, CurArray+CurArraySize);
282284 }
1313 #ifndef LLVM_ADT_SMALLSET_H
1414 #define LLVM_ADT_SMALLSET_H
1515
16 #include "llvm/ADT/None.h"
1617 #include "llvm/ADT/SmallPtrSet.h"
1718 #include "llvm/ADT/SmallVector.h"
1819 #include
5960
6061 /// insert - Insert an element into the set if it isn't already there.
6162 /// Returns true if the element is inserted (it was not in the set before).
62 bool insert(const T &V) {
63 /// The first value of the returned pair is unused and provided for
64 /// partial compatibility with the standard library self-associative container
65 /// concept.
66 // FIXME: Add iterators that abstract over the small and large form, and then
67 // return those here.
68 std::pair insert(const T &V) {
6369 if (!isSmall())
64 return Set.insert(V).second;
70 return std::make_pair(None, Set.insert(V).second);
6571
6672 VIterator I = vfind(V);
6773 if (I != Vector.end()) // Don't reinsert if it already exists.
68 return false;
74 return std::make_pair(None, false);
6975 if (Vector.size() < N) {
7076 Vector.push_back(V);
71 return true;
77 return std::make_pair(None, true);
7278 }
7379
7480 // Otherwise, grow from vector to set.
7783 Vector.pop_back();
7884 }
7985 Set.insert(V);
80 return true;
86 return std::make_pair(None, true);
8187 }
8288
8389 template
171171 DomSetType &S = this->Frontiers[currentBB];
172172
173173 // Visit each block only once.
174 if (visited.insert(currentBB)) {
174 if (visited.insert(currentBB).second) {
175175 // Loop over CFG successors to calculate DFlocal[currentNode]
176176 for (auto SI = BlockTraits::child_begin(currentBB),
177177 SE = BlockTraits::child_end(currentBB);
576576 SmallPtrSet Visited;
577577
578578 void push(const SCEV *S) {
579 if (Visited.insert(S) && Visitor.follow(S))
579 if (Visited.insert(S).second && Visitor.follow(S))
580580 Worklist.push_back(S);
581581 }
582582 public:
302302 void recordSplitCriticalEdge(MachineBasicBlock *FromBB,
303303 MachineBasicBlock *ToBB,
304304 MachineBasicBlock *NewBB) {
305 bool Inserted = NewBBs.insert(NewBB);
305 bool Inserted = NewBBs.insert(NewBB).second;
306306 (void)Inserted;
307307 assert(Inserted &&
308308 "A basic block inserted via edge splitting cannot appear twice");
124124 typename GraphT::NodeType* VAncestor = DT.Vertex[VInfo.Parent];
125125
126126 // Process Ancestor first
127 if (Visited.insert(VAncestor) && VInfo.Parent >= LastLinked) {
127 if (Visited.insert(VAncestor).second && VInfo.Parent >= LastLinked) {
128128 Work.push_back(VAncestor);
129129 continue;
130130 }
613613 Worklist.push_back(Loc.Ptr);
614614 do {
615615 const Value *V = GetUnderlyingObject(Worklist.pop_back_val(), DL);
616 if (!Visited.insert(V)) {
616 if (!Visited.insert(V).second) {
617617 Visited.clear();
618618 return AliasAnalysis::pointsToConstantMemory(Loc, OrLocal);
619619 }
12341234 // sides are PHI nodes. In which case, this is O(m x n) time where 'm'
12351235 // and 'n' are the number of PHI sources.
12361236 return MayAlias;
1237 if (UniqueSrc.insert(PV1))
1237 if (UniqueSrc.insert(PV1).second)
12381238 V1Srcs.push_back(PV1);
12391239 }
12401240
4444 bool FoundNew = false;
4545 while (I != succ_end(ParentBB)) {
4646 BB = *I++;
47 if (Visited.insert(BB)) {
47 if (Visited.insert(BB).second) {
4848 FoundNew = true;
4949 break;
5050 }
140140 SmallSet Visited;
141141 do {
142142 BasicBlock *BB = Worklist.pop_back_val();
143 if (!Visited.insert(BB))
143 if (!Visited.insert(BB).second)
144144 continue;
145145 if (BB == StopBB)
146146 return true;
238238 if (Count++ >= Threshold)
239239 return Tracker->tooManyUses();
240240
241 if (Visited.insert(&UU))
241 if (Visited.insert(&UU).second)
242242 if (Tracker->shouldExplore(&UU))
243243 Worklist.push_back(&UU);
244244 }
3939 const Value *V = WorkSet.front();
4040 WorkSet.erase(WorkSet.begin());
4141
42 if (!Visited.insert(V))
42 if (!Visited.insert(V).second)
4343 continue;
4444
4545 // If all uses of this value are ephemeral, then so is this value.
980980 // Recursively fold the ConstantExpr's operands. If we have already folded
981981 // a ConstantExpr, we don't have to process it again.
982982 if (ConstantExpr *NewCE = dyn_cast(NewC)) {
983 if (FoldedOps.insert(NewCE))
983 if (FoldedOps.insert(NewCE).second)
984984 NewC = ConstantFoldConstantExpressionImpl(NewCE, TD, TLI, FoldedOps);
985985 }
986986 Ops.push_back(NewC);
976976 break;
977977 }
978978 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
979 } while (Visited.insert(V));
979 } while (Visited.insert(V).second);
980980
981981 Type *IntPtrTy = DL->getIntPtrType(V->getContext());
982982 return cast(ConstantInt::get(IntPtrTy, Offset));
114114 SmallPtrSetImpl &SimpleLoopNests) {
115115 // Add this IV user to the Processed set before returning false to ensure that
116116 // all IV users are members of the set. See IVUsers::isIVUserOrOperand.
117 if (!Processed.insert(I))
117 if (!Processed.insert(I).second)
118118 return true; // Instruction already handled.
119119
120120 if (!SE->isSCEVable(I->getType()))
144144 SmallPtrSet UniqueUsers;
145145 for (Use &U : I->uses()) {
146146 Instruction *User = cast(U.getUser());
147 if (!UniqueUsers.insert(User))
147 if (!UniqueUsers.insert(User).second)
148148 continue;
149149
150150 // Do not infinitely recurse on PHI nodes.
630630 }
631631 assert(V->getType()->getScalarType()->isPointerTy() &&
632632 "Unexpected operand type!");
633 } while (Visited.insert(V));
633 } while (Visited.insert(V).second);
634634
635635 Constant *OffsetIntPtr = ConstantInt::get(IntPtrTy, Offset);
636636 if (V->getType()->isVectorTy())
4747 }
4848
4949 for (Value *Op : C->operand_values())
50 if (Visited.insert(cast(Op)))
50 if (Visited.insert(cast(Op)).second)
5151 Worklist.push_back(cast(Op));
5252 }
5353 }
6565 for (Instruction &I : BB)
6666 for (Value *Op : I.operand_values())
6767 if (Constant *C = dyn_cast(Op))
68 if (Visited.insert(C))
68 if (Visited.insert(C).second)
6969 Worklist.push_back(C);
7070
7171 // We've collected all the constant (and thus potentially function or
112112 SmallPtrSet Visited;
113113 for (GlobalVariable &GV : M.globals())
114114 if (GV.hasInitializer())
115 if (Visited.insert(GV.getInitializer()))
115 if (Visited.insert(GV.getInitializer()).second)
116116 Worklist.push_back(GV.getInitializer());
117117
118118 DEBUG(dbgs() << " Adding functions referenced by global initializers to the "
687687 SmallPtrSetImpl &Printed) {
688688 // Recurse depth first through the nodes.
689689 for (LazyCallGraph::Node &ChildN : N)
690 if (Printed.insert(&ChildN))
690 if (Printed.insert(&ChildN).second)
691691 printNodes(OS, ChildN, Printed);
692692
693693 OS << " Call edges in function: " << N.getFunction().getName() << "\n";
716716
717717 SmallPtrSet Printed;
718718 for (LazyCallGraph::Node &N : G)
719 if (Printed.insert(&N))
719 if (Printed.insert(&N).second)
720720 printNodes(OS, N, Printed);
721721
722722 for (LazyCallGraph::SCC &SCC : G.postorder_sccs())
630630 Value *Lint::findValueImpl(Value *V, bool OffsetOk,
631631 SmallPtrSetImpl &Visited) const {
632632 // Detect self-referential values.
633 if (!Visited.insert(V))
633 if (!Visited.insert(V).second)
634634 return UndefValue::get(V->getType());
635635
636636 // TODO: Look through sext or zext cast, when the result is known to
644644 BasicBlock *BB = L->getParent();
645645 SmallPtrSet VisitedBlocks;
646646 for (;;) {
647 if (!VisitedBlocks.insert(BB)) break;
647 if (!VisitedBlocks.insert(BB).second)
648 break;
648649 if (Value *U = FindAvailableLoadedValue(L->getPointerOperand(),
649650 BB, BBI, 6, AA))
650651 return findValueImpl(U, OffsetOk, Visited);
415415 if (Instruction *I = dyn_cast(V)) {
416416 // If we have already seen this instruction, bail out. Cycles can happen in
417417 // unreachable code after constant propagation.
418 if (!SeenInsts.insert(I))
418 if (!SeenInsts.insert(I).second)
419419 return unknown();
420420
421421 if (GEPOperator *GEP = dyn_cast(V))
651651 // Record the pointers that were handled in this run, so that they can be
652652 // cleaned later if something fails. We also use this set to break cycles that
653653 // can occur in dead code.
654 if (!SeenVals.insert(V)) {
654 if (!SeenVals.insert(V).second) {
655655 Result = unknown();
656656 } else if (GEPOperator *GEP = dyn_cast(V)) {
657657 Result = visitGEPOperator(*GEP);
775775 DirtyBlocks.pop_back();
776776
777777 // Already processed this block?
778 if (!Visited.insert(DirtyBB))
778 if (!Visited.insert(DirtyBB).second)
779779 continue;
780780
781781 // Do a binary search to see if we already have an entry for this block in
1616
1717 void detail::PtrUseVisitorBase::enqueueUsers(Instruction &I) {
1818 for (Use &U : I.uses()) {
19 if (VisitedUses.insert(&U)) {
19 if (VisitedUses.insert(&U).second) {
2020 UseToVisit NewU = {
2121 UseToVisit::UseAndIsOffsetKnownPair(&U, IsOffsetKnown),
2222 Offset
33943394 Visited.insert(PN);
33953395 while (!Worklist.empty()) {
33963396 Instruction *I = Worklist.pop_back_val();
3397 if (!Visited.insert(I)) continue;
3397 if (!Visited.insert(I).second)
3398 continue;
33983399
33993400 ValueExprMapType::iterator It =
34003401 ValueExprMap.find_as(static_cast(I));
45924593 SmallPtrSet Visited;
45934594 while (!Worklist.empty()) {
45944595 Instruction *I = Worklist.pop_back_val();
4595 if (!Visited.insert(I)) continue;
4596 if (!Visited.insert(I).second)
4597 continue;
45964598
45974599 ValueExprMapType::iterator It =
45984600 ValueExprMap.find_as(static_cast(I));
46444646 SmallPtrSet Visited;
46454647 while (!Worklist.empty()) {
46464648 Instruction *I = Worklist.pop_back_val();
4647 if (!Visited.insert(I)) continue;
4649 if (!Visited.insert(I).second)
4650 continue;
46484651
46494652 ValueExprMapType::iterator It =
46504653 ValueExprMap.find_as(static_cast(I));
46784681 SmallPtrSet Visited;
46794682 while (!Worklist.empty()) {
46804683 I = Worklist.pop_back_val();
4681 if (!Visited.insert(I)) continue;
4684 if (!Visited.insert(I).second)
4685 continue;
46824686
46834687 ValueExprMapType::iterator It =
46844688 ValueExprMap.find_as(static_cast(I));
77917795 // that until everything else is done.
77927796 if (U == Old)
77937797 continue;
7794 if (!Visited.insert(U))
7798 if (!Visited.insert(U).second)
77957799 continue;
77967800 if (PHINode *PN = dyn_cast(U))
77977801 SE->ConstantEvolutionLoopExitValue.erase(PN);
14421442 Constant *One = ConstantInt::get(Ty, 1);
14431443 for (pred_iterator HPI = HPB; HPI != HPE; ++HPI) {
14441444 BasicBlock *HP = *HPI;
1445 if (!PredSeen.insert(HP)) {
1445 if (!PredSeen.insert(HP).second) {
14461446 // There must be an incoming value for each predecessor, even the
14471447 // duplicates!
14481448 CanonicalIV->addIncoming(CanonicalIV->getIncomingValueForBlock(HP), HP);
362362 SmallSet Visited;
363363 for (unsigned I = 0, E = Links.size(); I < E; ++I) {
364364 auto CurrentIndex = getHighestParentAbove(I);
365 if (!Visited.insert(CurrentIndex)) {
365 if (!Visited.insert(CurrentIndex).second) {
366366 continue;
367367 }
368368
330330
331331 while (!WorkSet.empty()) {
332332 const Value *V = WorkSet.pop_back_val();
333 if (!Visited.insert(V))
333 if (!Visited.insert(V).second)
334334 continue;
335335
336336 // If all uses of this value are ephemeral, then so is this value.
24042404 // If this is a PHI node, there are two cases: either we have already seen it
24052405 // or we haven't.
24062406 if (PHINode *PN = dyn_cast(V)) {
2407 if (!PHIs.insert(PN))
2407 if (!PHIs.insert(PN).second)
24082408 return ~0ULL; // already in the set.
24092409
24102410 // If it was new, see if all the input strings are the same length.
24982498 Value *P = Worklist.pop_back_val();
24992499 P = GetUnderlyingObject(P, TD, MaxLookup);
25002500
2501 if (!Visited.insert(P))
2501 if (!Visited.insert(P).second)
25022502 continue;
25032503
25042504 if (SelectInst *SI = dyn_cast(P)) {
37343734 ParseTypeAndBasicBlock(DestBB, PFS))
37353735 return true;
37363736
3737 if (!SeenCases.insert(Constant))
3737 if (!SeenCases.insert(Constant).second)
37383738 return Error(CondLoc, "duplicate case value in switch");
37393739 if (!isa(Constant))
37403740 return Error(CondLoc, "case value is not a constant integer");
255255 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
256256 P != PE; ++P) {
257257 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) {
258 if (RegSet.insert(P->getReg()))
258 if (RegSet.insert(P->getReg()).second)
259259 Edges.push_back(&*P);
260260 }
261261 }
11661166 const MCExpr *Base = TLI->getPICJumpTableRelocBaseExpr(MF,JTI,OutContext);
11671167 for (unsigned ii = 0, ee = JTBBs.size(); ii != ee; ++ii) {
11681168 const MachineBasicBlock *MBB = JTBBs[ii];
1169 if (!EmittedSets.insert(MBB)) continue;
1169 if (!EmittedSets.insert(MBB).second)
1170 continue;
11701171
11711172 // .set LJTSet, LBB32-base
11721173 const MCExpr *LHS =
989989 for (unsigned i = 0, e = Variables.getNumElements(); i != e; ++i) {
990990 DIVariable DV(Variables.getElement(i));
991991 assert(DV.isVariable());
992 if (!Processed.insert(DV))
992 if (!Processed.insert(DV).second)
993993 continue;
994994 if (LexicalScope *Scope = LScopes.findLexicalScope(DV.getContext())) {
995995 ensureAbstractVariableIsCreatedIfScoped(DV, Scope->getScopeNode());
12861286 for (unsigned i = 0, e = Variables.getNumElements(); i != e; ++i) {
12871287 DIVariable DV(Variables.getElement(i));
12881288 assert(DV && DV.isVariable());
1289 if (!ProcessedVars.insert(DV))
1289 if (!ProcessedVars.insert(DV).second)
12901290 continue;
12911291 ensureAbstractVariableIsCreated(DV, DV.getContext());
12921292 assert(LScopes.getAbstractScopesList().size() == NumAbstractScopes
915915 continue;
916916
917917 // Visit each predecessor only once.
918 if (!UniquePreds.insert(PBB))
918 if (!UniquePreds.insert(PBB).second)
919919 continue;
920920
921921 // Skip blocks which may jump to a landing pad. Can't tail merge these.
119119 numInstr++;
120120 if (mi->isIdentityCopy() || mi->isImplicitDef() || mi->isDebugValue())
121121 continue;
122 if (!visited.insert(mi))
122 if (!visited.insert(mi).second)
123123 continue;
124124
125125 float weight = 1.0f;
10001000 } else {
10011001 SmallPtrSet VisitedBBs;
10021002 for (pred_iterator PI = pred_begin(BB), PE = pred_end(BB); PI != PE; ++PI) {
1003 if (!VisitedBBs.insert(*PI))
1003 if (!VisitedBBs.insert(*PI).second)
10041004 continue;
10051005
10061006 BasicBlock::InstListType &InstList = (*PI)->getInstList();
24002400 SmallPtrSetImpl &ConsideredInsts,
24012401 const TargetLowering &TLI) {
24022402 // If we already considered this instruction, we're done.
2403 if (!ConsideredInsts.insert(I))
2403 if (!ConsideredInsts.insert(I).second)
24042404 return false;
24052405
24062406 // If this is an obviously unfoldable instruction, bail out.
26142614 worklist.pop_back();
26152615
26162616 // Break use-def graph loops.
2617 if (!Visited.insert(V)) {
2617 if (!Visited.insert(V).second) {
26182618 Consensus = nullptr;
26192619 break;
26202620 }
244244 MachineInstr *DefMI = MRI->getVRegDef(Reg);
245245 if (!DefMI || DefMI->getParent() != Head)
246246 continue;
247 if (InsertAfter.insert(DefMI))
247 if (InsertAfter.insert(DefMI).second)
248248 DEBUG(dbgs() << "BB#" << MBB->getNumber() << " depends on " << *DefMI);
249249 if (DefMI->isTerminator()) {
250250 DEBUG(dbgs() << "Can't insert instructions below terminator.\n");
822822 WorkList.push_back(std::make_pair(LI, VNI));
823823 do {
824824 std::tie(LI, VNI) = WorkList.pop_back_val();
825 if (!UsedValues.insert(VNI))
825 if (!UsedValues.insert(VNI).second)
826826 continue;
827827
828828 if (VNI->isPHIDef()) {
205205 valnos.clear();
206206 for (const_iterator I = begin(), E = end(); I != E; ++I) {
207207 VNInfo *VNI = I->valno;
208 if (!Seen.insert(VNI))
208 if (!Seen.insert(VNI).second)
209209 continue;
210210 assert(!VNI->isUnused() && "Unused valno used by live segment");
211211 VNI->id = (unsigned)valnos.size();
378378 (void)ExtVNI;
379379 assert(ExtVNI == VNI && "Unexpected existing value number");
380380 // Is this a PHIDef we haven't seen before?
381 if (!VNI->isPHIDef() || VNI->def != BlockStart || !UsedPHIs.insert(VNI))
381 if (!VNI->isPHIDef() || VNI->def != BlockStart ||
382 !UsedPHIs.insert(VNI).second)
382383 continue;
383384 // The PHI is live, make sure the predecessors are live-out.
384385 for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
385386 PE = MBB->pred_end(); PI != PE; ++PI) {
386 if (!LiveOut.insert(*PI))
387 if (!LiveOut.insert(*PI).second)
387388 continue;
388389 SlotIndex Stop = getMBBEndIdx(*PI);
389390 // A predecessor is not required to have a live-out value for a PHI.
400401 // Make sure VNI is live-out from the predecessors.
401402 for (MachineBasicBlock::const_pred_iterator PI = MBB->pred_begin(),
402403 PE = MBB->pred_end(); PI != PE; ++PI) {
403 if (!LiveOut.insert(*PI))
404 if (!LiveOut.insert(*PI).second)
404405 continue;
405406 SlotIndex Stop = getMBBEndIdx(*PI);
406407 assert(li->getVNInfoBefore(Stop) == VNI &&
783784 /// Update a single live range, assuming an instruction has been moved from
784785 /// OldIdx to NewIdx.
785786 void updateRange(LiveRange &LR, unsigned Reg) {
786 if (!Updated.insert(&LR))
787 if (!Updated.insert(&LR).second)
787788 return;
788789 DEBUG({
789790 dbgs() << " ";
10651065 MachineBasicBlock::succ_iterator SI = succ_begin();
10661066 while (SI != succ_end()) {
10671067 const MachineBasicBlock *MBB = *SI;
1068 if (!SeenMBBs.insert(MBB) ||
1068 if (!SeenMBBs.insert(MBB).second ||
10691069 (MBB != DestA && MBB != DestB && !MBB->isLandingPad())) {
10701070 // This is a superfluous edge, remove it.
10711071 SI = removeSuccessor(SI);
812812 BE = L.block_end();
813813 BI != BE; ++BI) {
814814 BlockChain &Chain = *BlockToChain[*BI];
815 if (!UpdatedPreds.insert(&Chain))
815 if (!UpdatedPreds.insert(&Chain).second)
816816 continue;
817817
818818 assert(Chain.LoopPredecessors == 0);
913913 for (MachineFunction::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) {
914914 MachineBasicBlock *BB = &*FI;
915915 BlockChain &Chain = *BlockToChain[BB];
916 if (!UpdatedPreds.insert(&Chain))
916 if (!UpdatedPreds.insert(&Chain).second)
917917 continue;
918918
919919 assert(Chain.LoopPredecessors == 0);
140140 // Internal def is now killed.
141141 KilledDefSet.insert(Reg);
142142 } else {
143 if (ExternUseSet.insert(Reg)) {
143 if (ExternUseSet.insert(Reg).second) {
144144 ExternUses.push_back(Reg);
145145 if (MO.isUndef())
146146 UndefUseSet.insert(Reg);
157157 if (!Reg)
158158 continue;
159159
160 if (LocalDefSet.insert(Reg)) {
160 if (LocalDefSet.insert(Reg).second) {
161161 LocalDefs.push_back(Reg);
162162 if (MO.isDead()) {
163163 DeadDefSet.insert(Reg);
173173 if (!MO.isDead()) {
174174 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) {
175175 unsigned SubReg = *SubRegs;
176 if (LocalDefSet.insert(SubReg))
176 if (LocalDefSet.insert(SubReg).second)
177177 LocalDefs.push_back(SubReg);
178178 }
179179 }
185185 SmallSet Added;
186186 for (unsigned i = 0, e = LocalDefs.size(); i != e; ++i) {
187187 unsigned Reg = LocalDefs[i];
188 if (Added.insert(Reg)) {
188 if (Added.insert(Reg).second) {
189189 // If it's not live beyond end of the bundle, mark it dead.
190190 bool isDead = DeadDefSet.count(Reg) || KilledDefSet.count(Reg);
191191 MIB.addReg(Reg, getDefRegState(true) | getDeadRegState(isDead) |
817817 if (!TargetRegisterInfo::isVirtualRegister(Reg))
818818 continue;
819819
820 bool isNew = RegSeen.insert(Reg);
820 bool isNew = RegSeen.insert(Reg).second;
821821 unsigned RCId, RCCost;
822822 getRegisterClassIDAndCost(MI, Reg, i, RCId, RCCost);
823823 if (MO.isDef())
849849 if (!TargetRegisterInfo::isVirtualRegister(Reg))
850850 continue;
851851
852 bool isNew = RegSeen.insert(Reg);
852 bool isNew = RegSeen.insert(Reg).second;
853853 if (MO.isDef())
854854 Defs.push_back(Reg);
855855 else if (!isNew && isOperandKill(MO, MRI)) {
339339 // If the pass has already considered breaking this edge (during this pass
340340 // through the function), then let's go ahead and break it. This means
341341 // sinking multiple "cheap" instructions into the same block.
342 if (!CEBCandidates.insert(std::make_pair(From, To)))
342 if (!CEBCandidates.insert(std::make_pair(From, To)).second)
343343 return true;
344344
345345 if (!MI->isCopy() && !TII->isAsCheapAsAMove(MI))
448448 }
449449 // To is a new block. Mark the block as visited in case the CFG has cycles
450450 // that MachineLoopInfo didn't recognize as a natural loop.
451 return LB.Visited.insert(To);
451 return LB.Visited.insert(To).second;
452452 }
453453 };
454454 }
9191 unsigned DstReg = MI->getOperand(0).getReg();
9292
9393 // See if we already saw this register.
94 if (!PHIsInCycle.insert(MI))
94 if (!PHIsInCycle.insert(MI).second)
9595 return true;
9696
9797 // Don't scan crazily complex things.
136136 "PHI destination is not a virtual register");
137137
138138 // See if we already saw this register.
139 if (!PHIsInCycle.insert(MI))
139 if (!PHIsInCycle.insert(MI).second)
140140 return true;
141141
142142 // Don't scan crazily complex things.
366366 // Check to make sure we haven't already emitted the copy for this block.
367367 // This can happen because PHI nodes may have multiple entries for the same
368368 // basic block.
369 if (!MBBsInsertedInto.insert(&opBlock))
369 if (!MBBsInsertedInto.insert(&opBlock).second)
370370 continue; // If the copy has already been emitted, we're done.
371371
372372 // Find a safe location to insert the copy, this may be the first terminator
707707 continue;
708708 if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) ||
709709 (MO.getSubReg() && MI->readsVirtualRegister(Reg))) {
710 if (ThroughRegs.insert(Reg))
710 if (ThroughRegs.insert(Reg).second)
711711 DEBUG(dbgs() << ' ' << PrintReg(Reg));
712712 }
713713 }
964964 // the UseMI operands removes them from the SrcReg use-def chain, but when
965965 // SrcReg is DstReg we could encounter UseMI twice if it has multiple
966966 // operands mentioning the virtual register.
967 if (SrcReg == DstReg && !Visited.insert(UseMI))
967 if (SrcReg == DstReg && !Visited.insert(UseMI).second)
968968 continue;
969969
970970 SmallVector Ops;
108108 for (SmallVectorImpl::iterator I = Objs.begin(), IE = Objs.end();
109109 I != IE; ++I) {
110110 V = *I;
111 if (!Visited.insert(V))
111 if (!Visited.insert(V).second)
112112 continue;
113113 if (Operator::getOpcode(V) == Instruction::IntToPtr) {
114114 const Value *O =
587587 return *Depth;
588588
589589 // Remember visited nodes.
590 if (!Visited.insert(SUb))
590 if (!Visited.insert(SUb).second)
591591 return *Depth;
592592 // If there is _some_ dependency already in place, do not
593593 // descend any further.
14921492
14931493 default:
14941494 // Only add if it isn't already in the list.
1495 if (SeenOps.insert(Op.getNode()))
1495 if (SeenOps.insert(Op.getNode()).second)
14961496 Ops.push_back(Op);
14971497 else
14981498 Changed = true;
1226612266 }
1226712267
1226812268 // Don't bother if we've been before.
12269 if (!Visited.insert(Chain.getNode()))
12269 if (!Visited.insert(Chain.getNode()).second)
1227012270 continue;
1227112271
1227212272 switch (Chain.getOpcode()) {
1235412354
1235512355 for (SDNode::use_iterator UI = M->use_begin(),
1235612356 UIE = M->use_end(); UI != UIE; ++UI)
12357 if (UI.getUse().getValueType() == MVT::Other && Visited.insert(*UI)) {
12357 if (UI.getUse().getValueType() == MVT::Other &&
12358 Visited.insert(*UI).second) {
1235812359 if (isa(*UI) || isa(*UI)) {
1235912360 // We've not visited this use, and we care about it (it could have an
1236012361 // ordering dependency with the original node).
19751975
19761976 // If this terminator has multiple identical successors (common for
19771977 // switches), only handle each succ once.
1978 if (!SuccsHandled.insert(SuccMBB))
1978 if (!SuccsHandled.insert(SuccMBB).second)
19791979 continue;
19801980
19811981 MachineBasicBlock::iterator MBBI = SuccMBB->begin();
43424342 continue;
43434343 }
43444344
4345 if (LegalizedNodes.insert(N)) {
4345 if (LegalizedNodes.insert(N).second) {
43464346 AnyLegalized = true;
43474347 Legalizer.LegalizeOp(N);
43484348
459459 bool Added = false;
460460 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
461461 if (LiveRegDefs[*AI] && LiveRegDefs[*AI] != SU) {
462 if (RegAdded.insert(*AI)) {
462 if (RegAdded.insert(*AI).second) {
463463 LRegs.push_back(*AI);
464464 Added = true;
465465 }
12221222 if (LiveRegDefs[*AliasI] == SU) continue;
12231223
12241224 // Add Reg to the set of interfering live regs.
1225 if (RegAdded.insert(*AliasI)) {
1225 if (RegAdded.insert(*AliasI).second) {
12261226 LRegs.push_back(*AliasI);
12271227 }
12281228 }
12391239 if (!LiveRegDefs[i]) continue;
12401240 if (LiveRegDefs[i] == SU) continue;
12411241 if (!MachineOperand::clobbersPhysReg(RegMask, i)) continue;
1242 if (RegAdded.insert(i))
1242 if (RegAdded.insert(i).second)
12431243 LRegs.push_back(i);
12441244 }
12451245 }
13141314 SDNode *Gen = LiveRegGens[CallResource]->getNode();
13151315 while (SDNode *Glued = Gen->getGluedNode())
13161316 Gen = Glued;
1317 if (!IsChainDependent(Gen, Node, 0, TII) && RegAdded.insert(CallResource))
1317 if (!IsChainDependent(Gen, Node, 0, TII) &&
1318 RegAdded.insert(CallResource).second)
13181319 LRegs.push_back(CallResource);
13191320 }
13201321 }
229229 for (SDNode::use_iterator I = Chain->use_begin(), E = Chain->use_end();
230230 I != E && UseCount < 100; ++I, ++UseCount) {
231231 SDNode *User = *I;
232 if (User == Node || !Visited.insert(User))
232 if (User == Node || !Visited.insert(User).second)
233233 continue;
234234 int64_t Offset1, Offset2;
235235 if (!TII->areLoadsFromSameBasePtr(Base, User, Offset1, Offset2) ||
342342
343343 // Add all operands to the worklist unless they've already been added.
344344 for (unsigned i = 0, e = NI->getNumOperands(); i != e; ++i)
345 if (Visited.insert(NI->getOperand(i).getNode()))
345 if (Visited.insert(NI->getOperand(i).getNode()).second)
346346 Worklist.push_back(NI->getOperand(i).getNode());
347347
348348 if (isPassiveNode(NI)) // Leaf node, e.g. a TargetImmediate.
736736 SmallVectorImpl > &Orders,
737737 SmallSet &Seen) {
738738 unsigned Order = N->getIROrder();
739 if (!Order || !Seen.insert(Order)) {
739 if (!Order || !Seen.insert(Order).second) {
740740 // Process any valid SDDbgValues even if node does not have any order
741741 // assigned.
742742 ProcessSDDbgValues(N, DAG, Emitter, Orders, VRBaseMap, 0);
63846384 const SDNode *M = Worklist.pop_back_val();
63856385 for (unsigned i = 0, e = M->getNumOperands(); i != e; ++i) {
63866386 SDNode *Op = M->getOperand(i).getNode();
6387 if (Visited.insert(Op))
6387 if (Visited.insert(Op).second)
63886388 Worklist.push_back(Op);
63896389 if (Op == N)
63906390 return true;
67526752
67536753 // If a node has already been visited on this depth-first walk, reject it as
67546754 // a cycle.
6755 if (!Visited.insert(N)) {
6755 if (!Visited.insert(N).second) {
67566756 errs() << "Detected cycle in SelectionDAG\n";
67576757 dbgs() << "Offending node:\n";
67586758 N->dumprFull(DAG); dbgs() << "\n";
27592759 SmallSet Done;
27602760 for (unsigned i = 0, e = I.getNumSuccessors(); i != e; ++i) {
27612761 BasicBlock *BB = I.getSuccessor(i);
2762 bool Inserted = Done.insert(BB);
2762 bool Inserted = Done.insert(BB).second;
27632763 if (!Inserted)
27642764 continue;
27652765
76967696
76977697 // If this terminator has multiple identical successors (common for
76987698 // switches), only handle each succ once.
7699 if (!SuccsHandled.insert(SuccMBB)) continue;
7699 if (!SuccsHandled.insert(SuccMBB).second)
7700 continue;
77007701
77017702 MachineBasicBlock::iterator MBBI = SuccMBB->begin();
77027703
568568 typedef SmallPtrSet VisitedSDNodeSet;
569569 static void DumpNodesr(raw_ostream &OS, const SDNode *N, unsigned indent,
570570 const SelectionDAG *G, VisitedSDNodeSet &once) {
571 if (!once.insert(N)) // If we've been here before, return now.
571 if (!once.insert(N).second) // If we've been here before, return now.
572572 return;
573573
574574 // Dump the current SDNode, but don't end the line yet.
614614 SDNode *N = Worklist.pop_back_val();
615615
616616 // If we've already seen this node, ignore it.
617 if (!VisitedNodes.insert(N))
617 if (!VisitedNodes.insert(N).second)
618618 continue;
619619
620620 // Otherwise, add all chain operands to the worklist.
17401740
17411741 // Don't revisit nodes if we already scanned it and didn't fail, we know we
17421742 // won't fail if we scan it again.
1743 if (!Visited.insert(Use))
1743 if (!Visited.insert(Use).second)
17441744 return false;
17451745
17461746 for (unsigned i = 0, e = Use->getNumOperands(); i != e; ++i) {
139139 /// we reach blocks we've already seen.
140140 static void MarkBlocksLiveIn(BasicBlock *BB,
141141 SmallPtrSetImpl &LiveBBs) {
142 if (!LiveBBs.insert(BB))
142 if (!LiveBBs.insert(BB).second)
143143 return; // already been here.
144144
145145 for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI)
168168 } else if (const PHINode *PN = dyn_cast(U)) {
169169 // Keep track of what PHI nodes we have already visited to ensure
170170 // they are only visited once.
171 if (VisitedPHIs.insert(PN))
171 if (VisitedPHIs.insert(PN).second)
172172 if (HasAddressTaken(PN))
173173 return true;
174174 } else if (const GetElementPtrInst *GEP = dyn_cast(U)) {
677677 unsigned Reg = DstReg;
678678 while (MachineInstr *UseMI = findOnlyInterestingUse(Reg, MBB, MRI, TII,IsCopy,
679679 NewReg, IsDstPhys)) {
680 if (IsCopy && !Processed.insert(UseMI))
680 if (IsCopy && !Processed.insert(UseMI).second)
681681 break;
682682
683683 DenseMap::iterator DI = DistanceMap.find(UseMI);
315315 // ConstantExpr traps if any operands can trap.
316316 for (unsigned i = 0, e = C->getNumOperands(); i != e; ++i) {
317317 if (ConstantExpr *Op = dyn_cast(CE->getOperand(i))) {
318 if (NonTrappingOps.insert(Op) && canTrapImpl(Op, NonTrappingOps))
318 if (NonTrappingOps.insert(Op).second && canTrapImpl(Op, NonTrappingOps))
319319 return true;
320320 }
321321 }
362362 const Constant *ConstOp = dyn_cast(Op);
363363 if (!ConstOp)
364364 continue;
365 if (Visited.insert(ConstOp))
365 if (Visited.insert(ConstOp).second)
366366 WorkList.push_back(ConstOp);
367367 }
368368 }
6363 // TrackingVHs back into Values.
6464 SmallPtrSet RetainSet;
6565 for (unsigned I = 0, E = AllRetainTypes.size(); I < E; I++)
66 if (RetainSet.insert(AllRetainTypes[I]))
66 if (RetainSet.insert(AllRetainTypes[I]).second)
6767 RetainValues.push_back(AllRetainTypes[I]);
6868 DIArray RetainTypes = getOrCreateArray(RetainValues);
6969 DIType(TempRetainTypes).replaceAllUsesWith(RetainTypes);
11261126 if (!DV.isVariable())
11271127 return;
11281128
1129 if (!NodesSeen.insert(DV))
1129 if (!NodesSeen.insert(DV).second)
11301130 return;
11311131 processScope(DIVariable(N).getContext());
11321132 processType(DIVariable(N).getType().resolve(TypeIdentifierMap));
11421142 if (!DV.isVariable())
11431143 return;
11441144
1145 if (!NodesSeen.insert(DV))
1145 if (!NodesSeen.insert(DV).second)
11461146 return;
11471147 processScope(DIVariable(N).getContext());
11481148 processType(DIVariable(N).getType().resolve(TypeIdentifierMap));
11521152 if (!DT)
11531153 return false;
11541154
1155 if (!NodesSeen.insert(DT))
1155 if (!NodesSeen.insert(DT).second)
11561156 return false;
11571157
11581158 TYs.push_back(DT);
11621162 bool DebugInfoFinder::addCompileUnit(DICompileUnit CU) {
11631163 if (!CU)
11641164 return false;
1165 if (!NodesSeen.insert(CU))
1165 if (!NodesSeen.insert(CU).second)
11661166 return false;
11671167
11681168 CUs.push_back(CU);
11731173 if (!DIG)
11741174 return false;
11751175
1176 if (!NodesSeen.insert(DIG))
1176 if (!NodesSeen.insert(DIG).second)
11771177 return false;
11781178
11791179 GVs.push_back(DIG);
11841184 if (!SP)
11851185 return false;
11861186
1187 if (!NodesSeen.insert(SP))
1187 if (!NodesSeen.insert(SP).second)
11881188 return false;
11891189
11901190 SPs.push_back(SP);
11981198 // as null for now.
11991199 if (Scope->getNumOperands() == 0)
12001200 return false;
1201 if (!NodesSeen.insert(Scope))
1201 if (!NodesSeen.insert(Scope).second)
12021202 return false;
12031203 Scopes.push_back(Scope);
12041204 return true;
561561 if (isOpaque())
562562 return false;
563563
564 if (Visited && !Visited->insert(this))
564 if (Visited && !Visited->insert(this).second)
565565 return false;
566566
567567 // Okay, our struct is sized if all of the elements are, but if one of the
291291 #ifndef NDEBUG
292292 static bool contains(SmallPtrSetImpl &Cache, ConstantExpr *Expr,
293293 Constant *C) {
294 if (!Cache.insert(Expr))
294 if (!Cache.insert(Expr).second)
295295 return false;
296296
297297 for (auto &O : Expr->operands()) {
400400 return V;
401401 }
402402 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
403 } while (Visited.insert(V));
403 } while (Visited.insert(V).second);
404404
405405 return V;
406406 }
450450 return V;
451451 }
452452 assert(V->getType()->isPointerTy() && "Unexpected operand type!");
453 } while (Visited.insert(V));
453 } while (Visited.insert(V).second);
454454
455455 return V;
456456 }
521521 // For GEPs, determine if the indexing lands within the allocated object.
522522 if (const GEPOperator *GEP = dyn_cast(V)) {
523523 // Conservatively require that the base pointer be fully dereferenceable.
524 if (!Visited.insert(GEP->getOperand(0)))
524 if (!Visited.insert(GEP->getOperand(0)).second)
525525 return false;
526526 if (!isDereferenceablePointer(GEP->getOperand(0), DL, Visited))
527527 return false;
479479
480480 while (!WorkStack.empty()) {
481481 const Value *V = WorkStack.pop_back_val();
482 if (!Visited.insert(V))
482 if (!Visited.insert(V).second)
483483 continue;
484484
485485 if (const User *U = dyn_cast(V)) {
509509 Assert1(!GV->isDeclaration(), "Alias must point to a definition", &GA);
510510
511511 if (const auto *GA2 = dyn_cast(GV)) {
512 Assert1(Visited.insert(GA2), "Aliases cannot form a cycle", &GA);
512 Assert1(Visited.insert(GA2).second, "Aliases cannot form a cycle", &GA);
513513
514514 Assert1(!GA2->mayBeOverridden(), "Alias cannot point to a weak alias",
515515 &GA);
567567 void Verifier::visitMDNode(MDNode &MD, Function *F) {
568568 // Only visit each node once. Metadata can be mutually recursive, so this
569569 // avoids infinite recursion here, as well as being an optimization.
570 if (!MDNodes.insert(&MD))
570 if (!MDNodes.insert(&MD).second)
571571 return;
572572
573573 for (unsigned i = 0, e = MD.getNumOperands(); i != e; ++i) {
12171217 for (SwitchInst::CaseIt i = SI.case_begin(), e = SI.case_end(); i != e; ++i) {
12181218 Assert1(i.getCaseValue()->getType() == SwitchTy,
12191219 "Switch constants must all be same type as switch value!", &SI);
1220 Assert2(Constants.insert(i.getCaseValue()),
1220 Assert2(Constants.insert(i.getCaseValue()).second,
12211221 "Duplicate integer as switch case", &SI, i.getCaseValue());
12221222 }
12231223
22522252
22532253 while (!Stack.empty()) {
22542254 const ConstantExpr *V = Stack.pop_back_val();
2255 if (!Visited.insert(V))
2255 if (!Visited.insert(V).second)
22562256 continue;
22572257
22582258 VerifyConstantExprBitcastType(V);
151151 // same opaque type then we fail.
152152 if (cast(DstTy)->isOpaque()) {
153153 // We can only map one source type onto the opaque destination type.
154 if (!DstResolvedOpaqueTypes.insert(cast(DstTy)))
154 if (!DstResolvedOpaqueTypes.insert(cast(DstTy)).second)
155155 return false;
156156 SrcDefinitionsToResolve.push_back(SSTy);
157157 Entry = DstTy;
14541454 continue;
14551455
14561456 // If we've already seen this option, don't add it to the list again.
1457 if (!OptionSet.insert(I->second))
1457 if (!OptionSet.insert(I->second).second)
14581458 continue;
14591459
14601460 Opts.push_back(std::pair(I->getKey().data(),
3333 memset(CurArray, -1, CurArraySize*sizeof(void*));
3434 }
3535
36 bool SmallPtrSetImplBase::insert_imp(const void * Ptr) {
36 std::pair
37 SmallPtrSetImplBase::insert_imp(const void *Ptr) {
3738 if (isSmall()) {
3839 // Check to see if it is already in the set.
3940 for (const void **APtr = SmallArray, **E = SmallArray+NumElements;
4041 APtr != E; ++APtr)
4142 if (*APtr == Ptr)
42 return false;
43
43 return std::make_pair(APtr, false);
44
4445 // Nope, there isn't. If we stay small, just 'pushback' now.
4546 if (NumElements < CurArraySize) {
4647 SmallArray[NumElements++] = Ptr;
47 return true;
48 return std::make_pair(SmallArray + (NumElements - 1), true);
4849 }
4950 // Otherwise, hit the big set case, which will call grow.
5051 }
6061
6162 // Okay, we know we have space. Find a hash bucket.
6263 const void **Bucket = const_cast(FindBucketFor(Ptr));
63 if (*Bucket == Ptr) return false; // Already inserted, good.
64
64 if (*Bucket == Ptr)
65 return std::make_pair(Bucket, false); // Already inserted, good.
66
6567 // Otherwise, insert it!
6668 if (*Bucket == getTombstoneMarker())
6769 --NumTombstones;
6870 *Bucket = Ptr;
6971 ++NumElements; // Track density.
70 return true;
72 return std::make_pair(Bucket, true);
7173 }
7274
7375 bool SmallPtrSetImplBase::erase_imp(const void * Ptr) {
568568 // global. Do not promote constant expressions either, as they may
569569 // require some code expansion.
570570 if (Cst && !isa(Cst) && !isa(Cst) &&
571 AlreadyChecked.insert(Cst))
571 AlreadyChecked.insert(Cst).second)
572572 LocalChange |= promoteConstant(Cst);
573573 }
574574 }
69426942 for (std::vector::iterator
69436943 I = LPadList.begin(), E = LPadList.end(); I != E; ++I) {
69446944 MachineBasicBlock *CurMBB = *I;
6945 if (SeenMBBs.insert(CurMBB))
6945 if (SeenMBBs.insert(CurMBB).second)
69466946 DispContBB->addSuccessor(CurMBB);
69476947 }
69486948
454454
455455 bool MemDefsUses::updateDefsUses(ValueType V, bool MayStore) {
456456 if (MayStore)
457 return !Defs.insert(V) || Uses.count(V) || SeenNoObjStore || SeenNoObjLoad;
457 return !Defs.insert(V).second || Uses.count(V) || SeenNoObjStore ||
458 SeenNoObjLoad;
458459
459460 Uses.insert(V);
460461 return Defs.count(V) || SeenNoObjStore;
76487648 // nodes just above the top-level loads and token factors.
76497649 while (!Queue.empty()) {
76507650 SDNode *ChainNext = Queue.pop_back_val();
7651 if (!Visited.insert(ChainNext))
7651 if (!Visited.insert(ChainNext).second)
76527652 continue;
76537653
76547654 if (MemSDNode *ChainLD = dyn_cast(ChainNext)) {
76797679
76807680 while (!Queue.empty()) {
76817681 SDNode *LoadRoot = Queue.pop_back_val();
7682 if (!Visited.insert(LoadRoot))
7682 if (!Visited.insert(LoadRoot).second)
76837683 continue;
76847684
76857685 if (MemSDNode *ChainLD = dyn_cast(LoadRoot))
78097809 SDValue BinOp = BinOps.back();
78107810 BinOps.pop_back();
78117811
7812 if (!Visited.insert(BinOp.getNode()))
7812 if (!Visited.insert(BinOp.getNode()).second)
78137813 continue;
78147814
78157815 PromOps.push_back(BinOp);
80238023 SDValue BinOp = BinOps.back();
80248024 BinOps.pop_back();
80258025
8026 if (!Visited.insert(BinOp.getNode()))
8026 if (!Visited.insert(BinOp.getNode()).second)
80278027 continue;
80288028
80298029 PromOps.push_back(BinOp);
329329 // Process any unreachable blocks in arbitrary order now.
330330 if (MF.size() != Processed.size())
331331 for (MachineFunction::iterator BB = MF.begin(), E = MF.end(); BB != E; ++BB)
332 if (Processed.insert(BB))
332 if (Processed.insert(BB).second)
333333 Changed |= processBasicBlock(MF, *BB);
334334
335335 LiveBundles.clear();
181181 Value *V = WorkList.back();
182182 WorkList.pop_back();
183183 if (isa(V) || isa(V)) {
184 if (PtrValues.insert(V))
184 if (PtrValues.insert(V).second)
185185 WorkList.insert(WorkList.end(), V->user_begin(), V->user_end());
186186 } else if (StoreInst *Store = dyn_cast(V)) {
187187 Stores.push_back(Store);
445445 case Instruction::AddrSpaceCast:
446446 // The original value is not read/written via this if the new value isn't.
447447 for (Use &UU : I->uses())
448 if (Visited.insert(&UU))
448 if (Visited.insert(&UU).second)
449449 Worklist.push_back(&UU);
450450 break;
451451
459459 auto AddUsersToWorklistIfCapturing = [&] {
460460 if (Captures)
461461 for (Use &UU : I->uses())
462 if (Visited.insert(&UU))
462 if (Visited.insert(&UU).second)
463463 Worklist.push_back(&UU);
464464 };
465465
184184 /// recursively mark anything that it uses as also needed.
185185 void GlobalDCE::GlobalIsNeeded(GlobalValue *G) {
186186 // If the global is already in the set, no need to reprocess it.
187 if (!AliveGlobals.insert(G))
187 if (!AliveGlobals.insert(G).second)
188188 return;
189189
190190 Module *M = G->getParent();
237237 for (User::op_iterator I = C->op_begin(), E = C->op_end(); I != E; ++I) {
238238 // If we've already processed this constant there's no need to do it again.
239239 Constant *Op = dyn_cast(*I);
240 if (Op && SeenConstants.insert(Op))
240 if (Op && SeenConstants.insert(Op).second)
241241 MarkUsedGlobalsAsNeeded(Op);
242242 }
243243 }
638638 } else if (const PHINode *PN = dyn_cast(U)) {
639639 // If we've already seen this phi node, ignore it, it has already been
640640 // checked.
641 if (PHIs.insert(PN) && !AllUsesOfValueWillTrapIfNull(PN, PHIs))
641 if (PHIs.insert(PN).second && !AllUsesOfValueWillTrapIfNull(PN, PHIs))
642642 return false;
643643 } else if (isa(U) &&
644644 isa(U->getOperand(1))) {
981981 if (const PHINode *PN = dyn_cast(Inst)) {
982982 // PHIs are ok if all uses are ok. Don't infinitely recurse through PHI
983983 // cycles.
984 if (PHIs.insert(PN))
984 if (PHIs.insert(PN).second)
985985 if (!ValueIsOnlyUsedLocallyOrStoredToOneGlobal(PN, GV, PHIs))
986986 return false;
987987 continue;
10721072 }
10731073
10741074 if (const PHINode *PN = dyn_cast(UI)) {
1075 if (!LoadUsingPHIsPerLoad.insert(PN))
1075 if (!LoadUsingPHIsPerLoad.insert(PN).second)
10761076 // This means some phi nodes are dependent on each other.
10771077 // Avoid infinite looping!
10781078 return false;
1079 if (!LoadUsingPHIs.insert(PN))
1079 if (!LoadUsingPHIs.insert(PN).second)
10801080 // If we have already analyzed this PHI, then it is safe.
10811081 continue;
10821082
20442044 SmallPtrSetImpl &SimpleConstants,
20452045 const DataLayout *DL) {
20462046 // If we already checked this constant, we win.
2047 if (!SimpleConstants.insert(C)) return true;
2047 if (!SimpleConstants.insert(C).second)
2048 return true;
20482049 // Check the constant.
20492050 return isSimpleEnoughValueToCommitHelper(C, SimpleConstants, DL);
20502051 }
26692670 // Okay, we succeeded in evaluating this control flow. See if we have
26702671 // executed the new block before. If so, we have a looping function,
26712672 // which we cannot evaluate in reasonable time.
2672 if (!ExecutedBlocks.insert(NextBB))
2673 if (!ExecutedBlocks.insert(NextBB).second)
26732674 return false; // looped!
26742675
26752676 // Okay, we have never been in this block before. Check to see if there
27782779 }
27792780 bool usedErase(GlobalValue *GV) { return Used.erase(GV); }
27802781 bool compilerUsedErase(GlobalValue *GV) { return CompilerUsed.erase(GV); }
2781 bool usedInsert(GlobalValue *GV) { return Used.insert(GV); }
2782 bool compilerUsedInsert(GlobalValue *GV) { return CompilerUsed.insert(GV); }
2782 bool usedInsert(GlobalValue *GV) { return Used.insert(GV).second; }
2783 bool compilerUsedInsert(GlobalValue *GV) {
2784 return CompilerUsed.insert(GV).second;
2785 }
27832786
27842787 void syncVariablesAndSets() {
27852788 if (UsedV)
29722975 SmallPtrSet NewCalledFunctions(CalledFunctions);
29732976
29742977 // Don't treat recursive functions as empty.
2975 if (!NewCalledFunctions.insert(CalledFn))
2978 if (!NewCalledFunctions.insert(CalledFn).second)
29762979 return false;
29772980
29782981 if (!cxxDtorIsEmpty(*CalledFn, NewCalledFunctions))
218218
219219 // If the inlined function already uses this alloca then we can't reuse
220220 // it.
221 if (!UsedAllocas.insert(AvailableAlloca))
221 if (!UsedAllocas.insert(AvailableAlloca).second)
222222 continue;
223223
224224 // Otherwise, we *can* reuse it, RAUW AI into AvailableAlloca and declare
10481048
10491049 assert(TermL->getNumSuccessors() == TermR->getNumSuccessors());
10501050 for (unsigned i = 0, e = TermL->getNumSuccessors(); i != e; ++i) {
1051 if (!VisitedBBs.insert(TermL->getSuccessor(i)))
1051 if (!VisitedBBs.insert(TermL->getSuccessor(i)).second)
10521052 continue;
10531053
10541054 FnLBBs.push_back(TermL->getSuccessor(i));
510510 if (!PN->hasOneUse()) return false;
511511
512512 // Remember this node, and if we find the cycle, return.
513 if (!PotentiallyDeadPHIs.insert(PN))
513 if (!PotentiallyDeadPHIs.insert(PN).second)
514514 return true;
515515
516516 // Don't scan crazily complex things.
529529 static bool PHIsEqualValue(PHINode *PN, Value *NonPhiInVal,
530530 SmallPtrSetImpl &ValueEqualPHIs) {
531531 // See if we already saw this PHI node.
532 if (!ValueEqualPHIs.insert(PN))
532 if (!ValueEqualPHIs.insert(PN).second)
533533 return true;
534534
535535 // Don't scan crazily complex things.
653653
654654 // If the user is a PHI, inspect its uses recursively.
655655 if (PHINode *UserPN = dyn_cast(UserI)) {
656 if (PHIsInspected.insert(UserPN))
656 if (PHIsInspected.insert(UserPN).second)
657657 PHIsToSlice.push_back(UserPN);
658658 continue;
659659 }
23402340
23412341 // If we already saw this clause, there is no point in having a second
23422342 // copy of it.
2343 if (AlreadyCaught.insert(TypeInfo)) {
2343 if (AlreadyCaught.insert(TypeInfo).second) {
23442344 // This catch clause was not already seen.
23452345 NewClauses.push_back(CatchClause);
23462346 } else {
24222422 continue;
24232423 // There is no point in having multiple copies of the same typeinfo in
24242424 // a filter, so only add it if we didn't already.
2425 if (SeenInFilter.insert(TypeInfo))
2425 if (SeenInFilter.insert(TypeInfo).second)
24262426 NewFilterElts.push_back(cast(Elt));
24272427 }
24282428 // A filter containing a catch-all cannot match anything by definition.
26742674 BB = Worklist.pop_back_val();
26752675
26762676 // We have now visited this block! If we've already been here, ignore it.
2677 if (!Visited.insert(BB)) continue;
2677 if (!Visited.insert(BB).second)
2678 continue;
26782679
26792680 for (BasicBlock::iterator BBI = BB->begin(), E = BB->end(); BBI != E; ) {
26802681 Instruction *Inst = BBI++;
13161316 if (Value *Addr =
13171317 isInterestingMemoryAccess(&Inst, &IsWrite, &Alignment)) {
13181318 if (ClOpt && ClOptSameTemp) {
1319 if (!TempsToInstrument.insert(Addr))
1319 if (!TempsToInstrument.insert(Addr).second)
13201320 continue; // We've seen this temp in the current BB.
13211321 }
13221322 } else if (ClInvalidPointerPairs &&
228228 // Add the predecessors to the worklist.
229229 do {
230230 BasicBlock *PredBB = *PI;
231 if (Visited.insert(PredBB))
231 if (Visited.insert(PredBB).second)
232232 Worklist.push_back(std::make_pair(PredBB, PredBB->end()));
233233 } while (++PI != PE);
234234 break;
187187 if (isa(P))
188188 return true;
189189
190 if (!Visited.insert(P))
190 if (!Visited.insert(P).second)
191191 continue;
192192
193193 if (const SelectInst *SI = dyn_cast(P)) {
411411 // that makes this a partial merge.
412412 bool Partial = ReverseInsertPts.size() != Other.ReverseInsertPts.size();
413413 for (Instruction *Inst : Other.ReverseInsertPts)
414 Partial |= ReverseInsertPts.insert(Inst);
414 Partial |= ReverseInsertPts.insert(Inst).second;
415415 return Partial;
416416 }
417417
21932193
21942194 while (SuccStack.back().second != SE) {
21952195 BasicBlock *SuccBB = *SuccStack.back().second++;
2196 if (Visited.insert(SuccBB)) {
2196 if (Visited.insert(SuccBB).second) {
21972197 TerminatorInst *TI = cast(&SuccBB->back());
21982198 SuccStack.push_back(std::make_pair(SuccBB, succ_iterator(TI)));
21992199 BBStates[CurrBB].addSucc(SuccBB);
22342234 BBState::edge_iterator PE = BBStates[PredStack.back().first].pred_end();
22352235 while (PredStack.back().second != PE) {
22362236 BasicBlock *BB = *PredStack.back().second++;
2237 if (Visited.insert(BB)) {
2237 if (Visited.insert(BB).second) {
22382238 PredStack.push_back(std::make_pair(BB, BBStates[BB].pred_begin()));
22392239 goto reverse_dfs_next_succ;
22402240 }
23892389 if (!NewRetainReleaseRRI.Calls.count(NewRetain))
23902390 return false;
23912391
2392 if (ReleasesToMove.Calls.insert(NewRetainRelease)) {
2392 if (ReleasesToMove.Calls.insert(NewRetainRelease).second) {
23932393
23942394 // If we overflow when we compute the path count, don't remove/move
23952395 // anything.
24212421 // Collect the optimal insertion points.
24222422 if (!KnownSafe)
24232423 for (Instruction *RIP : NewRetainReleaseRRI.ReverseInsertPts) {
2424 if (ReleasesToMove.ReverseInsertPts.insert(RIP)) {
2424 if (ReleasesToMove.ReverseInsertPts.insert(RIP).second) {
24252425 // If we overflow when we compute the path count, don't
24262426 // remove/move anything.
24272427 const BBState &RIPBBState = BBStates[RIP->getParent()];
24662466 if (!NewReleaseRetainRRI.Calls.count(NewRelease))
24672467 return false;
24682468
2469 if (RetainsToMove.Calls.insert(NewReleaseRetain)) {
2469 if (RetainsToMove.Calls.insert(NewReleaseRetain).second) {
24702470 // If we overflow when we compute the path count, don't remove/move
24712471 // anything.
24722472 const BBState &NRRBBState = BBStates[NewReleaseRetain->getParent()];
24822482 // Collect the optimal insertion points.
24832483 if (!KnownSafe)
24842484 for (Instruction *RIP : NewReleaseRetainRRI.ReverseInsertPts) {
2485 if (RetainsToMove.ReverseInsertPts.insert(RIP)) {
2485 if (RetainsToMove.ReverseInsertPts.insert(RIP).second) {
24862486 // If we overflow when we compute the path count, don't
24872487 // remove/move anything.
24882488 const BBState &RIPBBState = BBStates[RIP->getParent()];
6161 SmallPtrSet UniqueSrc;
6262 for (unsigned i = 0, e = A->getNumIncomingValues(); i != e; ++i) {
6363 const Value *PV1 = A->getIncomingValue(i);
64 if (UniqueSrc.insert(PV1) && related(PV1, B))
64 if (UniqueSrc.insert(PV1).second && related(PV1, B))
6565 return true;
6666 }
6767
9393 if (isa(P))
9494 // Assume the worst.
9595 return true;
96 if (Visited.insert(Ur))
96 if (Visited.insert(Ur).second)
9797 Worklist.push_back(Ur);
9898 }
9999 } while (!Worklist.empty());
7272 for (Instruction::op_iterator OI = curr->op_begin(), OE = curr->op_end();
7373 OI != OE; ++OI)
7474 if (Instruction* Inst = dyn_cast(OI))
75 if (alive.insert(Inst))
75 if (alive.insert(Inst).second)
7676 worklist.push_back(Inst);
7777 }
7878
11011101 Instruction *NarrowUser = cast(U);
11021102
11031103 // Handle data flow merges and bizarre phi cycles.
1104 if (!Widened.insert(NarrowUser))
1104 if (!Widened.insert(NarrowUser).second)
11051105 continue;
11061106
11071107 NarrowIVUsers.push_back(NarrowIVDefUse(NarrowDef, NarrowUser, WideDef));
12831283 static bool isHighCostExpansion(const SCEV *S, BranchInst *BI,
12841284 SmallPtrSetImpl &Processed,
12851285 ScalarEvolution *SE) {
1286 if (!Processed.insert(S))
1286 if (!Processed.insert(S).second)
12871287 return false;
12881288
12891289 // If the backedge-taken count is a UDiv, it's very likely a UDiv that
14741474
14751475 // Optimistically handle other instructions.
14761476 for (User::op_iterator OI = I->op_begin(), E = I->op_end(); OI != E; ++OI) {
1477 if (!Visited.insert(*OI))
1477 if (!Visited.insert(*OI).second)
14781478 continue;
14791479 if (!hasConcreteDefImpl(*OI, Visited, Depth+1))
14801480 return false;
931931 BasicBlock *PredBB = *PI;
932932
933933 // If we already scanned this predecessor, skip it.
934 if (!PredsScanned.insert(PredBB))
934 if (!PredsScanned.insert(PredBB).second)
935935 continue;
936936
937937 // Scan the predecessor to see if the value is available in the pred.
11501150
11511151 for (unsigned i = 0, e = PredValues.size(); i != e; ++i) {
11521152 BasicBlock *Pred = PredValues[i].second;
1153 if (!SeenPreds.insert(Pred))
1153 if (!SeenPreds.insert(Pred).second)
11541154 continue; // Duplicate predecessor entry.
11551155
11561156 // If the predecessor ends with an indirect goto, we can't change its
151151 for (succ_iterator SI = succ_begin(BB), SE = succ_end(BB); SI != SE;
152152 ++SI) {
153153 BasicBlock *SuccBB = *SI;
154 if (!Visited.insert(SuccBB))
154 if (!Visited.insert(SuccBB).second)
155155 continue;
156156
157157 const Loop *SuccLoop = LI->getLoopFor(SuccBB);
164164
165165 for (unsigned i = 0; i < SubLoopExitBlocks.size(); ++i) {
166166 BasicBlock *ExitBB = SubLoopExitBlocks[i];
167 if (LI->getLoopFor(ExitBB) == L && Visited.insert(ExitBB))
167 if (LI->getLoopFor(ExitBB) == L && Visited.insert(ExitBB).second)
168168 VisitStack.push_back(WorklistItem(ExitBB, false));
169169 }
170170
761761 Processed, SE);
762762 }
763763
764 if (!Processed.insert(S))
764 if (!Processed.insert(S).second)
765765 return false;
766766
767767 if (const SCEVAddExpr *Add = dyn_cast(S)) {
974974 Lose();
975975 return;
976976 }
977 if (Regs.insert(Reg)) {
977 if (Regs.insert(Reg).second) {
978978 RateRegister(Reg, Regs, L, SE, DT);
979979 if (LoserRegs && isLoser())
980980 LoserRegs->insert(Reg);
28012801 User::op_iterator IVOpIter = findIVOperand(I->op_begin(), IVOpEnd, L, SE);
28022802 while (IVOpIter != IVOpEnd) {
28032803 Instruction *IVOpInst = cast(*IVOpIter);
2804 if (UniqueOperands.insert(IVOpInst))
2804 if (UniqueOperands.insert(IVOpInst).second)
28052805 ChainInstruction(I, IVOpInst, ChainUsersVec);
28062806 IVOpIter = findIVOperand(std::next(IVOpIter), IVOpEnd, L, SE);
28072807 }
31213121 const SCEV *S = Worklist.pop_back_val();
31223122
31233123 // Don't process the same SCEV twice
3124 if (!Visited.insert(S))
3124 if (!Visited.insert(S).second)
31253125 continue;
31263126
31273127 if (const SCEVNAryExpr *N = dyn_cast(S))
37733773 for (int LUIdx = UsedByIndices.find_first(); LUIdx != -1;
37743774 LUIdx = UsedByIndices.find_next(LUIdx))
37753775 // Make a memo of this use, offset, and register tuple.
3776 if (UniqueItems.insert(std::make_pair(LUIdx, Imm)))
3776 if (UniqueItems.insert(std::make_pair(LUIdx, Imm)).second)
37773777 WorkItems.push_back(WorkItem(LUIdx, Imm, OrigReg));
37783778 }
37793779 }
622622 // If this is a binary operation of the right kind with only one use then
623623 // add its operands to the expression.
624624 if (BinaryOperator *BO = isReassociableOp(Op, Opcode)) {
625 assert(Visited.insert(Op) && "Not first visit!");
625 assert(Visited.insert(Op).second && "Not first visit!");
626626 DEBUG(dbgs() << "DIRECT ADD: " << *Op << " (" << Weight << ")\n");
627627 Worklist.push_back(std::make_pair(BO, Weight));
628628 continue;
632632 LeafMap::iterator It = Leaves.find(Op);
633633 if (It == Leaves.end()) {
634634 // Not in the leaf map. Must be the first time we saw this operand.
635 assert(Visited.insert(Op) && "Not first visit!");
635 assert(Visited.insert(Op).second && "Not first visit!");
636636 if (!Op->hasOneUse()) {
637637 // This value has uses not accounted for by the expression, so it is
638638 // not safe to modify. Mark it as being a leaf.
16081608 SmallPtrSet Duplicates;
16091609 for (unsigned i = 0, e = Factors.size(); i != e; ++i) {
16101610 Value *Factor = Factors[i];
1611 if (!Duplicates.insert(Factor))
1611 if (!Duplicates.insert(Factor).second)
16121612 continue;
16131613
16141614 unsigned Occ = ++FactorOccurrences[Factor];
19591959 // and add that since that's where optimization actually happens.
19601960 unsigned Opcode = Op->getOpcode();
19611961 while (Op->hasOneUse() && Op->user_back()->getOpcode() == Opcode &&
1962 Visited.insert(Op))
1962 Visited.insert(Op).second)
19631963 Op = Op->user_back();
19641964 RedoInsts.insert(Op);
19651965 }
213213 ///
214214 /// This returns true if the block was not considered live before.
215215 bool MarkBlockExecutable(BasicBlock *BB) {
216 if (!BBExecutable.insert(BB)) return false;
216 if (!BBExecutable.insert(BB).second)
217 return false;
217218 DEBUG(dbgs() << "Marking Block Executable: " << BB->getName() << '\n');
218219 BBWorkList.push_back(BB); // Add the block to the work list!
219220 return true;
348348
349349 private:
350350 void markAsDead(Instruction &I) {
351 if (VisitedDeadInsts.insert(&I))
351 if (VisitedDeadInsts.insert(&I).second)
352352 AS.DeadUsers.push_back(&I);
353353 }
354354
638638 }
639639
640640 for (User *U : I->users())
641 if (Visited.insert(cast(U)))
641 if (Visited.insert(cast(U)).second)
642642 Uses.push_back(std::make_pair(I, cast(U)));
643643 } while (!Uses.empty());
644644
847847 else
848848 return false;
849849
850 } while (Visited.insert(Ptr));
850 } while (Visited.insert(Ptr).second);
851851
852852 return false;
853853 }
14601460 break;
14611461 Offset += GEPOffset;
14621462 Ptr = GEP->getPointerOperand();
1463 if (!Visited.insert(Ptr))
1463 if (!Visited.insert(Ptr).second)
14641464 break;
14651465 }
14661466
14971497 break;
14981498 }
14991499 assert(Ptr->getType()->isPointerTy() && "Unexpected operand type!");
1500 } while (Visited.insert(Ptr));
1500 } while (Visited.insert(Ptr).second);
15011501
15021502 if (!OffsetPtr) {
15031503 if (!Int8Ptr) {
28602860 /// This uses a set to de-duplicate users.
28612861 void enqueueUsers(Instruction &I) {
28622862 for (Use &U : I.uses())
2863 if (Visited.insert(U.getUser()))
2863 if (Visited.insert(U.getUser()).second)
28642864 Queue.push_back(&U);
28652865 }
28662866
35873587 SmallVectorImpl &Worklist,
35883588 SmallPtrSetImpl &Visited) {
35893589 for (User *U : I.users())
3590 if (Visited.insert(cast(U)))
3590 if (Visited.insert(cast(U)).second)
35913591 Worklist.push_back(cast(U));
35923592 }
35933593
304304 for (auto *BB2 : Descendants) {
305305 bool IsDomParent = DomTree->dominates(BB2, BB1);
306306 bool IsInSameLoop = LI->getLoopFor(BB1) == LI->getLoopFor(BB2);
307 if (BB1 != BB2 && VisitedBlocks.insert(BB2) && IsDomParent &&
307 if (BB1 != BB2 && VisitedBlocks.insert(BB2).second && IsDomParent &&
308308 IsInSameLoop) {
309309 EquivalenceClass[BB2] = BB1;
310310
493493 << " known. Set weight for block: ";
494494 printBlockWeight(dbgs(), BB););
495495 }
496 if (VisitedBlocks.insert(BB))
496 if (VisitedBlocks.insert(BB).second)
497497 Changed = true;
498498 } else if (NumUnknownEdges == 1 && VisitedBlocks.count(BB)) {
499499 // If there is a single unknown edge and the block has been
539539 llvm_unreachable("Found a stale predecessors list in a basic block.");
540540 for (pred_iterator PI = pred_begin(B1), PE = pred_end(B1); PI != PE; ++PI) {
541541 BasicBlock *B2 = *PI;
542 if (Visited.insert(B2))
542 if (Visited.insert(B2).second)
543543 Predecessors[B1].push_back(B2);
544544 }
545545
549549 llvm_unreachable("Found a stale successors list in a basic block.");
550550 for (succ_iterator SI = succ_begin(B1), SE = succ_end(B1); SI != SE; ++SI) {
551551 BasicBlock *B2 = *SI;
552 if (Visited.insert(B2))
552 if (Visited.insert(B2).second)
553553 Successors[B1].push_back(B2);
554554 }
555555 }
16681668 AllocaInfo &Info) {
16691669 // If we've already checked this PHI, don't do it again.
16701670 if (PHINode *PN = dyn_cast(I))
1671 if (!Info.CheckedPHIs.insert(PN))
1671 if (!Info.CheckedPHIs.insert(PN).second)
16721672 return;
16731673
16741674 for (User *U : I->users()) {
178178
179179 auto AddUsesToWorklist = [&](Value *V) {
180180 for (auto &U : V->uses()) {
181 if (!Visited.insert(&U))
181 if (!Visited.insert(&U).second)
182182 continue;
183183 Worklist.push_back(&U);
184184 }
132132 } else if (const PHINode *PN = dyn_cast(I)) {
133133 // PHI nodes we can check just like select or GEP instructions, but we
134134 // have to be careful about infinite recursion.
135 if (PhiUsers.insert(PN)) // Not already visited.
135 if (PhiUsers.insert(PN).second) // Not already visited.
136136 if (analyzeGlobalAux(I, GS, PhiUsers))
137137 return true;
138138 } else if (isa(I)) {
391391
392392 // If we find an instruction more than once, we're on a cycle that
393393 // won't prove fruitful.
394 if (!Visited.insert(I)) {
394 if (!Visited.insert(I).second) {
395395 // Break the cycle and delete the instruction and its operands.
396396 I->replaceAllUsesWith(UndefValue::get(I->getType()));
397397 (void)RecursivelyDeleteTriviallyDeadInstructions(I, TLI);
12651265
12661266 Changed |= ConstantFoldTerminator(BB, true);
12671267 for (succ_iterator SI = succ_begin(BB), SE = succ_end(BB); SI != SE; ++SI)
1268 if (Reachable.insert(*SI))
1268 if (Reachable.insert(*SI).second)
12691269 Worklist.push_back(*SI);
12701270 } while (!Worklist.empty());
12711271 return Changed;
111111 if (LPM) {
112112 if (ScalarEvolution *SE = LPM->getAnalysisIfAvailable()) {
113113 if (Loop *L = LI->getLoopFor(BB)) {
114 if (ForgottenLoops.insert(L))
114 if (ForgottenLoops.insert(L).second)
115115 SE->forgetLoop(L);
116116 }
117117 }
818818
819819 // The block really is live in here, insert it into the set. If already in
820820 // the set, then it has already been processed.
821 if (!LiveInBlocks.insert(BB))
821 if (!LiveInBlocks.insert(BB).second)
822822 continue;
823823
824824 // Since the value is live into BB, it is either defined in a predecessor or
898898 if (SuccLevel > RootLevel)
899899 continue;
900900
901 if (!Visited.insert(SuccNode))
901 if (!Visited.insert(SuccNode).second)
902902 continue;
903903
904904 BasicBlock *SuccBB = SuccNode->getBlock();
10031003 }
10041004
10051005 // Don't revisit blocks.
1006 if (!Visited.insert(BB))
1006 if (!Visited.insert(BB).second)
10071007 return;
10081008
10091009 for (BasicBlock::iterator II = BB->begin(); !isa(II);) {
10601060 ++I;
10611061
10621062 for (; I != E; ++I)
1063 if (VisitedSuccs.insert(*I))
1063 if (VisitedSuccs.insert(*I).second)
10641064 Worklist.push_back(RenamePassData(*I, Pred, IncomingVals));
10651065
10661066 goto NextIteration;
42014201 SmallPtrSet Succs;
42024202 for (unsigned i = 0, e = IBI->getNumDestinations(); i != e; ++i) {
42034203 BasicBlock *Dest = IBI->getDestination(i);
4204 if (!Dest->hasAddressTaken() || !Succs.insert(Dest)) {
4204 if (!Dest->hasAddressTaken() || !Succs.insert(Dest).second) {
42054205 Dest->removePredecessor(BB);
42064206 IBI->removeDestination(i);
42074207 --i; --e;
346346 // Also ensure unique worklist users.
347347 // If Def is a LoopPhi, it may not be in the Simplified set, so check for
348348 // self edges first.
349 if (UI != Def && Simplified.insert(UI))
349 if (UI != Def && Simplified.insert(UI).second)
350350 SimpleIVUsers.push_back(std::make_pair(UI, Def));
351351 }
352352 }
48004800
48014801 // If we did *not* see this pointer before, insert it to the read-write
48024802 // list. At this phase it is only a 'write' list.
4803 if (Seen.insert(Ptr)) {
4803 if (Seen.insert(Ptr).second) {
48044804 ++NumReadWrites;
48054805
48064806 AliasAnalysis::Location Loc = AA->getLocation(ST);
48334833 // read a few words, modify, and write a few words, and some of the
48344834 // words may be written to the same address.
48354835 bool IsReadOnlyPtr = false;
4836 if (Seen.insert(Ptr) || !isStridedPtr(SE, DL, Ptr, TheLoop, Strides)) {
4836 if (Seen.insert(Ptr).second ||
4837 !isStridedPtr(SE, DL, Ptr, TheLoop, Strides)) {
48374838 ++NumReads;
48384839 IsReadOnlyPtr = true;
48394840 }
50965097 // value must only be used once, except by phi nodes and min/max
50975098 // reductions which are represented as a cmp followed by a select.
50985099 ReductionInstDesc IgnoredVal(false, nullptr);
5099 if (VisitedInsts.insert(UI)) {
5100 if (VisitedInsts.insert(UI).second) {
51005101 if (isa(UI))
51015102 PHIs.push_back(UI);
51025103 else
17231723 for (UserList::iterator I = ExternalUses.begin(), E = ExternalUses.end();
17241724 I != E; ++I) {
17251725 // We only add extract cost once for the same scalar.
1726 if (!ExtractCostCalculated.insert(I->Scalar))
1726 if (!ExtractCostCalculated.insert(I->Scalar).second)
17271727 continue;
17281728
17291729 // Uses by ephemeral values are free (because the ephemeral value will be
19211921 ValueList Operands;
19221922 BasicBlock *IBB = PH->getIncomingBlock(i);
19231923
1924 if (!VisitedBBs.insert(IBB)) {
1924 if (!VisitedBBs.insert(IBB).second) {
19251925 NewPhi->addIncoming(NewPhi->getIncomingValueForBlock(IBB), IBB);
19261926 continue;
19271927 }
36313631
36323632 for (BasicBlock::iterator it = BB->begin(), e = BB->end(); it != e; it++) {
36333633 // We may go through BB multiple times so skip the one we have checked.
3634 if (!VisitedInstrs.insert(it))
3634 if (!VisitedInstrs.insert(it).second)
36353635 continue;
36363636
36373637 if (isa(it))
181181 // Recursively find all reachable SchedReadWrite records.
182182 static void scanSchedRW(Record *RWDef, RecVec &RWDefs,
183183 SmallPtrSet &RWSet) {
184 if (!RWSet.insert(RWDef))
184 if (!RWSet.insert(RWDef).second)
185185 return;
186186 RWDefs.push_back(RWDef);
187187 // Reads don't current have sequence records, but it can be added later.
750750 for (ArrayRef::const_iterator
751751 II = InstDefs.begin(), IE = InstDefs.end(); II != IE; ++II) {
752752 unsigned OldSCIdx = InstrClassMap[*II];
753 if (OldSCIdx && RemappedClassIDs.insert(OldSCIdx)) {
753 if (OldSCIdx && RemappedClassIDs.insert(OldSCIdx).second) {
754754 for (RecIter RI = SchedClasses[OldSCIdx].InstRWs.begin(),
755755 RE = SchedClasses[OldSCIdx].InstRWs.end(); RI != RE; ++RI) {
756756 if ((*RI)->getValueAsDef("SchedModel") == RWModelDef) {
385385 for (CodeGenSchedModels::ProcIter PI = SchedModels.procModelBegin(),
386386 PE = SchedModels.procModelEnd(); PI != PE; ++PI) {
387387
388 if (!ItinsDefSet.insert(PI->ItinsDef))
388 if (!ItinsDefSet.insert(PI->ItinsDef).second)
389389 continue;
390390
391391 std::vector FUs = PI->ItinsDef->getValueAsListOfDefs("FU");
564564 PE = SchedModels.procModelEnd(); PI != PE; ++PI, ++ProcItinListsIter) {
565565
566566 Record *ItinsDef = PI->ItinsDef;
567 if (!ItinsDefSet.insert(ItinsDef))
567 if (!ItinsDefSet.insert(ItinsDef).second)
568568 continue;
569569
570570 // Get processor itinerary name