llvm.org GIT mirror llvm / 440079e
[PBQP] Move register-allocation specific PBQP code into RegAllocPBQP.h. Just clean-up - no functional change. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@220145 91177308-0d34-0410-b5e6-96231b3b80d8 Lang Hames 5 years ago
2 changed file(s) with 386 addition(s) and 414 deletion(s). Raw diff Collapse all Expand all
+0
-409
include/llvm/CodeGen/PBQP/RegAllocSolver.h less more
None //===-- RegAllocSolver.h - Heuristic PBQP Solver for reg alloc --*- C++ -*-===//
1 //
2 // The LLVM Compiler Infrastructure
3 //
4 // This file is distributed under the University of Illinois Open Source
5 // License. See LICENSE.TXT for details.
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // Heuristic PBQP solver for register allocation problems. This solver uses a
10 // graph reduction approach. Nodes of degree 0, 1 and 2 are eliminated with
11 // optimality-preserving rules (see ReductionRules.h). When no low-degree (<3)
12 // nodes are present, a heuristic derived from Brigg's graph coloring approach
13 // is used.
14 //
15 //===----------------------------------------------------------------------===//
16
17 #ifndef LLVM_CODEGEN_PBQP_REGALLOCSOLVER_H
18 #define LLVM_CODEGEN_PBQP_REGALLOCSOLVER_H
19
20 #include "CostAllocator.h"
21 #include "Graph.h"
22 #include "ReductionRules.h"
23 #include "Solution.h"
24 #include "llvm/Support/ErrorHandling.h"
25 #include
26 #include
27
28 namespace llvm{
29 namespace PBQP {
30 namespace RegAlloc {
31
32 /// @brief Spill option index.
33 inline unsigned getSpillOptionIdx() { return 0; }
34
35 /// \brief Metadata to speed allocatability test.
36 ///
37 /// Keeps track of the number of infinities in each row and column.
38 class MatrixMetadata {
39 private:
40 MatrixMetadata(const MatrixMetadata&);
41 void operator=(const MatrixMetadata&);
42 public:
43 MatrixMetadata(const PBQP::Matrix& M)
44 : WorstRow(0), WorstCol(0),
45 UnsafeRows(new bool[M.getRows() - 1]()),
46 UnsafeCols(new bool[M.getCols() - 1]()) {
47
48 unsigned* ColCounts = new unsigned[M.getCols() - 1]();
49
50 for (unsigned i = 1; i < M.getRows(); ++i) {
51 unsigned RowCount = 0;
52 for (unsigned j = 1; j < M.getCols(); ++j) {
53 if (M[i][j] == std::numeric_limits::infinity()) {
54 ++RowCount;
55 ++ColCounts[j - 1];
56 UnsafeRows[i - 1] = true;
57 UnsafeCols[j - 1] = true;
58 }
59 }
60 WorstRow = std::max(WorstRow, RowCount);
61 }
62 unsigned WorstColCountForCurRow =
63 *std::max_element(ColCounts, ColCounts + M.getCols() - 1);
64 WorstCol = std::max(WorstCol, WorstColCountForCurRow);
65 delete[] ColCounts;
66 }
67
68 ~MatrixMetadata() {
69 delete[] UnsafeRows;
70 delete[] UnsafeCols;
71 }
72
73 unsigned getWorstRow() const { return WorstRow; }
74 unsigned getWorstCol() const { return WorstCol; }
75 const bool* getUnsafeRows() const { return UnsafeRows; }
76 const bool* getUnsafeCols() const { return UnsafeCols; }
77
78 private:
79 unsigned WorstRow, WorstCol;
80 bool* UnsafeRows;
81 bool* UnsafeCols;
82 };
83
84 class NodeMetadata {
85 public:
86 typedef std::vector OptionToRegMap;
87
88 typedef enum { Unprocessed,
89 OptimallyReducible,
90 ConservativelyAllocatable,
91 NotProvablyAllocatable } ReductionState;
92
93 NodeMetadata() : RS(Unprocessed), DeniedOpts(0), OptUnsafeEdges(nullptr){}
94 ~NodeMetadata() { delete[] OptUnsafeEdges; }
95
96 void setVReg(unsigned VReg) { this->VReg = VReg; }
97 unsigned getVReg() const { return VReg; }
98
99 void setOptionRegs(OptionToRegMap OptionRegs) {
100 this->OptionRegs = std::move(OptionRegs);
101 }
102 const OptionToRegMap& getOptionRegs() const { return OptionRegs; }
103
104 void setup(const Vector& Costs) {
105 NumOpts = Costs.getLength() - 1;
106 OptUnsafeEdges = new unsigned[NumOpts]();
107 }
108
109 ReductionState getReductionState() const { return RS; }
110 void setReductionState(ReductionState RS) { this->RS = RS; }
111
112 void handleAddEdge(const MatrixMetadata& MD, bool Transpose) {
113 DeniedOpts += Transpose ? MD.getWorstCol() : MD.getWorstRow();
114 const bool* UnsafeOpts =
115 Transpose ? MD.getUnsafeCols() : MD.getUnsafeRows();
116 for (unsigned i = 0; i < NumOpts; ++i)
117 OptUnsafeEdges[i] += UnsafeOpts[i];
118 }
119
120 void handleRemoveEdge(const MatrixMetadata& MD, bool Transpose) {
121 DeniedOpts -= Transpose ? MD.getWorstCol() : MD.getWorstRow();
122 const bool* UnsafeOpts =
123 Transpose ? MD.getUnsafeCols() : MD.getUnsafeRows();
124 for (unsigned i = 0; i < NumOpts; ++i)
125 OptUnsafeEdges[i] -= UnsafeOpts[i];
126 }
127
128 bool isConservativelyAllocatable() const {
129 return (DeniedOpts < NumOpts) ||
130 (std::find(OptUnsafeEdges, OptUnsafeEdges + NumOpts, 0) !=
131 OptUnsafeEdges + NumOpts);
132 }
133
134 private:
135 ReductionState RS;
136 unsigned NumOpts;
137 unsigned DeniedOpts;
138 unsigned* OptUnsafeEdges;
139 unsigned VReg;
140 OptionToRegMap OptionRegs;
141 };
142
143 class RegAllocSolverImpl {
144 private:
145 typedef PBQP::MDMatrix RAMatrix;
146 public:
147 typedef PBQP::Vector RawVector;
148 typedef PBQP::Matrix RawMatrix;
149 typedef PBQP::Vector Vector;
150 typedef RAMatrix Matrix;
151 typedef PBQP::PoolCostAllocator<
152 Vector, PBQP::VectorComparator,
153 Matrix, PBQP::MatrixComparator> CostAllocator;
154
155 typedef PBQP::GraphBase::NodeId NodeId;
156 typedef PBQP::GraphBase::EdgeId EdgeId;
157
158 typedef RegAlloc::NodeMetadata NodeMetadata;
159
160 struct EdgeMetadata { };
161
162 class GraphMetadata {
163 public:
164 GraphMetadata(MachineFunction &MF,
165 LiveIntervals &LIS,
166 MachineBlockFrequencyInfo &MBFI)
167 : MF(MF), LIS(LIS), MBFI(MBFI) {}
168
169 MachineFunction &MF;
170 LiveIntervals &LIS;
171 MachineBlockFrequencyInfo &MBFI;
172
173 void setNodeIdForVReg(unsigned VReg, GraphBase::NodeId NId) {
174 VRegToNodeId[VReg] = NId;
175 }
176
177 GraphBase::NodeId getNodeIdForVReg(unsigned VReg) const {
178 auto VRegItr = VRegToNodeId.find(VReg);
179 if (VRegItr == VRegToNodeId.end())
180 return GraphBase::invalidNodeId();
181 return VRegItr->second;
182 }
183
184 void eraseNodeIdForVReg(unsigned VReg) {
185 VRegToNodeId.erase(VReg);
186 }
187
188 private:
189 DenseMap VRegToNodeId;
190 };
191
192 typedef PBQP::Graph Graph;
193
194 RegAllocSolverImpl(Graph &G) : G(G) {}
195
196 Solution solve() {
197 G.setSolver(*this);
198 Solution S;
199 setup();
200 S = backpropagate(G, reduce());
201 G.unsetSolver();
202 return S;
203 }
204
205 void handleAddNode(NodeId NId) {
206 G.getNodeMetadata(NId).setup(G.getNodeCosts(NId));
207 }
208 void handleRemoveNode(NodeId NId) {}
209 void handleSetNodeCosts(NodeId NId, const Vector& newCosts) {}
210
211 void handleAddEdge(EdgeId EId) {
212 handleReconnectEdge(EId, G.getEdgeNode1Id(EId));
213 handleReconnectEdge(EId, G.getEdgeNode2Id(EId));
214 }
215
216 void handleRemoveEdge(EdgeId EId) {
217 handleDisconnectEdge(EId, G.getEdgeNode1Id(EId));
218 handleDisconnectEdge(EId, G.getEdgeNode2Id(EId));
219 }
220
221 void handleDisconnectEdge(EdgeId EId, NodeId NId) {
222 NodeMetadata& NMd = G.getNodeMetadata(NId);
223 const MatrixMetadata& MMd = G.getEdgeCosts(EId).getMetadata();
224 NMd.handleRemoveEdge(MMd, NId == G.getEdgeNode2Id(EId));
225 if (G.getNodeDegree(NId) == 3) {
226 // This node is becoming optimally reducible.
227 moveToOptimallyReducibleNodes(NId);
228 } else if (NMd.getReductionState() ==
229 NodeMetadata::NotProvablyAllocatable &&
230 NMd.isConservativelyAllocatable()) {
231 // This node just became conservatively allocatable.
232 moveToConservativelyAllocatableNodes(NId);
233 }
234 }
235
236 void handleReconnectEdge(EdgeId EId, NodeId NId) {
237 NodeMetadata& NMd = G.getNodeMetadata(NId);
238 const MatrixMetadata& MMd = G.getEdgeCosts(EId).getMetadata();
239 NMd.handleAddEdge(MMd, NId == G.getEdgeNode2Id(EId));
240 }
241
242 void handleSetEdgeCosts(EdgeId EId, const Matrix& NewCosts) {
243 handleRemoveEdge(EId);
244
245 NodeId N1Id = G.getEdgeNode1Id(EId);
246 NodeId N2Id = G.getEdgeNode2Id(EId);
247 NodeMetadata& N1Md = G.getNodeMetadata(N1Id);
248 NodeMetadata& N2Md = G.getNodeMetadata(N2Id);
249 const MatrixMetadata& MMd = NewCosts.getMetadata();
250 N1Md.handleAddEdge(MMd, N1Id != G.getEdgeNode1Id(EId));
251 N2Md.handleAddEdge(MMd, N2Id != G.getEdgeNode1Id(EId));
252 }
253
254 private:
255
256 void removeFromCurrentSet(NodeId NId) {
257 switch (G.getNodeMetadata(NId).getReductionState()) {
258 case NodeMetadata::Unprocessed: break;
259 case NodeMetadata::OptimallyReducible:
260 assert(OptimallyReducibleNodes.find(NId) !=
261 OptimallyReducibleNodes.end() &&
262 "Node not in optimally reducible set.");
263 OptimallyReducibleNodes.erase(NId);
264 break;
265 case NodeMetadata::ConservativelyAllocatable:
266 assert(ConservativelyAllocatableNodes.find(NId) !=
267 ConservativelyAllocatableNodes.end() &&
268 "Node not in conservatively allocatable set.");
269 ConservativelyAllocatableNodes.erase(NId);
270 break;
271 case NodeMetadata::NotProvablyAllocatable:
272 assert(NotProvablyAllocatableNodes.find(NId) !=
273 NotProvablyAllocatableNodes.end() &&
274 "Node not in not-provably-allocatable set.");
275 NotProvablyAllocatableNodes.erase(NId);
276 break;
277 }
278 }
279
280 void moveToOptimallyReducibleNodes(NodeId NId) {
281 removeFromCurrentSet(NId);
282 OptimallyReducibleNodes.insert(NId);
283 G.getNodeMetadata(NId).setReductionState(
284 NodeMetadata::OptimallyReducible);
285 }
286
287 void moveToConservativelyAllocatableNodes(NodeId NId) {
288 removeFromCurrentSet(NId);
289 ConservativelyAllocatableNodes.insert(NId);
290 G.getNodeMetadata(NId).setReductionState(
291 NodeMetadata::ConservativelyAllocatable);
292 }
293
294 void moveToNotProvablyAllocatableNodes(NodeId NId) {
295 removeFromCurrentSet(NId);
296 NotProvablyAllocatableNodes.insert(NId);
297 G.getNodeMetadata(NId).setReductionState(
298 NodeMetadata::NotProvablyAllocatable);
299 }
300
301 void setup() {
302 // Set up worklists.
303 for (auto NId : G.nodeIds()) {
304 if (G.getNodeDegree(NId) < 3)
305 moveToOptimallyReducibleNodes(NId);
306 else if (G.getNodeMetadata(NId).isConservativelyAllocatable())
307 moveToConservativelyAllocatableNodes(NId);
308 else
309 moveToNotProvablyAllocatableNodes(NId);
310 }
311 }
312
313 // Compute a reduction order for the graph by iteratively applying PBQP
314 // reduction rules. Locally optimal rules are applied whenever possible (R0,
315 // R1, R2). If no locally-optimal rules apply then any conservatively
316 // allocatable node is reduced. Finally, if no conservatively allocatable
317 // node exists then the node with the lowest spill-cost:degree ratio is
318 // selected.
319 std::vector reduce() {
320 assert(!G.empty() && "Cannot reduce empty graph.");
321
322 typedef GraphBase::NodeId NodeId;
323 std::vector NodeStack;
324
325 // Consume worklists.
326 while (true) {
327 if (!OptimallyReducibleNodes.empty()) {
328 NodeSet::iterator NItr = OptimallyReducibleNodes.begin();
329 NodeId NId = *NItr;
330 OptimallyReducibleNodes.erase(NItr);
331 NodeStack.push_back(NId);
332 switch (G.getNodeDegree(NId)) {
333 case 0:
334 break;
335 case 1:
336 applyR1(G, NId);
337 break;
338 case 2:
339 applyR2(G, NId);
340 break;
341 default: llvm_unreachable("Not an optimally reducible node.");
342 }
343 } else if (!ConservativelyAllocatableNodes.empty()) {
344 // Conservatively allocatable nodes will never spill. For now just
345 // take the first node in the set and push it on the stack. When we
346 // start optimizing more heavily for register preferencing, it may
347 // would be better to push nodes with lower 'expected' or worst-case
348 // register costs first (since early nodes are the most
349 // constrained).
350 NodeSet::iterator NItr = ConservativelyAllocatableNodes.begin();
351 NodeId NId = *NItr;
352 ConservativelyAllocatableNodes.erase(NItr);
353 NodeStack.push_back(NId);
354 G.disconnectAllNeighborsFromNode(NId);
355
356 } else if (!NotProvablyAllocatableNodes.empty()) {
357 NodeSet::iterator NItr =
358 std::min_element(NotProvablyAllocatableNodes.begin(),
359 NotProvablyAllocatableNodes.end(),
360 SpillCostComparator(G));
361 NodeId NId = *NItr;
362 NotProvablyAllocatableNodes.erase(NItr);
363 NodeStack.push_back(NId);
364 G.disconnectAllNeighborsFromNode(NId);
365 } else
366 break;
367 }
368
369 return NodeStack;
370 }
371
372 class SpillCostComparator {
373 public:
374 SpillCostComparator(const Graph& G) : G(G) {}
375 bool operator()(NodeId N1Id, NodeId N2Id) {
376 PBQPNum N1SC = G.getNodeCosts(N1Id)[0] / G.getNodeDegree(N1Id);
377 PBQPNum N2SC = G.getNodeCosts(N2Id)[0] / G.getNodeDegree(N2Id);
378 return N1SC < N2SC;
379 }
380 private:
381 const Graph& G;
382 };
383
384 Graph& G;
385 typedef std::set NodeSet;
386 NodeSet OptimallyReducibleNodes;
387 NodeSet ConservativelyAllocatableNodes;
388 NodeSet NotProvablyAllocatableNodes;
389 };
390
391 class PBQPRAGraph : public PBQP::Graph {
392 private:
393 typedef PBQP::Graph BaseT;
394 public:
395 PBQPRAGraph(GraphMetadata Metadata) : BaseT(Metadata) {}
396 };
397
398 inline Solution solve(PBQPRAGraph& G) {
399 if (G.empty())
400 return Solution();
401 RegAllocSolverImpl RegAllocSolver(G);
402 return RegAllocSolver.solve();
403 }
404 } // namespace RegAlloc
405 } // namespace PBQP
406 } // namespace llvm
407
408 #endif // LLVM_CODEGEN_PBQP_REGALLOCSOLVER_H
1717
1818 #include "llvm/CodeGen/MachineFunctionPass.h"
1919 #include "llvm/CodeGen/PBQPRAConstraint.h"
20 #include "llvm/CodeGen/PBQP/RegAllocSolver.h"
20 #include "llvm/CodeGen/PBQP/CostAllocator.h"
21 #include "llvm/CodeGen/PBQP/ReductionRules.h"
22 #include "llvm/Support/ErrorHandling.h"
2123
2224 namespace llvm {
23
24 /// @brief Create a PBQP register allocator instance.
25 FunctionPass *
26 createPBQPRegisterAllocator(char *customPassID = nullptr);
25 namespace PBQP {
26 namespace RegAlloc {
27
28 /// @brief Spill option index.
29 inline unsigned getSpillOptionIdx() { return 0; }
30
31 /// \brief Metadata to speed allocatability test.
32 ///
33 /// Keeps track of the number of infinities in each row and column.
34 class MatrixMetadata {
35 private:
36 MatrixMetadata(const MatrixMetadata&);
37 void operator=(const MatrixMetadata&);
38 public:
39 MatrixMetadata(const Matrix& M)
40 : WorstRow(0), WorstCol(0),
41 UnsafeRows(new bool[M.getRows() - 1]()),
42 UnsafeCols(new bool[M.getCols() - 1]()) {
43
44 unsigned* ColCounts = new unsigned[M.getCols() - 1]();
45
46 for (unsigned i = 1; i < M.getRows(); ++i) {
47 unsigned RowCount = 0;
48 for (unsigned j = 1; j < M.getCols(); ++j) {
49 if (M[i][j] == std::numeric_limits::infinity()) {
50 ++RowCount;
51 ++ColCounts[j - 1];
52 UnsafeRows[i - 1] = true;
53 UnsafeCols[j - 1] = true;
54 }
55 }
56 WorstRow = std::max(WorstRow, RowCount);
57 }
58 unsigned WorstColCountForCurRow =
59 *std::max_element(ColCounts, ColCounts + M.getCols() - 1);
60 WorstCol = std::max(WorstCol, WorstColCountForCurRow);
61 delete[] ColCounts;
62 }
63
64 ~MatrixMetadata() {
65 delete[] UnsafeRows;
66 delete[] UnsafeCols;
67 }
68
69 unsigned getWorstRow() const { return WorstRow; }
70 unsigned getWorstCol() const { return WorstCol; }
71 const bool* getUnsafeRows() const { return UnsafeRows; }
72 const bool* getUnsafeCols() const { return UnsafeCols; }
73
74 private:
75 unsigned WorstRow, WorstCol;
76 bool* UnsafeRows;
77 bool* UnsafeCols;
78 };
79
80 class NodeMetadata {
81 public:
82 typedef std::vector OptionToRegMap;
83
84 typedef enum { Unprocessed,
85 OptimallyReducible,
86 ConservativelyAllocatable,
87 NotProvablyAllocatable } ReductionState;
88
89 NodeMetadata() : RS(Unprocessed), DeniedOpts(0), OptUnsafeEdges(nullptr){}
90 ~NodeMetadata() { delete[] OptUnsafeEdges; }
91
92 void setVReg(unsigned VReg) { this->VReg = VReg; }
93 unsigned getVReg() const { return VReg; }
94
95 void setOptionRegs(OptionToRegMap OptionRegs) {
96 this->OptionRegs = std::move(OptionRegs);
97 }
98 const OptionToRegMap& getOptionRegs() const { return OptionRegs; }
99
100 void setup(const Vector& Costs) {
101 NumOpts = Costs.getLength() - 1;
102 OptUnsafeEdges = new unsigned[NumOpts]();
103 }
104
105 ReductionState getReductionState() const { return RS; }
106 void setReductionState(ReductionState RS) { this->RS = RS; }
107
108 void handleAddEdge(const MatrixMetadata& MD, bool Transpose) {
109 DeniedOpts += Transpose ? MD.getWorstCol() : MD.getWorstRow();
110 const bool* UnsafeOpts =
111 Transpose ? MD.getUnsafeCols() : MD.getUnsafeRows();
112 for (unsigned i = 0; i < NumOpts; ++i)
113 OptUnsafeEdges[i] += UnsafeOpts[i];
114 }
115
116 void handleRemoveEdge(const MatrixMetadata& MD, bool Transpose) {
117 DeniedOpts -= Transpose ? MD.getWorstCol() : MD.getWorstRow();
118 const bool* UnsafeOpts =
119 Transpose ? MD.getUnsafeCols() : MD.getUnsafeRows();
120 for (unsigned i = 0; i < NumOpts; ++i)
121 OptUnsafeEdges[i] -= UnsafeOpts[i];
122 }
123
124 bool isConservativelyAllocatable() const {
125 return (DeniedOpts < NumOpts) ||
126 (std::find(OptUnsafeEdges, OptUnsafeEdges + NumOpts, 0) !=
127 OptUnsafeEdges + NumOpts);
128 }
129
130 private:
131 ReductionState RS;
132 unsigned NumOpts;
133 unsigned DeniedOpts;
134 unsigned* OptUnsafeEdges;
135 unsigned VReg;
136 OptionToRegMap OptionRegs;
137 };
138
139 class RegAllocSolverImpl {
140 private:
141 typedef MDMatrix RAMatrix;
142 public:
143 typedef PBQP::Vector RawVector;
144 typedef PBQP::Matrix RawMatrix;
145 typedef PBQP::Vector Vector;
146 typedef RAMatrix Matrix;
147 typedef PBQP::PoolCostAllocator<
148 Vector, PBQP::VectorComparator,
149 Matrix, PBQP::MatrixComparator> CostAllocator;
150
151 typedef GraphBase::NodeId NodeId;
152 typedef GraphBase::EdgeId EdgeId;
153
154 typedef RegAlloc::NodeMetadata NodeMetadata;
155
156 struct EdgeMetadata { };
157
158 class GraphMetadata {
159 public:
160 GraphMetadata(MachineFunction &MF,
161 LiveIntervals &LIS,
162 MachineBlockFrequencyInfo &MBFI)
163 : MF(MF), LIS(LIS), MBFI(MBFI) {}
164
165 MachineFunction &MF;
166 LiveIntervals &LIS;
167 MachineBlockFrequencyInfo &MBFI;
168
169 void setNodeIdForVReg(unsigned VReg, GraphBase::NodeId NId) {
170 VRegToNodeId[VReg] = NId;
171 }
172
173 GraphBase::NodeId getNodeIdForVReg(unsigned VReg) const {
174 auto VRegItr = VRegToNodeId.find(VReg);
175 if (VRegItr == VRegToNodeId.end())
176 return GraphBase::invalidNodeId();
177 return VRegItr->second;
178 }
179
180 void eraseNodeIdForVReg(unsigned VReg) {
181 VRegToNodeId.erase(VReg);
182 }
183
184 private:
185 DenseMap VRegToNodeId;
186 };
187
188 typedef PBQP::Graph Graph;
189
190 RegAllocSolverImpl(Graph &G) : G(G) {}
191
192 Solution solve() {
193 G.setSolver(*this);
194 Solution S;
195 setup();
196 S = backpropagate(G, reduce());
197 G.unsetSolver();
198 return S;
199 }
200
201 void handleAddNode(NodeId NId) {
202 G.getNodeMetadata(NId).setup(G.getNodeCosts(NId));
203 }
204 void handleRemoveNode(NodeId NId) {}
205 void handleSetNodeCosts(NodeId NId, const Vector& newCosts) {}
206
207 void handleAddEdge(EdgeId EId) {
208 handleReconnectEdge(EId, G.getEdgeNode1Id(EId));
209 handleReconnectEdge(EId, G.getEdgeNode2Id(EId));
210 }
211
212 void handleRemoveEdge(EdgeId EId) {
213 handleDisconnectEdge(EId, G.getEdgeNode1Id(EId));
214 handleDisconnectEdge(EId, G.getEdgeNode2Id(EId));
215 }
216
217 void handleDisconnectEdge(EdgeId EId, NodeId NId) {
218 NodeMetadata& NMd = G.getNodeMetadata(NId);
219 const MatrixMetadata& MMd = G.getEdgeCosts(EId).getMetadata();
220 NMd.handleRemoveEdge(MMd, NId == G.getEdgeNode2Id(EId));
221 if (G.getNodeDegree(NId) == 3) {
222 // This node is becoming optimally reducible.
223 moveToOptimallyReducibleNodes(NId);
224 } else if (NMd.getReductionState() ==
225 NodeMetadata::NotProvablyAllocatable &&
226 NMd.isConservativelyAllocatable()) {
227 // This node just became conservatively allocatable.
228 moveToConservativelyAllocatableNodes(NId);
229 }
230 }
231
232 void handleReconnectEdge(EdgeId EId, NodeId NId) {
233 NodeMetadata& NMd = G.getNodeMetadata(NId);
234 const MatrixMetadata& MMd = G.getEdgeCosts(EId).getMetadata();
235 NMd.handleAddEdge(MMd, NId == G.getEdgeNode2Id(EId));
236 }
237
238 void handleSetEdgeCosts(EdgeId EId, const Matrix& NewCosts) {
239 handleRemoveEdge(EId);
240
241 NodeId N1Id = G.getEdgeNode1Id(EId);
242 NodeId N2Id = G.getEdgeNode2Id(EId);
243 NodeMetadata& N1Md = G.getNodeMetadata(N1Id);
244 NodeMetadata& N2Md = G.getNodeMetadata(N2Id);
245 const MatrixMetadata& MMd = NewCosts.getMetadata();
246 N1Md.handleAddEdge(MMd, N1Id != G.getEdgeNode1Id(EId));
247 N2Md.handleAddEdge(MMd, N2Id != G.getEdgeNode1Id(EId));
248 }
249
250 private:
251
252 void removeFromCurrentSet(NodeId NId) {
253 switch (G.getNodeMetadata(NId).getReductionState()) {
254 case NodeMetadata::Unprocessed: break;
255 case NodeMetadata::OptimallyReducible:
256 assert(OptimallyReducibleNodes.find(NId) !=
257 OptimallyReducibleNodes.end() &&
258 "Node not in optimally reducible set.");
259 OptimallyReducibleNodes.erase(NId);
260 break;
261 case NodeMetadata::ConservativelyAllocatable:
262 assert(ConservativelyAllocatableNodes.find(NId) !=
263 ConservativelyAllocatableNodes.end() &&
264 "Node not in conservatively allocatable set.");
265 ConservativelyAllocatableNodes.erase(NId);
266 break;
267 case NodeMetadata::NotProvablyAllocatable:
268 assert(NotProvablyAllocatableNodes.find(NId) !=
269 NotProvablyAllocatableNodes.end() &&
270 "Node not in not-provably-allocatable set.");
271 NotProvablyAllocatableNodes.erase(NId);
272 break;
273 }
274 }
275
276 void moveToOptimallyReducibleNodes(NodeId NId) {
277 removeFromCurrentSet(NId);
278 OptimallyReducibleNodes.insert(NId);
279 G.getNodeMetadata(NId).setReductionState(
280 NodeMetadata::OptimallyReducible);
281 }
282
283 void moveToConservativelyAllocatableNodes(NodeId NId) {
284 removeFromCurrentSet(NId);
285 ConservativelyAllocatableNodes.insert(NId);
286 G.getNodeMetadata(NId).setReductionState(
287 NodeMetadata::ConservativelyAllocatable);
288 }
289
290 void moveToNotProvablyAllocatableNodes(NodeId NId) {
291 removeFromCurrentSet(NId);
292 NotProvablyAllocatableNodes.insert(NId);
293 G.getNodeMetadata(NId).setReductionState(
294 NodeMetadata::NotProvablyAllocatable);
295 }
296
297 void setup() {
298 // Set up worklists.
299 for (auto NId : G.nodeIds()) {
300 if (G.getNodeDegree(NId) < 3)
301 moveToOptimallyReducibleNodes(NId);
302 else if (G.getNodeMetadata(NId).isConservativelyAllocatable())
303 moveToConservativelyAllocatableNodes(NId);
304 else
305 moveToNotProvablyAllocatableNodes(NId);
306 }
307 }
308
309 // Compute a reduction order for the graph by iteratively applying PBQP
310 // reduction rules. Locally optimal rules are applied whenever possible (R0,
311 // R1, R2). If no locally-optimal rules apply then any conservatively
312 // allocatable node is reduced. Finally, if no conservatively allocatable
313 // node exists then the node with the lowest spill-cost:degree ratio is
314 // selected.
315 std::vector reduce() {
316 assert(!G.empty() && "Cannot reduce empty graph.");
317
318 typedef GraphBase::NodeId NodeId;
319 std::vector NodeStack;
320
321 // Consume worklists.
322 while (true) {
323 if (!OptimallyReducibleNodes.empty()) {
324 NodeSet::iterator NItr = OptimallyReducibleNodes.begin();
325 NodeId NId = *NItr;
326 OptimallyReducibleNodes.erase(NItr);
327 NodeStack.push_back(NId);
328 switch (G.getNodeDegree(NId)) {
329 case 0:
330 break;
331 case 1:
332 applyR1(G, NId);
333 break;
334 case 2:
335 applyR2(G, NId);
336 break;
337 default: llvm_unreachable("Not an optimally reducible node.");
338 }
339 } else if (!ConservativelyAllocatableNodes.empty()) {
340 // Conservatively allocatable nodes will never spill. For now just
341 // take the first node in the set and push it on the stack. When we
342 // start optimizing more heavily for register preferencing, it may
343 // would be better to push nodes with lower 'expected' or worst-case
344 // register costs first (since early nodes are the most
345 // constrained).
346 NodeSet::iterator NItr = ConservativelyAllocatableNodes.begin();
347 NodeId NId = *NItr;
348 ConservativelyAllocatableNodes.erase(NItr);
349 NodeStack.push_back(NId);
350 G.disconnectAllNeighborsFromNode(NId);
351
352 } else if (!NotProvablyAllocatableNodes.empty()) {
353 NodeSet::iterator NItr =
354 std::min_element(NotProvablyAllocatableNodes.begin(),
355 NotProvablyAllocatableNodes.end(),
356 SpillCostComparator(G));
357 NodeId NId = *NItr;
358 NotProvablyAllocatableNodes.erase(NItr);
359 NodeStack.push_back(NId);
360 G.disconnectAllNeighborsFromNode(NId);
361 } else
362 break;
363 }
364
365 return NodeStack;
366 }
367
368 class SpillCostComparator {
369 public:
370 SpillCostComparator(const Graph& G) : G(G) {}
371 bool operator()(NodeId N1Id, NodeId N2Id) {
372 PBQPNum N1SC = G.getNodeCosts(N1Id)[0] / G.getNodeDegree(N1Id);
373 PBQPNum N2SC = G.getNodeCosts(N2Id)[0] / G.getNodeDegree(N2Id);
374 return N1SC < N2SC;
375 }
376 private:
377 const Graph& G;
378 };
379
380 Graph& G;
381 typedef std::set NodeSet;
382 NodeSet OptimallyReducibleNodes;
383 NodeSet ConservativelyAllocatableNodes;
384 NodeSet NotProvablyAllocatableNodes;
385 };
386
387 class PBQPRAGraph : public PBQP::Graph {
388 private:
389 typedef PBQP::Graph BaseT;
390 public:
391 PBQPRAGraph(GraphMetadata Metadata) : BaseT(Metadata) {}
392 };
393
394 inline Solution solve(PBQPRAGraph& G) {
395 if (G.empty())
396 return Solution();
397 RegAllocSolverImpl RegAllocSolver(G);
398 return RegAllocSolver.solve();
27399 }
28400
401 } // namespace RegAlloc
402 } // namespace PBQP
403
404 /// @brief Create a PBQP register allocator instance.
405 FunctionPass *
406 createPBQPRegisterAllocator(char *customPassID = nullptr);
407
408 } // namespace llvm
409
29410 #endif /* LLVM_CODEGEN_REGALLOCPBQP_H */