llvm.org GIT mirror llvm / 59282d3
GlobalISel: move type information to MachineRegisterInfo. We want each register to have a canonical type, which means the best place to store this is in MachineRegisterInfo rather than on every MachineInstr that happens to use or define that register. Most changes following from this are pretty simple (you need an MRI anyway if you're going to be doing any transformations, so just check the type there). But legalization doesn't really want to check redundant operands (when, for example, a G_ADD only ever has one type) so I've made use of MCInstrDesc's operand type field to encode these constraints and limit legalization's work. As an added bonus, more validation is possible, both in MachineVerifier and MachineIRBuilder (coming soon). git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@281035 91177308-0d34-0410-b5e6-96231b3b80d8 Tim Northover 3 years ago
50 changed file(s) with 1268 addition(s) and 1358 deletion(s). Raw diff Collapse all Expand all
3838 MachineFunction *MF;
3939 /// Information used to access the description of the opcodes.
4040 const TargetInstrInfo *TII;
41 /// Information used to verify types are consistent.
42 const MachineRegisterInfo *MRI;
4143 /// Debug location to be set to any instruction we create.
4244 DebugLoc DL;
4345
5557 return *TII;
5658 }
5759
58 void validateTruncExt(ArrayRef Tys, bool IsExtend);
60 void validateTruncExt(unsigned Dst, unsigned Src, bool IsExtend);
5961
6062 public:
6163 /// Getter for the function we currently build.
110112 /// \pre Ty == LLT{} or isPreISelGenericOpcode(Opcode)
111113 ///
112114 /// \return a MachineInstrBuilder for the newly created instruction.
113 MachineInstrBuilder buildInstr(unsigned Opcode, ArrayRef Tys);
114
115 /// Build and insert = \p Opcode .
116 ///
117 /// \pre setBasicBlock or setMI must have been called.
118 /// \pre not isPreISelGenericOpcode(\p Opcode)
119 ///
120 /// \return a MachineInstrBuilder for the newly created instruction.
121 MachineInstrBuilder buildInstr(unsigned Opcode) {
122 return buildInstr(Opcode, ArrayRef());
123 }
115 MachineInstrBuilder buildInstr(unsigned Opcode);
124116
125117 /// Build and insert \p Res = G_FRAME_INDEX \p Ty \p Idx
126118 ///
130122 /// \pre setBasicBlock or setMI must have been called.
131123 ///
132124 /// \return a MachineInstrBuilder for the newly created instruction.
133 MachineInstrBuilder buildFrameIndex(LLT Ty, unsigned Res, int Idx);
125 MachineInstrBuilder buildFrameIndex(unsigned Res, int Idx);
134126
135127 /// Build and insert \p Res = G_ADD \p Ty \p Op0, \p Op1
136128 ///
140132 /// \pre setBasicBlock or setMI must have been called.
141133 ///
142134 /// \return a MachineInstrBuilder for the newly created instruction.
143 MachineInstrBuilder buildAdd(LLT Ty, unsigned Res, unsigned Op0,
144 unsigned Op1);
135 MachineInstrBuilder buildAdd(unsigned Res, unsigned Op0,
136 unsigned Op1);
145137
146138 /// Build and insert \p Res = G_SUB \p Ty \p Op0, \p Op1
147139 ///
151143 /// \pre setBasicBlock or setMI must have been called.
152144 ///
153145 /// \return a MachineInstrBuilder for the newly created instruction.
154 MachineInstrBuilder buildSub(LLT Ty, unsigned Res, unsigned Op0,
146 MachineInstrBuilder buildSub(unsigned Res, unsigned Op0,
155147 unsigned Op1);
156148
157149 /// Build and insert \p Res = G_MUL \p Ty \p Op0, \p Op1
162154 /// \pre setBasicBlock or setMI must have been called.
163155 ///
164156 /// \return a MachineInstrBuilder for the newly created instruction.
165 MachineInstrBuilder buildMul(LLT Ty, unsigned Res, unsigned Op0,
157 MachineInstrBuilder buildMul(unsigned Res, unsigned Op0,
166158 unsigned Op1);
167159
168160 /// Build and insert \p Res, \p CarryOut = G_UADDE \p Tys \p Op0, \p Op1,
175167 /// \pre setBasicBlock or setMI must have been called.
176168 ///
177169 /// \return The newly created instruction.
178 MachineInstrBuilder buildUAdde(ArrayRef Tys, unsigned Res,
179 unsigned CarryOut, unsigned Op0, unsigned Op1,
180 unsigned CarryIn);
170 MachineInstrBuilder buildUAdde(unsigned Res, unsigned CarryOut, unsigned Op0,
171 unsigned Op1, unsigned CarryIn);
181172
182173 /// Build and insert \p Res = G_TYPE \p Ty \p Op.
183174 ///
189180 /// register).
190181 ///
191182 /// \return The newly created instruction.
192 MachineInstrBuilder buildType(LLT Ty, unsigned Res, unsigned Op);
183 MachineInstrBuilder buildType(unsigned Res, unsigned Op);
193184
194185 /// Build and insert \p Res = G_ANYEXT \p { DstTy, SrcTy } \p Op0
195186 ///
201192 /// \pre setBasicBlock or setMI must have been called.
202193 ///
203194 /// \return The newly created instruction.
204 MachineInstrBuilder buildAnyExt(ArrayRef Tys, unsigned Res, unsigned Op);
195 MachineInstrBuilder buildAnyExt(unsigned Res, unsigned Op);
205196
206197 /// Build and insert \p Res = G_SEXT \p { DstTy, SrcTy }\p Op
207198 ///
212203 /// \pre setBasicBlock or setMI must have been called.
213204 ///
214205 /// \return The newly created instruction.
215 MachineInstrBuilder buildSExt(ArrayRef Tys, unsigned Res, unsigned Op);
206 MachineInstrBuilder buildSExt(unsigned Res, unsigned Op);
216207
217208 /// Build and insert \p Res = G_ZEXT \p { DstTy, SrcTy } \p Op
218209 ///
223214 /// \pre setBasicBlock or setMI must have been called.
224215 ///
225216 /// \return The newly created instruction.
226 MachineInstrBuilder buildZExt(ArrayRef Tys, unsigned Res, unsigned Op);
217 MachineInstrBuilder buildZExt(unsigned Res, unsigned Op);
227218
228219 /// Build and insert G_BR unsized \p Dest
229220 ///
244235 /// \pre setBasicBlock or setMI must have been called.
245236 ///
246237 /// \return The newly created instruction.
247 MachineInstrBuilder buildBrCond(LLT Ty, unsigned Tst, MachineBasicBlock &BB);
238 MachineInstrBuilder buildBrCond(unsigned Tst, MachineBasicBlock &BB);
248239
249240 /// Build and insert \p Res = G_CONSTANT \p Ty \p Val
250241 ///
253244 /// \pre setBasicBlock or setMI must have been called.
254245 ///
255246 /// \return The newly created instruction.
256 MachineInstrBuilder buildConstant(LLT Ty, unsigned Res, int64_t Val);
247 MachineInstrBuilder buildConstant(unsigned Res, int64_t Val);
257248
258249 /// Build and insert \p Res = G_FCONSTANT \p Ty \p Val
259250 ///
263254 /// \pre setBasicBlock or setMI must have been called.
264255 ///
265256 /// \return The newly created instruction.
266 MachineInstrBuilder buildFConstant(LLT Ty, unsigned Res,
267 const ConstantFP &Val);
257 MachineInstrBuilder buildFConstant(unsigned Res, const ConstantFP &Val);
268258
269259 /// Build and insert \p Res = COPY Op
270260 ///
283273 /// \pre setBasicBlock or setMI must have been called.
284274 ///
285275 /// \return a MachineInstrBuilder for the newly created instruction.
286 MachineInstrBuilder buildLoad(LLT VTy, LLT PTy, unsigned Res, unsigned Addr,
276 MachineInstrBuilder buildLoad(unsigned Res, unsigned Addr,
287277 MachineMemOperand &MMO);
288278
289279 /// Build and insert `G_STORE { VTy, PTy } Val, Addr, MMO`.
294284 /// \pre setBasicBlock or setMI must have been called.
295285 ///
296286 /// \return a MachineInstrBuilder for the newly created instruction.
297 MachineInstrBuilder buildStore(LLT VTy, LLT PTy, unsigned Val, unsigned Addr,
287 MachineInstrBuilder buildStore(unsigned Val, unsigned Addr,
298288 MachineMemOperand &MMO);
299289
300290 /// Build and insert `Res0, ... = G_EXTRACT { ResTys, SrcTy } Src, Idx0,
307297 /// \pre \p Indices must be in ascending order of bit position.
308298 ///
309299 /// \return a MachineInstrBuilder for the newly created instruction.
310 MachineInstrBuilder buildExtract(ArrayRef ResTys,
311 ArrayRef Results,
312 ArrayRef Indices, LLT SrcTy,
313 unsigned Src);
300 MachineInstrBuilder buildExtract(ArrayRef Results,
301 ArrayRef Indices, unsigned Src);
314302
315303 /// Build and insert \p Res = G_SEQUENCE \p { \pResTy, \p Op0Ty, ... }
316304 /// \p Op0, \p Idx0...
326314 /// \pre \p Indices must be in ascending order of bit position.
327315 ///
328316 /// \return a MachineInstrBuilder for the newly created instruction.
329 MachineInstrBuilder buildSequence(LLT ResTy, unsigned Res,
330 ArrayRef OpTys,
317 MachineInstrBuilder buildSequence(unsigned Res,
331318 ArrayRef Ops,
332319 ArrayRef Indices);
333320
334321 void addUsesWithIndices(MachineInstrBuilder MIB) {}
335322
336323 template
337 void addUsesWithIndices(MachineInstrBuilder MIB, LLT Ty, unsigned Reg,
324 void addUsesWithIndices(MachineInstrBuilder MIB, unsigned Reg,
338325 unsigned BitIndex, ArgTys... Args) {
339326 MIB.addUse(Reg).addImm(BitIndex);
340 MIB->setType(Ty, MIB->getNumTypes());
341
342327 addUsesWithIndices(MIB, Args...);
343328 }
344329
345330 template
346 MachineInstrBuilder buildSequence(LLT Ty, unsigned Res, LLT OpTy, unsigned Op,
331 MachineInstrBuilder buildSequence(unsigned Res, unsigned Op,
347332 unsigned Index, ArgTys... Args) {
348333 MachineInstrBuilder MIB =
349 buildInstr(TargetOpcode::G_SEQUENCE, Ty).addDef(Res);
350 addUsesWithIndices(MIB, OpTy, Op, Index, Args...);
334 buildInstr(TargetOpcode::G_SEQUENCE).addDef(Res);
335 addUsesWithIndices(MIB, Op, Index, Args...);
351336 return MIB;
352337 }
353338
354339 template
355 MachineInstrBuilder buildInsert(LLT Ty, unsigned Res, unsigned Src, LLT OpTy,
340 MachineInstrBuilder buildInsert(unsigned Res, unsigned Src,
356341 unsigned Op, unsigned Index, ArgTys... Args) {
357342 MachineInstrBuilder MIB =
358 buildInstr(TargetOpcode::G_INSERT, Ty).addDef(Res).addUse(Src);
359 addUsesWithIndices(MIB, OpTy, Op, Index, Args...);
343 buildInstr(TargetOpcode::G_INSERT).addDef(Res).addUse(Src);
344 addUsesWithIndices(MIB, Op, Index, Args...);
360345 return MIB;
361346 }
362347
370355 /// \pre setBasicBlock or setMI must have been called.
371356 ///
372357 /// \return a MachineInstrBuilder for the newly created instruction.
373 MachineInstrBuilder buildIntrinsic(ArrayRef Tys, Intrinsic::ID ID,
374 unsigned Res, bool HasSideEffects);
358 MachineInstrBuilder buildIntrinsic(Intrinsic::ID ID, unsigned Res,
359 bool HasSideEffects);
375360
376361 /// Build and insert \p Res = G_FPTRUNC \p { DstTy, SrcTy } \p Op
377362 ///
380365 /// \pre setBasicBlock or setMI must have been called.
381366 ///
382367 /// \return The newly created instruction.
383 MachineInstrBuilder buildFPTrunc(ArrayRef Ty, unsigned Res, unsigned Op);
368 MachineInstrBuilder buildFPTrunc(unsigned Res, unsigned Op);
384369
385370 /// Build and insert \p Res = G_TRUNC \p { DstTy, SrcTy } \p Op
386371 ///
390375 /// \pre setBasicBlock or setMI must have been called.
391376 ///
392377 /// \return The newly created instruction.
393 MachineInstrBuilder buildTrunc(ArrayRef Tys, unsigned Res, unsigned Op);
378 MachineInstrBuilder buildTrunc(unsigned Res, unsigned Op);
394379
395380 /// Build and insert a G_ICMP
396381 ///
397382 /// \pre setBasicBlock or setMI must have been called.
398383 ///
399384 /// \return a MachineInstrBuilder for the newly created instruction.
400 MachineInstrBuilder buildICmp(ArrayRef Tys, CmpInst::Predicate Pred,
385 MachineInstrBuilder buildICmp(CmpInst::Predicate Pred,
401386 unsigned Res, unsigned Op0, unsigned Op1);
402387
403388 /// Build and insert a G_FCMP
405390 /// \pre setBasicBlock or setMI must have been called.
406391 ///
407392 /// \return a MachineInstrBuilder for the newly created instruction.
408 MachineInstrBuilder buildFCmp(ArrayRef Tys, CmpInst::Predicate Pred,
393 MachineInstrBuilder buildFCmp(CmpInst::Predicate Pred,
409394 unsigned Res, unsigned Op0, unsigned Op1);
410395
411396 /// Build and insert a \p Res = G_SELECT { \p Ty, s1 } \p Tst, \p Op0, \p Op1
413398 /// \pre setBasicBlock or setMI must have been called.
414399 ///
415400 /// \return a MachineInstrBuilder for the newly created instruction.
416 MachineInstrBuilder buildSelect(LLT Ty, unsigned Res, unsigned Tst,
401 MachineInstrBuilder buildSelect(unsigned Res, unsigned Tst,
417402 unsigned Op0, unsigned Op1);
418403 };
419404
2424 namespace llvm {
2525 class LLVMContext;
2626 class MachineInstr;
27 class MachineRegisterInfo;
2728 class Type;
2829 class VectorType;
2930
130131 /// performed and the destination type.
131132 std::pair getAction(const InstrAspect &Aspect) const;
132133
133 /// Determine what action should be taken to legalize the given generic instruction.
134 /// Determine what action should be taken to legalize the given generic
135 /// instruction.
134136 ///
135137 /// \returns a tuple consisting of the LegalizeAction that should be
136138 /// performed, the type-index it should be performed on and the destination
137139 /// type.
138140 std::tuple
139 getAction(const MachineInstr &MI) const;
141 getAction(const MachineInstr &MI, const MachineRegisterInfo &MRI) const;
140142
141143 /// Iterate the given function (typically something like doubling the width)
142144 /// on Ty until we find a legal type for this operation.
181183 return ActionIt->second;
182184 }
183185
184 bool isLegal(const MachineInstr &MI) const;
186 bool isLegal(const MachineInstr &MI, const MachineRegisterInfo &MRI) const;
185187
186188 private:
187189 static const int FirstOp = TargetOpcode::PRE_ISEL_GENERIC_OPCODE_START;
2222 #include "llvm/ADT/iterator_range.h"
2323 #include "llvm/Analysis/AliasAnalysis.h"
2424 #include "llvm/CodeGen/MachineOperand.h"
25 #include "llvm/CodeGen/LowLevelType.h"
2625 #include "llvm/IR/DebugLoc.h"
2726 #include "llvm/IR/InlineAsm.h"
2827 #include "llvm/MC/MCInstrDesc.h"
103102
104103 DebugLoc debugLoc; // Source line information.
105104
106 #ifdef LLVM_BUILD_GLOBAL_ISEL
107 /// Type of the instruction in case of a generic opcode.
108 /// \invariant This must be LLT{} if getOpcode() is not
109 /// in the range of generic opcodes.
110 SmallVector Tys;
111 #endif
112
113105 MachineInstr(const MachineInstr&) = delete;
114106 void operator=(const MachineInstr&) = delete;
115107 // Use MachineFunction::DeleteMachineInstr() instead.
184176 Flags &= ~((uint8_t)Flag);
185177 }
186178
187 /// Set the type of the instruction.
188 /// \pre getOpcode() is in the range of the generic opcodes.
189 void setType(LLT Ty, unsigned Idx = 0);
190 LLT getType(int unsigned = 0) const;
191 unsigned getNumTypes() const;
192 void removeTypes();
193179
194180 /// Return true if MI is in a bundle (but not the first MI in a bundle).
195181 ///
1919 #include "llvm/ADT/iterator_range.h"
2020 // PointerUnion needs to have access to the full RegisterBank type.
2121 #include "llvm/CodeGen/GlobalISel/RegisterBank.h"
22 #include "llvm/CodeGen/LowLevelType.h"
2223 #include "llvm/CodeGen/MachineFunction.h"
2324 #include "llvm/CodeGen/MachineInstrBundle.h"
2425 #include "llvm/Target/TargetRegisterInfo.h"
103104 /// started.
104105 BitVector ReservedRegs;
105106
106 typedef DenseMapunsigned> VRegToSizeMap;
107 typedef DenseMapLLT> VRegToTypeMap;
107108 /// Map generic virtual registers to their actual size.
108 mutable std::unique_ptr VRegToSize;
109
110 /// Accessor for VRegToSize. This accessor should only be used
109 mutable std::unique_ptr VRegToType;
110
111 /// Accessor for VRegToType. This accessor should only be used
111112 /// by global-isel related work.
112 VRegToSizeMap &getVRegToSize() const {
113 if (!VRegToSize)
114 VRegToSize.reset(new VRegToSizeMap);
115 return *VRegToSize.get();
113 VRegToTypeMap &getVRegToType() const {
114 if (!VRegToType)
115 VRegToType.reset(new VRegToTypeMap);
116 return *VRegToType.get();
116117 }
117118
118119 /// Keep track of the physical registers that are live in to the function.
640641 ///
641642 unsigned createVirtualRegister(const TargetRegisterClass *RegClass);
642643
643 /// Get the size in bits of \p VReg or 0 if VReg is not a generic
644 /// Get the low-level type of \p VReg or LLT{} if VReg is not a generic
644645 /// (target independent) virtual register.
645 unsigned getSize(unsigned VReg) const;
646
647 /// Set the size in bits of \p VReg to \p Size.
648 /// Although the size should be set at build time, mir infrastructure
649 /// is not yet able to do it.
650 void setSize(unsigned VReg, unsigned Size);
651
652 /// Create and return a new generic virtual register with a size of \p Size.
653 /// \pre Size > 0.
654 unsigned createGenericVirtualRegister(unsigned Size);
655
656 /// Remove all sizes associated to virtual registers (after instruction
646 LLT getType(unsigned VReg) const;
647
648 /// Set the low-level type of \p VReg to \p Ty.
649 void setType(unsigned VReg, LLT Ty);
650
651 /// Create and return a new generic virtual register with low-level
652 /// type \p Ty.
653 unsigned createGenericVirtualRegister(LLT Ty);
654
655 /// Remove all types associated to virtual registers (after instruction
657656 /// selection and constraining of all generic virtual registers).
658 void clearVirtRegSizes();
657 void clearVirtRegTypes();
659658
660659 /// getNumVirtRegs - Return the number of virtual registers created.
661660 ///
4646 OPERAND_REGISTER = 2,
4747 OPERAND_MEMORY = 3,
4848 OPERAND_PCREL = 4,
49 OPERAND_FIRST_TARGET = 5
49
50 OPERAND_FIRST_GENERIC = 6,
51 OPERAND_GENERIC_0 = 6,
52 OPERAND_GENERIC_1 = 7,
53 OPERAND_GENERIC_2 = 8,
54 OPERAND_GENERIC_3 = 9,
55 OPERAND_GENERIC_4 = 10,
56 OPERAND_GENERIC_5 = 11,
57 OPERAND_LAST_GENERIC = 11,
58
59 OPERAND_FIRST_TARGET = 12,
5060 };
61
62 enum GenericOperandType {
63 };
64
5165 }
5266
5367 /// \brief This holds information about one operand of a machine instruction,
8296
8397 /// \brief Set if this operand is a optional def.
8498 bool isOptionalDef() const { return Flags & (1 << MCOI::OptionalDef); }
99
100 bool isGenericType() const {
101 return OperandType >= MCOI::OPERAND_FIRST_GENERIC &&
102 OperandType <= MCOI::OPERAND_LAST_GENERIC;
103 }
104
105 unsigned getGenericTypeIndex() const {
106 assert(isGenericType() && "non-generic types don't have an index");
107 return OperandType - MCOI::OPERAND_FIRST_GENERIC;
108 }
85109 };
86110
87111 //===----------------------------------------------------------------------===//
1818 // Extend the underlying scalar type of an operation, leaving the high bits
1919 // unspecified.
2020 def G_ANYEXT : Instruction {
21 let OutOperandList = (outs unknown:$dst);
22 let InOperandList = (ins unknown:$src);
21 let OutOperandList = (outs type0:$dst);
22 let InOperandList = (ins type1:$src);
2323 let hasSideEffects = 0;
2424 }
2525
2626 // Sign extend the underlying scalar type of an operation, copying the sign bit
2727 // into the newly-created space.
2828 def G_SEXT : Instruction {
29 let OutOperandList = (outs unknown:$dst);
30 let InOperandList = (ins unknown:$src);
29 let OutOperandList = (outs type0:$dst);
30 let InOperandList = (ins type1:$src);
3131 let hasSideEffects = 0;
3232 }
3333
3434 // Zero extend the underlying scalar type of an operation, putting zero bits
3535 // into the newly-created space.
3636 def G_ZEXT : Instruction {
37 let OutOperandList = (outs unknown:$dst);
38 let InOperandList = (ins unknown:$src);
37 let OutOperandList = (outs type0:$dst);
38 let InOperandList = (ins type1:$src);
3939 let hasSideEffects = 0;
4040 }
4141
4343 // Truncate the underlying scalar type of an operation. This is equivalent to
4444 // G_EXTRACT for scalar types, but acts elementwise on vectors.
4545 def G_TRUNC : Instruction {
46 let OutOperandList = (outs unknown:$dst);
47 let InOperandList = (ins unknown:$src);
46 let OutOperandList = (outs type0:$dst);
47 let InOperandList = (ins type1:$src);
4848 let hasSideEffects = 0;
4949 }
5050
5151 def G_FRAME_INDEX : Instruction {
52 let OutOperandList = (outs unknown:$dst);
52 let OutOperandList = (outs type0:$dst);
5353 let InOperandList = (ins unknown:$src2);
5454 let hasSideEffects = 0;
5555 }
5656
5757 def G_INTTOPTR : Instruction {
58 let OutOperandList = (outs unknown:$dst);
59 let InOperandList = (ins unknown:$src);
58 let OutOperandList = (outs type0:$dst);
59 let InOperandList = (ins type1:$src);
6060 let hasSideEffects = 0;
6161 }
6262
6363 def G_PTRTOINT : Instruction {
64 let OutOperandList = (outs unknown:$dst);
65 let InOperandList = (ins unknown:$src);
64 let OutOperandList = (outs type0:$dst);
65 let InOperandList = (ins type1:$src);
6666 let hasSideEffects = 0;
6767 }
6868
6969 def G_BITCAST : Instruction {
70 let OutOperandList = (outs unknown:$dst);
71 let InOperandList = (ins unknown:$src);
70 let OutOperandList = (outs type0:$dst);
71 let InOperandList = (ins type1:$src);
7272 let hasSideEffects = 0;
7373 }
7474
7575 def G_CONSTANT : Instruction {
76 let OutOperandList = (outs unknown:$dst);
76 let OutOperandList = (outs type0:$dst);
7777 let InOperandList = (ins unknown:$imm);
7878 let hasSideEffects = 0;
7979 }
8080
8181 def G_FCONSTANT : Instruction {
82 let OutOperandList = (outs unknown:$dst);
82 let OutOperandList = (outs type0:$dst);
8383 let InOperandList = (ins unknown:$imm);
8484 let hasSideEffects = 0;
8585 }
8686
8787 def G_TYPE : Instruction {
88 let OutOperandList = (outs unknown:$dst);
88 let OutOperandList = (outs type0:$dst);
8989 let InOperandList = (ins unknown:$imm);
9090 let hasSideEffects = 0;
9191 }
9696
9797 // Generic addition.
9898 def G_ADD : Instruction {
99 let OutOperandList = (outs unknown:$dst);
100 let InOperandList = (ins unknown:$src1, unknown:$src2);
99 let OutOperandList = (outs type0:$dst);
100 let InOperandList = (ins type0:$src1, type0:$src2);
101101 let hasSideEffects = 0;
102102 let isCommutable = 1;
103103 }
104104
105105 // Generic subtraction.
106106 def G_SUB : Instruction {
107 let OutOperandList = (outs unknown:$dst);
108 let InOperandList = (ins unknown:$src1, unknown:$src2);
107 let OutOperandList = (outs type0:$dst);
108 let InOperandList = (ins type0:$src1, type0:$src2);
109109 let hasSideEffects = 0;
110110 let isCommutable = 0;
111111 }
112112
113113 // Generic multiplication.
114114 def G_MUL : Instruction {
115 let OutOperandList = (outs unknown:$dst);
116 let InOperandList = (ins unknown:$src1, unknown:$src2);
115 let OutOperandList = (outs type0:$dst);
116 let InOperandList = (ins type0:$src1, type0:$src2);
117117 let hasSideEffects = 0;
118118 let isCommutable = 1;
119119 }
120120
121121 // Generic signed division.
122122 def G_SDIV : Instruction {
123 let OutOperandList = (outs unknown:$dst);
124 let InOperandList = (ins unknown:$src1, unknown:$src2);
123 let OutOperandList = (outs type0:$dst);
124 let InOperandList = (ins type0:$src1, type0:$src2);
125125 let hasSideEffects = 0;
126126 let isCommutable = 0;
127127 }
128128
129129 // Generic unsigned division.
130130 def G_UDIV : Instruction {
131 let OutOperandList = (outs unknown:$dst);
132 let InOperandList = (ins unknown:$src1, unknown:$src2);
131 let OutOperandList = (outs type0:$dst);
132 let InOperandList = (ins type0:$src1, type0:$src2);
133133 let hasSideEffects = 0;
134134 let isCommutable = 0;
135135 }
136136
137137 // Generic signed remainder.
138138 def G_SREM : Instruction {
139 let OutOperandList = (outs unknown:$dst);
140 let InOperandList = (ins unknown:$src1, unknown:$src2);
139 let OutOperandList = (outs type0:$dst);
140 let InOperandList = (ins type0:$src1, type0:$src2);
141141 let hasSideEffects = 0;
142142 let isCommutable = 0;
143143 }
144144
145145 // Generic unsigned remainder.
146146 def G_UREM : Instruction {
147 let OutOperandList = (outs unknown:$dst);
148 let InOperandList = (ins unknown:$src1, unknown:$src2);
147 let OutOperandList = (outs type0:$dst);
148 let InOperandList = (ins type0:$src1, type0:$src2);
149149 let hasSideEffects = 0;
150150 let isCommutable = 0;
151151 }
152152
153153 // Generic bitwise and.
154154 def G_AND : Instruction {
155 let OutOperandList = (outs unknown:$dst);
156 let InOperandList = (ins unknown:$src1, unknown:$src2);
155 let OutOperandList = (outs type0:$dst);
156 let InOperandList = (ins type0:$src1, type0:$src2);
157157 let hasSideEffects = 0;
158158 let isCommutable = 1;
159159 }
160160
161161 // Generic bitwise or.
162162 def G_OR : Instruction {
163 let OutOperandList = (outs unknown:$dst);
164 let InOperandList = (ins unknown:$src1, unknown:$src2);
163 let OutOperandList = (outs type0:$dst);
164 let InOperandList = (ins type0:$src1, type0:$src2);
165165 let hasSideEffects = 0;
166166 let isCommutable = 1;
167167 }
168168
169169 // Generic bitwise xor.
170170 def G_XOR : Instruction {
171 let OutOperandList = (outs unknown:$dst);
172 let InOperandList = (ins unknown:$src1, unknown:$src2);
171 let OutOperandList = (outs type0:$dst);
172 let InOperandList = (ins type0:$src1, type0:$src2);
173173 let hasSideEffects = 0;
174174 let isCommutable = 1;
175175 }
176176
177177 // Generic left-shift.
178178 def G_SHL : Instruction {
179 let OutOperandList = (outs unknown:$dst);
180 let InOperandList = (ins unknown:$src1, unknown:$src2);
179 let OutOperandList = (outs type0:$dst);
180 let InOperandList = (ins type0:$src1, type0:$src2);
181181 let hasSideEffects = 0;
182182 }
183183
184184 // Generic logical right-shift.
185185 def G_LSHR : Instruction {
186 let OutOperandList = (outs unknown:$dst);
187 let InOperandList = (ins unknown:$src1, unknown:$src2);
186 let OutOperandList = (outs type0:$dst);
187 let InOperandList = (ins type0:$src1, type0:$src2);
188188 let hasSideEffects = 0;
189189 }
190190
191191 // Generic arithmetic right-shift.
192192 def G_ASHR : Instruction {
193 let OutOperandList = (outs unknown:$dst);
194 let InOperandList = (ins unknown:$src1, unknown:$src2);
193 let OutOperandList = (outs type0:$dst);
194 let InOperandList = (ins type0:$src1, type0:$src2);
195195 let hasSideEffects = 0;
196196 }
197197
198198 // Generic integer comparison.
199199 def G_ICMP : Instruction {
200 let OutOperandList = (outs unknown:$dst);
201 let InOperandList = (ins unknown:$tst, unknown:$src1, unknown:$src2);
200 let OutOperandList = (outs type0:$dst);
201 let InOperandList = (ins unknown:$tst, type1:$src1, type1:$src2);
202202 let hasSideEffects = 0;
203203 }
204204
205205 // Generic floating-point comparison.
206206 def G_FCMP : Instruction {
207 let OutOperandList = (outs unknown:$dst);
208 let InOperandList = (ins unknown:$tst, unknown:$src1, unknown:$src2);
207 let OutOperandList = (outs type0:$dst);
208 let InOperandList = (ins unknown:$tst, type1:$src1, type1:$src2);
209209 let hasSideEffects = 0;
210210 }
211211
212212 // Generic select
213213 def G_SELECT : Instruction {
214 let OutOperandList = (outs unknown:$dst);
215 let InOperandList = (ins unknown:$tst, unknown:$src1, unknown:$src2);
214 let OutOperandList = (outs type0:$dst);
215 let InOperandList = (ins type1:$tst, type0:$src1, type0:$src2);
216216 let hasSideEffects = 0;
217217 }
218218
222222
223223 // Generic unsigned addition consuming and producing a carry flag.
224224 def G_UADDE : Instruction {
225 let OutOperandList = (outs unknown:$dst, unknown:$carry_out);
226 let InOperandList = (ins unknown:$src1, unknown:$src2, unknown:$carry_in);
225 let OutOperandList = (outs type0:$dst, type1:$carry_out);
226 let InOperandList = (ins type0:$src1, type0:$src2, type1:$carry_in);
227227 let hasSideEffects = 0;
228228 }
229229
230230 // Generic signed addition producing a carry flag.
231231 def G_SADDO : Instruction {
232 let OutOperandList = (outs unknown:$dst, unknown:$carry_out);
233 let InOperandList = (ins unknown:$src1, unknown:$src2);
232 let OutOperandList = (outs type0:$dst, type1:$carry_out);
233 let InOperandList = (ins type0:$src1, type0:$src2);
234234 let hasSideEffects = 0;
235235 let isCommutable = 1;
236236 }
237237
238238 // Generic unsigned subtraction consuming and producing a carry flag.
239239 def G_USUBE : Instruction {
240 let OutOperandList = (outs unknown:$dst, unknown:$carry_out);
241 let InOperandList = (ins unknown:$src1, unknown:$src2, unknown:$carry_in);
240 let OutOperandList = (outs type0:$dst, type1:$carry_out);
241 let InOperandList = (ins type0:$src1, type0:$src2, type1:$carry_in);
242242 let hasSideEffects = 0;
243243 }
244244
245245 // Generic unsigned subtraction producing a carry flag.
246246 def G_SSUBO : Instruction {
247 let OutOperandList = (outs unknown:$dst, unknown:$carry_out);
248 let InOperandList = (ins unknown:$src1, unknown:$src2);
247 let OutOperandList = (outs type0:$dst, type1:$carry_out);
248 let InOperandList = (ins type0:$src1, type0:$src2);
249249 let hasSideEffects = 0;
250250 }
251251
252252 // Generic unsigned multiplication producing a carry flag.
253253 def G_UMULO : Instruction {
254 let OutOperandList = (outs unknown:$dst, unknown:$carry_out);
255 let InOperandList = (ins unknown:$src1, unknown:$src2);
254 let OutOperandList = (outs type0:$dst, type1:$carry_out);
255 let InOperandList = (ins type0:$src1, type0:$src2);
256256 let hasSideEffects = 0;
257257 let isCommutable = 1;
258258 }
259259
260260 // Generic signed multiplication producing a carry flag.
261261 def G_SMULO : Instruction {
262 let OutOperandList = (outs unknown:$dst, unknown:$carry_out);
263 let InOperandList = (ins unknown:$src1, unknown:$src2);
262 let OutOperandList = (outs type0:$dst, type1:$carry_out);
263 let InOperandList = (ins type0:$src1, type0:$src2);
264264 let hasSideEffects = 0;
265265 let isCommutable = 1;
266266 }
270270 //------------------------------------------------------------------------------
271271
272272 def G_FPEXT : Instruction {
273 let OutOperandList = (outs unknown:$dst);
274 let InOperandList = (ins unknown:$src);
273 let OutOperandList = (outs type0:$dst);
274 let InOperandList = (ins type1:$src);
275275 let hasSideEffects = 0;
276276 }
277277
278278 def G_FPTRUNC : Instruction {
279 let OutOperandList = (outs unknown:$dst);
280 let InOperandList = (ins unknown:$src);
279 let OutOperandList = (outs type0:$dst);
280 let InOperandList = (ins type1:$src);
281281 let hasSideEffects = 0;
282282 }
283283
284284 def G_FPTOSI : Instruction {
285 let OutOperandList = (outs unknown:$dst);
286 let InOperandList = (ins unknown:$src);
285 let OutOperandList = (outs type0:$dst);
286 let InOperandList = (ins type1:$src);
287287 let hasSideEffects = 0;
288288 }
289289
290290 def G_FPTOUI : Instruction {
291 let OutOperandList = (outs unknown:$dst);
292 let InOperandList = (ins unknown:$src);
291 let OutOperandList = (outs type0:$dst);
292 let InOperandList = (ins type1:$src);
293293 let hasSideEffects = 0;
294294 }
295295
296296 def G_SITOFP : Instruction {
297 let OutOperandList = (outs unknown:$dst);
298 let InOperandList = (ins unknown:$src);
297 let OutOperandList = (outs type0:$dst);
298 let InOperandList = (ins type1:$src);
299299 let hasSideEffects = 0;
300300 }
301301
302302 def G_UITOFP : Instruction {
303 let OutOperandList = (outs unknown:$dst);
304 let InOperandList = (ins unknown:$src);
303 let OutOperandList = (outs type0:$dst);
304 let InOperandList = (ins type1:$src);
305305 let hasSideEffects = 0;
306306 }
307307
311311
312312 // Generic FP addition.
313313 def G_FADD : Instruction {
314 let OutOperandList = (outs unknown:$dst);
315 let InOperandList = (ins unknown:$src1, unknown:$src2);
314 let OutOperandList = (outs type0:$dst);
315 let InOperandList = (ins type0:$src1, type0:$src2);
316316 let hasSideEffects = 0;
317317 let isCommutable = 1;
318318 }
319319
320320 // Generic FP subtraction.
321321 def G_FSUB : Instruction {
322 let OutOperandList = (outs unknown:$dst);
323 let InOperandList = (ins unknown:$src1, unknown:$src2);
322 let OutOperandList = (outs type0:$dst);
323 let InOperandList = (ins type0:$src1, type0:$src2);
324324 let hasSideEffects = 0;
325325 let isCommutable = 0;
326326 }
327327
328328 // Generic FP multiplication.
329329 def G_FMUL : Instruction {
330 let OutOperandList = (outs unknown:$dst);
331 let InOperandList = (ins unknown:$src1, unknown:$src2);
330 let OutOperandList = (outs type0:$dst);
331 let InOperandList = (ins type0:$src1, type0:$src2);
332332 let hasSideEffects = 0;
333333 let isCommutable = 1;
334334 }
335335
336336 // Generic FP division.
337337 def G_FDIV : Instruction {
338 let OutOperandList = (outs unknown:$dst);
339 let InOperandList = (ins unknown:$src1, unknown:$src2);
338 let OutOperandList = (outs type0:$dst);
339 let InOperandList = (ins type0:$src1, type0:$src2);
340340 let hasSideEffects = 0;
341341 }
342342
343343 // Generic FP remainder.
344344 def G_FREM : Instruction {
345 let OutOperandList = (outs unknown:$dst);
346 let InOperandList = (ins unknown:$src1, unknown:$src2);
345 let OutOperandList = (outs type0:$dst);
346 let InOperandList = (ins type0:$src1, type0:$src2);
347347 let hasSideEffects = 0;
348348 }
349349
353353
354354 // Generic load. Expects a MachineMemOperand in addition to explicit operands.
355355 def G_LOAD : Instruction {
356 let OutOperandList = (outs unknown:$dst);
357 let InOperandList = (ins unknown:$addr);
356 let OutOperandList = (outs type0:$dst);
357 let InOperandList = (ins type1:$addr);
358358 let hasSideEffects = 0;
359359 let mayLoad = 1;
360360 }
362362 // Generic store. Expects a MachineMemOperand in addition to explicit operands.
363363 def G_STORE : Instruction {
364364 let OutOperandList = (outs);
365 let InOperandList = (ins unknown:$src, unknown:$addr);
365 let InOperandList = (ins type0:$src, type1:$addr);
366366 let hasSideEffects = 0;
367367 let mayStore = 1;
368368 }
384384 // indices (interleaved with the values in the operand list "op0, bit0, op1,
385385 // bit1, ...")).
386386 def G_INSERT : Instruction {
387 let OutOperandList = (outs unknown:$dst);
388 let InOperandList = (ins unknown:$src, variable_ops);
387 let OutOperandList = (outs type0:$dst);
388 let InOperandList = (ins type0:$src, variable_ops);
389389 let hasSideEffects = 0;
390390 }
391391
393393 // bit 0). Essentially a G_INSERT where $src is an IMPLICIT_DEF, but it's so
394394 // important to legalization it probably deserves its own instruction.
395395 def G_SEQUENCE : Instruction {
396 let OutOperandList = (outs unknown:$dst);
396 let OutOperandList = (outs type0:$dst);
397397 let InOperandList = (ins variable_ops);
398398 let hasSideEffects = 0;
399399 }
416416
417417 // PHI node bearing an LLT.
418418 def G_PHI : Instruction {
419 let OutOperandList = (outs unknown:$dst);
419 let OutOperandList = (outs type0:$dst);
420420 let InOperandList = (ins variable_ops);
421421 let hasSideEffects = 0;
422422 }
438438 // Generic conditional branch.
439439 def G_BRCOND : Instruction {
440440 let OutOperandList = (outs);
441 let InOperandList = (ins unknown:$tst, unknown:$truebb);
441 let InOperandList = (ins type0:$tst, unknown:$truebb);
442442 let hasSideEffects = 0;
443443 let isBranch = 1;
444444 let isTerminator = 1;
699699 def f64imm : Operand;
700700 }
701701
702 // Register operands for generic instructions don't have an MVT, but do have
703 // constraints linking the operands (e.g. all operands of a G_ADD must
704 // have the same LLT).
705 class TypedOperand : Operand {
706 let OperandType = Ty;
707 }
708
709 def type0 : TypedOperand<"OPERAND_GENERIC_0">;
710 def type1 : TypedOperand<"OPERAND_GENERIC_1">;
711 def type2 : TypedOperand<"OPERAND_GENERIC_2">;
712 def type3 : TypedOperand<"OPERAND_GENERIC_3">;
713 def type4 : TypedOperand<"OPERAND_GENERIC_4">;
714 def type5 : TypedOperand<"OPERAND_GENERIC_5">;
715
702716 /// zero_reg definition - Special node to stand for the zero register.
703717 ///
704718 def zero_reg;
5454 // we need to concat together to produce the value.
5555 assert(Val.getType()->isSized() &&
5656 "Don't know how to create an empty vreg");
57 unsigned Size = DL->getTypeSizeInBits(Val.getType());
58 unsigned VReg = MRI->createGenericVirtualRegister(Size);
57 unsigned VReg = MRI->createGenericVirtualRegister(LLT{*Val.getType(), DL});
5958 ValReg = VReg;
6059
6160 if (auto CV = dyn_cast(&Val)) {
112111 unsigned Op0 = getOrCreateVReg(*U.getOperand(0));
113112 unsigned Op1 = getOrCreateVReg(*U.getOperand(1));
114113 unsigned Res = getOrCreateVReg(U);
115 MIRBuilder.buildInstr(Opcode, LLT{*U.getType()})
116 .addDef(Res)
117 .addUse(Op0)
118 .addUse(Op1);
114 MIRBuilder.buildInstr(Opcode).addDef(Res).addUse(Op0).addUse(Op1);
119115 return true;
120116 }
121117
129125 cast(U).getPredicate());
130126
131127 if (CmpInst::isIntPredicate(Pred))
132 MIRBuilder.buildICmp(
133 {LLT{*U.getType()}, LLT{*U.getOperand(0)->getType()}}, Pred, Res, Op0,
134 Op1);
128 MIRBuilder.buildICmp(Pred, Res, Op0, Op1);
135129 else
136 MIRBuilder.buildFCmp(
137 {LLT{*U.getType()}, LLT{*U.getOperand(0)->getType()}}, Pred, Res, Op0,
138 Op1);
130 MIRBuilder.buildFCmp(Pred, Res, Op0, Op1);
139131
140132 return true;
141133 }
157149 unsigned Tst = getOrCreateVReg(*BrInst.getCondition());
158150 const BasicBlock &TrueTgt = *cast(BrInst.getSuccessor(Succ++));
159151 MachineBasicBlock &TrueBB = getOrCreateBB(TrueTgt);
160 MIRBuilder.buildBrCond(LLT{*BrInst.getCondition()->getType()}, Tst, TrueBB);
152 MIRBuilder.buildBrCond(Tst, TrueBB);
161153 }
162154
163155 const BasicBlock &BrTgt = *cast(BrInst.getSuccessor(Succ));
185177 LLT VTy{*LI.getType(), DL}, PTy{*LI.getPointerOperand()->getType()};
186178
187179 MIRBuilder.buildLoad(
188 VTy, PTy, Res, Addr,
180 Res, Addr,
189181 *MF.getMachineMemOperand(
190182 MachinePointerInfo(LI.getPointerOperand()), MachineMemOperand::MOLoad,
191183 DL->getTypeStoreSize(LI.getType()), getMemOpAlignment(LI)));
207199 PTy{*SI.getPointerOperand()->getType()};
208200
209201 MIRBuilder.buildStore(
210 VTy, PTy, Val, Addr,
202 Val, Addr,
211203 *MF.getMachineMemOperand(
212204 MachinePointerInfo(SI.getPointerOperand()),
213205 MachineMemOperand::MOStore,
236228 uint64_t Offset = 8 * DL->getIndexedOffsetInType(Src->getType(), Indices);
237229
238230 unsigned Res = getOrCreateVReg(U);
239 MIRBuilder.buildExtract(LLT{*U.getType(), DL}, Res, Offset,
240 LLT{*Src->getType(), DL}, getOrCreateVReg(*Src));
231 MIRBuilder.buildExtract(Res, Offset, getOrCreateVReg(*Src));
241232
242233 return true;
243234 }
263254
264255 unsigned Res = getOrCreateVReg(U);
265256 const Value &Inserted = *U.getOperand(1);
266 MIRBuilder.buildInsert(LLT{*U.getType(), DL}, Res, getOrCreateVReg(*Src),
267 LLT{*Inserted.getType(), DL},
268 getOrCreateVReg(Inserted), Offset);
257 MIRBuilder.buildInsert(Res, getOrCreateVReg(*Src), getOrCreateVReg(Inserted),
258 Offset);
269259
270260 return true;
271261 }
272262
273263 bool IRTranslator::translateSelect(const User &U) {
274 MIRBuilder.buildSelect(
275 LLT{*U.getType()}, getOrCreateVReg(U), getOrCreateVReg(*U.getOperand(0)),
276 getOrCreateVReg(*U.getOperand(1)), getOrCreateVReg(*U.getOperand(2)));
264 MIRBuilder.buildSelect(getOrCreateVReg(U), getOrCreateVReg(*U.getOperand(0)),
265 getOrCreateVReg(*U.getOperand(1)),
266 getOrCreateVReg(*U.getOperand(2)));
277267 return true;
278268 }
279269
292282 bool IRTranslator::translateCast(unsigned Opcode, const User &U) {
293283 unsigned Op = getOrCreateVReg(*U.getOperand(0));
294284 unsigned Res = getOrCreateVReg(U);
295 MIRBuilder
296 .buildInstr(Opcode, {LLT{*U.getType()}, LLT{*U.getOperand(0)->getType()}})
297 .addDef(Res)
298 .addUse(Op);
285 MIRBuilder.buildInstr(Opcode).addDef(Res).addUse(Op);
299286 return true;
300287 }
301288
315302 LLT Ty{*CI.getOperand(0)->getType()};
316303 LLT s1 = LLT::scalar(1);
317304 unsigned Width = Ty.getSizeInBits();
318 unsigned Res = MRI->createGenericVirtualRegister(Width);
319 unsigned Overflow = MRI->createGenericVirtualRegister(1);
320 auto MIB = MIRBuilder.buildInstr(Op, {Ty, s1})
305 unsigned Res = MRI->createGenericVirtualRegister(Ty);
306 unsigned Overflow = MRI->createGenericVirtualRegister(s1);
307 auto MIB = MIRBuilder.buildInstr(Op)
321308 .addDef(Res)
322309 .addDef(Overflow)
323310 .addUse(getOrCreateVReg(*CI.getOperand(0)))
324311 .addUse(getOrCreateVReg(*CI.getOperand(1)));
325312
326313 if (Op == TargetOpcode::G_UADDE || Op == TargetOpcode::G_USUBE) {
327 unsigned Zero = MRI->createGenericVirtualRegister(1);
328 EntryBuilder.buildConstant(s1, Zero, 0);
314 unsigned Zero = MRI->createGenericVirtualRegister(s1);
315 EntryBuilder.buildConstant(Zero, 0);
329316 MIB.addUse(Zero);
330317 }
331318
332 MIRBuilder.buildSequence(LLT{*CI.getType(), DL}, getOrCreateVReg(CI), Ty, Res,
333 0, s1, Overflow, Width);
319 MIRBuilder.buildSequence(getOrCreateVReg(CI), Res, 0, Overflow, Width);
334320 return true;
335321 }
336322
360346 if (translateKnownIntrinsic(CI, ID))
361347 return true;
362348
363 // Need types (starting with return) & args.
364 SmallVector Tys;
365 Tys.emplace_back(*CI.getType());
366 for (auto &Arg : CI.arg_operands())
367 Tys.emplace_back(*Arg->getType());
368
369349 unsigned Res = CI.getType()->isVoidTy() ? 0 : getOrCreateVReg(CI);
370350 MachineInstrBuilder MIB =
371 MIRBuilder.buildIntrinsic(Tys, ID, Res, !CI.doesNotAccessMemory());
351 MIRBuilder.buildIntrinsic(ID, Res, !CI.doesNotAccessMemory());
372352
373353 for (auto &Arg : CI.arg_operands()) {
374354 if (ConstantInt *CI = dyn_cast(Arg))
398378
399379 unsigned Res = getOrCreateVReg(AI);
400380 int FI = MF.getFrameInfo().CreateStackObject(Size, Alignment, false, &AI);
401 MIRBuilder.buildFrameIndex(LLT::pointer(0), Res, FI);
381 MIRBuilder.buildFrameIndex(Res, FI);
402382 return true;
403383 }
404384
405385 bool IRTranslator::translatePHI(const User &U) {
406386 const PHINode &PI = cast(U);
407 auto MIB = MIRBuilder.buildInstr(TargetOpcode::G_PHI, LLT{*U.getType()});
387 auto MIB = MIRBuilder.buildInstr(TargetOpcode::G_PHI);
408388 MIB.addDef(getOrCreateVReg(PI));
409389
410390 PendingPHIs.emplace_back(&PI, MIB.getInstr());
446426
447427 bool IRTranslator::translate(const Constant &C, unsigned Reg) {
448428 if (auto CI = dyn_cast(&C))
449 EntryBuilder.buildConstant(LLT{*CI->getType()}, Reg, CI->getZExtValue());
429 EntryBuilder.buildConstant(Reg, CI->getZExtValue());
450430 else if (auto CF = dyn_cast(&C))
451 EntryBuilder.buildFConstant(LLT{*CF->getType()}, Reg, *CF);
431 EntryBuilder.buildFConstant(Reg, *CF);
452432 else if (isa(C))
453433 EntryBuilder.buildInstr(TargetOpcode::IMPLICIT_DEF).addDef(Reg);
454434 else if (isa(C))
455 EntryBuilder.buildInstr(TargetOpcode::G_CONSTANT, LLT{*C.getType()})
435 EntryBuilder.buildInstr(TargetOpcode::G_CONSTANT)
456436 .addDef(Reg)
457437 .addImm(0);
458438 else if (auto CE = dyn_cast(&C)) {
7474 // The RegBankSelected property is already checked in the verifier. Note
7575 // that it has the same layering problem, but we only use inline methods so
7676 // end up not needing to link against the GlobalISel library.
77 const MachineRegisterInfo &MRI = MF.getRegInfo();
7778 if (const MachineLegalizer *MLI = MF.getSubtarget().getMachineLegalizer())
7879 for (const MachineBasicBlock &MBB : MF)
7980 for (const MachineInstr &MI : MBB)
80 if (isPreISelGenericOpcode(MI.getOpcode()) && !MLI->isLegal(MI))
81 if (isPreISelGenericOpcode(MI.getOpcode()) && !MLI->isLegal(MI, MRI))
8182 reportSelectionError(MI, "Instruction is not legal");
8283
8384 #endif
117118 // the vreg instead, but that's not ideal either, because it's saying that
118119 // vregs have types, which they really don't. But then again, LLT is just
119120 // a size and a "shape": it's probably the same information as regbank info.
120 MF.getRegInfo().clearVirtRegSizes();
121 MF.getRegInfo().clearVirtRegTypes();
121122
122123 // FIXME: Should we accurately track changes?
123124 return true;
1313 #include "llvm/CodeGen/MachineFunction.h"
1414 #include "llvm/CodeGen/MachineInstr.h"
1515 #include "llvm/CodeGen/MachineInstrBuilder.h"
16 #include "llvm/CodeGen/MachineRegisterInfo.h"
1617 #include "llvm/Target/TargetInstrInfo.h"
1718 #include "llvm/Target/TargetOpcodes.h"
1819 #include "llvm/Target/TargetSubtargetInfo.h"
2223 void MachineIRBuilder::setMF(MachineFunction &MF) {
2324 this->MF = &MF;
2425 this->MBB = nullptr;
26 this->MRI = &MF.getRegInfo();
2527 this->TII = MF.getSubtarget().getInstrInfo();
2628 this->DL = DebugLoc();
2729 this->MI = nullptr;
6668 // Build instruction variants.
6769 //------------------------------------------------------------------------------
6870
69 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode,
70 ArrayRef Tys) {
71 MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) {
7172 MachineInstrBuilder MIB = BuildMI(getMF(), DL, getTII().get(Opcode));
72 if (Tys.size() > 0) {
73 assert(isPreISelGenericOpcode(Opcode) &&
74 "Only generic instruction can have a type");
75 for (unsigned i = 0; i < Tys.size(); ++i)
76 MIB->setType(Tys[i], i);
77 } else
78 assert(!isPreISelGenericOpcode(Opcode) &&
79 "Generic instruction must have a type");
8073 getMBB().insert(getInsertPt(), MIB);
8174 if (InsertedInstr)
8275 InsertedInstr(MIB);
8376 return MIB;
8477 }
8578
86 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(LLT Ty, unsigned Res,
87 int Idx) {
88 return buildInstr(TargetOpcode::G_FRAME_INDEX, Ty)
79 MachineInstrBuilder MachineIRBuilder::buildFrameIndex(unsigned Res, int Idx) {
80 return buildInstr(TargetOpcode::G_FRAME_INDEX)
8981 .addDef(Res)
9082 .addFrameIndex(Idx);
9183 }
9284
93 MachineInstrBuilder MachineIRBuilder::buildAdd(LLT Ty, unsigned Res,
94 unsigned Op0, unsigned Op1) {
95 return buildInstr(TargetOpcode::G_ADD, Ty)
96 .addDef(Res)
97 .addUse(Op0)
98 .addUse(Op1);
99 }
100
101 MachineInstrBuilder MachineIRBuilder::buildSub(LLT Ty, unsigned Res,
102 unsigned Op0, unsigned Op1) {
103 return buildInstr(TargetOpcode::G_SUB, Ty)
104 .addDef(Res)
105 .addUse(Op0)
106 .addUse(Op1);
107 }
108
109 MachineInstrBuilder MachineIRBuilder::buildMul(LLT Ty, unsigned Res,
110 unsigned Op0, unsigned Op1) {
111 return buildInstr(TargetOpcode::G_MUL, Ty)
85 MachineInstrBuilder MachineIRBuilder::buildAdd(unsigned Res, unsigned Op0,
86 unsigned Op1) {
87 return buildInstr(TargetOpcode::G_ADD)
88 .addDef(Res)
89 .addUse(Op0)
90 .addUse(Op1);
91 }
92
93 MachineInstrBuilder MachineIRBuilder::buildSub(unsigned Res, unsigned Op0,
94 unsigned Op1) {
95 return buildInstr(TargetOpcode::G_SUB)
96 .addDef(Res)
97 .addUse(Op0)
98 .addUse(Op1);
99 }
100
101 MachineInstrBuilder MachineIRBuilder::buildMul(unsigned Res, unsigned Op0,
102 unsigned Op1) {
103 return buildInstr(TargetOpcode::G_MUL)
112104 .addDef(Res)
113105 .addUse(Op0)
114106 .addUse(Op1);
115107 }
116108
117109 MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
118 return buildInstr(TargetOpcode::G_BR, LLT::unsized()).addMBB(&Dest);
110 return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
119111 }
120112
121113 MachineInstrBuilder MachineIRBuilder::buildCopy(unsigned Res, unsigned Op) {
122114 return buildInstr(TargetOpcode::COPY).addDef(Res).addUse(Op);
123115 }
124116
125 MachineInstrBuilder MachineIRBuilder::buildConstant(LLT Ty, unsigned Res,
126 int64_t Val) {
127 return buildInstr(TargetOpcode::G_CONSTANT, Ty).addDef(Res).addImm(Val);
128 }
129
130 MachineInstrBuilder MachineIRBuilder::buildFConstant(LLT Ty, unsigned Res,
131 const ConstantFP &Val) {
132 return buildInstr(TargetOpcode::G_FCONSTANT, Ty).addDef(Res).addFPImm(&Val);
133 }
134
135 MachineInstrBuilder MachineIRBuilder::buildBrCond(LLT Ty, unsigned Tst,
117 MachineInstrBuilder MachineIRBuilder::buildConstant(unsigned Res, int64_t Val) {
118 return buildInstr(TargetOpcode::G_CONSTANT).addDef(Res).addImm(Val);
119 }
120
121 MachineInstrBuilder MachineIRBuilder::buildFConstant(unsigned Res,
122 const ConstantFP &Val) {
123 return buildInstr(TargetOpcode::G_FCONSTANT).addDef(Res).addFPImm(&Val);
124 }
125
126 MachineInstrBuilder MachineIRBuilder::buildBrCond(unsigned Tst,
136127 MachineBasicBlock &Dest) {
137 return buildInstr(TargetOpcode::G_BRCOND, Ty).addUse(Tst).addMBB(&Dest);
138 }
139
140
141 MachineInstrBuilder MachineIRBuilder::buildLoad(LLT VTy, LLT PTy, unsigned Res,
142 unsigned Addr,
143 MachineMemOperand &MMO) {
144 return buildInstr(TargetOpcode::G_LOAD, {VTy, PTy})
128 return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
129 }
130
131 MachineInstrBuilder MachineIRBuilder::buildLoad(unsigned Res, unsigned Addr,
132 MachineMemOperand &MMO) {
133 return buildInstr(TargetOpcode::G_LOAD)
145134 .addDef(Res)
146135 .addUse(Addr)
147136 .addMemOperand(&MMO);
148137 }
149138
150 MachineInstrBuilder MachineIRBuilder::buildStore(LLT VTy, LLT PTy,
151 unsigned Val, unsigned Addr,
152 MachineMemOperand &MMO) {
153 return buildInstr(TargetOpcode::G_STORE, {VTy, PTy})
139 MachineInstrBuilder MachineIRBuilder::buildStore(unsigned Val, unsigned Addr,
140 MachineMemOperand &MMO) {
141 return buildInstr(TargetOpcode::G_STORE)
154142 .addUse(Val)
155143 .addUse(Addr)
156144 .addMemOperand(&MMO);
157145 }
158146
159 MachineInstrBuilder
160 MachineIRBuilder::buildUAdde(ArrayRef Tys, unsigned Res, unsigned CarryOut,
161 unsigned Op0, unsigned Op1, unsigned CarryIn) {
162 return buildInstr(TargetOpcode::G_UADDE, Tys)
147 MachineInstrBuilder MachineIRBuilder::buildUAdde(unsigned Res,
148 unsigned CarryOut,
149 unsigned Op0, unsigned Op1,
150 unsigned CarryIn) {
151 return buildInstr(TargetOpcode::G_UADDE)
163152 .addDef(Res)
164153 .addDef(CarryOut)
165154 .addUse(Op0)
167156 .addUse(CarryIn);
168157 }
169158
170 MachineInstrBuilder MachineIRBuilder::buildType(LLT Ty,
171 unsigned Res, unsigned Op) {
172 return buildInstr(TargetOpcode::G_TYPE, Ty).addDef(Res).addUse(Op);
173 }
174
175 MachineInstrBuilder MachineIRBuilder::buildAnyExt(ArrayRef Tys,
176 unsigned Res, unsigned Op) {
177 validateTruncExt(Tys, true);
178 return buildInstr(TargetOpcode::G_ANYEXT, Tys).addDef(Res).addUse(Op);
179 }
180
181 MachineInstrBuilder MachineIRBuilder::buildSExt(ArrayRef Tys, unsigned Res,
182 unsigned Op) {
183 validateTruncExt(Tys, true);
184 return buildInstr(TargetOpcode::G_SEXT, Tys).addDef(Res).addUse(Op);
185 }
186
187 MachineInstrBuilder MachineIRBuilder::buildZExt(ArrayRef Tys, unsigned Res,
188 unsigned Op) {
189 validateTruncExt(Tys, true);
190 return buildInstr(TargetOpcode::G_ZEXT, Tys).addDef(Res).addUse(Op);
191 }
192
193 MachineInstrBuilder MachineIRBuilder::buildExtract(ArrayRef ResTys,
194 ArrayRef Results,
159 MachineInstrBuilder MachineIRBuilder::buildType(unsigned Res, unsigned Op) {
160 return buildInstr(TargetOpcode::G_TYPE).addDef(Res).addUse(Op);
161 }
162
163 MachineInstrBuilder MachineIRBuilder::buildAnyExt(unsigned Res, unsigned Op) {
164 validateTruncExt(Res, Op, true);
165 return buildInstr(TargetOpcode::G_ANYEXT).addDef(Res).addUse(Op);
166 }
167
168 MachineInstrBuilder MachineIRBuilder::buildSExt(unsigned Res, unsigned Op) {
169 validateTruncExt(Res, Op, true);
170 return buildInstr(TargetOpcode::G_SEXT).addDef(Res).addUse(Op);
171 }
172
173 MachineInstrBuilder MachineIRBuilder::buildZExt(unsigned Res, unsigned Op) {
174 validateTruncExt(Res, Op, true);
175 return buildInstr(TargetOpcode::G_ZEXT).addDef(Res).addUse(Op);
176 }
177
178 MachineInstrBuilder MachineIRBuilder::buildExtract(ArrayRef Results,
195179 ArrayRef Indices,
196 LLT SrcTy, unsigned Src) {
197 assert(ResTys.size() == Results.size() && Results.size() == Indices.size() &&
198 "inconsistent number of regs");
180 unsigned Src) {
181 assert(Results.size() == Indices.size() && "inconsistent number of regs");
199182 assert(!Results.empty() && "invalid trivial extract");
200183 assert(std::is_sorted(Indices.begin(), Indices.end()) &&
201184 "extract offsets must be in ascending order");
202185
203186 auto MIB = BuildMI(getMF(), DL, getTII().get(TargetOpcode::G_EXTRACT));
204 for (unsigned i = 0; i < ResTys.size(); ++i)
205 MIB->setType(LLT::scalar(ResTys[i].getSizeInBits()), i);
206 MIB->setType(LLT::scalar(SrcTy.getSizeInBits()), ResTys.size());
207
208187 for (auto Res : Results)
209188 MIB.addDef(Res);
210189
221200 }
222201
223202 MachineInstrBuilder
224 MachineIRBuilder::buildSequence(LLT ResTy, unsigned Res,
225 ArrayRef OpTys,
203 MachineIRBuilder::buildSequence(unsigned Res,
226204 ArrayRef Ops,
227205 ArrayRef Indices) {
228 assert(OpTys.size() == Ops.size() && Ops.size() == Indices.size() &&
229 "incompatible args");
206 assert(Ops.size() == Indices.size() && "incompatible args");
230207 assert(!Ops.empty() && "invalid trivial sequence");
231208 assert(std::is_sorted(Indices.begin(), Indices.end()) &&
232209 "sequence offsets must be in ascending order");
233210
234 MachineInstrBuilder MIB =
235 buildInstr(TargetOpcode::G_SEQUENCE, LLT::scalar(ResTy.getSizeInBits()));
211 MachineInstrBuilder MIB = buildInstr(TargetOpcode::G_SEQUENCE);
236212 MIB.addDef(Res);
237213 for (unsigned i = 0; i < Ops.size(); ++i) {
238214 MIB.addUse(Ops[i]);
239215 MIB.addImm(Indices[i]);
240 MIB->setType(LLT::scalar(OpTys[i].getSizeInBits()), MIB->getNumTypes());
241216 }
242217 return MIB;
243218 }
244219
245 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(ArrayRef Tys,
246 Intrinsic::ID ID,
220 MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
247221 unsigned Res,
248222 bool HasSideEffects) {
249223 auto MIB =
250224 buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
251 : TargetOpcode::G_INTRINSIC,
252 Tys);
225 : TargetOpcode::G_INTRINSIC);
253226 if (Res)
254227 MIB.addDef(Res);
255228 MIB.addIntrinsicID(ID);
256229 return MIB;
257230 }
258231
259 MachineInstrBuilder MachineIRBuilder::buildTrunc(ArrayRef Tys,
260 unsigned Res, unsigned Op) {
261 validateTruncExt(Tys, false);
262 return buildInstr(TargetOpcode::G_TRUNC, Tys).addDef(Res).addUse(Op);
263 }
264
265 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(ArrayRef Tys,
266 unsigned Res, unsigned Op) {
267 validateTruncExt(Tys, false);
268 return buildInstr(TargetOpcode::G_FPTRUNC, Tys).addDef(Res).addUse(Op);
269 }
270
271 MachineInstrBuilder MachineIRBuilder::buildICmp(ArrayRef Tys,
272 CmpInst::Predicate Pred,
232 MachineInstrBuilder MachineIRBuilder::buildTrunc(unsigned Res, unsigned Op) {
233 validateTruncExt(Res, Op, false);
234 return buildInstr(TargetOpcode::G_TRUNC).addDef(Res).addUse(Op);
235 }
236
237 MachineInstrBuilder MachineIRBuilder::buildFPTrunc(unsigned Res, unsigned Op) {
238 validateTruncExt(Res, Op, false);
239 return buildInstr(TargetOpcode::G_FPTRUNC).addDef(Res).addUse(Op);
240 }
241
242 MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
273243 unsigned Res, unsigned Op0,
274244 unsigned Op1) {
275 return buildInstr(TargetOpcode::G_ICMP, Tys)
245 return buildInstr(TargetOpcode::G_ICMP)
276246 .addDef(Res)
277247 .addPredicate(Pred)
278248 .addUse(Op0)
279249 .addUse(Op1);
280250 }
281251
282 MachineInstrBuilder MachineIRBuilder::buildFCmp(ArrayRef Tys,
283 CmpInst::Predicate Pred,
252 MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
284253 unsigned Res, unsigned Op0,
285254 unsigned Op1) {
286 return buildInstr(TargetOpcode::G_FCMP, Tys)
255 return buildInstr(TargetOpcode::G_FCMP)
287256 .addDef(Res)
288257 .addPredicate(Pred)
289258 .addUse(Op0)
290259 .addUse(Op1);
291260 }
292261
293 MachineInstrBuilder MachineIRBuilder::buildSelect(LLT Ty, unsigned Res,
294 unsigned Tst,
262 MachineInstrBuilder MachineIRBuilder::buildSelect(unsigned Res, unsigned Tst,
295263 unsigned Op0, unsigned Op1) {
296 return buildInstr(TargetOpcode::G_SELECT, {Ty, LLT::scalar(1)})
264 return buildInstr(TargetOpcode::G_SELECT)
297265 .addDef(Res)
298266 .addUse(Tst)
299267 .addUse(Op0)
300268 .addUse(Op1);
301269 }
302270
303 void MachineIRBuilder::validateTruncExt(ArrayRef Tys, bool IsExtend) {
271 void MachineIRBuilder::validateTruncExt(unsigned Dst, unsigned Src,
272 bool IsExtend) {
304273 #ifndef NDEBUG
305 assert(Tys.size() == 2 && "cast should have a source and a dest type");
306 LLT DstTy{Tys[0]}, SrcTy{Tys[1]};
274 LLT SrcTy = MRI->getType(Src);
275 LLT DstTy = MRI->getType(Dst);
307276
308277 if (DstTy.isVector()) {
309278 assert(SrcTy.isVector() && "mismatched cast between vecot and non-vector");
3535 MachineLegalizeHelper::LegalizeResult
3636 MachineLegalizeHelper::legalizeInstrStep(MachineInstr &MI,
3737 const MachineLegalizer &Legalizer) {
38 auto Action = Legalizer.getAction(MI);
38 auto Action = Legalizer.getAction(MI, MRI);
3939 switch (std::get<0>(Action)) {
4040 case MachineLegalizer::Legal:
4141 return AlreadyLegal;
8484 SmallVectorImpl &VRegs) {
8585 unsigned Size = Ty.getSizeInBits();
8686 SmallVector Indexes;
87 SmallVector ResTys;
8887 for (int i = 0; i < NumParts; ++i) {
89 VRegs.push_back(MRI.createGenericVirtualRegister(Size));
88 VRegs.push_back(MRI.createGenericVirtualRegister(Ty));
9089 Indexes.push_back(i * Size);
91 ResTys.push_back(Ty);
92 }
93 MIRBuilder.buildExtract(ResTys, VRegs, Indexes,
94 LLT::scalar(Ty.getSizeInBits() * NumParts), Reg);
90 }
91 MIRBuilder.buildExtract(VRegs, Indexes, Reg);
9592 }
9693
9794 MachineLegalizeHelper::LegalizeResult
9895 MachineLegalizeHelper::libcall(MachineInstr &MI) {
99 unsigned Size = MI.getType().getSizeInBits();
96 LLT Ty = MRI.getType(MI.getOperand(0).getReg());
97 unsigned Size = Ty.getSizeInBits();
10098 MIRBuilder.setInstr(MI);
10199
102100 switch (MI.getOpcode()) {
131129 case TargetOpcode::G_ADD: {
132130 // Expand in terms of carry-setting/consuming G_ADDE instructions.
133131 unsigned NarrowSize = NarrowTy.getSizeInBits();
134 int NumParts = MI.getType().getSizeInBits() / NarrowSize;
132 int NumParts = MRI.getType(MI.getOperand(0).getReg()).getSizeInBits() /
133 NarrowTy.getSizeInBits();
135134
136135 MIRBuilder.setInstr(MI);
137136
139138 extractParts(MI.getOperand(1).getReg(), NarrowTy, NumParts, Src1Regs);
140139 extractParts(MI.getOperand(2).getReg(), NarrowTy, NumParts, Src2Regs);
141140
142 unsigned CarryIn = MRI.createGenericVirtualRegister(1);
143 MIRBuilder.buildConstant(LLT::scalar(1), CarryIn, 0);
144
145 SmallVector DstTys;
141 unsigned CarryIn = MRI.createGenericVirtualRegister(LLT::scalar(1));
142 MIRBuilder.buildConstant(CarryIn, 0);
143
146144 for (int i = 0; i < NumParts; ++i) {
147 unsigned DstReg = MRI.createGenericVirtualRegister(NarrowSize);
148 unsigned CarryOut = MRI.createGenericVirtualRegister(1);
149
150 MIRBuilder.buildUAdde(NarrowTy, DstReg, CarryOut, Src1Regs[i],
145 unsigned DstReg = MRI.createGenericVirtualRegister(NarrowTy);
146 unsigned CarryOut = MRI.createGenericVirtualRegister(LLT::scalar(1));
147
148 MIRBuilder.buildUAdde(DstReg, CarryOut, Src1Regs[i],
151149 Src2Regs[i], CarryIn);
152150
153 DstTys.push_back(NarrowTy);
154151 DstRegs.push_back(DstReg);
155152 Indexes.push_back(i * NarrowSize);
156153 CarryIn = CarryOut;
157154 }
158 MIRBuilder.buildSequence(MI.getType(), MI.getOperand(0).getReg(), DstTys,
159 DstRegs, Indexes);
155 unsigned DstReg = MI.getOperand(0).getReg();
156 MIRBuilder.buildSequence(DstReg, DstRegs, Indexes);
160157 MI.eraseFromParent();
161158 return Legalized;
162159 }
166163 MachineLegalizeHelper::LegalizeResult
167164 MachineLegalizeHelper::widenScalar(MachineInstr &MI, unsigned TypeIdx,
168165 LLT WideTy) {
169 LLT Ty = MI.getType();
166 LLT Ty = MRI.getType(MI.getOperand(0).getReg());
170167 unsigned WideSize = WideTy.getSizeInBits();
171168 MIRBuilder.setInstr(MI);
172169
182179 // Perform operation at larger width (any extension is fine here, high bits
183180 // don't affect the result) and then truncate the result back to the
184181 // original type.
185 unsigned Src1Ext = MRI.createGenericVirtualRegister(WideSize);
186 unsigned Src2Ext = MRI.createGenericVirtualRegister(WideSize);
187 MIRBuilder.buildAnyExt({WideTy, Ty}, Src1Ext, MI.getOperand(1).getReg());
188 MIRBuilder.buildAnyExt({WideTy, Ty}, Src2Ext, MI.getOperand(2).getReg());
189
190 unsigned DstExt = MRI.createGenericVirtualRegister(WideSize);
191 MIRBuilder.buildInstr(MI.getOpcode(), WideTy)
192 .addDef(DstExt).addUse(Src1Ext).addUse(Src2Ext);
193
194 MIRBuilder.buildTrunc({Ty, WideTy}, MI.getOperand(0).getReg(), DstExt);
182 unsigned Src1Ext = MRI.createGenericVirtualRegister(WideTy);
183 unsigned Src2Ext = MRI.createGenericVirtualRegister(WideTy);
184 MIRBuilder.buildAnyExt(Src1Ext, MI.getOperand(1).getReg());
185 MIRBuilder.buildAnyExt(Src2Ext, MI.getOperand(2).getReg());
186
187 unsigned DstExt = MRI.createGenericVirtualRegister(WideTy);
188 MIRBuilder.buildInstr(MI.getOpcode())
189 .addDef(DstExt)
190 .addUse(Src1Ext)
191 .addUse(Src2Ext);
192
193 MIRBuilder.buildTrunc(MI.getOperand(0).getReg(), DstExt);
195194 MI.eraseFromParent();
196195 return Legalized;
197196 }
201200 ? TargetOpcode::G_SEXT
202201 : TargetOpcode::G_ZEXT;
203202
204 unsigned LHSExt = MRI.createGenericVirtualRegister(WideSize);
205 MIRBuilder.buildInstr(ExtOp, {WideTy, MI.getType()})
206 .addDef(LHSExt)
207 .addUse(MI.getOperand(1).getReg());
208
209 unsigned RHSExt = MRI.createGenericVirtualRegister(WideSize);
210 MIRBuilder.buildInstr(ExtOp, {WideTy, MI.getType()})
211 .addDef(RHSExt)
212 .addUse(MI.getOperand(2).getReg());
213
214 unsigned ResExt = MRI.createGenericVirtualRegister(WideSize);
215 MIRBuilder.buildInstr(MI.getOpcode(), WideTy)
203 unsigned LHSExt = MRI.createGenericVirtualRegister(WideTy);
204 MIRBuilder.buildInstr(ExtOp).addDef(LHSExt).addUse(
205 MI.getOperand(1).getReg());
206
207 unsigned RHSExt = MRI.createGenericVirtualRegister(WideTy);
208 MIRBuilder.buildInstr(ExtOp).addDef(RHSExt).addUse(
209 MI.getOperand(2).getReg());
210
211 unsigned ResExt = MRI.createGenericVirtualRegister(WideTy);
212 MIRBuilder.buildInstr(MI.getOpcode())
216213 .addDef(ResExt)
217214 .addUse(LHSExt)
218215 .addUse(RHSExt);
219216
220 MIRBuilder.buildTrunc({MI.getType(), WideTy}, MI.getOperand(0).getReg(),
221 ResExt);
217 MIRBuilder.buildTrunc(MI.getOperand(0).getReg(), ResExt);
222218 MI.eraseFromParent();
223219 return Legalized;
224220 }
226222 assert(alignTo(Ty.getSizeInBits(), 8) == WideSize &&
227223 "illegal to increase number of bytes loaded");
228224
229 unsigned DstExt = MRI.createGenericVirtualRegister(WideSize);
230 MIRBuilder.buildLoad(WideTy, MI.getType(1), DstExt,
231 MI.getOperand(1).getReg(), **MI.memoperands_begin());
232 MIRBuilder.buildTrunc({Ty, WideTy}, MI.getOperand(0).getReg(), DstExt);
225 unsigned DstExt = MRI.createGenericVirtualRegister(WideTy);
226 MIRBuilder.buildLoad(DstExt, MI.getOperand(1).getReg(),
227 **MI.memoperands_begin());
228 MIRBuilder.buildTrunc(MI.getOperand(0).getReg(), DstExt);
233229 MI.eraseFromParent();
234230 return Legalized;
235231 }
237233 assert(alignTo(Ty.getSizeInBits(), 8) == WideSize &&
238234 "illegal to increase number of bytes modified by a store");
239235
240 unsigned SrcExt = MRI.createGenericVirtualRegister(WideSize);
241 MIRBuilder.buildAnyExt({WideTy, Ty}, SrcExt, MI.getOperand(0).getReg());
242 MIRBuilder.buildStore(WideTy, MI.getType(1), SrcExt,
243 MI.getOperand(1).getReg(), **MI.memoperands_begin());
236 unsigned SrcExt = MRI.createGenericVirtualRegister(WideTy);
237 MIRBuilder.buildAnyExt(SrcExt, MI.getOperand(0).getReg());
238 MIRBuilder.buildStore(SrcExt, MI.getOperand(1).getReg(),
239 **MI.memoperands_begin());
244240 MI.eraseFromParent();
245241 return Legalized;
246242 }
247243 case TargetOpcode::G_CONSTANT: {
248 unsigned DstExt = MRI.createGenericVirtualRegister(WideSize);
249 MIRBuilder.buildConstant(WideTy, DstExt, MI.getOperand(1).getImm());
250 MIRBuilder.buildTrunc({Ty, WideTy}, MI.getOperand(0).getReg(), DstExt);
244 unsigned DstExt = MRI.createGenericVirtualRegister(WideTy);
245 MIRBuilder.buildConstant(DstExt, MI.getOperand(1).getImm());
246 MIRBuilder.buildTrunc(MI.getOperand(0).getReg(), DstExt);
251247 MI.eraseFromParent();
252248 return Legalized;
253249 }
254250 case TargetOpcode::G_FCONSTANT: {
255 unsigned DstExt = MRI.createGenericVirtualRegister(WideSize);
256 MIRBuilder.buildFConstant(WideTy, DstExt, *MI.getOperand(1).getFPImm());
257 MIRBuilder.buildFPTrunc({Ty, WideTy}, MI.getOperand(0).getReg(), DstExt);
251 unsigned DstExt = MRI.createGenericVirtualRegister(WideTy);
252 MIRBuilder.buildFConstant(DstExt, *MI.getOperand(1).getFPImm());
253 MIRBuilder.buildFPTrunc(MI.getOperand(0).getReg(), DstExt);
258254 MI.eraseFromParent();
259255 return Legalized;
260256 }
261257 case TargetOpcode::G_BRCOND: {
262 unsigned TstExt = MRI.createGenericVirtualRegister(WideSize);
263 MIRBuilder.buildAnyExt({WideTy, Ty}, TstExt, MI.getOperand(0).getReg());
264 MIRBuilder.buildBrCond(WideTy, TstExt, *MI.getOperand(1).getMBB());
258 unsigned TstExt = MRI.createGenericVirtualRegister(WideTy);
259 MIRBuilder.buildAnyExt(TstExt, MI.getOperand(0).getReg());
260 MIRBuilder.buildBrCond(TstExt, *MI.getOperand(1).getMBB());
265261 MI.eraseFromParent();
266262 return Legalized;
267263 }
269265 assert(TypeIdx == 1 && "unable to legalize predicate");
270266 bool IsSigned = CmpInst::isSigned(
271267 static_cast(MI.getOperand(1).getPredicate()));
272 unsigned Op0Ext = MRI.createGenericVirtualRegister(WideSize);
273 unsigned Op1Ext = MRI.createGenericVirtualRegister(WideSize);
268 unsigned Op0Ext = MRI.createGenericVirtualRegister(WideTy);
269 unsigned Op1Ext = MRI.createGenericVirtualRegister(WideTy);
274270 if (IsSigned) {
275 MIRBuilder.buildSExt({WideTy, MI.getType(1)}, Op0Ext,
276 MI.getOperand(2).getReg());
277 MIRBuilder.buildSExt({WideTy, MI.getType(1)}, Op1Ext,
278 MI.getOperand(3).getReg());
271 MIRBuilder.buildSExt(Op0Ext, MI.getOperand(2).getReg());
272 MIRBuilder.buildSExt(Op1Ext, MI.getOperand(3).getReg());
279273 } else {
280 MIRBuilder.buildZExt({WideTy, MI.getType(1)}, Op0Ext,
281 MI.getOperand(2).getReg());
282 MIRBuilder.buildZExt({WideTy, MI.getType(1)}, Op1Ext,
283 MI.getOperand(3).getReg());
274 MIRBuilder.buildZExt(Op0Ext, MI.getOperand(2).getReg());
275 MIRBuilder.buildZExt(Op1Ext, MI.getOperand(3).getReg());
284276 }
285277 MIRBuilder.buildICmp(
286 {MI.getType(0), WideTy},
287278 static_cast(MI.getOperand(1).getPredicate()),
288279 MI.getOperand(0).getReg(), Op0Ext, Op1Ext);
289280 MI.eraseFromParent();
295286 MachineLegalizeHelper::LegalizeResult
296287 MachineLegalizeHelper::lower(MachineInstr &MI, unsigned TypeIdx, LLT Ty) {
297288 using namespace TargetOpcode;
298 unsigned Size = Ty.getSizeInBits();
299289 MIRBuilder.setInstr(MI);
300290
301291 switch(MI.getOpcode()) {
303293 return UnableToLegalize;
304294 case TargetOpcode::G_SREM:
305295 case TargetOpcode::G_UREM: {
306 unsigned QuotReg = MRI.createGenericVirtualRegister(Size);
307 MIRBuilder.buildInstr(MI.getOpcode() == G_SREM ? G_SDIV : G_UDIV, Ty)
296 unsigned QuotReg = MRI.createGenericVirtualRegister(Ty);
297 MIRBuilder.buildInstr(MI.getOpcode() == G_SREM ? G_SDIV : G_UDIV)
308298 .addDef(QuotReg)
309299 .addUse(MI.getOperand(1).getReg())
310300 .addUse(MI.getOperand(2).getReg());
311301
312 unsigned ProdReg = MRI.createGenericVirtualRegister(Size);
313 MIRBuilder.buildMul(Ty, ProdReg, QuotReg, MI.getOperand(2).getReg());
314 MIRBuilder.buildSub(Ty, MI.getOperand(0).getReg(),
315 MI.getOperand(1).getReg(), ProdReg);
302 unsigned ProdReg = MRI.createGenericVirtualRegister(Ty);
303 MIRBuilder.buildMul(ProdReg, QuotReg, MI.getOperand(2).getReg());
304 MIRBuilder.buildSub(MI.getOperand(0).getReg(), MI.getOperand(1).getReg(),
305 ProdReg);
316306 MI.eraseFromParent();
317307 return Legalized;
318308 }
330320 return UnableToLegalize;
331321 case TargetOpcode::G_ADD: {
332322 unsigned NarrowSize = NarrowTy.getSizeInBits();
333 int NumParts = MI.getType().getSizeInBits() / NarrowSize;
323 unsigned DstReg = MI.getOperand(0).getReg();
324 int NumParts = MRI.getType(DstReg).getSizeInBits() / NarrowSize;
334325
335326 MIRBuilder.setInstr(MI);
336327
338329 extractParts(MI.getOperand(1).getReg(), NarrowTy, NumParts, Src1Regs);
339330 extractParts(MI.getOperand(2).getReg(), NarrowTy, NumParts, Src2Regs);
340331
341 SmallVector DstTys;
342332 for (int i = 0; i < NumParts; ++i) {
343 unsigned DstReg = MRI.createGenericVirtualRegister(NarrowSize);
344 MIRBuilder.buildAdd(NarrowTy, DstReg, Src1Regs[i], Src2Regs[i]);
345 DstTys.push_back(NarrowTy);
333 unsigned DstReg = MRI.createGenericVirtualRegister(NarrowTy);
334 MIRBuilder.buildAdd(DstReg, Src1Regs[i], Src2Regs[i]);
346335 DstRegs.push_back(DstReg);
347336 Indexes.push_back(i * NarrowSize);
348337 }
349338
350 MIRBuilder.buildSequence(MI.getType(), MI.getOperand(0).getReg(), DstTys,
351 DstRegs, Indexes);
352 MI.eraseFromParent();
353 return Legalized;
354 }
355 }
356 }
339 MIRBuilder.buildSequence(DstReg, DstRegs, Indexes);
340 MI.eraseFromParent();
341 return Legalized;
342 }
343 }
344 }
7676 SeqI.getOperand(2 * SeqIdx + 2).getImm() < ExtractPos)
7777 ++SeqIdx;
7878
79 if (SeqIdx == NumSeqSrcs ||
80 SeqI.getOperand(2 * SeqIdx + 2).getImm() != ExtractPos ||
81 SeqI.getType(SeqIdx + 1) != MI.getType(Idx)) {
79 if (SeqIdx == NumSeqSrcs) {
8280 AllDefsReplaced = false;
8381 continue;
8482 }
8583
8684 unsigned OrigReg = SeqI.getOperand(2 * SeqIdx + 1).getReg();
85 if (SeqI.getOperand(2 * SeqIdx + 2).getImm() != ExtractPos ||
86 MRI.getType(OrigReg) != MRI.getType(ExtractReg)) {
87 AllDefsReplaced = false;
88 continue;
89 }
90
8791 assert(!TargetRegisterInfo::isPhysicalRegister(OrigReg) &&
8892 "unexpected physical register in G_SEQUENCE");
8993
1717 //===----------------------------------------------------------------------===//
1818
1919 #include "llvm/CodeGen/GlobalISel/MachineLegalizer.h"
20
21 #include "llvm/ADT/SmallBitVector.h"
2022 #include "llvm/CodeGen/MachineInstr.h"
23 #include "llvm/CodeGen/MachineRegisterInfo.h"
2124 #include "llvm/CodeGen/ValueTypes.h"
2225 #include "llvm/IR/Type.h"
2326 #include "llvm/Target/TargetOpcodes.h"
115118 }
116119
117120 std::tuple
118 MachineLegalizer::getAction(const MachineInstr &MI) const {
119 for (unsigned i = 0; i < MI.getNumTypes(); ++i) {
120 auto Action = getAction({MI.getOpcode(), i, MI.getType(i)});
121 MachineLegalizer::getAction(const MachineInstr &MI,
122 const MachineRegisterInfo &MRI) const {
123 SmallBitVector SeenTypes(8);
124 const MCOperandInfo *OpInfo = MI.getDesc().OpInfo;
125 for (unsigned i = 0; i < MI.getDesc().getNumOperands(); ++i) {
126 if (!OpInfo[i].isGenericType())
127 continue;
128
129 // We don't want to repeatedly check the same operand index, that
130 // could get expensive.
131 unsigned TypeIdx = OpInfo[i].getGenericTypeIndex();
132 if (SeenTypes[TypeIdx])
133 continue;
134
135 SeenTypes.set(TypeIdx);
136
137 LLT Ty = MRI.getType(MI.getOperand(i).getReg());
138 auto Action = getAction({MI.getOpcode(), TypeIdx, Ty});
121139 if (Action.first != Legal)
122 return std::make_tuple(Action.first, i, Action.second);
140 return std::make_tuple(Action.first, TypeIdx, Action.second);
123141 }
124142 return std::make_tuple(Legal, 0, LLT{});
125143 }
126144
127 bool MachineLegalizer::isLegal(const MachineInstr &MI) const {
128 return std::get<0>(getAction(MI)) == Legal;
145 bool MachineLegalizer::isLegal(const MachineInstr &MI,
146 const MachineRegisterInfo &MRI) const {
147 return std::get<0>(getAction(MI, MRI)) == Legal;
129148 }
130149
131150 LLT MachineLegalizer::findLegalType(const InstrAspect &Aspect,
574574 // Legalized property, so it should be.
575575 // FIXME: This should be in the MachineVerifier, but it can't use the
576576 // MachineLegalizer as it's currently in the separate GlobalISel library.
577 const MachineRegisterInfo &MRI = MF.getRegInfo();
577578 if (const MachineLegalizer *MLI = MF.getSubtarget().getMachineLegalizer()) {
578579 for (const MachineBasicBlock &MBB : MF) {
579580 for (const MachineInstr &MI : MBB) {
580 if (isPreISelGenericOpcode(MI.getOpcode()) && !MLI->isLegal(MI)) {
581 if (isPreISelGenericOpcode(MI.getOpcode()) && !MLI->isLegal(MI, MRI)) {
581582 if (!TPC->isGlobalISelAbortEnabled()) {
582583 MF.getProperties().set(
583584 MachineFunctionProperties::Property::FailedISel);
366366 // get the size of that register class.
367367 RC = TRI.getMinimalPhysRegClass(Reg);
368368 } else {
369 unsigned RegSize = MRI.getSize(Reg);
369 LLT Ty = MRI.getType(Reg);
370 unsigned RegSize = Ty.isSized() ? Ty.getSizeInBits() : 0;
370371 // If Reg is not a generic register, query the register class to
371372 // get its size.
372373 if (RegSize)
565566 for (unsigned &NewVReg : NewVRegsForOpIdx) {
566567 assert(PartMap != PartMapList.end() && "Out-of-bound access");
567568 assert(NewVReg == 0 && "Register has already been created");
568 NewVReg = MRI.createGenericVirtualRegister(PartMap->Length);
569 NewVReg = MRI.createGenericVirtualRegister(LLT::scalar(PartMap->Length));
569570 MRI.setRegBank(NewVReg, *PartMap->RegBank);
570571 ++PartMap;
571572 }
598598 if (Token.isError() || parseInstruction(OpCode, Flags))
599599 return true;
600600
601 SmallVector Tys;
602 if (isPreISelGenericOpcode(OpCode)) {
603 // For generic opcode, at least one type is mandatory.
604 auto Loc = Token.location();
605 bool ManyTypes = Token.is(MIToken::lbrace);
606 if (ManyTypes)
607 lex();
608
609 // Now actually parse the type(s).
610 do {
611 Tys.resize(Tys.size() + 1);
612 if (parseLowLevelType(Loc, Tys[Tys.size() - 1]))
613 return true;
614 } while (ManyTypes && consumeIfPresent(MIToken::comma));
615
616 if (ManyTypes)
617 expectAndConsume(MIToken::rbrace);
618 }
619
620601 // Parse the remaining machine operands.
621602 while (!Token.isNewlineOrEOF() && Token.isNot(MIToken::kw_debug_location) &&
622603 Token.isNot(MIToken::coloncolon) && Token.isNot(MIToken::lbrace)) {
672653 // TODO: Check for extraneous machine operands.
673654 MI = MF.CreateMachineInstr(MCID, DebugLocation, /*NoImplicit=*/true);
674655 MI->setFlags(Flags);
675 if (Tys.size() > 0) {
676 for (unsigned i = 0; i < Tys.size(); ++i)
677 MI->setType(Tys[i], i);
678 }
679656 for (const auto &Operand : Operands)
680657 MI->addOperand(MF, Operand.Operand);
681658 if (assignRegisterTies(*MI, Operands))
995972 if (MRI.getRegClassOrRegBank(Reg).is())
996973 return error("unexpected size on non-generic virtual register");
997974
998 unsigned Size;
999 if (parseSize(Size))
1000 return true;
1001
1002 MRI.setSize(Reg, Size);
975 LLT Ty;
976 if (parseLowLevelType(Token.location(), Ty))
977 return true;
978
979 if (expectAndConsume(MIToken::rparen))
980 return true;
981
982 MRI.setType(Reg, Ty);
1003983 } else if (PFS.GenericVRegs.count(Reg)) {
1004984 // Generic virtual registers must have a size.
1005985 // If we end up here this means the size hasn't been specified and
414414 if (StringRef(VReg.Class.Value).equals("_")) {
415415 // This is a generic virtual register.
416416 // The size will be set appropriately when we reach the definition.
417 Reg = RegInfo.createGenericVirtualRegister(/*Size*/ 1);
417 Reg = RegInfo.createGenericVirtualRegister(LLT::scalar(1));
418418 PFS.GenericVRegs.insert(Reg);
419419 } else {
420420 const auto *RC = getRegClass(MF, VReg.Class.Value);
427427 VReg.Class.SourceRange.Start,
428428 Twine("use of undefined register class or register bank '") +
429429 VReg.Class.Value + "'");
430 Reg = RegInfo.createGenericVirtualRegister(/*Size*/ 1);
430 Reg = RegInfo.createGenericVirtualRegister(LLT::scalar(1));
431431 RegInfo.setRegBank(Reg, *RegBank);
432432 PFS.GenericVRegs.insert(Reg);
433433 }
222222 VReg.Class = StringRef(RegInfo.getRegBankOrNull(Reg)->getName()).lower();
223223 else {
224224 VReg.Class = std::string("_");
225 assert(RegInfo.getSize(Reg) && "Generic registers must have a size");
225 assert(RegInfo.getType(Reg).isValid() &&
226 "Generic registers must have a valid type");
226227 }
227228 unsigned PreferredReg = RegInfo.getSimpleHint(Reg);
228229 if (PreferredReg)
567568 if (MI.getFlag(MachineInstr::FrameSetup))
568569 OS << "frame-setup ";
569570 OS << TII->getName(MI.getOpcode());
570 if (isPreISelGenericOpcode(MI.getOpcode())) {
571 assert(MI.getType().isValid() && "Generic instructions must have a type");
572 unsigned NumTypes = MI.getNumTypes();
573 OS << (NumTypes > 1 ? " {" : "") << ' ';
574 for (unsigned i = 0; i < NumTypes; ++i) {
575 MI.getType(i).print(OS);
576 if (i + 1 != NumTypes)
577 OS << ", ";
578 }
579 OS << (NumTypes > 1 ? " }" : "") << ' ';
580 }
581571 if (I < E)
582572 OS << ' ';
583573
786776 if (ShouldPrintRegisterTies && Op.isTied() && !Op.isDef())
787777 OS << "(tied-def " << Op.getParent()->findTiedOperandIdx(I) << ")";
788778 assert((!IsDef || MRI) && "for IsDef, MRI must be provided");
789 if (IsDef && MRI->getSize(Op.getReg()))
790 OS << '(' << MRI->getSize(Op.getReg()) << ')';
779 if (IsDef && MRI->getType(Op.getReg()).isValid())
780 OS << '(' << MRI->getType(Op.getReg()) << ')';
791781 break;
792782 case MachineOperand::MO_Immediate:
793783 OS << Op.getImm();
679679 DebugLoc dl, bool NoImp)
680680 : MCID(&tid), Parent(nullptr), Operands(nullptr), NumOperands(0), Flags(0),
681681 AsmPrinterFlags(0), NumMemRefs(0), MemRefs(nullptr),
682 debugLoc(std::move(dl))
683 #ifdef LLVM_BUILD_GLOBAL_ISEL
684 ,
685 Tys(0)
686 #endif
687 {
682 debugLoc(std::move(dl)) {
688683 assert(debugLoc.hasTrivialDestructor() && "Expected trivial destructor");
689684
690685 // Reserve space for the expected number of operands.
703698 MachineInstr::MachineInstr(MachineFunction &MF, const MachineInstr &MI)
704699 : MCID(&MI.getDesc()), Parent(nullptr), Operands(nullptr), NumOperands(0),
705700 Flags(0), AsmPrinterFlags(0), NumMemRefs(MI.NumMemRefs),
706 MemRefs(MI.MemRefs), debugLoc(MI.getDebugLoc())
707 #ifdef LLVM_BUILD_GLOBAL_ISEL
708 ,
709 Tys(0)
710 #endif
711 {
701 MemRefs(MI.MemRefs), debugLoc(MI.getDebugLoc()) {
712702 assert(debugLoc.hasTrivialDestructor() && "Expected trivial destructor");
713703
714704 CapOperands = OperandCapacity::get(MI.getNumOperands());
730720 return &MBB->getParent()->getRegInfo();
731721 return nullptr;
732722 }
733
734 // Implement dummy setter and getter for type when
735 // global-isel is not built.
736 // The proper implementation is WIP and is tracked here:
737 // PR26576.
738 #ifndef LLVM_BUILD_GLOBAL_ISEL
739 unsigned MachineInstr::getNumTypes() const { return 0; }
740
741 void MachineInstr::setType(LLT Ty, unsigned Idx) {}
742
743 LLT MachineInstr::getType(unsigned Idx) const { return LLT{}; }
744
745 void MachineInstr::removeTypes() {}
746
747 #else
748 unsigned MachineInstr::getNumTypes() const { return Tys.size(); }
749
750 void MachineInstr::setType(LLT Ty, unsigned Idx) {
751 assert((!Ty.isValid() || isPreISelGenericOpcode(getOpcode())) &&
752 "Non generic instructions are not supposed to be typed");
753 if (Tys.size() < Idx + 1)
754 Tys.resize(Idx+1);
755 Tys[Idx] = Ty;
756 }
757
758 LLT MachineInstr::getType(unsigned Idx) const { return Tys[Idx]; }
759
760 void MachineInstr::removeTypes() {
761 Tys.clear();
762 }
763 #endif // LLVM_BUILD_GLOBAL_ISEL
764723
765724 /// RemoveRegOperandsFromUseLists - Unlink all of the register operands in
766725 /// this instruction from their respective use lists. This requires that the
17501709 unsigned Reg = getOperand(StartOp).getReg();
17511710 if (TargetRegisterInfo::isVirtualRegister(Reg)) {
17521711 VirtRegs.push_back(Reg);
1753 unsigned Size;
1754 if (MRI && (Size = MRI->getSize(Reg)))
1755 OS << '(' << Size << ')';
1712 LLT Ty = MRI ? MRI->getType(Reg) : LLT{};
1713 if (Ty.isValid())
1714 OS << '(' << Ty << ')';
17561715 }
17571716 }
17581717
17641723 OS << TII->getName(getOpcode());
17651724 else
17661725 OS << "UNKNOWN";
1767
1768 if (getNumTypes() > 0) {
1769 OS << " { ";
1770 for (unsigned i = 0; i < getNumTypes(); ++i) {
1771 getType(i).print(OS);
1772 if (i + 1 != getNumTypes())
1773 OS << ", ";
1774 }
1775 OS << " } ";
1776 }
17771726
17781727 if (SkipOpers)
17791728 return;
111111 return Reg;
112112 }
113113
114 unsigned
115 MachineRegisterInfo::getSize(unsigned VReg) const {
116 VRegToSizeMap::const_iterator SizeIt = getVRegToSize().find(VReg);
117 return SizeIt != getVRegToSize().end() ? SizeIt->second : 0;
118 }
119
120 void MachineRegisterInfo::setSize(unsigned VReg, unsigned Size) {
114 LLT MachineRegisterInfo::getType(unsigned VReg) const {
115 VRegToTypeMap::const_iterator TypeIt = getVRegToType().find(VReg);
116 return TypeIt != getVRegToType().end() ? TypeIt->second : LLT{};
117 }
118
119 void MachineRegisterInfo::setType(unsigned VReg, LLT Ty) {
121120 // Check that VReg doesn't have a class.
122121 assert(!getRegClassOrRegBank(VReg).is() &&
123122 "Can't set the size of a non-generic virtual register");
124 getVRegToSize()[VReg] = Size;
123 getVRegToType()[VReg] = Ty;
125124 }
126125
127126 unsigned
128 MachineRegisterInfo::createGenericVirtualRegister(unsigned Size) {
129 assert(Size && "Cannot create empty virtual register");
127 MachineRegisterInfo::createGenericVirtualRegister(LLT Ty) {
128 assert(Ty.isValid() && "Cannot create empty virtual register");
130129
131130 // New virtual register number.
132131 unsigned Reg = TargetRegisterInfo::index2VirtReg(getNumVirtRegs());
133132 VRegInfo.grow(Reg);
134133 // FIXME: Should we use a dummy register bank?
135134 VRegInfo[Reg].first = static_cast(nullptr);
136 getVRegToSize()[Reg] = Size;
135 getVRegToType()[Reg] = Ty;
137136 RegAllocHints.grow(Reg);
138137 if (TheDelegate)
139138 TheDelegate->MRI_NoteNewVirtualRegister(Reg);
140139 return Reg;
141140 }
142141
143 void MachineRegisterInfo::clearVirtRegSizes() {
142 void MachineRegisterInfo::clearVirtRegTypes() {
144143 #ifndef NDEBUG
145144 // Verify that the size of the now-constrained vreg is unchanged.
146 for (auto &VRegToSize : getVRegToSize()) {
147 auto *RC = getRegClass(VRegToSize.first);
148 if (VRegToSize.second != (RC->getSize() * 8))
145 for (auto &VRegToType : getVRegToType()) {
146 auto *RC = getRegClass(VRegToType.first);
147 if (VRegToType.second.isSized() &&
148 VRegToType.second.getSizeInBits() > (RC->getSize() * 8))
149149 llvm_unreachable(
150150 "Virtual register has explicit size different from its class size");
151151 }
152152 #endif
153153
154 getVRegToSize().clear();
154 getVRegToType().clear();
155155 }
156156
157157 /// clearVirtRegs - Remove all virtual registers (after physreg assignment).
886886 }
887887
888888 // Check types.
889 const unsigned NumTypes = MI->getNumTypes();
890889 if (isPreISelGenericOpcode(MCID.getOpcode())) {
891890 if (isFunctionSelected)
892891 report("Unexpected generic instruction in a Selected function", MI);
893892
894 if (NumTypes == 0)
895 report("Generic instruction must have a type", MI);
896 } else {
897 if (NumTypes != 0)
898 report("Non-generic instruction cannot have a type", MI);
893 // Generic instructions specify equality constraints between some
894 // of their operands. Make sure these are consistent.
895 SmallVector Types;
896 for (unsigned i = 0; i < MCID.getNumOperands(); ++i) {
897 if (!MCID.OpInfo[i].isGenericType())
898 continue;
899 size_t TypeIdx = MCID.OpInfo[i].getGenericTypeIndex();
900 Types.resize(std::max(TypeIdx + 1, Types.size()));
901
902 LLT OpTy = MRI->getType(MI->getOperand(i).getReg());
903 if (Types[TypeIdx].isValid() && Types[TypeIdx] != OpTy)
904 report("type mismatch in generic instruction", MI);
905 Types[TypeIdx] = OpTy;
906 }
899907 }
900908
901909 // Generic opcodes must not have physical register operands.
10251033 }
10261034
10271035 // The gvreg must have a size and it must not have a SubIdx.
1028 unsigned Size = MRI->getSize(Reg);
1029 if (!Size) {
1030 report("Generic virtual register must have a size", MO, MONum);
1036 LLT Ty = MRI->getType(Reg);
1037 if (!Ty.isValid()) {
1038 report("Generic virtual register must have a valid type", MO,
1039 MONum);
10311040 return;
10321041 }
10331042
10421051 }
10431052
10441053 // Make sure the register fits into its register bank if any.
1045 if (RegBank && RegBank->getSize() < Size) {
1054 if (RegBank && Ty.isSized() &&
1055 RegBank->getSize() < Ty.getSizeInBits()) {
10461056 report("Register bank is too small for virtual register", MO,
10471057 MONum);
10481058 errs() << "Register bank " << RegBank->getName() << " too small("
1049 << RegBank->getSize() << ") to fit " << Size << "-bits\n";
1059 << RegBank->getSize() << ") to fit " << Ty.getSizeInBits()
1060 << "-bits\n";
10501061 return;
10511062 }
10521063 if (SubIdx) {
1053 report("Generic virtual register does not subregister index", MO, MONum);
1064 report("Generic virtual register does not subregister index", MO,
1065 MONum);
10541066 return;
10551067 }
10561068 break;
124124 [](MachineIRBuilder &MIRBuilder, Type *Ty,
125125 unsigned ValReg, unsigned PhysReg) {
126126 MIRBuilder.getMBB().addLiveIn(PhysReg);
127 MIRBuilder.buildType(LLT{*Ty}, ValReg, PhysReg);
127 MIRBuilder.buildCopy(ValReg, PhysReg);
128128 });
129129 }
130130
171171 handleAssignments(MIRBuilder, RetAssignFn, ResTys, ResRegs,
172172 [&](MachineIRBuilder &MIRBuilder, Type *Ty,
173173 unsigned ValReg, unsigned PhysReg) {
174 MIRBuilder.buildType(LLT{*Ty}, ValReg, PhysReg);
174 MIRBuilder.buildCopy(ValReg, PhysReg);
175175 MIB.addDef(PhysReg, RegState::Implicit);
176176 });
177177
4848 const AArch64RegisterBankInfo &RBI,
4949 const MachineRegisterInfo &MRI,
5050 const AArch64RegisterInfo &TRI) {
51 if (!I.getType().isSized()) {
51 LLT Ty = MRI.getType(I.getOperand(0).getReg());
52 if (!Ty.isSized()) {
5253 DEBUG(dbgs() << "Generic binop should be sized\n");
5354 return true;
5455 }
218219 return false;
219220 }
220221
221 const LLT Ty = I.getType();
222 const LLT Ty = I.getOperand(0).isReg() ? MRI.getType(I.getOperand(0).getReg())
223 : LLT::unsized();
222224 assert(Ty.isValid() && "Generic instruction doesn't have a type");
223225
224226 switch (I.getOpcode()) {
225227 case TargetOpcode::G_BR: {
226228 I.setDesc(TII.get(AArch64::B));
227 I.removeTypes();
228229 return true;
229230 }
230231
231232 case TargetOpcode::G_TYPE: {
232233 I.setDesc(TII.get(TargetOpcode::COPY));
233 I.removeTypes();
234234 return true;
235235 }
236236
237237 case TargetOpcode::G_PHI: {
238238 I.setDesc(TII.get(TargetOpcode::PHI));
239 I.removeTypes();
240239 return true;
241240 }
242241
243242 case TargetOpcode::G_FRAME_INDEX: {
244243 // allocas and G_FRAME_INDEX are only supported in addrspace(0).
245 if (I.getType() != LLT::pointer(0)) {
246 DEBUG(dbgs() << "G_FRAME_INDEX pointer has type: " << I.getType()
244 if (Ty != LLT::pointer(0)) {
245 DEBUG(dbgs() << "G_FRAME_INDEX pointer has type: " << Ty
247246 << ", expected: " << LLT::pointer(0) << '\n');
248247 return false;
249248 }
250249
251250 I.setDesc(TII.get(AArch64::ADDXri));
252 I.removeTypes();
253251
254252 // MOs for a #0 shifted immediate.
255253 I.addOperand(MachineOperand::CreateImm(0));
259257 }
260258 case TargetOpcode::G_LOAD:
261259 case TargetOpcode::G_STORE: {
262 LLT MemTy = I.getType(0);
263 LLT PtrTy = I.getType(1);
260 LLT MemTy = Ty;
261 LLT PtrTy = MRI.getType(I.getOperand(1).getReg());
264262
265263 if (PtrTy != LLT::pointer(0)) {
266264 DEBUG(dbgs() << "Load/Store pointer has type: " << PtrTy
274272 const RegisterBank &PtrRB = *RBI.getRegBank(PtrReg, MRI, TRI);
275273 assert(PtrRB.getID() == AArch64::GPRRegBankID &&
276274 "Load/Store pointer operand isn't a GPR");
277 assert(MRI.getSize(PtrReg) == 64 &&
278 "Load/Store pointer operand isn't 64-bit");
275 assert(MRI.getType(PtrReg).isPointer() &&
276 "Load/Store pointer operand isn't a pointer");
279277 #endif
280278
281279 const unsigned ValReg = I.getOperand(0).getReg();
287285 return false;
288286
289287 I.setDesc(TII.get(NewOpc));
290 I.removeTypes();
291288
292289 I.addOperand(MachineOperand::CreateImm(0));
293290 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
321318 }
322319
323320 I.setDesc(TII.get(NewOpc));
324 I.removeTypes();
325321
326322 I.addOperand(MachineOperand::CreateReg(ZeroReg, /*isDef=*/false));
327323
360356
361357 I.setDesc(TII.get(NewOpc));
362358 // FIXME: Should the type be always reset in setDesc?
363 I.removeTypes();
364359
365360 // Now that we selected an opcode, we need to constrain the register
366361 // operands to use appropriate classes.
1313
1414 #include "AArch64RegisterBankInfo.h"
1515 #include "AArch64InstrInfo.h" // For XXXRegClassID.
16 #include "llvm/CodeGen/LowLevelType.h"
17 #include "llvm/CodeGen/MachineRegisterInfo.h"
1618 #include "llvm/CodeGen/GlobalISel/RegisterBank.h"
1719 #include "llvm/CodeGen/GlobalISel/RegisterBankInfo.h"
1820 #include "llvm/Target/TargetRegisterInfo.h"
176178 // As a top-level guess, vectors go in FPRs, scalars in GPRs. Obviously this
177179 // won't work for normal floating-point types (or NZCV). When such
178180 // instructions exist we'll need to look at the MI's opcode.
179 LLT Ty = MI.getType();
181 auto &MRI = MI.getParent()->getParent()->getRegInfo();
182 LLT Ty = MRI.getType(MI.getOperand(0).getReg());
180183 unsigned BankID;
181184 if (Ty.isVector())
182185 BankID = AArch64::FPRRegBankID;
33 target triple = "aarch64-linux-gnu"
44
55 ; CHECK-LABEL: name: args_i32
6 ; CHECK: %[[ARG0:[0-9]+]](32) = G_TYPE s32 %w0
7 ; CHECK: %{{[0-9]+}}(32) = G_TYPE s32 %w1
8 ; CHECK: %{{[0-9]+}}(32) = G_TYPE s32 %w2
9 ; CHECK: %{{[0-9]+}}(32) = G_TYPE s32 %w3
10 ; CHECK: %{{[0-9]+}}(32) = G_TYPE s32 %w4
11 ; CHECK: %{{[0-9]+}}(32) = G_TYPE s32 %w5
12 ; CHECK: %{{[0-9]+}}(32) = G_TYPE s32 %w6
13 ; CHECK: %{{[0-9]+}}(32) = G_TYPE s32 %w7
6 ; CHECK: %[[ARG0:[0-9]+]](s32) = COPY %w0
7 ; CHECK: %{{[0-9]+}}(s32) = COPY %w1
8 ; CHECK: %{{[0-9]+}}(s32) = COPY %w2
9 ; CHECK: %{{[0-9]+}}(s32) = COPY %w3
10 ; CHECK: %{{[0-9]+}}(s32) = COPY %w4
11 ; CHECK: %{{[0-9]+}}(s32) = COPY %w5
12 ; CHECK: %{{[0-9]+}}(s32) = COPY %w6
13 ; CHECK: %{{[0-9]+}}(s32) = COPY %w7
1414 ; CHECK: %w0 = COPY %[[ARG0]]
1515
1616 define i32 @args_i32(i32 %w0, i32 %w1, i32 %w2, i32 %w3,
1919 }
2020
2121 ; CHECK-LABEL: name: args_i64
22 ; CHECK: %[[ARG0:[0-9]+]](64) = G_TYPE s64 %x0
23 ; CHECK: %{{[0-9]+}}(64) = G_TYPE s64 %x1
24 ; CHECK: %{{[0-9]+}}(64) = G_TYPE s64 %x2
25 ; CHECK: %{{[0-9]+}}(64) = G_TYPE s64 %x3
26 ; CHECK: %{{[0-9]+}}(64) = G_TYPE s64 %x4
27 ; CHECK: %{{[0-9]+}}(64) = G_TYPE s64 %x5
28 ; CHECK: %{{[0-9]+}}(64) = G_TYPE s64 %x6
29 ; CHECK: %{{[0-9]+}}(64) = G_TYPE s64 %x7
22 ; CHECK: %[[ARG0:[0-9]+]](s64) = COPY %x0
23 ; CHECK: %{{[0-9]+}}(s64) = COPY %x1
24 ; CHECK: %{{[0-9]+}}(s64) = COPY %x2
25 ; CHECK: %{{[0-9]+}}(s64) = COPY %x3
26 ; CHECK: %{{[0-9]+}}(s64) = COPY %x4
27 ; CHECK: %{{[0-9]+}}(s64) = COPY %x5
28 ; CHECK: %{{[0-9]+}}(s64) = COPY %x6
29 ; CHECK: %{{[0-9]+}}(s64) = COPY %x7
3030 ; CHECK: %x0 = COPY %[[ARG0]]
3131 define i64 @args_i64(i64 %x0, i64 %x1, i64 %x2, i64 %x3,
3232 i64 %x4, i64 %x5, i64 %x6, i64 %x7) {
3535
3636
3737 ; CHECK-LABEL: name: args_ptrs
38 ; CHECK: %[[ARG0:[0-9]+]](64) = G_TYPE p0 %x0
39 ; CHECK: %{{[0-9]+}}(64) = G_TYPE p0 %x1
40 ; CHECK: %{{[0-9]+}}(64) = G_TYPE p0 %x2
41 ; CHECK: %{{[0-9]+}}(64) = G_TYPE p0 %x3
42 ; CHECK: %{{[0-9]+}}(64) = G_TYPE p0 %x4
43 ; CHECK: %{{[0-9]+}}(64) = G_TYPE p0 %x5
44 ; CHECK: %{{[0-9]+}}(64) = G_TYPE p0 %x6
45 ; CHECK: %{{[0-9]+}}(64) = G_TYPE p0 %x7
38 ; CHECK: %[[ARG0:[0-9]+]](p0) = COPY %x0
39 ; CHECK: %{{[0-9]+}}(p0) = COPY %x1
40 ; CHECK: %{{[0-9]+}}(p0) = COPY %x2
41 ; CHECK: %{{[0-9]+}}(p0) = COPY %x3
42 ; CHECK: %{{[0-9]+}}(p0) = COPY %x4
43 ; CHECK: %{{[0-9]+}}(p0) = COPY %x5
44 ; CHECK: %{{[0-9]+}}(p0) = COPY %x6
45 ; CHECK: %{{[0-9]+}}(p0) = COPY %x7
4646 ; CHECK: %x0 = COPY %[[ARG0]]
4747 define i8* @args_ptrs(i8* %x0, i16* %x1, <2 x i8>* %x2, {i8, i16, i32}* %x3,
4848 [3 x float]* %x4, double* %x5, i8* %x6, i8* %x7) {
9393 bb.0:
9494 liveins: %w0, %w1
9595
96 %0(32) = G_TYPE s32 %w0
97 %1(32) = G_TYPE s32 %w1
98 %2(32) = G_ADD s32 %0, %1
96 %0(s32) = COPY %w0
97 %1(s32) = COPY %w1
98 %2(s32) = G_ADD %0, %1
9999 ...
100100
101101 ---
122122 bb.0:
123123 liveins: %x0, %x1
124124
125 %0(64) = G_TYPE s64 %x0
126 %1(64) = G_TYPE s64 %x1
127 %2(64) = G_ADD s64 %0, %1
125 %0(s64) = COPY %x0
126 %1(s64) = COPY %x1
127 %2(s64) = G_ADD %0, %1
128128 ...
129129
130130 ---
151151 bb.0:
152152 liveins: %w0, %w1
153153
154 %0(32) = G_TYPE s32 %w0
155 %1(32) = G_TYPE s32 %w1
156 %2(32) = G_SUB s32 %0, %1
154 %0(s32) = COPY %w0
155 %1(s32) = COPY %w1
156 %2(s32) = G_SUB %0, %1
157157 ...
158158
159159 ---
180180 bb.0:
181181 liveins: %x0, %x1
182182
183 %0(64) = G_TYPE s64 %x0
184 %1(64) = G_TYPE s64 %x1
185 %2(64) = G_SUB s64 %0, %1
183 %0(s64) = COPY %x0
184 %1(s64) = COPY %x1
185 %2(s64) = G_SUB %0, %1
186186 ...
187187
188188 ---
209209 bb.0:
210210 liveins: %w0, %w1
211211
212 %0(32) = G_TYPE s32 %w0
213 %1(32) = G_TYPE s32 %w1
214 %2(32) = G_OR s32 %0, %1
212 %0(s32) = COPY %w0
213 %1(s32) = COPY %w1
214 %2(s32) = G_OR %0, %1
215215 ...
216216
217217 ---
238238 bb.0:
239239 liveins: %x0, %x1
240240
241 %0(64) = G_TYPE s64 %x0
242 %1(64) = G_TYPE s64 %x1
243 %2(64) = G_OR s64 %0, %1
241 %0(s64) = COPY %x0
242 %1(s64) = COPY %x1
243 %2(s64) = G_OR %0, %1
244244 ...
245245
246246 ---
267267 bb.0:
268268 liveins: %w0, %w1
269269
270 %0(32) = G_TYPE s32 %w0
271 %1(32) = G_TYPE s32 %w1
272 %2(32) = G_XOR s32 %0, %1
270 %0(s32) = COPY %w0
271 %1(s32) = COPY %w1
272 %2(s32) = G_XOR %0, %1
273273 ...
274274
275275 ---
296296 bb.0:
297297 liveins: %x0, %x1
298298
299 %0(64) = G_TYPE s64 %x0
300 %1(64) = G_TYPE s64 %x1
301 %2(64) = G_XOR s64 %0, %1
299 %0(s64) = COPY %x0
300 %1(s64) = COPY %x1
301 %2(s64) = G_XOR %0, %1
302302 ...
303303
304304 ---
325325 bb.0:
326326 liveins: %w0, %w1
327327
328 %0(32) = G_TYPE s32 %w0
329 %1(32) = G_TYPE s32 %w1
330 %2(32) = G_AND s32 %0, %1
328 %0(s32) = COPY %w0
329 %1(s32) = COPY %w1
330 %2(s32) = G_AND %0, %1
331331 ...
332332
333333 ---
354354 bb.0:
355355 liveins: %x0, %x1
356356
357 %0(64) = G_TYPE s64 %x0
358 %1(64) = G_TYPE s64 %x1
359 %2(64) = G_AND s64 %0, %1
357 %0(s64) = COPY %x0
358 %1(s64) = COPY %x1
359 %2(s64) = G_AND %0, %1
360360 ...
361361
362362 ---
383383 bb.0:
384384 liveins: %w0, %w1
385385
386 %0(32) = G_TYPE s32 %w0
387 %1(32) = G_TYPE s32 %w1
388 %2(32) = G_SHL s32 %0, %1
386 %0(s32) = COPY %w0
387 %1(s32) = COPY %w1
388 %2(s32) = G_SHL %0, %1
389389 ...
390390
391391 ---
412412 bb.0:
413413 liveins: %x0, %x1
414414
415 %0(64) = G_TYPE s64 %x0
416 %1(64) = G_TYPE s64 %x1
417 %2(64) = G_SHL s64 %0, %1
415 %0(s64) = COPY %x0
416 %1(s64) = COPY %x1
417 %2(s64) = G_SHL %0, %1
418418 ...
419419
420420 ---
441441 bb.0:
442442 liveins: %w0, %w1
443443
444 %0(32) = G_TYPE s32 %w0
445 %1(32) = G_TYPE s32 %w1
446 %2(32) = G_LSHR s32 %0, %1
444 %0(s32) = COPY %w0
445 %1(s32) = COPY %w1
446 %2(s32) = G_LSHR %0, %1
447447 ...
448448
449449 ---
470470 bb.0:
471471 liveins: %x0, %x1
472472
473 %0(64) = G_TYPE s64 %x0
474 %1(64) = G_TYPE s64 %x1
475 %2(64) = G_LSHR s64 %0, %1
473 %0(s64) = COPY %x0
474 %1(s64) = COPY %x1
475 %2(s64) = G_LSHR %0, %1
476476 ...
477477
478478 ---
499499 bb.0:
500500 liveins: %w0, %w1
501501
502 %0(32) = G_TYPE s32 %w0
503 %1(32) = G_TYPE s32 %w1
504 %2(32) = G_ASHR s32 %0, %1
502 %0(s32) = COPY %w0
503 %1(s32) = COPY %w1
504 %2(s32) = G_ASHR %0, %1
505505 ...
506506
507507 ---
528528 bb.0:
529529 liveins: %x0, %x1
530530
531 %0(64) = G_TYPE s64 %x0
532 %1(64) = G_TYPE s64 %x1
533 %2(64) = G_ASHR s64 %0, %1
531 %0(s64) = COPY %x0
532 %1(s64) = COPY %x1
533 %2(s64) = G_ASHR %0, %1
534534 ...
535535
536536 # Check that we select s32 GPR G_MUL. This is trickier than other binops because
557557 bb.0:
558558 liveins: %w0, %w1
559559
560 %0(32) = G_TYPE s32 %w0
561 %1(32) = G_TYPE s32 %w1
562 %2(32) = G_MUL s32 %0, %1
560 %0(s32) = COPY %w0
561 %1(s32) = COPY %w1
562 %2(s32) = G_MUL %0, %1
563563 ...
564564
565565 ---
586586 bb.0:
587587 liveins: %x0, %x1
588588
589 %0(64) = G_TYPE s64 %x0
590 %1(64) = G_TYPE s64 %x1
591 %2(64) = G_MUL s64 %0, %1
589 %0(s64) = COPY %x0
590 %1(s64) = COPY %x1
591 %2(s64) = G_MUL %0, %1
592592 ...
593593
594594 ---
615615 bb.0:
616616 liveins: %w0, %w1
617617
618 %0(32) = G_TYPE s32 %w0
619 %1(32) = G_TYPE s32 %w1
620 %2(32) = G_SDIV s32 %0, %1
618 %0(s32) = COPY %w0
619 %1(s32) = COPY %w1
620 %2(s32) = G_SDIV %0, %1
621621 ...
622622
623623 ---
644644 bb.0:
645645 liveins: %x0, %x1
646646
647 %0(64) = G_TYPE s64 %x0
648 %1(64) = G_TYPE s64 %x1
649 %2(64) = G_SDIV s64 %0, %1
647 %0(s64) = COPY %x0
648 %1(s64) = COPY %x1
649 %2(s64) = G_SDIV %0, %1
650650 ...
651651
652652 ---
673673 bb.0:
674674 liveins: %w0, %w1
675675
676 %0(32) = G_TYPE s32 %w0
677 %1(32) = G_TYPE s32 %w1
678 %2(32) = G_UDIV s32 %0, %1
676 %0(s32) = COPY %w0
677 %1(s32) = COPY %w1
678 %2(s32) = G_UDIV %0, %1
679679 ...
680680
681681 ---
702702 bb.0:
703703 liveins: %x0, %x1
704704
705 %0(64) = G_TYPE s64 %x0
706 %1(64) = G_TYPE s64 %x1
707 %2(64) = G_UDIV s64 %0, %1
705 %0(s64) = COPY %x0
706 %1(s64) = COPY %x1
707 %2(s64) = G_UDIV %0, %1
708708 ...
709709
710710 ---
731731 bb.0:
732732 liveins: %s0, %s1
733733
734 %0(32) = G_TYPE s32 %s0
735 %1(32) = G_TYPE s32 %s1
736 %2(32) = G_FADD s32 %0, %1
734 %0(s32) = COPY %s0
735 %1(s32) = COPY %s1
736 %2(s32) = G_FADD %0, %1
737737 ...
738738
739739 ---
759759 bb.0:
760760 liveins: %d0, %d1
761761
762 %0(64) = G_TYPE s64 %d0
763 %1(64) = G_TYPE s64 %d1
764 %2(64) = G_FADD s64 %0, %1
762 %0(s64) = COPY %d0
763 %1(s64) = COPY %d1
764 %2(s64) = G_FADD %0, %1
765765 ...
766766
767767 ---
787787 bb.0:
788788 liveins: %s0, %s1
789789
790 %0(32) = G_TYPE s32 %s0
791 %1(32) = G_TYPE s32 %s1
792 %2(32) = G_FSUB s32 %0, %1
790 %0(s32) = COPY %s0
791 %1(s32) = COPY %s1
792 %2(s32) = G_FSUB %0, %1
793793 ...
794794
795795 ---
815815 bb.0:
816816 liveins: %d0, %d1
817817
818 %0(64) = G_TYPE s64 %d0
819 %1(64) = G_TYPE s64 %d1
820 %2(64) = G_FSUB s64 %0, %1
818 %0(s64) = COPY %d0
819 %1(s64) = COPY %d1
820 %2(s64) = G_FSUB %0, %1
821821 ...
822822
823823 ---
843843 bb.0:
844844 liveins: %s0, %s1
845845
846 %0(32) = G_TYPE s32 %s0
847 %1(32) = G_TYPE s32 %s1
848 %2(32) = G_FMUL s32 %0, %1
846 %0(s32) = COPY %s0
847 %1(s32) = COPY %s1
848 %2(s32) = G_FMUL %0, %1
849849 ...
850850
851851 ---
871871 bb.0:
872872 liveins: %d0, %d1
873873
874 %0(64) = G_TYPE s64 %d0
875 %1(64) = G_TYPE s64 %d1
876 %2(64) = G_FMUL s64 %0, %1
874 %0(s64) = COPY %d0
875 %1(s64) = COPY %d1
876 %2(s64) = G_FMUL %0, %1
877877 ...
878878
879879 ---
899899 bb.0:
900900 liveins: %s0, %s1
901901
902 %0(32) = G_TYPE s32 %s0
903 %1(32) = G_TYPE s32 %s1
904 %2(32) = G_FDIV s32 %0, %1
902 %0(s32) = COPY %s0
903 %1(s32) = COPY %s1
904 %2(s32) = G_FDIV %0, %1
905905 ...
906906
907907 ---
927927 bb.0:
928928 liveins: %d0, %d1
929929
930 %0(64) = G_TYPE s64 %d0
931 %1(64) = G_TYPE s64 %d1
932 %2(64) = G_FDIV s64 %0, %1
930 %0(s64) = COPY %d0
931 %1(s64) = COPY %d1
932 %2(s64) = G_FDIV %0, %1
933933 ...
934934
935935 ---
946946 bb.0:
947947 successors: %bb.0
948948
949 G_BR unsized %bb.0
949 G_BR %bb.0
950950 ...
951951
952952 ---
969969 bb.0:
970970 liveins: %x0
971971
972 %0(64) = G_TYPE s64 %x0
973 %1(64) = G_LOAD { s64, p0 } %0 :: (load 8 from %ir.addr)
972 %0(p0) = COPY %x0
973 %1(s64) = G_LOAD %0 :: (load 8 from %ir.addr)
974974
975975 ...
976976
994994 bb.0:
995995 liveins: %x0
996996
997 %0(64) = G_TYPE s64 %x0
998 %1(32) = G_LOAD { s32, p0 } %0 :: (load 4 from %ir.addr)
997 %0(p0) = COPY %x0
998 %1(s32) = G_LOAD %0 :: (load 4 from %ir.addr)
999999
10001000 ...
10011001
10201020 bb.0:
10211021 liveins: %x0, %x1
10221022
1023 %0(64) = G_TYPE s64 %x0
1024 %1(64) = G_TYPE s64 %x1
1025 G_STORE { s64, p0 } %1, %0 :: (store 8 into %ir.addr)
1023 %0(p0) = COPY %x0
1024 %1(s64) = COPY %x1
1025 G_STORE %1, %0 :: (store 8 into %ir.addr)
10261026
10271027 ...
10281028
10471047 bb.0:
10481048 liveins: %x0, %w1
10491049
1050 %0(64) = G_TYPE s64 %x0
1051 %1(32) = G_TYPE s32 %w1
1052 G_STORE { s32, p0 } %1, %0 :: (store 4 into %ir.addr)
1050 %0(p0) = COPY %x0
1051 %1(s32) = COPY %w1
1052 G_STORE %1, %0 :: (store 4 into %ir.addr)
10531053
10541054 ...
10551055
10711071 # CHECK: %0 = ADDXri %stack.0.ptr0, 0, 0
10721072 body: |
10731073 bb.0:
1074 %0(64) = G_FRAME_INDEX p0 %stack.0.ptr0
1074 %0(p0) = G_FRAME_INDEX %stack.0.ptr0
10751075 ...
10761076
10771077 ---
66
77 ; Tests for add.
88 ; CHECK-LABEL: name: addi64
9 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE s64 %x0
10 ; CHECK-NEXT: [[ARG2:%[0-9]+]](64) = G_TYPE s64 %x1
11 ; CHECK-NEXT: [[RES:%[0-9]+]](64) = G_ADD s64 [[ARG1]], [[ARG2]]
9 ; CHECK: [[ARG1:%[0-9]+]](s64) = COPY %x0
10 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s64) = COPY %x1
11 ; CHECK-NEXT: [[RES:%[0-9]+]](s64) = G_ADD [[ARG1]], [[ARG2]]
1212 ; CHECK-NEXT: %x0 = COPY [[RES]]
1313 ; CHECK-NEXT: RET_ReallyLR implicit %x0
1414 define i64 @addi64(i64 %arg1, i64 %arg2) {
1717 }
1818
1919 ; CHECK-LABEL: name: muli64
20 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE s64 %x0
21 ; CHECK-NEXT: [[ARG2:%[0-9]+]](64) = G_TYPE s64 %x1
22 ; CHECK-NEXT: [[RES:%[0-9]+]](64) = G_MUL s64 [[ARG1]], [[ARG2]]
20 ; CHECK: [[ARG1:%[0-9]+]](s64) = COPY %x0
21 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s64) = COPY %x1
22 ; CHECK-NEXT: [[RES:%[0-9]+]](s64) = G_MUL [[ARG1]], [[ARG2]]
2323 ; CHECK-NEXT: %x0 = COPY [[RES]]
2424 ; CHECK-NEXT: RET_ReallyLR implicit %x0
2525 define i64 @muli64(i64 %arg1, i64 %arg2) {
3434 ; CHECK-NEXT: - { id: 1, name: ptr2, offset: 0, size: 8, alignment: 1 }
3535 ; CHECK-NEXT: - { id: 2, name: ptr3, offset: 0, size: 128, alignment: 8 }
3636 ; CHECK-NEXT: - { id: 3, name: ptr4, offset: 0, size: 1, alignment: 8 }
37 ; CHECK: %{{[0-9]+}}(64) = G_FRAME_INDEX p0 %stack.0.ptr1
38 ; CHECK: %{{[0-9]+}}(64) = G_FRAME_INDEX p0 %stack.1.ptr2
39 ; CHECK: %{{[0-9]+}}(64) = G_FRAME_INDEX p0 %stack.2.ptr3
40 ; CHECK: %{{[0-9]+}}(64) = G_FRAME_INDEX p0 %stack.3.ptr4
37 ; CHECK: %{{[0-9]+}}(p0) = G_FRAME_INDEX %stack.0.ptr1
38 ; CHECK: %{{[0-9]+}}(p0) = G_FRAME_INDEX %stack.1.ptr2
39 ; CHECK: %{{[0-9]+}}(p0) = G_FRAME_INDEX %stack.2.ptr3
40 ; CHECK: %{{[0-9]+}}(p0) = G_FRAME_INDEX %stack.3.ptr4
4141 define void @allocai64() {
4242 %ptr1 = alloca i64
4343 %ptr2 = alloca i64, align 1
5757 ; CHECK-NEXT: successors: %[[END:[0-9a-zA-Z._-]+]]({{0x[a-f0-9]+ / 0x[a-f0-9]+}} = 100.00%)
5858 ;
5959 ; Check that we emit the correct branch.
60 ; CHECK: G_BR unsized %[[END]]
60 ; CHECK: G_BR %[[END]]
6161 ;
6262 ; Check that end contains the return instruction.
6363 ; CHECK: [[END]]:
8080 ; CHECK: %[[FALSE:[0-9a-zA-Z._-]+]]({{0x[a-f0-9]+ / 0x[a-f0-9]+}} = 50.00%)
8181 ;
8282 ; Check that we emit the correct branch.
83 ; CHECK: [[ADDR:%.*]](64) = G_TYPE p0 %x0
84 ; CHECK: [[TST:%.*]](1) = G_LOAD { s1, p0 } [[ADDR]]
85 ; CHECK: G_BRCOND s1 [[TST]], %[[TRUE]]
86 ; CHECK: G_BR unsized %[[FALSE]]
83 ; CHECK: [[ADDR:%.*]](p0) = COPY %x0
84 ; CHECK: [[TST:%.*]](s1) = G_LOAD [[ADDR]]
85 ; CHECK: G_BRCOND [[TST]], %[[TRUE]]
86 ; CHECK: G_BR %[[FALSE]]
8787 ;
8888 ; Check that each successor contains the return instruction.
8989 ; CHECK: [[TRUE]]:
101101
102102 ; Tests for or.
103103 ; CHECK-LABEL: name: ori64
104 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE s64 %x0
105 ; CHECK-NEXT: [[ARG2:%[0-9]+]](64) = G_TYPE s64 %x1
106 ; CHECK-NEXT: [[RES:%[0-9]+]](64) = G_OR s64 [[ARG1]], [[ARG2]]
104 ; CHECK: [[ARG1:%[0-9]+]](s64) = COPY %x0
105 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s64) = COPY %x1
106 ; CHECK-NEXT: [[RES:%[0-9]+]](s64) = G_OR [[ARG1]], [[ARG2]]
107107 ; CHECK-NEXT: %x0 = COPY [[RES]]
108108 ; CHECK-NEXT: RET_ReallyLR implicit %x0
109109 define i64 @ori64(i64 %arg1, i64 %arg2) {
112112 }
113113
114114 ; CHECK-LABEL: name: ori32
115 ; CHECK: [[ARG1:%[0-9]+]](32) = G_TYPE s32 %w0
116 ; CHECK-NEXT: [[ARG2:%[0-9]+]](32) = G_TYPE s32 %w1
117 ; CHECK-NEXT: [[RES:%[0-9]+]](32) = G_OR s32 [[ARG1]], [[ARG2]]
115 ; CHECK: [[ARG1:%[0-9]+]](s32) = COPY %w0
116 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s32) = COPY %w1
117 ; CHECK-NEXT: [[RES:%[0-9]+]](s32) = G_OR [[ARG1]], [[ARG2]]
118118 ; CHECK-NEXT: %w0 = COPY [[RES]]
119119 ; CHECK-NEXT: RET_ReallyLR implicit %w0
120120 define i32 @ori32(i32 %arg1, i32 %arg2) {
124124
125125 ; Tests for xor.
126126 ; CHECK-LABEL: name: xori64
127 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE s64 %x0
128 ; CHECK-NEXT: [[ARG2:%[0-9]+]](64) = G_TYPE s64 %x1
129 ; CHECK-NEXT: [[RES:%[0-9]+]](64) = G_XOR s64 [[ARG1]], [[ARG2]]
127 ; CHECK: [[ARG1:%[0-9]+]](s64) = COPY %x0
128 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s64) = COPY %x1
129 ; CHECK-NEXT: [[RES:%[0-9]+]](s64) = G_XOR [[ARG1]], [[ARG2]]
130130 ; CHECK-NEXT: %x0 = COPY [[RES]]
131131 ; CHECK-NEXT: RET_ReallyLR implicit %x0
132132 define i64 @xori64(i64 %arg1, i64 %arg2) {
135135 }
136136
137137 ; CHECK-LABEL: name: xori32
138 ; CHECK: [[ARG1:%[0-9]+]](32) = G_TYPE s32 %w0
139 ; CHECK-NEXT: [[ARG2:%[0-9]+]](32) = G_TYPE s32 %w1
140 ; CHECK-NEXT: [[RES:%[0-9]+]](32) = G_XOR s32 [[ARG1]], [[ARG2]]
138 ; CHECK: [[ARG1:%[0-9]+]](s32) = COPY %w0
139 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s32) = COPY %w1
140 ; CHECK-NEXT: [[RES:%[0-9]+]](s32) = G_XOR [[ARG1]], [[ARG2]]
141141 ; CHECK-NEXT: %w0 = COPY [[RES]]
142142 ; CHECK-NEXT: RET_ReallyLR implicit %w0
143143 define i32 @xori32(i32 %arg1, i32 %arg2) {
147147
148148 ; Tests for and.
149149 ; CHECK-LABEL: name: andi64
150 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE s64 %x0
151 ; CHECK-NEXT: [[ARG2:%[0-9]+]](64) = G_TYPE s64 %x1
152 ; CHECK-NEXT: [[RES:%[0-9]+]](64) = G_AND s64 [[ARG1]], [[ARG2]]
150 ; CHECK: [[ARG1:%[0-9]+]](s64) = COPY %x0
151 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s64) = COPY %x1
152 ; CHECK-NEXT: [[RES:%[0-9]+]](s64) = G_AND [[ARG1]], [[ARG2]]
153153 ; CHECK-NEXT: %x0 = COPY [[RES]]
154154 ; CHECK-NEXT: RET_ReallyLR implicit %x0
155155 define i64 @andi64(i64 %arg1, i64 %arg2) {
158158 }
159159
160160 ; CHECK-LABEL: name: andi32
161 ; CHECK: [[ARG1:%[0-9]+]](32) = G_TYPE s32 %w0
162 ; CHECK-NEXT: [[ARG2:%[0-9]+]](32) = G_TYPE s32 %w1
163 ; CHECK-NEXT: [[RES:%[0-9]+]](32) = G_AND s32 [[ARG1]], [[ARG2]]
161 ; CHECK: [[ARG1:%[0-9]+]](s32) = COPY %w0
162 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s32) = COPY %w1
163 ; CHECK-NEXT: [[RES:%[0-9]+]](s32) = G_AND [[ARG1]], [[ARG2]]
164164 ; CHECK-NEXT: %w0 = COPY [[RES]]
165165 ; CHECK-NEXT: RET_ReallyLR implicit %w0
166166 define i32 @andi32(i32 %arg1, i32 %arg2) {
170170
171171 ; Tests for sub.
172172 ; CHECK-LABEL: name: subi64
173 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE s64 %x0
174 ; CHECK-NEXT: [[ARG2:%[0-9]+]](64) = G_TYPE s64 %x1
175 ; CHECK-NEXT: [[RES:%[0-9]+]](64) = G_SUB s64 [[ARG1]], [[ARG2]]
173 ; CHECK: [[ARG1:%[0-9]+]](s64) = COPY %x0
174 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s64) = COPY %x1
175 ; CHECK-NEXT: [[RES:%[0-9]+]](s64) = G_SUB [[ARG1]], [[ARG2]]
176176 ; CHECK-NEXT: %x0 = COPY [[RES]]
177177 ; CHECK-NEXT: RET_ReallyLR implicit %x0
178178 define i64 @subi64(i64 %arg1, i64 %arg2) {
181181 }
182182
183183 ; CHECK-LABEL: name: subi32
184 ; CHECK: [[ARG1:%[0-9]+]](32) = G_TYPE s32 %w0
185 ; CHECK-NEXT: [[ARG2:%[0-9]+]](32) = G_TYPE s32 %w1
186 ; CHECK-NEXT: [[RES:%[0-9]+]](32) = G_SUB s32 [[ARG1]], [[ARG2]]
184 ; CHECK: [[ARG1:%[0-9]+]](s32) = COPY %w0
185 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s32) = COPY %w1
186 ; CHECK-NEXT: [[RES:%[0-9]+]](s32) = G_SUB [[ARG1]], [[ARG2]]
187187 ; CHECK-NEXT: %w0 = COPY [[RES]]
188188 ; CHECK-NEXT: RET_ReallyLR implicit %w0
189189 define i32 @subi32(i32 %arg1, i32 %arg2) {
192192 }
193193
194194 ; CHECK-LABEL: name: ptrtoint
195 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE p0 %x0
196 ; CHECK: [[RES:%[0-9]+]](64) = G_PTRTOINT { s64, p0 } [[ARG1]]
195 ; CHECK: [[ARG1:%[0-9]+]](p0) = COPY %x0
196 ; CHECK: [[RES:%[0-9]+]](s64) = G_PTRTOINT [[ARG1]]
197197 ; CHECK: %x0 = COPY [[RES]]
198198 ; CHECK: RET_ReallyLR implicit %x0
199199 define i64 @ptrtoint(i64* %a) {
202202 }
203203
204204 ; CHECK-LABEL: name: inttoptr
205 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE s64 %x0
206 ; CHECK: [[RES:%[0-9]+]](64) = G_INTTOPTR { p0, s64 } [[ARG1]]
205 ; CHECK: [[ARG1:%[0-9]+]](s64) = COPY %x0
206 ; CHECK: [[RES:%[0-9]+]](p0) = G_INTTOPTR [[ARG1]]
207207 ; CHECK: %x0 = COPY [[RES]]
208208 ; CHECK: RET_ReallyLR implicit %x0
209209 define i64* @inttoptr(i64 %a) {
212212 }
213213
214214 ; CHECK-LABEL: name: trivial_bitcast
215 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE p0 %x0
215 ; CHECK: [[ARG1:%[0-9]+]](p0) = COPY %x0
216216 ; CHECK: %x0 = COPY [[ARG1]]
217217 ; CHECK: RET_ReallyLR implicit %x0
218218 define i64* @trivial_bitcast(i8* %a) {
221221 }
222222
223223 ; CHECK-LABEL: name: trivial_bitcast_with_copy
224 ; CHECK: [[A:%[0-9]+]](64) = G_TYPE p0 %x0
225 ; CHECK: G_BR unsized %[[CAST:bb\.[0-9]+]]
224 ; CHECK: [[A:%[0-9]+]](p0) = COPY %x0
225 ; CHECK: G_BR %[[CAST:bb\.[0-9]+]]
226226
227227 ; CHECK: [[CAST]]:
228 ; CHECK: {{%[0-9]+}}(64) = COPY [[A]]
229 ; CHECK: G_BR unsized %[[END:bb\.[0-9]+]]
228 ; CHECK: {{%[0-9]+}}(p0) = COPY [[A]]
229 ; CHECK: G_BR %[[END:bb\.[0-9]+]]
230230
231231 ; CHECK: [[END]]:
232232 define i64* @trivial_bitcast_with_copy(i8* %a) {
241241 }
242242
243243 ; CHECK-LABEL: name: bitcast
244 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE s64 %x0
245 ; CHECK: [[RES1:%[0-9]+]](64) = G_BITCAST { <2 x s32>, s64 } [[ARG1]]
246 ; CHECK: [[RES2:%[0-9]+]](64) = G_BITCAST { s64, <2 x s32> } [[RES1]]
244 ; CHECK: [[ARG1:%[0-9]+]](s64) = COPY %x0
245 ; CHECK: [[RES1:%[0-9]+]](<2 x s32>) = G_BITCAST [[ARG1]]
246 ; CHECK: [[RES2:%[0-9]+]](s64) = G_BITCAST [[RES1]]
247247 ; CHECK: %x0 = COPY [[RES2]]
248248 ; CHECK: RET_ReallyLR implicit %x0
249249 define i64 @bitcast(i64 %a) {
253253 }
254254
255255 ; CHECK-LABEL: name: trunc
256 ; CHECK: [[ARG1:%[0-9]+]](64) = G_TYPE s64 %x0
257 ; CHECK: [[VEC:%[0-9]+]](128) = G_LOAD { <4 x s32>, p0 }
258 ; CHECK: [[RES1:%[0-9]+]](8) = G_TRUNC { s8, s64 } [[ARG1]]
259 ; CHECK: [[RES2:%[0-9]+]](64) = G_TRUNC { <4 x s16>, <4 x s32> } [[VEC]]
256 ; CHECK: [[ARG1:%[0-9]+]](s64) = COPY %x0
257 ; CHECK: [[VEC:%[0-9]+]](<4 x s32>) = G_LOAD
258 ; CHECK: [[RES1:%[0-9]+]](s8) = G_TRUNC [[ARG1]]
259 ; CHECK: [[RES2:%[0-9]+]](<4 x s16>) = G_TRUNC [[VEC]]
260260 define void @trunc(i64 %a) {
261261 %vecptr = alloca <4 x i32>
262262 %vec = load <4 x i32>, <4 x i32>* %vecptr
266266 }
267267
268268 ; CHECK-LABEL: name: load
269 ; CHECK: [[ADDR:%[0-9]+]](64) = G_TYPE p0 %x0
270 ; CHECK: [[ADDR42:%[0-9]+]](64) = G_TYPE p42 %x1
271 ; CHECK: [[VAL1:%[0-9]+]](64) = G_LOAD { s64, p0 } [[ADDR]] :: (load 8 from %ir.addr, align 16)
272 ; CHECK: [[VAL2:%[0-9]+]](64) = G_LOAD { s64, p42 } [[ADDR42]] :: (load 8 from %ir.addr42)
273 ; CHECK: [[SUM:%.*]](64) = G_ADD s64 [[VAL1]], [[VAL2]]
269 ; CHECK: [[ADDR:%[0-9]+]](p0) = COPY %x0
270 ; CHECK: [[ADDR42:%[0-9]+]](p42) = COPY %x1
271 ; CHECK: [[VAL1:%[0-9]+]](s64) = G_LOAD [[ADDR]] :: (load 8 from %ir.addr, align 16)
272 ; CHECK: [[VAL2:%[0-9]+]](s64) = G_LOAD [[ADDR42]] :: (load 8 from %ir.addr42)
273 ; CHECK: [[SUM:%.*]](s64) = G_ADD [[VAL1]], [[VAL2]]
274274 ; CHECK: %x0 = COPY [[SUM]]
275275 ; CHECK: RET_ReallyLR implicit %x0
276276 define i64 @load(i64* %addr, i64 addrspace(42)* %addr42) {
281281 }
282282
283283 ; CHECK-LABEL: name: store
284 ; CHECK: [[ADDR:%[0-9]+]](64) = G_TYPE p0 %x0
285 ; CHECK: [[ADDR42:%[0-9]+]](64) = G_TYPE p42 %x1
286 ; CHECK: [[VAL1:%[0-9]+]](64) = G_TYPE s64 %x2
287 ; CHECK: [[VAL2:%[0-9]+]](64) = G_TYPE s64 %x3
288 ; CHECK: G_STORE { s64, p0 } [[VAL1]], [[ADDR]] :: (store 8 into %ir.addr, align 16)
289 ; CHECK: G_STORE { s64, p42 } [[VAL2]], [[ADDR42]] :: (store 8 into %ir.addr42)
284 ; CHECK: [[ADDR:%[0-9]+]](p0) = COPY %x0
285 ; CHECK: [[ADDR42:%[0-9]+]](p42) = COPY %x1
286 ; CHECK: [[VAL1:%[0-9]+]](s64) = COPY %x2
287 ; CHECK: [[VAL2:%[0-9]+]](s64) = COPY %x3
288 ; CHECK: G_STORE [[VAL1]], [[ADDR]] :: (store 8 into %ir.addr, align 16)
289 ; CHECK: G_STORE [[VAL2]], [[ADDR42]] :: (store 8 into %ir.addr42)
290290 ; CHECK: RET_ReallyLR
291291 define void @store(i64* %addr, i64 addrspace(42)* %addr42, i64 %val1, i64 %val2) {
292292 store i64 %val1, i64* %addr, align 16
296296 }
297297
298298 ; CHECK-LABEL: name: intrinsics
299 ; CHECK: [[CUR:%[0-9]+]](32) = G_TYPE s32 %w0
300 ; CHECK: [[BITS:%[0-9]+]](32) = G_TYPE s32 %w1
301 ; CHECK: [[PTR:%[0-9]+]](64) = G_INTRINSIC { p0, s32 } intrinsic(@llvm.returnaddress), 0
302 ; CHECK: [[PTR_VEC:%[0-9]+]](64) = G_FRAME_INDEX p0 %stack.0.ptr.vec
303 ; CHECK: [[VEC:%[0-9]+]](64) = G_LOAD { <8 x s8>, p0 } [[PTR_VEC]]
304 ; CHECK: G_INTRINSIC_W_SIDE_EFFECTS { unsized, <8 x s8>, <8 x s8>, p0 } intrinsic(@llvm.aarch64.neon.st2), [[VEC]], [[VEC]], [[PTR]]
299 ; CHECK: [[CUR:%[0-9]+]](s32) = COPY %w0
300 ; CHECK: [[BITS:%[0-9]+]](s32) = COPY %w1
301 ; CHECK: [[PTR:%[0-9]+]](p0) = G_INTRINSIC intrinsic(@llvm.returnaddress), 0
302 ; CHECK: [[PTR_VEC:%[0-9]+]](p0) = G_FRAME_INDEX %stack.0.ptr.vec
303 ; CHECK: [[VEC:%[0-9]+]](<8 x s8>) = G_LOAD [[PTR_VEC]]
304 ; CHECK: G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.aarch64.neon.st2), [[VEC]], [[VEC]], [[PTR]]
305305 ; CHECK: RET_ReallyLR
306306 declare i8* @llvm.returnaddress(i32)
307307 declare void @llvm.aarch64.neon.st2.v8i8.p0i8(<8 x i8>, <8 x i8>, i8*)
315315 }
316316
317317 ; CHECK-LABEL: name: test_phi
318 ; CHECK: G_BRCOND s1 {{%.*}}, %[[TRUE:bb\.[0-9]+]]
319 ; CHECK: G_BR unsized %[[FALSE:bb\.[0-9]+]]
318 ; CHECK: G_BRCOND {{%.*}}, %[[TRUE:bb\.[0-9]+]]
319 ; CHECK: G_BR %[[FALSE:bb\.[0-9]+]]
320320
321321 ; CHECK: [[TRUE]]:
322 ; CHECK: [[RES1:%[0-9]+]](32) = G_LOAD { s32, p0 }
322 ; CHECK: [[RES1:%[0-9]+]](s32) = G_LOAD
323323
324324 ; CHECK: [[FALSE]]:
325 ; CHECK: [[RES2:%[0-9]+]](32) = G_LOAD { s32, p0 }
326
327 ; CHECK: [[RES:%[0-9]+]](32) = G_PHI s32 [[RES1]], %[[TRUE]], [[RES2]], %[[FALSE]]
325 ; CHECK: [[RES2:%[0-9]+]](s32) = G_LOAD
326
327 ; CHECK: [[RES:%[0-9]+]](s32) = G_PHI [[RES1]], %[[TRUE]], [[RES2]], %[[FALSE]]
328328 ; CHECK: %w0 = COPY [[RES]]
329329 define i32 @test_phi(i32* %addr1, i32* %addr2, i1 %tst) {
330330 br i1 %tst, label %true, label %false
354354 ; It's important that constants are after argument passing, but before the
355355 ; rest of the entry block.
356356 ; CHECK-LABEL: name: constant_int
357 ; CHECK: [[IN:%[0-9]+]](32) = G_TYPE s32 %w0
358 ; CHECK: [[ONE:%[0-9]+]](32) = G_CONSTANT s32 1
359 ; CHECK: G_BR unsized
360
361 ; CHECK: [[SUM1:%[0-9]+]](32) = G_ADD s32 [[IN]], [[ONE]]
362 ; CHECK: [[SUM2:%[0-9]+]](32) = G_ADD s32 [[IN]], [[ONE]]
363 ; CHECK: [[RES:%[0-9]+]](32) = G_ADD s32 [[SUM1]], [[SUM2]]
357 ; CHECK: [[IN:%[0-9]+]](s32) = COPY %w0
358 ; CHECK: [[ONE:%[0-9]+]](s32) = G_CONSTANT 1
359 ; CHECK: G_BR
360
361 ; CHECK: [[SUM1:%[0-9]+]](s32) = G_ADD [[IN]], [[ONE]]
362 ; CHECK: [[SUM2:%[0-9]+]](s32) = G_ADD [[IN]], [[ONE]]
363 ; CHECK: [[RES:%[0-9]+]](s32) = G_ADD [[SUM1]], [[SUM2]]
364364 ; CHECK: %w0 = COPY [[RES]]
365365
366366 define i32 @constant_int(i32 %in) {
374374 }
375375
376376 ; CHECK-LABEL: name: constant_int_start
377 ; CHECK: [[TWO:%[0-9]+]](32) = G_CONSTANT s32 2
378 ; CHECK: [[ANSWER:%[0-9]+]](32) = G_CONSTANT s32 42
379 ; CHECK: [[RES:%[0-9]+]](32) = G_ADD s32 [[TWO]], [[ANSWER]]
377 ; CHECK: [[TWO:%[0-9]+]](s32) = G_CONSTANT 2
378 ; CHECK: [[ANSWER:%[0-9]+]](s32) = G_CONSTANT 42
379 ; CHECK: [[RES:%[0-9]+]](s32) = G_ADD [[TWO]], [[ANSWER]]
380380 define i32 @constant_int_start() {
381381 %res = add i32 2, 42
382382 ret i32 %res
383383 }
384384
385385 ; CHECK-LABEL: name: test_undef
386 ; CHECK: [[UNDEF:%[0-9]+]](32) = IMPLICIT_DEF
386 ; CHECK: [[UNDEF:%[0-9]+]](s32) = IMPLICIT_DEF
387387 ; CHECK: %w0 = COPY [[UNDEF]]
388388 define i32 @test_undef() {
389389 ret i32 undef
390390 }
391391
392392 ; CHECK-LABEL: name: test_constant_inttoptr
393 ; CHECK: [[ONE:%[0-9]+]](64) = G_CONSTANT s64 1
394 ; CHECK: [[PTR:%[0-9]+]](64) = G_INTTOPTR { p0, s64 } [[ONE]]
393 ; CHECK: [[ONE:%[0-9]+]](s64) = G_CONSTANT 1
394 ; CHECK: [[PTR:%[0-9]+]](p0) = G_INTTOPTR [[ONE]]
395395 ; CHECK: %x0 = COPY [[PTR]]
396396 define i8* @test_constant_inttoptr() {
397397 ret i8* inttoptr(i64 1 to i8*)
400400 ; This failed purely because the Constant -> VReg map was kept across
401401 ; functions, so reuse the "i64 1" from above.
402402 ; CHECK-LABEL: name: test_reused_constant
403 ; CHECK: [[ONE:%[0-9]+]](64) = G_CONSTANT s64 1
403 ; CHECK: [[ONE:%[0-9]+]](s64) = G_CONSTANT 1
404404 ; CHECK: %x0 = COPY [[ONE]]
405405 define i64 @test_reused_constant() {
406406 ret i64 1
407407 }
408408
409409 ; CHECK-LABEL: name: test_sext
410 ; CHECK: [[IN:%[0-9]+]](32) = G_TYPE s32 %w0
411 ; CHECK: [[RES:%[0-9]+]](64) = G_SEXT { s64, s32 } [[IN]]
410 ; CHECK: [[IN:%[0-9]+]](s32) = COPY %w0
411 ; CHECK: [[RES:%[0-9]+]](s64) = G_SEXT [[IN]]
412412 ; CHECK: %x0 = COPY [[RES]]
413413 define i64 @test_sext(i32 %in) {
414414 %res = sext i32 %in to i64
416416 }
417417
418418 ; CHECK-LABEL: name: test_zext
419 ; CHECK: [[IN:%[0-9]+]](32) = G_TYPE s32 %w0
420 ; CHECK: [[RES:%[0-9]+]](64) = G_ZEXT { s64, s32 } [[IN]]
419 ; CHECK: [[IN:%[0-9]+]](s32) = COPY %w0
420 ; CHECK: [[RES:%[0-9]+]](s64) = G_ZEXT [[IN]]
421421 ; CHECK: %x0 = COPY [[RES]]
422422 define i64 @test_zext(i32 %in) {
423423 %res = zext i32 %in to i64
425425 }
426426
427427 ; CHECK-LABEL: name: test_shl
428 ; CHECK: [[ARG1:%[0-9]+]](32) = G_TYPE s32 %w0
429 ; CHECK-NEXT: [[ARG2:%[0-9]+]](32) = G_TYPE s32 %w1
430 ; CHECK-NEXT: [[RES:%[0-9]+]](32) = G_SHL s32 [[ARG1]], [[ARG2]]
428 ; CHECK: [[ARG1:%[0-9]+]](s32) = COPY %w0
429 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s32) = COPY %w1
430 ; CHECK-NEXT: [[RES:%[0-9]+]](s32) = G_SHL [[ARG1]], [[ARG2]]
431431 ; CHECK-NEXT: %w0 = COPY [[RES]]
432432 ; CHECK-NEXT: RET_ReallyLR implicit %w0
433433 define i32 @test_shl(i32 %arg1, i32 %arg2) {
437437
438438
439439 ; CHECK-LABEL: name: test_lshr
440 ; CHECK: [[ARG1:%[0-9]+]](32) = G_TYPE s32 %w0
441 ; CHECK-NEXT: [[ARG2:%[0-9]+]](32) = G_TYPE s32 %w1
442 ; CHECK-NEXT: [[RES:%[0-9]+]](32) = G_LSHR s32 [[ARG1]], [[ARG2]]
440 ; CHECK: [[ARG1:%[0-9]+]](s32) = COPY %w0
441 ; CHECK-NEXT: [[ARG2:%[0-9]+]](s32) = COPY %w1
442 ; CHECK-NEXT: [[RES:%[0-9]+]](s32) = G_LSHR [[ARG1]], [[ARG2]]
443443 ; CHECK-NEXT: %w0 = COPY [[RES]]
444444 ; CHECK-NEXT: RET_ReallyLR implicit %w0
445445 define i32 @test_lshr(i32 %arg1, i32 %arg2) {
448448 }
449449
450450 ; CHECK-LABEL: name: test_ashr
451 ; CHECK: [[ARG1:%[0-9]+]](32) = G_TYPE s32 %w0
452 <