llvm.org GIT mirror llvm / 2d51ba3
GlobalISel: Add templated functions and pattern matcher support for some more opcodes Summary: This patch adds templated functions to MachineIRBuilder for some opcodes and adds pattern matcher support for G_AND and G_OR. Reviewers: aditya_nandakumar Reviewed By: aditya_nandakumar Subscribers: rovka, kristof.beyls, llvm-commits Differential Revision: https://reviews.llvm.org/D43309 git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@325162 91177308-0d34-0410-b5e6-96231b3b80d8 Volkan Keles 2 years ago
4 changed file(s) with 74 addition(s) and 13 deletion(s). Raw diff Collapse all Expand all
207207 return BinaryOp_match(L, R);
208208 }
209209
210 template
211 inline BinaryOp_match
212 m_GAnd(const LHS &L, const RHS &R) {
213 return BinaryOp_match(L, R);
214 }
215
216 template
217 inline BinaryOp_match m_GOr(const LHS &L,
218 const RHS &R) {
219 return BinaryOp_match(L, R);
220 }
221
210222 // Helper for unary instructions (G_[ZSA]EXT/G_TRUNC) etc
211223 template struct UnaryOp_match {
212224 SrcTy L;
374374 /// with the same (scalar or vector) type).
375375 ///
376376 /// \return a MachineInstrBuilder for the newly created instruction.
377 template
378 MachineInstrBuilder buildOr(DstTy &&Dst, UseArgsTy &&... UseArgs) {
379 return buildOr(getDestFromArg(Dst), getRegFromArg(UseArgs)...);
380 }
377381 MachineInstrBuilder buildOr(unsigned Res, unsigned Op0, unsigned Op1);
378382
379383 /// Build and insert \p Res = G_ANYEXT \p Op0
440444 /// \pre \p Op must be a generic virtual register with scalar or vector type.
441445 ///
442446 /// \return The newly created instruction.
447 template
448 MachineInstrBuilder buildSExtOrTrunc(DstTy &&Dst, UseArgTy &&Use) {
449 return buildSExtOrTrunc(getDestFromArg(Dst), getRegFromArg(Use));
450 }
443451 MachineInstrBuilder buildSExtOrTrunc(unsigned Res, unsigned Op);
444452
445453 /// Build and insert \p Res = G_ZEXT \p Op, \p Res = G_TRUNC \p Op, or
450458 /// \pre \p Op must be a generic virtual register with scalar or vector type.
451459 ///
452460 /// \return The newly created instruction.
461 template
462 MachineInstrBuilder buildZExtOrTrunc(DstTy &&Dst, UseArgTy &&Use) {
463 return buildZExtOrTrunc(getDestFromArg(Dst), getRegFromArg(Use));
464 }
453465 MachineInstrBuilder buildZExtOrTrunc(unsigned Res, unsigned Op);
454466
455467 // Build and insert \p Res = G_ANYEXT \p Op, \p Res = G_TRUNC \p Op, or
479491 unsigned Op);
480492
481493 /// Build and insert an appropriate cast between two registers of equal size.
494 template
495 MachineInstrBuilder buildCast(DstType &&Res, ArgType &&Arg) {
496 return buildCast(getDestFromArg(Res), getRegFromArg(Arg));
497 }
482498 MachineInstrBuilder buildCast(unsigned Dst, unsigned Src);
483499
484500 /// Build and insert G_BR \p Dest
549565 /// \pre \p Res must be a generic virtual register with scalar type.
550566 ///
551567 /// \return The newly created instruction.
568 template
569 MachineInstrBuilder buildFConstant(DstType &&Res, const ConstantFP &Val) {
570 return buildFConstant(getDestFromArg(Res), Val);
571 }
552572 MachineInstrBuilder buildFConstant(unsigned Res, const ConstantFP &Val);
553573
554574 /// Build and insert \p Res = COPY Op
597617 MachineInstrBuilder buildExtract(unsigned Res, unsigned Src, uint64_t Index);
598618
599619 /// Build and insert \p Res = IMPLICIT_DEF.
620 template MachineInstrBuilder buildUndef(DstType &&Res) {
621 return buildUndef(getDestFromArg(Res));
622 }
600623 MachineInstrBuilder buildUndef(unsigned Dst);
601624
602625 /// Build and insert instructions to put \p Ops together at the specified p
666689 /// \pre \p Res must be smaller than \p Op
667690 ///
668691 /// \return The newly created instruction.
692 template
693 MachineInstrBuilder buildFPTrunc(DstType &&Res, SrcType &&Src) {
694 return buildFPTrunc(getDestFromArg(Res), getRegFromArg(Src));
695 }
669696 MachineInstrBuilder buildFPTrunc(unsigned Res, unsigned Op);
670697
671698 /// Build and insert \p Res = G_TRUNC \p Op
285285 int NumParts = SizeOp0 / NarrowSize;
286286
287287 SmallVector DstRegs;
288 for (int i = 0; i < NumParts; ++i) {
289 unsigned Dst = MRI.createGenericVirtualRegister(NarrowTy);
290 MIRBuilder.buildUndef(Dst);
291 DstRegs.push_back(Dst);
292 }
288 for (int i = 0; i < NumParts; ++i)
289 DstRegs.push_back(
290 MIRBuilder.buildUndef(NarrowTy)->getOperand(0).getReg());
293291 MIRBuilder.buildMerge(MI.getOperand(0).getReg(), DstRegs);
294292 MI.eraseFromParent();
295293 return Legalized;
754752 return Legalized;
755753 }
756754 case TargetOpcode::G_FCONSTANT: {
757 unsigned DstExt = MRI.createGenericVirtualRegister(WideTy);
758755 const ConstantFP *CFP = MI.getOperand(1).getFPImm();
759756 APFloat Val = CFP->getValueAPF();
760757 LLVMContext &Ctx = MIRBuilder.getMF().getFunction().getContext();
772769 };
773770 bool LosesInfo;
774771 Val.convert(*LLT2Sem(WideTy), APFloat::rmTowardZero, &LosesInfo);
775 MIRBuilder.buildFConstant(DstExt, *ConstantFP::get(Ctx, Val));
776 MIRBuilder.buildFPTrunc(MI.getOperand(0).getReg(), DstExt);
772 auto Cst = MIRBuilder.buildFConstant(WideTy, *ConstantFP::get(Ctx, Val));
773 MIRBuilder.buildFPTrunc(MI.getOperand(0).getReg(), Cst);
777774 MI.eraseFromParent();
778775 return Legalized;
779776 }
968965 }
969966 ConstantFP &ZeroForNegation =
970967 *cast(ConstantFP::getZeroValueForNegation(ZeroTy));
971 unsigned Zero = MRI.createGenericVirtualRegister(Ty);
972 MIRBuilder.buildFConstant(Zero, ZeroForNegation);
968 auto Zero = MIRBuilder.buildFConstant(Ty, ZeroForNegation);
973969 MIRBuilder.buildInstr(TargetOpcode::G_FSUB)
974970 .addDef(Res)
975 .addUse(Zero)
971 .addUse(Zero->getOperand(0).getReg())
976972 .addUse(MI.getOperand(1).getReg());
977973 MI.eraseFromParent();
978974 return Legalized;
209209 ASSERT_TRUE(match);
210210 ASSERT_EQ(Cst, (uint64_t)42);
211211 ASSERT_EQ(Src0, Copies[0]);
212
213 // Build AND %0, %1
214 auto MIBAnd = B.buildAnd(s64, Copies[0], Copies[1]);
215 // Try to match AND.
216 match = mi_match(MIBAnd->getOperand(0).getReg(), MRI,
217 m_GAnd(m_Reg(Src0), m_Reg(Src1)));
218 ASSERT_TRUE(match);
219 ASSERT_EQ(Src0, Copies[0]);
220 ASSERT_EQ(Src1, Copies[1]);
221
222 // Build OR %0, %1
223 auto MIBOr = B.buildOr(s64, Copies[0], Copies[1]);
224 // Try to match OR.
225 match = mi_match(MIBOr->getOperand(0).getReg(), MRI,
226 m_GOr(m_Reg(Src0), m_Reg(Src1)));
227 ASSERT_TRUE(match);
228 ASSERT_EQ(Src0, Copies[0]);
229 ASSERT_EQ(Src1, Copies[1]);
212230 }
213231
214232 TEST(PatternMatchInstr, MatchExtendsTrunc) {
281299 MachineIRBuilder B(*MF);
282300 MachineRegisterInfo &MRI = MF->getRegInfo();
283301 B.setInsertPt(*EntryMBB, EntryMBB->end());
302
303 // Try to match a 64bit add.
284304 LLT s64 = LLT::scalar(64);
285305 LLT s32 = LLT::scalar(32);
286306 auto MIBAdd = B.buildAdd(s64, Copies[0], Copies[1]);
287
288 // Try to match a 64bit add.
289307 ASSERT_FALSE(mi_match(MIBAdd->getOperand(0).getReg(), MRI,
290308 m_GAdd(m_SpecificType(s32), m_Reg())));
291309 ASSERT_TRUE(mi_match(MIBAdd->getOperand(0).getReg(), MRI,
292310 m_GAdd(m_SpecificType(s64), m_Reg())));
311
312 // Try to match the destination type of a bitcast.
313 LLT v2s32 = LLT::vector(2, 32);
314 auto MIBCast = B.buildCast(v2s32, Copies[0]);
315 ASSERT_TRUE(
316 mi_match(MIBCast->getOperand(0).getReg(), MRI, m_SpecificType(v2s32)));
317 ASSERT_TRUE(
318 mi_match(MIBCast->getOperand(1).getReg(), MRI, m_SpecificType(s64)));
293319 }
294320
295321 TEST(PatternMatchInstr, MatchCombinators) {