llvm.org GIT mirror llvm / 7fac1d9
[SelectionDAG] Allow targets to specify legality of extloads' result type (in addition to the memory type). The *LoadExt* legalization handling used to only have one type, the memory type. This forced users to assume that as long as the extload for the memory type was declared legal, and the result type was legal, the whole extload was legal. However, this isn't always the case. For instance, on X86, with AVX, this is legal: v4i32 load, zext from v4i8 but this isn't: v4i64 load, zext from v4i8 Whereas v4i64 is (arguably) legal, even without AVX2. Note that the same thing was done a while ago for truncstores (r46140), but I assume no one needed it yet for extloads, so here we go. Calls to getLoadExtAction were changed to add the value type, found manually in the surrounding code. Calls to setLoadExtAction were mechanically changed, by wrapping the call in a loop, to match previous behavior. The loop iterates over the MVT subrange corresponding to the memory type (FP vectors, etc...). I also pulled neighboring setTruncStoreActions into some of the loops; those shouldn't make a difference, as the additional types are illegal. (e.g., i128->i1 truncstores on PPC.) No functional change intended. Differential Revision: http://reviews.llvm.org/D6532 git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@225421 91177308-0d34-0410-b5e6-96231b3b80d8 Ahmed Bougacha 5 years ago
21 changed file(s) with 250 addition(s) and 187 deletion(s). Raw diff Collapse all Expand all
556556 /// Return how this load with extension should be treated: either it is legal,
557557 /// needs to be promoted to a larger size, needs to be expanded to some other
558558 /// code sequence, or the target has a custom expander for it.
559 LegalizeAction getLoadExtAction(unsigned ExtType, EVT VT) const {
560 if (VT.isExtended()) return Expand;
561 unsigned I = (unsigned) VT.getSimpleVT().SimpleTy;
562 assert(ExtType < ISD::LAST_LOADEXT_TYPE && I < MVT::LAST_VALUETYPE &&
563 "Table isn't big enough!");
564 return (LegalizeAction)LoadExtActions[I][ExtType];
559 LegalizeAction getLoadExtAction(unsigned ExtType, EVT ValVT, EVT MemVT) const {
560 if (ValVT.isExtended() || MemVT.isExtended()) return Expand;
561 unsigned ValI = (unsigned) ValVT.getSimpleVT().SimpleTy;
562 unsigned MemI = (unsigned) MemVT.getSimpleVT().SimpleTy;
563 assert(ExtType < ISD::LAST_LOADEXT_TYPE && ValI < MVT::LAST_VALUETYPE &&
564 MemI < MVT::LAST_VALUETYPE && "Table isn't big enough!");
565 return (LegalizeAction)LoadExtActions[ValI][MemI][ExtType];
565566 }
566567
567568 /// Return true if the specified load with extension is legal on this target.
568 bool isLoadExtLegal(unsigned ExtType, EVT VT) const {
569 return VT.isSimple() &&
570 getLoadExtAction(ExtType, VT.getSimpleVT()) == Legal;
569 bool isLoadExtLegal(unsigned ExtType, EVT ValVT, EVT MemVT) const {
570 return ValVT.isSimple() && MemVT.isSimple() &&
571 getLoadExtAction(ExtType, ValVT, MemVT) == Legal;
571572 }
572573
573574 /// Return how this store with truncation should be treated: either it is
12361237
12371238 /// Indicate that the specified load with extension does not work with the
12381239 /// specified type and indicate what to do about it.
1239 void setLoadExtAction(unsigned ExtType, MVT VT,
1240 void setLoadExtAction(unsigned ExtType, MVT ValVT, MVT MemVT,
12401241 LegalizeAction Action) {
1241 assert(ExtType < ISD::LAST_LOADEXT_TYPE && VT.isValid() &&
1242 "Table isn't big enough!");
1243 LoadExtActions[VT.SimpleTy][ExtType] = (uint8_t)Action;
1242 assert(ExtType < ISD::LAST_LOADEXT_TYPE && ValVT.isValid() &&
1243 MemVT.isValid() && "Table isn't big enough!");
1244 LoadExtActions[ValVT.SimpleTy][MemVT.SimpleTy][ExtType] = (uint8_t)Action;
12441245 }
12451246
12461247 /// Indicate that the specified truncating store does not work with the
17361737 /// For each load extension type and each value type, keep a LegalizeAction
17371738 /// that indicates how instruction selection should deal with a load of a
17381739 /// specific value type and extension type.
1739 uint8_t LoadExtActions[MVT::LAST_VALUETYPE][ISD::LAST_LOADEXT_TYPE];
1740 uint8_t LoadExtActions[MVT::LAST_VALUETYPE][MVT::LAST_VALUETYPE]
1741 [ISD::LAST_LOADEXT_TYPE];
17401742
17411743 /// For each value type pair keep a LegalizeAction that indicates whether a
17421744 /// truncating store of a specific value type and truncating type is legal.
518518 if (Opcode == Instruction::Store)
519519 LA = getTLI()->getTruncStoreAction(LT.second, MemVT.getSimpleVT());
520520 else
521 LA = getTLI()->getLoadExtAction(ISD::EXTLOAD, MemVT.getSimpleVT());
521 LA = getTLI()->getLoadExtAction(ISD::EXTLOAD, LT.second, MemVT);
522522 }
523523
524524 if (LA != TargetLowering::Legal && LA != TargetLowering::Custom) {
34313431 assert(isa(I) && "Unexpected ext type!");
34323432 LType = ISD::SEXTLOAD;
34333433 }
3434 if (TLI && !TLI->isLoadExtLegal(LType, LoadVT)) {
3434 if (TLI && !TLI->isLoadExtLegal(LType, VT, LoadVT)) {
34353435 I = OldExt;
34363436 TPT.rollback(LastKnownGood);
34373437 return false;
876876 if (LoadSDNode *LD = dyn_cast(Op)) {
877877 EVT MemVT = LD->getMemoryVT();
878878 ISD::LoadExtType ExtType = ISD::isNON_EXTLoad(LD)
879 ? (TLI.isLoadExtLegal(ISD::ZEXTLOAD, MemVT) ? ISD::ZEXTLOAD
880 : ISD::EXTLOAD)
879 ? (TLI.isLoadExtLegal(ISD::ZEXTLOAD, PVT, MemVT) ? ISD::ZEXTLOAD
880 : ISD::EXTLOAD)
881881 : LD->getExtensionType();
882882 Replace = true;
883883 return DAG.getExtLoad(ExtType, dl, PVT,
10981098 LoadSDNode *LD = cast(N);
10991099 EVT MemVT = LD->getMemoryVT();
11001100 ISD::LoadExtType ExtType = ISD::isNON_EXTLoad(LD)
1101 ? (TLI.isLoadExtLegal(ISD::ZEXTLOAD, MemVT) ? ISD::ZEXTLOAD
1102 : ISD::EXTLOAD)
1101 ? (TLI.isLoadExtLegal(ISD::ZEXTLOAD, PVT, MemVT) ? ISD::ZEXTLOAD
1102 : ISD::EXTLOAD)
11031103 : LD->getExtensionType();
11041104 SDValue NewLD = DAG.getExtLoad(ExtType, dl, PVT,
11051105 LD->getChain(), LD->getBasePtr(),
27992799 // actually legal and isn't going to get expanded, else this is a false
28002800 // optimisation.
28012801 bool CanZextLoadProfitably = TLI.isLoadExtLegal(ISD::ZEXTLOAD,
2802 Load->getValueType(0),
28022803 Load->getMemoryVT());
28032804
28042805 // Resize the constant to the same size as the original memory access before
29252926 if (DAG.MaskedValueIsZero(N1, APInt::getHighBitsSet(BitWidth,
29262927 BitWidth - MemVT.getScalarType().getSizeInBits())) &&
29272928 ((!LegalOperations && !LN0->isVolatile()) ||
2928 TLI.isLoadExtLegal(ISD::ZEXTLOAD, MemVT))) {
2929 TLI.isLoadExtLegal(ISD::ZEXTLOAD, VT, MemVT))) {
29292930 SDValue ExtLoad = DAG.getExtLoad(ISD::ZEXTLOAD, SDLoc(N0), VT,
29302931 LN0->getChain(), LN0->getBasePtr(),
29312932 MemVT, LN0->getMemOperand());
29452946 if (DAG.MaskedValueIsZero(N1, APInt::getHighBitsSet(BitWidth,
29462947 BitWidth - MemVT.getScalarType().getSizeInBits())) &&
29472948 ((!LegalOperations && !LN0->isVolatile()) ||
2948 TLI.isLoadExtLegal(ISD::ZEXTLOAD, MemVT))) {
2949 TLI.isLoadExtLegal(ISD::ZEXTLOAD, VT, MemVT))) {
29492950 SDValue ExtLoad = DAG.getExtLoad(ISD::ZEXTLOAD, SDLoc(N0), VT,
29502951 LN0->getChain(), LN0->getBasePtr(),
29512952 MemVT, LN0->getMemOperand());
29712972 if (ActiveBits > 0 && APIntOps::isMask(ActiveBits, N1C->getAPIntValue())){
29722973 EVT ExtVT = EVT::getIntegerVT(*DAG.getContext(), ActiveBits);
29732974 EVT LoadedVT = LN0->getMemoryVT();
2975 EVT LoadResultTy = HasAnyExt ? LN0->getValueType(0) : VT;
29742976
29752977 if (ExtVT == LoadedVT &&
2976 (!LegalOperations || TLI.isLoadExtLegal(ISD::ZEXTLOAD, ExtVT))) {
2977 EVT LoadResultTy = HasAnyExt ? LN0->getValueType(0) : VT;
2978 (!LegalOperations || TLI.isLoadExtLegal(ISD::ZEXTLOAD, LoadResultTy,
2979 ExtVT))) {
29782980
29792981 SDValue NewLoad =
29802982 DAG.getExtLoad(ISD::ZEXTLOAD, SDLoc(LN0), LoadResultTy,
29892991 // Do not generate loads of non-round integer types since these can
29902992 // be expensive (and would be wrong if the type is not byte sized).
29912993 if (!LN0->isVolatile() && LoadedVT.bitsGT(ExtVT) && ExtVT.isRound() &&
2992 (!LegalOperations || TLI.isLoadExtLegal(ISD::ZEXTLOAD, ExtVT))) {
2994 (!LegalOperations || TLI.isLoadExtLegal(ISD::ZEXTLOAD, LoadResultTy,
2995 ExtVT))) {
29932996 EVT PtrType = LN0->getOperand(1).getValueType();
29942997
29952998 unsigned Alignment = LN0->getAlignment();
30093012
30103013 AddToWorklist(NewPtr.getNode());
30113014
3012 EVT LoadResultTy = HasAnyExt ? LN0->getValueType(0) : VT;
30133015 SDValue Load =
30143016 DAG.getExtLoad(ISD::ZEXTLOAD, SDLoc(LN0), LoadResultTy,
30153017 LN0->getChain(), NewPtr,
52815283 if (ISD::isNON_EXTLoad(N0.getNode()) && !VT.isVector() &&
52825284 ISD::isUNINDEXEDLoad(N0.getNode()) &&
52835285 ((!LegalOperations && !cast(N0)->isVolatile()) ||
5284 TLI.isLoadExtLegal(ISD::SEXTLOAD, N0.getValueType()))) {
5286 TLI.isLoadExtLegal(ISD::SEXTLOAD, VT, N0.getValueType()))) {
52855287 bool DoXform = true;
52865288 SmallVector SetCCs;
52875289 if (!N0.hasOneUse())
53095311 LoadSDNode *LN0 = cast(N0);
53105312 EVT MemVT = LN0->getMemoryVT();
53115313 if ((!LegalOperations && !LN0->isVolatile()) ||
5312 TLI.isLoadExtLegal(ISD::SEXTLOAD, MemVT)) {
5314 TLI.isLoadExtLegal(ISD::SEXTLOAD, VT, MemVT)) {
53135315 SDValue ExtLoad = DAG.getExtLoad(ISD::SEXTLOAD, SDLoc(N), VT,
53145316 LN0->getChain(),
53155317 LN0->getBasePtr(), MemVT,
53295331 N0.getOpcode() == ISD::XOR) &&
53305332 isa(N0.getOperand(0)) &&
53315333 N0.getOperand(1).getOpcode() == ISD::Constant &&
5332 TLI.isLoadExtLegal(ISD::SEXTLOAD, N0.getValueType()) &&
5334 TLI.isLoadExtLegal(ISD::SEXTLOAD, VT, N0.getValueType()) &&
53335335 (!LegalOperations && TLI.isOperationLegal(N0.getOpcode(), VT))) {
53345336 LoadSDNode *LN0 = cast(N0.getOperand(0));
53355337 if (LN0->getExtensionType() != ISD::ZEXTLOAD && LN0->isUnindexed()) {
55715573 if (ISD::isNON_EXTLoad(N0.getNode()) && !VT.isVector() &&
55725574 ISD::isUNINDEXEDLoad(N0.getNode()) &&
55735575 ((!LegalOperations && !cast(N0)->isVolatile()) ||
5574 TLI.isLoadExtLegal(ISD::ZEXTLOAD, N0.getValueType()))) {
5576 TLI.isLoadExtLegal(ISD::ZEXTLOAD, VT, N0.getValueType()))) {
55755577 bool DoXform = true;
55765578 SmallVector SetCCs;
55775579 if (!N0.hasOneUse())
55995601 N0.getOpcode() == ISD::XOR) &&
56005602 isa(N0.getOperand(0)) &&
56015603 N0.getOperand(1).getOpcode() == ISD::Constant &&
5602 TLI.isLoadExtLegal(ISD::ZEXTLOAD, N0.getValueType()) &&
5604 TLI.isLoadExtLegal(ISD::ZEXTLOAD, VT, N0.getValueType()) &&
56035605 (!LegalOperations && TLI.isOperationLegal(N0.getOpcode(), VT))) {
56045606 LoadSDNode *LN0 = cast(N0.getOperand(0));
56055607 if (LN0->getExtensionType() != ISD::SEXTLOAD && LN0->isUnindexed()) {
56365638 LoadSDNode *LN0 = cast(N0);
56375639 EVT MemVT = LN0->getMemoryVT();
56385640 if ((!LegalOperations && !LN0->isVolatile()) ||
5639 TLI.isLoadExtLegal(ISD::ZEXTLOAD, MemVT)) {
5641 TLI.isLoadExtLegal(ISD::ZEXTLOAD, VT, MemVT)) {
56405642 SDValue ExtLoad = DAG.getExtLoad(ISD::ZEXTLOAD, SDLoc(N), VT,
56415643 LN0->getChain(),
56425644 LN0->getBasePtr(), MemVT,
57985800 // scalars.
57995801 if (ISD::isNON_EXTLoad(N0.getNode()) && !VT.isVector() &&
58005802 ISD::isUNINDEXEDLoad(N0.getNode()) &&
5801 TLI.isLoadExtLegal(ISD::EXTLOAD, N0.getValueType())) {
5803 TLI.isLoadExtLegal(ISD::EXTLOAD, VT, N0.getValueType())) {
58025804 bool DoXform = true;
58035805 SmallVector SetCCs;
58045806 if (!N0.hasOneUse())
58285830 LoadSDNode *LN0 = cast(N0);
58295831 ISD::LoadExtType ExtType = LN0->getExtensionType();
58305832 EVT MemVT = LN0->getMemoryVT();
5831 if (!LegalOperations || TLI.isLoadExtLegal(ExtType, MemVT)) {
5833 if (!LegalOperations || TLI.isLoadExtLegal(ExtType, VT, MemVT)) {
58325834 SDValue ExtLoad = DAG.getExtLoad(ExtType, SDLoc(N),
58335835 VT, LN0->getChain(), LN0->getBasePtr(),
58345836 MemVT, LN0->getMemOperand());
59575959 ExtVT = EVT::getIntegerVT(*DAG.getContext(),
59585960 VT.getSizeInBits() - N01->getZExtValue());
59595961 }
5960 if (LegalOperations && !TLI.isLoadExtLegal(ExtType, ExtVT))
5962 if (LegalOperations && !TLI.isLoadExtLegal(ExtType, VT, ExtVT))
59615963 return SDValue();
59625964
59635965 unsigned EVTBits = ExtVT.getSizeInBits();
61646166 ISD::isUNINDEXEDLoad(N0.getNode()) &&
61656167 EVT == cast(N0)->getMemoryVT() &&
61666168 ((!LegalOperations && !cast(N0)->isVolatile()) ||
6167 TLI.isLoadExtLegal(ISD::SEXTLOAD, EVT))) {
6169 TLI.isLoadExtLegal(ISD::SEXTLOAD, VT, EVT))) {
61686170 LoadSDNode *LN0 = cast(N0);
61696171 SDValue ExtLoad = DAG.getExtLoad(ISD::SEXTLOAD, SDLoc(N), VT,
61706172 LN0->getChain(),
61806182 N0.hasOneUse() &&
61816183 EVT == cast(N0)->getMemoryVT() &&
61826184 ((!LegalOperations && !cast(N0)->isVolatile()) ||
6183 TLI.isLoadExtLegal(ISD::SEXTLOAD, EVT))) {
6185 TLI.isLoadExtLegal(ISD::SEXTLOAD, VT, EVT))) {
61846186 LoadSDNode *LN0 = cast(N0);
61856187 SDValue ExtLoad = DAG.getExtLoad(ISD::SEXTLOAD, SDLoc(N), VT,
61866188 LN0->getChain(),
77257727
77267728 // fold (fpext (load x)) -> (fpext (fptrunc (extload x)))
77277729 if (ISD::isNormalLoad(N0.getNode()) && N0.hasOneUse() &&
7728 TLI.isLoadExtLegal(ISD::EXTLOAD, N0.getValueType())) {
7730 TLI.isLoadExtLegal(ISD::EXTLOAD, VT, N0.getValueType())) {
77297731 LoadSDNode *LN0 = cast(N0);
77307732 SDValue ExtLoad = DAG.getExtLoad(ISD::EXTLOAD, SDLoc(N), VT,
77317733 LN0->getChain(),
1000210004 EVT LegalizedStoredValueTy =
1000310005 TLI.getTypeToTransformTo(*DAG.getContext(), StoreTy);
1000410006 if (TLI.isTruncStoreLegal(LegalizedStoredValueTy, StoreTy) &&
10005 TLI.isLoadExtLegal(ISD::ZEXTLOAD, StoreTy) &&
10006 TLI.isLoadExtLegal(ISD::SEXTLOAD, StoreTy) &&
10007 TLI.isLoadExtLegal(ISD::EXTLOAD, StoreTy))
10007 TLI.isLoadExtLegal(ISD::ZEXTLOAD, LegalizedStoredValueTy, StoreTy) &&
10008 TLI.isLoadExtLegal(ISD::SEXTLOAD, LegalizedStoredValueTy, StoreTy) &&
10009 TLI.isLoadExtLegal(ISD::EXTLOAD, LegalizedStoredValueTy, StoreTy))
1000810010 LastLegalIntegerType = i+1;
1000910011 }
1001010012 }
1044210444 if (ResultVT.bitsGT(VecEltVT)) {
1044310445 // If the result type of vextract is wider than the load, then issue an
1044410446 // extending load instead.
10445 ISD::LoadExtType ExtType = TLI.isLoadExtLegal(ISD::ZEXTLOAD, VecEltVT)
10447 ISD::LoadExtType ExtType = TLI.isLoadExtLegal(ISD::ZEXTLOAD, ResultVT,
10448 VecEltVT)
1044610449 ? ISD::ZEXTLOAD
1044710450 : ISD::EXTLOAD;
1044810451 Load = DAG.getExtLoad(
259259 if (ConstantFPSDNode::isValueValidForType(SVT, CFP->getValueAPF()) &&
260260 // Only do this if the target has a native EXTLOAD instruction from
261261 // smaller type.
262 TLI.isLoadExtLegal(ISD::EXTLOAD, SVT) &&
262 TLI.isLoadExtLegal(ISD::EXTLOAD, OrigVT, SVT) &&
263263 TLI.ShouldShrinkFPConstant(OrigVT)) {
264264 Type *SType = SVT.getTypeForEVT(*DAG.getContext());
265265 LLVMC = cast(ConstantExpr::getFPTrunc(LLVMC, SType));
943943 // nice to have an effective generic way of getting these benefits...
944944 // Until such a way is found, don't insist on promoting i1 here.
945945 (SrcVT != MVT::i1 ||
946 TLI.getLoadExtAction(ExtType, MVT::i1) == TargetLowering::Promote)) {
946 TLI.getLoadExtAction(ExtType, Node->getValueType(0), MVT::i1) ==
947 TargetLowering::Promote)) {
947948 // Promote to a byte-sized load if not loading an integral number of
948949 // bytes. For example, promote EXTLOAD:i20 -> EXTLOAD:i24.
949950 unsigned NewWidth = SrcVT.getStoreSizeInBits();
10551056 Chain = Ch;
10561057 } else {
10571058 bool isCustom = false;
1058 switch (TLI.getLoadExtAction(ExtType, SrcVT.getSimpleVT())) {
1059 switch (TLI.getLoadExtAction(ExtType, Node->getValueType(0),
1060 SrcVT.getSimpleVT())) {
10591061 default: llvm_unreachable("This action is not supported yet!");
10601062 case TargetLowering::Custom:
10611063 isCustom = true;
10871089 break;
10881090 }
10891091 case TargetLowering::Expand:
1090 if (!TLI.isLoadExtLegal(ISD::EXTLOAD, SrcVT) && TLI.isTypeLegal(SrcVT)) {
1092 if (!TLI.isLoadExtLegal(ISD::EXTLOAD, Node->getValueType(0),
1093 SrcVT) && TLI.isTypeLegal(SrcVT)) {
10911094 SDValue Load = DAG.getLoad(SrcVT, dl, Chain, Ptr, LD->getMemOperand());
10921095 unsigned ExtendOp;
10931096 switch (ExtType) {
199199 LoadSDNode *LD = cast(Op.getNode());
200200 ISD::LoadExtType ExtType = LD->getExtensionType();
201201 if (LD->getMemoryVT().isVector() && ExtType != ISD::NON_EXTLOAD)
202 switch (TLI.getLoadExtAction(LD->getExtensionType(), LD->getMemoryVT())) {
202 switch (TLI.getLoadExtAction(LD->getExtensionType(), LD->getValueType(0),
203 LD->getMemoryVT())) {
203204 default: llvm_unreachable("This action is not supported yet!");
204205 case TargetLowering::Legal:
205206 return TranslateLegalizeResults(Op, Result);
395395
396396 // AArch64 does not have floating-point extending loads, i1 sign-extending
397397 // load, floating-point truncating stores, or v2i32->v2i16 truncating store.
398 setLoadExtAction(ISD::EXTLOAD, MVT::f16, Expand);
399 setLoadExtAction(ISD::EXTLOAD, MVT::f32, Expand);
400 setLoadExtAction(ISD::EXTLOAD, MVT::f64, Expand);
401 setLoadExtAction(ISD::EXTLOAD, MVT::f80, Expand);
402 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Expand);
398 for (MVT VT : MVT::fp_valuetypes()) {
399 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f16, Expand);
400 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f32, Expand);
401 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f64, Expand);
402 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f80, Expand);
403 }
404 for (MVT VT : MVT::integer_valuetypes())
405 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Expand);
406
403407 setTruncStoreAction(MVT::f32, MVT::f16, Expand);
404408 setTruncStoreAction(MVT::f64, MVT::f32, Expand);
405409 setTruncStoreAction(MVT::f64, MVT::f16, Expand);
548552
549553 setOperationAction(ISD::BSWAP, VT, Expand);
550554
551 for (MVT InnerVT : MVT::vector_valuetypes())
555 for (MVT InnerVT : MVT::vector_valuetypes()) {
552556 setTruncStoreAction(VT, InnerVT, Expand);
553 setLoadExtAction(ISD::SEXTLOAD, VT, Expand);
554 setLoadExtAction(ISD::ZEXTLOAD, VT, Expand);
555 setLoadExtAction(ISD::EXTLOAD, VT, Expand);
557 setLoadExtAction(ISD::SEXTLOAD, VT, InnerVT, Expand);
558 setLoadExtAction(ISD::ZEXTLOAD, VT, InnerVT, Expand);
559 setLoadExtAction(ISD::EXTLOAD, VT, InnerVT, Expand);
560 }
556561 }
557562
558563 // AArch64 has implementations of a lot of rounding-like FP operations.
617622 setOperationAction(ISD::SELECT, VT.getSimpleVT(), Expand);
618623 setOperationAction(ISD::SELECT_CC, VT.getSimpleVT(), Expand);
619624 setOperationAction(ISD::VSELECT, VT.getSimpleVT(), Expand);
620 setLoadExtAction(ISD::EXTLOAD, VT.getSimpleVT(), Expand);
625 for (MVT InnerVT : MVT::all_valuetypes())
626 setLoadExtAction(ISD::EXTLOAD, InnerVT, VT.getSimpleVT(), Expand);
621627
622628 // CNT supports only B element sizes.
623629 if (VT != MVT::v8i8 && VT != MVT::v16i8)
404404 }
405405
406406 for (MVT VT : MVT::vector_valuetypes()) {
407 for (MVT InnerVT : MVT::vector_valuetypes())
407 for (MVT InnerVT : MVT::vector_valuetypes()) {
408408 setTruncStoreAction(VT, InnerVT, Expand);
409 setLoadExtAction(ISD::SEXTLOAD, VT, Expand);
410 setLoadExtAction(ISD::ZEXTLOAD, VT, Expand);
411 setLoadExtAction(ISD::EXTLOAD, VT, Expand);
409 setLoadExtAction(ISD::SEXTLOAD, VT, InnerVT, Expand);
410 setLoadExtAction(ISD::ZEXTLOAD, VT, InnerVT, Expand);
411 setLoadExtAction(ISD::EXTLOAD, VT, InnerVT, Expand);
412 }
412413
413414 setOperationAction(ISD::MULHS, VT, Expand);
414415 setOperationAction(ISD::SMUL_LOHI, VT, Expand);
570571 MVT::v4i16, MVT::v2i16,
571572 MVT::v2i32};
572573 for (unsigned i = 0; i < 6; ++i) {
573 setLoadExtAction(ISD::EXTLOAD, Tys[i], Legal);
574 setLoadExtAction(ISD::ZEXTLOAD, Tys[i], Legal);
575 setLoadExtAction(ISD::SEXTLOAD, Tys[i], Legal);
574 for (MVT VT : MVT::integer_vector_valuetypes()) {
575 setLoadExtAction(ISD::EXTLOAD, VT, Tys[i], Legal);
576 setLoadExtAction(ISD::ZEXTLOAD, VT, Tys[i], Legal);
577 setLoadExtAction(ISD::SEXTLOAD, VT, Tys[i], Legal);
578 }
576579 }
577580 }
578581
617620 computeRegisterProperties();
618621
619622 // ARM does not have floating-point extending loads.
620 setLoadExtAction(ISD::EXTLOAD, MVT::f32, Expand);
621 setLoadExtAction(ISD::EXTLOAD, MVT::f16, Expand);
623 for (MVT VT : MVT::fp_valuetypes()) {
624 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f32, Expand);
625 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f16, Expand);
626 }
622627
623628 // ... or truncating stores
624629 setTruncStoreAction(MVT::f64, MVT::f32, Expand);
626631 setTruncStoreAction(MVT::f64, MVT::f16, Expand);
627632
628633 // ARM does not have i1 sign extending load.
629 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
634 for (MVT VT : MVT::integer_valuetypes())
635 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
630636
631637 // ARM supports all 4 flavors of integer indexed load / store.
632638 if (!Subtarget->isThumb1Only()) {
13001300 setOperationAction(ISD::BUILD_PAIR, MVT::i64, Expand);
13011301
13021302 // Turn FP extload into load/fextend.
1303 setLoadExtAction(ISD::EXTLOAD, MVT::f32, Expand);
1303 for (MVT VT : MVT::fp_valuetypes())
1304 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f32, Expand);
13041305 // Hexagon has a i1 sign extending load.
1305 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Expand);
1306 for (MVT VT : MVT::integer_valuetypes())
1307 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Expand);
13061308 // Turn FP truncstore into trunc + store.
13071309 setTruncStoreAction(MVT::f64, MVT::f32, Expand);
13081310
7979 setIndexedLoadAction(ISD::POST_INC, MVT::i8, Legal);
8080 setIndexedLoadAction(ISD::POST_INC, MVT::i16, Legal);
8181
82 setLoadExtAction(ISD::EXTLOAD, MVT::i1, Promote);
83 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
84 setLoadExtAction(ISD::ZEXTLOAD, MVT::i1, Promote);
85 setLoadExtAction(ISD::SEXTLOAD, MVT::i8, Expand);
86 setLoadExtAction(ISD::SEXTLOAD, MVT::i16, Expand);
82 for (MVT VT : MVT::integer_valuetypes()) {
83 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i1, Promote);
84 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
85 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i1, Promote);
86 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i8, Expand);
87 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i16, Expand);
88 }
8789
8890 // We don't have any truncstores
8991 setTruncStoreAction(MVT::i16, MVT::i8, Expand);
214214 ZeroOrNegativeOneBooleanContent);
215215
216216 // Load extented operations for i1 types must be promoted
217 setLoadExtAction(ISD::EXTLOAD, MVT::i1, Promote);
218 setLoadExtAction(ISD::ZEXTLOAD, MVT::i1, Promote);
219 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
217 for (MVT VT : MVT::integer_valuetypes()) {
218 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i1, Promote);
219 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i1, Promote);
220 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
221 }
220222
221223 // MIPS doesn't have extending float->double load/store
222 setLoadExtAction(ISD::EXTLOAD, MVT::f32, Expand);
224 for (MVT VT : MVT::fp_valuetypes())
225 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f32, Expand);
223226 setTruncStoreAction(MVT::f64, MVT::f32, Expand);
224227
225228 // Used by legalize types to correctly generate the setcc result.
367370 setOperationAction(ISD::BSWAP, MVT::i64, Expand);
368371
369372 if (Subtarget.isGP64bit()) {
370 setLoadExtAction(ISD::SEXTLOAD, MVT::i32, Custom);
371 setLoadExtAction(ISD::ZEXTLOAD, MVT::i32, Custom);
372 setLoadExtAction(ISD::EXTLOAD, MVT::i32, Custom);
373 setLoadExtAction(ISD::SEXTLOAD, MVT::i64, MVT::i32, Custom);
374 setLoadExtAction(ISD::ZEXTLOAD, MVT::i64, MVT::i32, Custom);
375 setLoadExtAction(ISD::EXTLOAD, MVT::i64, MVT::i32, Custom);
373376 setTruncStoreAction(MVT::i64, MVT::i32, Custom);
374377 }
375378
4646 if (Subtarget.hasDSP() || Subtarget.hasMSA()) {
4747 // Expand all truncating stores and extending loads.
4848 for (MVT VT0 : MVT::vector_valuetypes()) {
49 for (MVT VT1 : MVT::vector_valuetypes())
49 for (MVT VT1 : MVT::vector_valuetypes()) {
5050 setTruncStoreAction(VT0, VT1, Expand);
51
52 setLoadExtAction(ISD::SEXTLOAD, VT0, Expand);
53 setLoadExtAction(ISD::ZEXTLOAD, VT0, Expand);
54 setLoadExtAction(ISD::EXTLOAD, VT0, Expand);
51 setLoadExtAction(ISD::SEXTLOAD, VT0, VT1, Expand);
52 setLoadExtAction(ISD::ZEXTLOAD, VT0, VT1, Expand);
53 setLoadExtAction(ISD::EXTLOAD, VT0, VT1, Expand);
54 }
5555 }
5656 }
5757
202202 setOperationAction(ISD::INTRINSIC_W_CHAIN, MVT::Other, Custom);
203203
204204 // Turn FP extload into load/fextend
205 setLoadExtAction(ISD::EXTLOAD, MVT::f16, Expand);
206 setLoadExtAction(ISD::EXTLOAD, MVT::f32, Expand);
205 setLoadExtAction(ISD::EXTLOAD, MVT::f32, MVT::f16, Expand);
206 setLoadExtAction(ISD::EXTLOAD, MVT::f64, MVT::f16, Expand);
207 setLoadExtAction(ISD::EXTLOAD, MVT::f64, MVT::f32, Expand);
207208 // Turn FP truncstore into trunc + store.
208209 setTruncStoreAction(MVT::f32, MVT::f16, Expand);
209210 setTruncStoreAction(MVT::f64, MVT::f16, Expand);
213214 setOperationAction(ISD::LOAD, MVT::i1, Custom);
214215 setOperationAction(ISD::STORE, MVT::i1, Custom);
215216
216 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
217 setLoadExtAction(ISD::ZEXTLOAD, MVT::i1, Promote);
218 setTruncStoreAction(MVT::i64, MVT::i1, Expand);
219 setTruncStoreAction(MVT::i32, MVT::i1, Expand);
220 setTruncStoreAction(MVT::i16, MVT::i1, Expand);
221 setTruncStoreAction(MVT::i8, MVT::i1, Expand);
217 for (MVT VT : MVT::integer_valuetypes()) {
218 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
219 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i1, Promote);
220 setTruncStoreAction(VT, MVT::i1, Expand);
221 }
222222
223223 // This is legal in NVPTX
224224 setOperationAction(ISD::ConstantFP, MVT::f64, Legal);
7373 addRegisterClass(MVT::f64, &PPC::F8RCRegClass);
7474
7575 // PowerPC has an i16 but no i8 (or i1) SEXTLOAD
76 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
77 setLoadExtAction(ISD::SEXTLOAD, MVT::i8, Expand);
76 for (MVT VT : MVT::integer_valuetypes()) {
77 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
78 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i8, Expand);
79 }
7880
7981 setTruncStoreAction(MVT::f64, MVT::f32, Expand);
8082
113115 if (ANDIGlueBug)
114116 setOperationAction(ISD::TRUNCATE, MVT::i1, Custom);
115117
116 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
117 setLoadExtAction(ISD::ZEXTLOAD, MVT::i1, Promote);
118 setTruncStoreAction(MVT::i64, MVT::i1, Expand);
119 setTruncStoreAction(MVT::i32, MVT::i1, Expand);
120 setTruncStoreAction(MVT::i16, MVT::i1, Expand);
121 setTruncStoreAction(MVT::i8, MVT::i1, Expand);
118 for (MVT VT : MVT::integer_valuetypes()) {
119 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
120 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i1, Promote);
121 setTruncStoreAction(VT, MVT::i1, Expand);
122 }
122123
123124 addRegisterClass(MVT::i1, &PPC::CRBITRCRegClass);
124125 }
460461 setOperationAction(ISD::VSELECT, VT, Expand);
461462 setOperationAction(ISD::SIGN_EXTEND_INREG, VT, Expand);
462463
463 for (MVT InnerVT : MVT::vector_valuetypes())
464 for (MVT InnerVT : MVT::vector_valuetypes()) {
464465 setTruncStoreAction(VT, InnerVT, Expand);
465 setLoadExtAction(ISD::SEXTLOAD, VT, Expand);
466 setLoadExtAction(ISD::ZEXTLOAD, VT, Expand);
467 setLoadExtAction(ISD::EXTLOAD, VT, Expand);
466 setLoadExtAction(ISD::SEXTLOAD, VT, InnerVT, Expand);
467 setLoadExtAction(ISD::ZEXTLOAD, VT, InnerVT, Expand);
468 setLoadExtAction(ISD::EXTLOAD, VT, InnerVT, Expand);
469 }
468470 }
469471
470472 // We can custom expand all VECTOR_SHUFFLEs to VPERM, others we can handle
215215 setOperationAction(ISD::EXTRACT_SUBVECTOR, MVT::v8f32, Custom);
216216 setOperationAction(ISD::EXTRACT_SUBVECTOR, MVT::v8i32, Custom);
217217
218 setLoadExtAction(ISD::EXTLOAD, MVT::v2i8, Expand);
219 setLoadExtAction(ISD::SEXTLOAD, MVT::v2i8, Expand);
220 setLoadExtAction(ISD::ZEXTLOAD, MVT::v2i8, Expand);
221 setLoadExtAction(ISD::EXTLOAD, MVT::v4i8, Expand);
222 setLoadExtAction(ISD::SEXTLOAD, MVT::v4i8, Expand);
223 setLoadExtAction(ISD::ZEXTLOAD, MVT::v4i8, Expand);
224 setLoadExtAction(ISD::EXTLOAD, MVT::v2i16, Expand);
225 setLoadExtAction(ISD::SEXTLOAD, MVT::v2i16, Expand);
226 setLoadExtAction(ISD::ZEXTLOAD, MVT::v2i16, Expand);
227 setLoadExtAction(ISD::EXTLOAD, MVT::v4i16, Expand);
228 setLoadExtAction(ISD::SEXTLOAD, MVT::v4i16, Expand);
229 setLoadExtAction(ISD::ZEXTLOAD, MVT::v4i16, Expand);
218 for (MVT VT : MVT::integer_vector_valuetypes()) {
219 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v2i8, Expand);
220 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v2i8, Expand);
221 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::v2i8, Expand);
222 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v4i8, Expand);
223 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v4i8, Expand);
224 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::v4i8, Expand);
225 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v2i16, Expand);
226 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v2i16, Expand);
227 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::v2i16, Expand);
228 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v4i16, Expand);
229 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v4i16, Expand);
230 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::v4i16, Expand);
231 }
230232
231233 setOperationAction(ISD::BR_CC, MVT::i1, Expand);
232234
245247
246248 setOperationAction(ISD::FP16_TO_FP, MVT::f64, Expand);
247249
248 setLoadExtAction(ISD::EXTLOAD, MVT::f16, Expand);
250 setLoadExtAction(ISD::EXTLOAD, MVT::f32, MVT::f16, Expand);
251 setLoadExtAction(ISD::EXTLOAD, MVT::f64, MVT::f16, Expand);
249252 setTruncStoreAction(MVT::f32, MVT::f16, Expand);
250253 setTruncStoreAction(MVT::f64, MVT::f16, Expand);
251254
121121
122122 // EXTLOAD should be the same as ZEXTLOAD. It is legal for some address
123123 // spaces, so it is custom lowered to handle those where it isn't.
124 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
125 setLoadExtAction(ISD::SEXTLOAD, MVT::i8, Custom);
126 setLoadExtAction(ISD::SEXTLOAD, MVT::i16, Custom);
127
128 setLoadExtAction(ISD::ZEXTLOAD, MVT::i1, Promote);
129 setLoadExtAction(ISD::ZEXTLOAD, MVT::i8, Custom);
130 setLoadExtAction(ISD::ZEXTLOAD, MVT::i16, Custom);
131
132 setLoadExtAction(ISD::EXTLOAD, MVT::i1, Promote);
133 setLoadExtAction(ISD::EXTLOAD, MVT::i8, Custom);
134 setLoadExtAction(ISD::EXTLOAD, MVT::i16, Custom);
124 for (MVT VT : MVT::integer_valuetypes()) {
125 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
126 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i8, Custom);
127 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i16, Custom);
128
129 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i1, Promote);
130 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i8, Custom);
131 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i16, Custom);
132
133 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i1, Promote);
134 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i8, Custom);
135 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i16, Custom);
136 }
135137
136138 setOperationAction(ISD::STORE, MVT::i8, Custom);
137139 setOperationAction(ISD::STORE, MVT::i32, Custom);
129129 setOperationAction(ISD::INTRINSIC_VOID, MVT::Other, Custom);
130130 setOperationAction(ISD::BRCOND, MVT::Other, Custom);
131131
132 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
133 setLoadExtAction(ISD::SEXTLOAD, MVT::i8, Custom);
134 setLoadExtAction(ISD::SEXTLOAD, MVT::i16, Custom);
135 setLoadExtAction(ISD::SEXTLOAD, MVT::i32, Expand);
136 setLoadExtAction(ISD::SEXTLOAD, MVT::v8i16, Expand);
137 setLoadExtAction(ISD::SEXTLOAD, MVT::v16i16, Expand);
138
139 setLoadExtAction(ISD::ZEXTLOAD, MVT::i1, Promote);
140 setLoadExtAction(ISD::ZEXTLOAD, MVT::i8, Custom);
141 setLoadExtAction(ISD::ZEXTLOAD, MVT::i16, Custom);
142 setLoadExtAction(ISD::ZEXTLOAD, MVT::i32, Expand);
143
144 setLoadExtAction(ISD::EXTLOAD, MVT::i1, Promote);
145 setLoadExtAction(ISD::EXTLOAD, MVT::i8, Custom);
146 setLoadExtAction(ISD::EXTLOAD, MVT::i16, Custom);
147 setLoadExtAction(ISD::EXTLOAD, MVT::i32, Expand);
148 setLoadExtAction(ISD::EXTLOAD, MVT::f32, Expand);
132 for (MVT VT : MVT::integer_valuetypes()) {
133 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
134 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i8, Custom);
135 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i16, Custom);
136 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i32, Expand);
137
138 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i1, Promote);
139 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i8, Custom);
140 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i16, Custom);
141 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i32, Expand);
142
143 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i1, Promote);
144 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i8, Custom);
145 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i16, Custom);
146 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i32, Expand);
147 }
148
149 for (MVT VT : MVT::integer_vector_valuetypes()) {
150 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v8i16, Expand);
151 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v16i16, Expand);
152 }
153
154 for (MVT VT : MVT::fp_valuetypes())
155 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f32, Expand);
149156
150157 setTruncStoreAction(MVT::i32, MVT::i8, Custom);
151158 setTruncStoreAction(MVT::i32, MVT::i16, Custom);
13771377 addRegisterClass(MVT::i64, &SP::I64RegsRegClass);
13781378
13791379 // Turn FP extload into load/fextend
1380 setLoadExtAction(ISD::EXTLOAD, MVT::f32, Expand);
1381 setLoadExtAction(ISD::EXTLOAD, MVT::f64, Expand);
1380 for (MVT VT : MVT::fp_valuetypes()) {
1381 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f32, Expand);
1382 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f64, Expand);
1383 }
13821384
13831385 // Sparc doesn't have i1 sign extending load
1384 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
1386 for (MVT VT : MVT::integer_valuetypes())
1387 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
13851388
13861389 // Turn FP truncstore into trunc + store.
13871390 setTruncStoreAction(MVT::f64, MVT::f32, Expand);
217217 setOperationAction(ISD::SRA_PARTS, MVT::i64, Expand);
218218
219219 // We have native instructions for i8, i16 and i32 extensions, but not i1.
220 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
221 setLoadExtAction(ISD::ZEXTLOAD, MVT::i1, Promote);
222 setLoadExtAction(ISD::EXTLOAD, MVT::i1, Promote);
223220 setOperationAction(ISD::SIGN_EXTEND_INREG, MVT::i1, Expand);
221 for (MVT VT : MVT::integer_valuetypes()) {
222 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
223 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i1, Promote);
224 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i1, Promote);
225 }
224226
225227 // Handle the various types of symbolic address.
226228 setOperationAction(ISD::ConstantPool, PtrVT, Custom);
274276 // Needed so that we don't try to implement f128 constant loads using
275277 // a load-and-extend of a f80 constant (in cases where the constant
276278 // would fit in an f80).
277 setLoadExtAction(ISD::EXTLOAD, MVT::f80, Expand);
279 for (MVT VT : MVT::fp_valuetypes())
280 setLoadExtAction(ISD::EXTLOAD, VT, MVT::f80, Expand);
278281
279282 // Floating-point truncation and stores need to be done separately.
280283 setTruncStoreAction(MVT::f64, MVT::f32, Expand);
291291 if (Subtarget->is64Bit())
292292 addRegisterClass(MVT::i64, &X86::GR64RegClass);
293293
294 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
294 for (MVT VT : MVT::integer_valuetypes())
295 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
295296
296297 // We don't accept any truncstore of integer registers.
297298 setTruncStoreAction(MVT::i64, MVT::i32, Expand);
516517 setOperationAction(ISD::FP_TO_FP16, MVT::f64, Expand);
517518 setOperationAction(ISD::FP_TO_FP16, MVT::f80, Expand);
518519
519 setLoadExtAction(ISD::EXTLOAD, MVT::f16, Expand);
520 setLoadExtAction(ISD::EXTLOAD, MVT::f32, MVT::f16, Expand);
521 setLoadExtAction(ISD::EXTLOAD, MVT::f64, MVT::f16, Expand);
522 setLoadExtAction(ISD::EXTLOAD, MVT::f80, MVT::f16, Expand);
520523 setTruncStoreAction(MVT::f32, MVT::f16, Expand);
521524 setTruncStoreAction(MVT::f64, MVT::f16, Expand);
522525 setTruncStoreAction(MVT::f80, MVT::f16, Expand);
869872 setOperationAction(ISD::ANY_EXTEND, VT, Expand);
870873 setOperationAction(ISD::VSELECT, VT, Expand);
871874 setOperationAction(ISD::SELECT_CC, VT, Expand);
872 for (MVT InnerVT : MVT::vector_valuetypes())
873 setTruncStoreAction(VT, InnerVT, Expand);
874 setLoadExtAction(ISD::SEXTLOAD, VT, Expand);
875 setLoadExtAction(ISD::ZEXTLOAD, VT, Expand);
876
877 // N.b. ISD::EXTLOAD legality is basically ignored except for i1-like types,
878 // we have to deal with them whether we ask for Expansion or not. Setting
879 // Expand causes its own optimisation problems though, so leave them legal.
880 if (VT.getVectorElementType() == MVT::i1)
881 setLoadExtAction(ISD::EXTLOAD, VT, Expand);
875 for (MVT InnerVT : MVT::vector_valuetypes()) {
876 setTruncStoreAction(InnerVT, VT, Expand);
877
878 setLoadExtAction(ISD::SEXTLOAD, InnerVT, VT, Expand);
879 setLoadExtAction(ISD::ZEXTLOAD, InnerVT, VT, Expand);
880
881 // N.b. ISD::EXTLOAD legality is basically ignored except for i1-like types,
882 // we have to deal with them whether we ask for Expansion or not. Setting
883 // Expand causes its own optimisation problems though, so leave them legal.
884 if (VT.getVectorElementType() == MVT::i1)
885 setLoadExtAction(ISD::EXTLOAD, InnerVT, VT, Expand);
886 }
882887 }
883888
884889 // FIXME: In order to prevent SSE instructions being expanded to MMX ones
10081013 // memory vector types which we can load as a scalar (or sequence of
10091014 // scalars) and extend in-register to a legal 128-bit vector type. For sext
10101015 // loads these must work with a single scalar load.
1011 setLoadExtAction(ISD::SEXTLOAD, MVT::v4i8, Custom);
1012 setLoadExtAction(ISD::SEXTLOAD, MVT::v4i16, Custom);
1013 setLoadExtAction(ISD::SEXTLOAD, MVT::v8i8, Custom);
1014 setLoadExtAction(ISD::EXTLOAD, MVT::v2i8, Custom);
1015 setLoadExtAction(ISD::EXTLOAD, MVT::v2i16, Custom);
1016 setLoadExtAction(ISD::EXTLOAD, MVT::v2i32, Custom);
1017 setLoadExtAction(ISD::EXTLOAD, MVT::v4i8, Custom);
1018 setLoadExtAction(ISD::EXTLOAD, MVT::v4i16, Custom);
1019 setLoadExtAction(ISD::EXTLOAD, MVT::v8i8, Custom);
1016 for (MVT VT : MVT::integer_vector_valuetypes()) {
1017 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v4i8, Custom);
1018 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v4i16, Custom);
1019 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v8i8, Custom);
1020 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v2i8, Custom);
1021 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v2i16, Custom);
1022 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v2i32, Custom);
1023 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v4i8, Custom);
1024 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v4i16, Custom);
1025 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v8i8, Custom);
1026 }
10201027
10211028 setOperationAction(ISD::BUILD_VECTOR, MVT::v2f64, Custom);
10221029 setOperationAction(ISD::BUILD_VECTOR, MVT::v2i64, Custom);
10691076 setOperationAction(ISD::FP_EXTEND, MVT::v2f32, Custom);
10701077 setOperationAction(ISD::FP_ROUND, MVT::v2f32, Custom);
10711078
1072 setLoadExtAction(ISD::EXTLOAD, MVT::v2f32, Legal);
1079 for (MVT VT : MVT::fp_vector_valuetypes())
1080 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v2f32, Legal);
10731081
10741082 setOperationAction(ISD::BITCAST, MVT::v2i32, Custom);
10751083 setOperationAction(ISD::BITCAST, MVT::v4i16, Custom);
11131121
11141122 // SSE41 brings specific instructions for doing vector sign extend even in
11151123 // cases where we don't have SRA.
1116 setLoadExtAction(ISD::SEXTLOAD, MVT::v2i8, Custom);
1117 setLoadExtAction(ISD::SEXTLOAD, MVT::v2i16, Custom);
1118 setLoadExtAction(ISD::SEXTLOAD, MVT::v2i32, Custom);
1124 for (MVT VT : MVT::integer_vector_valuetypes()) {
1125 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v2i8, Custom);
1126 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v2i16, Custom);
1127 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::v2i32, Custom);
1128 }
11191129
11201130 // i8 and i16 vectors are custom because the source register and source
11211131 // source memory operand types are not the same width. f32 vectors are
12111221 setOperationAction(ISD::UINT_TO_FP, MVT::v8i8, Custom);
12121222 setOperationAction(ISD::UINT_TO_FP, MVT::v8i16, Custom);
12131223
1214 setLoadExtAction(ISD::EXTLOAD, MVT::v4f32, Legal);
1224 for (MVT VT : MVT::fp_vector_valuetypes())
1225 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v4f32, Legal);
12151226
12161227 setOperationAction(ISD::SRL, MVT::v16i16, Custom);
12171228 setOperationAction(ISD::SRL, MVT::v32i8, Custom);
13771388 addRegisterClass(MVT::v8i1, &X86::VK8RegClass);
13781389 addRegisterClass(MVT::v16i1, &X86::VK16RegClass);
13791390
1391 for (MVT VT : MVT::fp_vector_valuetypes())
1392 setLoadExtAction(ISD::EXTLOAD, VT, MVT::v8f32, Legal);
1393
13801394 setOperationAction(ISD::BR_CC, MVT::i1, Expand);
13811395 setOperationAction(ISD::SETCC, MVT::i1, Custom);
13821396 setOperationAction(ISD::XOR, MVT::i1, Legal);
13831397 setOperationAction(ISD::OR, MVT::i1, Legal);
13841398 setOperationAction(ISD::AND, MVT::i1, Legal);
1385 setLoadExtAction(ISD::EXTLOAD, MVT::v8f32, Legal);
13861399 setOperationAction(ISD::LOAD, MVT::v16f32, Legal);
13871400 setOperationAction(ISD::LOAD, MVT::v8f64, Legal);
13881401 setOperationAction(ISD::LOAD, MVT::v8i64, Legal);
126126 setOperationAction(ISD::ConstantPool, MVT::i32, Custom);
127127
128128 // Loads
129 setLoadExtAction(ISD::EXTLOAD, MVT::i1, Promote);
130 setLoadExtAction(ISD::ZEXTLOAD, MVT::i1, Promote);
131 setLoadExtAction(ISD::SEXTLOAD, MVT::i1, Promote);
132
133 setLoadExtAction(ISD::SEXTLOAD, MVT::i8, Expand);
134 setLoadExtAction(ISD::ZEXTLOAD, MVT::i16, Expand);
129 for (MVT VT : MVT::integer_valuetypes()) {
130 setLoadExtAction(ISD::EXTLOAD, VT, MVT::i1, Promote);
131 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i1, Promote);
132 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i1, Promote);
133
134 setLoadExtAction(ISD::SEXTLOAD, VT, MVT::i8, Expand);
135 setLoadExtAction(ISD::ZEXTLOAD, VT, MVT::i16, Expand);
136 }
135137
136138 // Custom expand misaligned loads / stores.
137139 setOperationAction(ISD::LOAD, MVT::i32, Custom);