llvm.org GIT mirror llvm / master test / CodeGen / X86 / vector-fshr-512.ll
master

Tree @master (Download .tar.gz)

vector-fshr-512.ll @masterraw · history · blame

   1
   2
   3
   4
   5
   6
   7
   8
   9
  10
  11
  12
  13
  14
  15
  16
  17
  18
  19
  20
  21
  22
  23
  24
  25
  26
  27
  28
  29
  30
  31
  32
  33
  34
  35
  36
  37
  38
  39
  40
  41
  42
  43
  44
  45
  46
  47
  48
  49
  50
  51
  52
  53
  54
  55
  56
  57
  58
  59
  60
  61
  62
  63
  64
  65
  66
  67
  68
  69
  70
  71
  72
  73
  74
  75
  76
  77
  78
  79
  80
  81
  82
  83
  84
  85
  86
  87
  88
  89
  90
  91
  92
  93
  94
  95
  96
  97
  98
  99
 100
 101
 102
 103
 104
 105
 106
 107
 108
 109
 110
 111
 112
 113
 114
 115
 116
 117
 118
 119
 120
 121
 122
 123
 124
 125
 126
 127
 128
 129
 130
 131
 132
 133
 134
 135
 136
 137
 138
 139
 140
 141
 142
 143
 144
 145
 146
 147
 148
 149
 150
 151
 152
 153
 154
 155
 156
 157
 158
 159
 160
 161
 162
 163
 164
 165
 166
 167
 168
 169
 170
 171
 172
 173
 174
 175
 176
 177
 178
 179
 180
 181
 182
 183
 184
 185
 186
 187
 188
 189
 190
 191
 192
 193
 194
 195
 196
 197
 198
 199
 200
 201
 202
 203
 204
 205
 206
 207
 208
 209
 210
 211
 212
 213
 214
 215
 216
 217
 218
 219
 220
 221
 222
 223
 224
 225
 226
 227
 228
 229
 230
 231
 232
 233
 234
 235
 236
 237
 238
 239
 240
 241
 242
 243
 244
 245
 246
 247
 248
 249
 250
 251
 252
 253
 254
 255
 256
 257
 258
 259
 260
 261
 262
 263
 264
 265
 266
 267
 268
 269
 270
 271
 272
 273
 274
 275
 276
 277
 278
 279
 280
 281
 282
 283
 284
 285
 286
 287
 288
 289
 290
 291
 292
 293
 294
 295
 296
 297
 298
 299
 300
 301
 302
 303
 304
 305
 306
 307
 308
 309
 310
 311
 312
 313
 314
 315
 316
 317
 318
 319
 320
 321
 322
 323
 324
 325
 326
 327
 328
 329
 330
 331
 332
 333
 334
 335
 336
 337
 338
 339
 340
 341
 342
 343
 344
 345
 346
 347
 348
 349
 350
 351
 352
 353
 354
 355
 356
 357
 358
 359
 360
 361
 362
 363
 364
 365
 366
 367
 368
 369
 370
 371
 372
 373
 374
 375
 376
 377
 378
 379
 380
 381
 382
 383
 384
 385
 386
 387
 388
 389
 390
 391
 392
 393
 394
 395
 396
 397
 398
 399
 400
 401
 402
 403
 404
 405
 406
 407
 408
 409
 410
 411
 412
 413
 414
 415
 416
 417
 418
 419
 420
 421
 422
 423
 424
 425
 426
 427
 428
 429
 430
 431
 432
 433
 434
 435
 436
 437
 438
 439
 440
 441
 442
 443
 444
 445
 446
 447
 448
 449
 450
 451
 452
 453
 454
 455
 456
 457
 458
 459
 460
 461
 462
 463
 464
 465
 466
 467
 468
 469
 470
 471
 472
 473
 474
 475
 476
 477
 478
 479
 480
 481
 482
 483
 484
 485
 486
 487
 488
 489
 490
 491
 492
 493
 494
 495
 496
 497
 498
 499
 500
 501
 502
 503
 504
 505
 506
 507
 508
 509
 510
 511
 512
 513
 514
 515
 516
 517
 518
 519
 520
 521
 522
 523
 524
 525
 526
 527
 528
 529
 530
 531
 532
 533
 534
 535
 536
 537
 538
 539
 540
 541
 542
 543
 544
 545
 546
 547
 548
 549
 550
 551
 552
 553
 554
 555
 556
 557
 558
 559
 560
 561
 562
 563
 564
 565
 566
 567
 568
 569
 570
 571
 572
 573
 574
 575
 576
 577
 578
 579
 580
 581
 582
 583
 584
 585
 586
 587
 588
 589
 590
 591
 592
 593
 594
 595
 596
 597
 598
 599
 600
 601
 602
 603
 604
 605
 606
 607
 608
 609
 610
 611
 612
 613
 614
 615
 616
 617
 618
 619
 620
 621
 622
 623
 624
 625
 626
 627
 628
 629
 630
 631
 632
 633
 634
 635
 636
 637
 638
 639
 640
 641
 642
 643
 644
 645
 646
 647
 648
 649
 650
 651
 652
 653
 654
 655
 656
 657
 658
 659
 660
 661
 662
 663
 664
 665
 666
 667
 668
 669
 670
 671
 672
 673
 674
 675
 676
 677
 678
 679
 680
 681
 682
 683
 684
 685
 686
 687
 688
 689
 690
 691
 692
 693
 694
 695
 696
 697
 698
 699
 700
 701
 702
 703
 704
 705
 706
 707
 708
 709
 710
 711
 712
 713
 714
 715
 716
 717
 718
 719
 720
 721
 722
 723
 724
 725
 726
 727
 728
 729
 730
 731
 732
 733
 734
 735
 736
 737
 738
 739
 740
 741
 742
 743
 744
 745
 746
 747
 748
 749
 750
 751
 752
 753
 754
 755
 756
 757
 758
 759
 760
 761
 762
 763
 764
 765
 766
 767
 768
 769
 770
 771
 772
 773
 774
 775
 776
 777
 778
 779
 780
 781
 782
 783
 784
 785
 786
 787
 788
 789
 790
 791
 792
 793
 794
 795
 796
 797
 798
 799
 800
 801
 802
 803
 804
 805
 806
 807
 808
 809
 810
 811
 812
 813
 814
 815
 816
 817
 818
 819
 820
 821
 822
 823
 824
 825
 826
 827
 828
 829
 830
 831
 832
 833
 834
 835
 836
 837
 838
 839
 840
 841
 842
 843
 844
 845
 846
 847
 848
 849
 850
 851
 852
 853
 854
 855
 856
 857
 858
 859
 860
 861
 862
 863
 864
 865
 866
 867
 868
 869
 870
 871
 872
 873
 874
 875
 876
 877
 878
 879
 880
 881
 882
 883
 884
 885
 886
 887
 888
 889
 890
 891
 892
 893
 894
 895
 896
 897
 898
 899
 900
 901
 902
 903
 904
 905
 906
 907
 908
 909
 910
 911
 912
 913
 914
 915
 916
 917
 918
 919
 920
 921
 922
 923
 924
 925
 926
 927
 928
 929
 930
 931
 932
 933
 934
 935
 936
 937
 938
 939
 940
 941
 942
 943
 944
 945
 946
 947
 948
 949
 950
 951
 952
 953
 954
 955
 956
 957
 958
 959
 960
 961
 962
 963
 964
 965
 966
 967
 968
 969
 970
 971
 972
 973
 974
 975
 976
 977
 978
 979
 980
 981
 982
 983
 984
 985
 986
 987
 988
 989
 990
 991
 992
 993
 994
 995
 996
 997
 998
 999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefixes=AVX512,AVX512F
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f,+avx512vl | FileCheck %s --check-prefixes=AVX512,AVX512VL
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f,+avx512bw | FileCheck %s --check-prefixes=AVX512,AVX512BW
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f,+avx512vbmi2 | FileCheck %s --check-prefixes=AVX512,AVX512VBMI2
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f,+avx512bw,+avx512vl | FileCheck %s --check-prefixes=AVX512,AVX512VLBW
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f,+avx512vbmi2,+avx512vl | FileCheck %s --check-prefixes=AVX512,AVX512VLVBMI2

declare <8 x i64> @llvm.fshr.v8i64(<8 x i64>, <8 x i64>, <8 x i64>)
declare <16 x i32> @llvm.fshr.v16i32(<16 x i32>, <16 x i32>, <16 x i32>)
declare <32 x i16> @llvm.fshr.v32i16(<32 x i16>, <32 x i16>, <32 x i16>)
declare <64 x i8> @llvm.fshr.v64i8(<64 x i8>, <64 x i8>, <64 x i8>)

;
; Variable Shifts
;

define <8 x i64> @var_funnnel_v8i64(<8 x i64> %x, <8 x i64> %y, <8 x i64> %amt) nounwind {
; AVX512F-LABEL: var_funnnel_v8i64:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vpandq {{.*}}(%rip){1to8}, %zmm2, %zmm2
; AVX512F-NEXT:    vpsrlvq %zmm2, %zmm1, %zmm3
; AVX512F-NEXT:    vpbroadcastq {{.*#+}} zmm4 = [64,64,64,64,64,64,64,64]
; AVX512F-NEXT:    vpsubq %zmm2, %zmm4, %zmm4
; AVX512F-NEXT:    vpsllvq %zmm4, %zmm0, %zmm0
; AVX512F-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512F-NEXT:    vptestnmq %zmm2, %zmm2, %k1
; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm0 {%k1}
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: var_funnnel_v8i64:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vpandq {{.*}}(%rip){1to8}, %zmm2, %zmm2
; AVX512VL-NEXT:    vpsrlvq %zmm2, %zmm1, %zmm3
; AVX512VL-NEXT:    vpbroadcastq {{.*#+}} zmm4 = [64,64,64,64,64,64,64,64]
; AVX512VL-NEXT:    vpsubq %zmm2, %zmm4, %zmm4
; AVX512VL-NEXT:    vpsllvq %zmm4, %zmm0, %zmm0
; AVX512VL-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VL-NEXT:    vptestnmq %zmm2, %zmm2, %k1
; AVX512VL-NEXT:    vmovdqa64 %zmm1, %zmm0 {%k1}
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: var_funnnel_v8i64:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpandq {{.*}}(%rip){1to8}, %zmm2, %zmm2
; AVX512BW-NEXT:    vpsrlvq %zmm2, %zmm1, %zmm3
; AVX512BW-NEXT:    vpbroadcastq {{.*#+}} zmm4 = [64,64,64,64,64,64,64,64]
; AVX512BW-NEXT:    vpsubq %zmm2, %zmm4, %zmm4
; AVX512BW-NEXT:    vpsllvq %zmm4, %zmm0, %zmm0
; AVX512BW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512BW-NEXT:    vptestnmq %zmm2, %zmm2, %k1
; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm0 {%k1}
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: var_funnnel_v8i64:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpshrdvq %zmm2, %zmm0, %zmm1
; AVX512VBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: var_funnnel_v8i64:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip){1to8}, %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpsrlvq %zmm2, %zmm1, %zmm3
; AVX512VLBW-NEXT:    vpbroadcastq {{.*#+}} zmm4 = [64,64,64,64,64,64,64,64]
; AVX512VLBW-NEXT:    vpsubq %zmm2, %zmm4, %zmm4
; AVX512VLBW-NEXT:    vpsllvq %zmm4, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vptestnmq %zmm2, %zmm2, %k1
; AVX512VLBW-NEXT:    vmovdqa64 %zmm1, %zmm0 {%k1}
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: var_funnnel_v8i64:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpshrdvq %zmm2, %zmm0, %zmm1
; AVX512VLVBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %res = call <8 x i64> @llvm.fshr.v8i64(<8 x i64> %x, <8 x i64> %y, <8 x i64> %amt)
  ret <8 x i64> %res
}

define <16 x i32> @var_funnnel_v16i32(<16 x i32> %x, <16 x i32> %y, <16 x i32> %amt) nounwind {
; AVX512F-LABEL: var_funnnel_v16i32:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vpandd {{.*}}(%rip){1to16}, %zmm2, %zmm2
; AVX512F-NEXT:    vpsrlvd %zmm2, %zmm1, %zmm3
; AVX512F-NEXT:    vpbroadcastd {{.*#+}} zmm4 = [32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32]
; AVX512F-NEXT:    vpsubd %zmm2, %zmm4, %zmm4
; AVX512F-NEXT:    vpsllvd %zmm4, %zmm0, %zmm0
; AVX512F-NEXT:    vpord %zmm3, %zmm0, %zmm0
; AVX512F-NEXT:    vptestnmd %zmm2, %zmm2, %k1
; AVX512F-NEXT:    vmovdqa32 %zmm1, %zmm0 {%k1}
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: var_funnnel_v16i32:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vpandd {{.*}}(%rip){1to16}, %zmm2, %zmm2
; AVX512VL-NEXT:    vpsrlvd %zmm2, %zmm1, %zmm3
; AVX512VL-NEXT:    vpbroadcastd {{.*#+}} zmm4 = [32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32]
; AVX512VL-NEXT:    vpsubd %zmm2, %zmm4, %zmm4
; AVX512VL-NEXT:    vpsllvd %zmm4, %zmm0, %zmm0
; AVX512VL-NEXT:    vpord %zmm3, %zmm0, %zmm0
; AVX512VL-NEXT:    vptestnmd %zmm2, %zmm2, %k1
; AVX512VL-NEXT:    vmovdqa32 %zmm1, %zmm0 {%k1}
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: var_funnnel_v16i32:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpandd {{.*}}(%rip){1to16}, %zmm2, %zmm2
; AVX512BW-NEXT:    vpsrlvd %zmm2, %zmm1, %zmm3
; AVX512BW-NEXT:    vpbroadcastd {{.*#+}} zmm4 = [32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32]
; AVX512BW-NEXT:    vpsubd %zmm2, %zmm4, %zmm4
; AVX512BW-NEXT:    vpsllvd %zmm4, %zmm0, %zmm0
; AVX512BW-NEXT:    vpord %zmm3, %zmm0, %zmm0
; AVX512BW-NEXT:    vptestnmd %zmm2, %zmm2, %k1
; AVX512BW-NEXT:    vmovdqa32 %zmm1, %zmm0 {%k1}
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: var_funnnel_v16i32:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpshrdvd %zmm2, %zmm0, %zmm1
; AVX512VBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: var_funnnel_v16i32:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpandd {{.*}}(%rip){1to16}, %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpsrlvd %zmm2, %zmm1, %zmm3
; AVX512VLBW-NEXT:    vpbroadcastd {{.*#+}} zmm4 = [32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32]
; AVX512VLBW-NEXT:    vpsubd %zmm2, %zmm4, %zmm4
; AVX512VLBW-NEXT:    vpsllvd %zmm4, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vpord %zmm3, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vptestnmd %zmm2, %zmm2, %k1
; AVX512VLBW-NEXT:    vmovdqa32 %zmm1, %zmm0 {%k1}
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: var_funnnel_v16i32:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpshrdvd %zmm2, %zmm0, %zmm1
; AVX512VLVBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %res = call <16 x i32> @llvm.fshr.v16i32(<16 x i32> %x, <16 x i32> %y, <16 x i32> %amt)
  ret <16 x i32> %res
}

define <32 x i16> @var_funnnel_v32i16(<32 x i16> %x, <32 x i16> %y, <32 x i16> %amt) nounwind {
; AVX512F-LABEL: var_funnnel_v32i16:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vextracti64x4 $1, %zmm0, %ymm3
; AVX512F-NEXT:    vextracti64x4 $1, %zmm1, %ymm4
; AVX512F-NEXT:    vextracti64x4 $1, %zmm2, %ymm5
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm6 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
; AVX512F-NEXT:    vpand %ymm6, %ymm5, %ymm5
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm7 = ymm5[0],zero,ymm5[1],zero,ymm5[2],zero,ymm5[3],zero,ymm5[4],zero,ymm5[5],zero,ymm5[6],zero,ymm5[7],zero,ymm5[8],zero,ymm5[9],zero,ymm5[10],zero,ymm5[11],zero,ymm5[12],zero,ymm5[13],zero,ymm5[14],zero,ymm5[15],zero
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm8 = ymm4[0],zero,ymm4[1],zero,ymm4[2],zero,ymm4[3],zero,ymm4[4],zero,ymm4[5],zero,ymm4[6],zero,ymm4[7],zero,ymm4[8],zero,ymm4[9],zero,ymm4[10],zero,ymm4[11],zero,ymm4[12],zero,ymm4[13],zero,ymm4[14],zero,ymm4[15],zero
; AVX512F-NEXT:    vpsrlvd %zmm7, %zmm8, %zmm7
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm8 = [16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16]
; AVX512F-NEXT:    vpsubw %ymm5, %ymm8, %ymm9
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm9 = ymm9[0],zero,ymm9[1],zero,ymm9[2],zero,ymm9[3],zero,ymm9[4],zero,ymm9[5],zero,ymm9[6],zero,ymm9[7],zero,ymm9[8],zero,ymm9[9],zero,ymm9[10],zero,ymm9[11],zero,ymm9[12],zero,ymm9[13],zero,ymm9[14],zero,ymm9[15],zero
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm3 = ymm3[0],zero,ymm3[1],zero,ymm3[2],zero,ymm3[3],zero,ymm3[4],zero,ymm3[5],zero,ymm3[6],zero,ymm3[7],zero,ymm3[8],zero,ymm3[9],zero,ymm3[10],zero,ymm3[11],zero,ymm3[12],zero,ymm3[13],zero,ymm3[14],zero,ymm3[15],zero
; AVX512F-NEXT:    vpsllvd %zmm9, %zmm3, %zmm3
; AVX512F-NEXT:    vpord %zmm7, %zmm3, %zmm3
; AVX512F-NEXT:    vpmovdw %zmm3, %ymm3
; AVX512F-NEXT:    vpxor %xmm7, %xmm7, %xmm7
; AVX512F-NEXT:    vpcmpeqw %ymm7, %ymm5, %ymm5
; AVX512F-NEXT:    vpblendvb %ymm5, %ymm4, %ymm3, %ymm3
; AVX512F-NEXT:    vpand %ymm6, %ymm2, %ymm2
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm4 = ymm2[0],zero,ymm2[1],zero,ymm2[2],zero,ymm2[3],zero,ymm2[4],zero,ymm2[5],zero,ymm2[6],zero,ymm2[7],zero,ymm2[8],zero,ymm2[9],zero,ymm2[10],zero,ymm2[11],zero,ymm2[12],zero,ymm2[13],zero,ymm2[14],zero,ymm2[15],zero
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm5 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
; AVX512F-NEXT:    vpsrlvd %zmm4, %zmm5, %zmm4
; AVX512F-NEXT:    vpsubw %ymm2, %ymm8, %ymm5
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm5 = ymm5[0],zero,ymm5[1],zero,ymm5[2],zero,ymm5[3],zero,ymm5[4],zero,ymm5[5],zero,ymm5[6],zero,ymm5[7],zero,ymm5[8],zero,ymm5[9],zero,ymm5[10],zero,ymm5[11],zero,ymm5[12],zero,ymm5[13],zero,ymm5[14],zero,ymm5[15],zero
; AVX512F-NEXT:    vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
; AVX512F-NEXT:    vpsllvd %zmm5, %zmm0, %zmm0
; AVX512F-NEXT:    vpord %zmm4, %zmm0, %zmm0
; AVX512F-NEXT:    vpmovdw %zmm0, %ymm0
; AVX512F-NEXT:    vpcmpeqw %ymm7, %ymm2, %ymm2
; AVX512F-NEXT:    vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
; AVX512F-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: var_funnnel_v32i16:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm0, %ymm3
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm1, %ymm4
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm2, %ymm5
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm6 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
; AVX512VL-NEXT:    vpand %ymm6, %ymm5, %ymm5
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm7 = ymm5[0],zero,ymm5[1],zero,ymm5[2],zero,ymm5[3],zero,ymm5[4],zero,ymm5[5],zero,ymm5[6],zero,ymm5[7],zero,ymm5[8],zero,ymm5[9],zero,ymm5[10],zero,ymm5[11],zero,ymm5[12],zero,ymm5[13],zero,ymm5[14],zero,ymm5[15],zero
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm8 = ymm4[0],zero,ymm4[1],zero,ymm4[2],zero,ymm4[3],zero,ymm4[4],zero,ymm4[5],zero,ymm4[6],zero,ymm4[7],zero,ymm4[8],zero,ymm4[9],zero,ymm4[10],zero,ymm4[11],zero,ymm4[12],zero,ymm4[13],zero,ymm4[14],zero,ymm4[15],zero
; AVX512VL-NEXT:    vpsrlvd %zmm7, %zmm8, %zmm7
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm8 = [16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16]
; AVX512VL-NEXT:    vpsubw %ymm5, %ymm8, %ymm9
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm9 = ymm9[0],zero,ymm9[1],zero,ymm9[2],zero,ymm9[3],zero,ymm9[4],zero,ymm9[5],zero,ymm9[6],zero,ymm9[7],zero,ymm9[8],zero,ymm9[9],zero,ymm9[10],zero,ymm9[11],zero,ymm9[12],zero,ymm9[13],zero,ymm9[14],zero,ymm9[15],zero
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm3 = ymm3[0],zero,ymm3[1],zero,ymm3[2],zero,ymm3[3],zero,ymm3[4],zero,ymm3[5],zero,ymm3[6],zero,ymm3[7],zero,ymm3[8],zero,ymm3[9],zero,ymm3[10],zero,ymm3[11],zero,ymm3[12],zero,ymm3[13],zero,ymm3[14],zero,ymm3[15],zero
; AVX512VL-NEXT:    vpsllvd %zmm9, %zmm3, %zmm3
; AVX512VL-NEXT:    vpord %zmm7, %zmm3, %zmm3
; AVX512VL-NEXT:    vpmovdw %zmm3, %ymm3
; AVX512VL-NEXT:    vpxor %xmm7, %xmm7, %xmm7
; AVX512VL-NEXT:    vpcmpeqw %ymm7, %ymm5, %ymm5
; AVX512VL-NEXT:    vpblendvb %ymm5, %ymm4, %ymm3, %ymm3
; AVX512VL-NEXT:    vpand %ymm6, %ymm2, %ymm2
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm4 = ymm2[0],zero,ymm2[1],zero,ymm2[2],zero,ymm2[3],zero,ymm2[4],zero,ymm2[5],zero,ymm2[6],zero,ymm2[7],zero,ymm2[8],zero,ymm2[9],zero,ymm2[10],zero,ymm2[11],zero,ymm2[12],zero,ymm2[13],zero,ymm2[14],zero,ymm2[15],zero
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm5 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
; AVX512VL-NEXT:    vpsrlvd %zmm4, %zmm5, %zmm4
; AVX512VL-NEXT:    vpsubw %ymm2, %ymm8, %ymm5
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm5 = ymm5[0],zero,ymm5[1],zero,ymm5[2],zero,ymm5[3],zero,ymm5[4],zero,ymm5[5],zero,ymm5[6],zero,ymm5[7],zero,ymm5[8],zero,ymm5[9],zero,ymm5[10],zero,ymm5[11],zero,ymm5[12],zero,ymm5[13],zero,ymm5[14],zero,ymm5[15],zero
; AVX512VL-NEXT:    vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
; AVX512VL-NEXT:    vpsllvd %zmm5, %zmm0, %zmm0
; AVX512VL-NEXT:    vpord %zmm4, %zmm0, %zmm0
; AVX512VL-NEXT:    vpmovdw %zmm0, %ymm0
; AVX512VL-NEXT:    vpcmpeqw %ymm7, %ymm2, %ymm2
; AVX512VL-NEXT:    vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
; AVX512VL-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: var_funnnel_v32i16:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512BW-NEXT:    vpsrlvw %zmm2, %zmm1, %zmm3
; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16]
; AVX512BW-NEXT:    vpsubw %zmm2, %zmm4, %zmm4
; AVX512BW-NEXT:    vpsllvw %zmm4, %zmm0, %zmm0
; AVX512BW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512BW-NEXT:    vptestnmw %zmm2, %zmm2, %k1
; AVX512BW-NEXT:    vmovdqu16 %zmm1, %zmm0 {%k1}
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: var_funnnel_v32i16:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpshrdvw %zmm2, %zmm0, %zmm1
; AVX512VBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: var_funnnel_v32i16:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpsrlvw %zmm2, %zmm1, %zmm3
; AVX512VLBW-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16]
; AVX512VLBW-NEXT:    vpsubw %zmm2, %zmm4, %zmm4
; AVX512VLBW-NEXT:    vpsllvw %zmm4, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vptestnmw %zmm2, %zmm2, %k1
; AVX512VLBW-NEXT:    vmovdqu16 %zmm1, %zmm0 {%k1}
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: var_funnnel_v32i16:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpshrdvw %zmm2, %zmm0, %zmm1
; AVX512VLVBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %res = call <32 x i16> @llvm.fshr.v32i16(<32 x i16> %x, <32 x i16> %y, <32 x i16> %amt)
  ret <32 x i16> %res
}

define <64 x i8> @var_funnnel_v64i8(<64 x i8> %x, <64 x i8> %y, <64 x i8> %amt) nounwind {
; AVX512F-LABEL: var_funnnel_v64i8:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vextracti64x4 $1, %zmm0, %ymm8
; AVX512F-NEXT:    vextracti64x4 $1, %zmm2, %ymm6
; AVX512F-NEXT:    vextracti64x4 $1, %zmm1, %ymm3
; AVX512F-NEXT:    vpsrlw $4, %ymm3, %ymm5
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
; AVX512F-NEXT:    vpand %ymm4, %ymm5, %ymm7
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm5 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
; AVX512F-NEXT:    vpand %ymm5, %ymm6, %ymm9
; AVX512F-NEXT:    vpsllw $5, %ymm9, %ymm10
; AVX512F-NEXT:    vpblendvb %ymm10, %ymm7, %ymm3, %ymm7
; AVX512F-NEXT:    vpsrlw $2, %ymm7, %ymm11
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm6 = [63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63]
; AVX512F-NEXT:    vpand %ymm6, %ymm11, %ymm11
; AVX512F-NEXT:    vpaddb %ymm10, %ymm10, %ymm10
; AVX512F-NEXT:    vpblendvb %ymm10, %ymm11, %ymm7, %ymm11
; AVX512F-NEXT:    vpsrlw $1, %ymm11, %ymm12
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm7 = [127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127]
; AVX512F-NEXT:    vpand %ymm7, %ymm12, %ymm12
; AVX512F-NEXT:    vpaddb %ymm10, %ymm10, %ymm10
; AVX512F-NEXT:    vpblendvb %ymm10, %ymm12, %ymm11, %ymm10
; AVX512F-NEXT:    vpsllw $4, %ymm8, %ymm11
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm12 = [240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240]
; AVX512F-NEXT:    vpand %ymm12, %ymm11, %ymm11
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm13 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512F-NEXT:    vpsubb %ymm9, %ymm13, %ymm14
; AVX512F-NEXT:    vpsllw $5, %ymm14, %ymm14
; AVX512F-NEXT:    vpblendvb %ymm14, %ymm11, %ymm8, %ymm8
; AVX512F-NEXT:    vpsllw $2, %ymm8, %ymm11
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm15 = [252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252]
; AVX512F-NEXT:    vpand %ymm15, %ymm11, %ymm11
; AVX512F-NEXT:    vpaddb %ymm14, %ymm14, %ymm14
; AVX512F-NEXT:    vpblendvb %ymm14, %ymm11, %ymm8, %ymm8
; AVX512F-NEXT:    vpaddb %ymm8, %ymm8, %ymm11
; AVX512F-NEXT:    vpaddb %ymm14, %ymm14, %ymm14
; AVX512F-NEXT:    vpblendvb %ymm14, %ymm11, %ymm8, %ymm8
; AVX512F-NEXT:    vpor %ymm10, %ymm8, %ymm8
; AVX512F-NEXT:    vpxor %xmm10, %xmm10, %xmm10
; AVX512F-NEXT:    vpcmpeqb %ymm10, %ymm9, %ymm9
; AVX512F-NEXT:    vpblendvb %ymm9, %ymm3, %ymm8, %ymm3
; AVX512F-NEXT:    vpsrlw $4, %ymm1, %ymm8
; AVX512F-NEXT:    vpand %ymm4, %ymm8, %ymm4
; AVX512F-NEXT:    vpand %ymm5, %ymm2, %ymm2
; AVX512F-NEXT:    vpsllw $5, %ymm2, %ymm5
; AVX512F-NEXT:    vpblendvb %ymm5, %ymm4, %ymm1, %ymm4
; AVX512F-NEXT:    vpsrlw $2, %ymm4, %ymm8
; AVX512F-NEXT:    vpand %ymm6, %ymm8, %ymm6
; AVX512F-NEXT:    vpaddb %ymm5, %ymm5, %ymm5
; AVX512F-NEXT:    vpblendvb %ymm5, %ymm6, %ymm4, %ymm4
; AVX512F-NEXT:    vpsrlw $1, %ymm4, %ymm6
; AVX512F-NEXT:    vpand %ymm7, %ymm6, %ymm6
; AVX512F-NEXT:    vpaddb %ymm5, %ymm5, %ymm5
; AVX512F-NEXT:    vpblendvb %ymm5, %ymm6, %ymm4, %ymm4
; AVX512F-NEXT:    vpsllw $4, %ymm0, %ymm5
; AVX512F-NEXT:    vpand %ymm12, %ymm5, %ymm5
; AVX512F-NEXT:    vpsubb %ymm2, %ymm13, %ymm6
; AVX512F-NEXT:    vpsllw $5, %ymm6, %ymm6
; AVX512F-NEXT:    vpblendvb %ymm6, %ymm5, %ymm0, %ymm0
; AVX512F-NEXT:    vpsllw $2, %ymm0, %ymm5
; AVX512F-NEXT:    vpand %ymm15, %ymm5, %ymm5
; AVX512F-NEXT:    vpaddb %ymm6, %ymm6, %ymm6
; AVX512F-NEXT:    vpblendvb %ymm6, %ymm5, %ymm0, %ymm0
; AVX512F-NEXT:    vpaddb %ymm0, %ymm0, %ymm5
; AVX512F-NEXT:    vpaddb %ymm6, %ymm6, %ymm6
; AVX512F-NEXT:    vpblendvb %ymm6, %ymm5, %ymm0, %ymm0
; AVX512F-NEXT:    vpor %ymm4, %ymm0, %ymm0
; AVX512F-NEXT:    vpcmpeqb %ymm10, %ymm2, %ymm2
; AVX512F-NEXT:    vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
; AVX512F-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: var_funnnel_v64i8:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm0, %ymm4
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm2, %ymm5
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm1, %ymm3
; AVX512VL-NEXT:    vpsrlw $4, %ymm3, %ymm6
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm7 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
; AVX512VL-NEXT:    vpand %ymm7, %ymm6, %ymm6
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm8 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
; AVX512VL-NEXT:    vpand %ymm8, %ymm5, %ymm5
; AVX512VL-NEXT:    vpsllw $5, %ymm5, %ymm9
; AVX512VL-NEXT:    vpblendvb %ymm9, %ymm6, %ymm3, %ymm6
; AVX512VL-NEXT:    vpsrlw $2, %ymm6, %ymm10
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm11 = [63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63]
; AVX512VL-NEXT:    vpand %ymm11, %ymm10, %ymm10
; AVX512VL-NEXT:    vpaddb %ymm9, %ymm9, %ymm9
; AVX512VL-NEXT:    vpblendvb %ymm9, %ymm10, %ymm6, %ymm6
; AVX512VL-NEXT:    vpsrlw $1, %ymm6, %ymm10
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm12 = [127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127]
; AVX512VL-NEXT:    vpand %ymm12, %ymm10, %ymm10
; AVX512VL-NEXT:    vpaddb %ymm9, %ymm9, %ymm9
; AVX512VL-NEXT:    vpblendvb %ymm9, %ymm10, %ymm6, %ymm6
; AVX512VL-NEXT:    vpsllw $4, %ymm4, %ymm9
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm10 = [240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240]
; AVX512VL-NEXT:    vpand %ymm10, %ymm9, %ymm9
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm13 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512VL-NEXT:    vpsubb %ymm5, %ymm13, %ymm14
; AVX512VL-NEXT:    vpsllw $5, %ymm14, %ymm14
; AVX512VL-NEXT:    vpblendvb %ymm14, %ymm9, %ymm4, %ymm4
; AVX512VL-NEXT:    vpsllw $2, %ymm4, %ymm9
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm15 = [252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252]
; AVX512VL-NEXT:    vpand %ymm15, %ymm9, %ymm9
; AVX512VL-NEXT:    vpaddb %ymm14, %ymm14, %ymm14
; AVX512VL-NEXT:    vpblendvb %ymm14, %ymm9, %ymm4, %ymm4
; AVX512VL-NEXT:    vpaddb %ymm4, %ymm4, %ymm9
; AVX512VL-NEXT:    vpaddb %ymm14, %ymm14, %ymm14
; AVX512VL-NEXT:    vpblendvb %ymm14, %ymm9, %ymm4, %ymm4
; AVX512VL-NEXT:    vpor %ymm6, %ymm4, %ymm4
; AVX512VL-NEXT:    vpxor %xmm6, %xmm6, %xmm6
; AVX512VL-NEXT:    vpcmpeqb %ymm6, %ymm5, %ymm5
; AVX512VL-NEXT:    vpblendvb %ymm5, %ymm3, %ymm4, %ymm3
; AVX512VL-NEXT:    vpsrlw $4, %ymm1, %ymm4
; AVX512VL-NEXT:    vpand %ymm7, %ymm4, %ymm4
; AVX512VL-NEXT:    vpand %ymm8, %ymm2, %ymm2
; AVX512VL-NEXT:    vpsllw $5, %ymm2, %ymm5
; AVX512VL-NEXT:    vpblendvb %ymm5, %ymm4, %ymm1, %ymm4
; AVX512VL-NEXT:    vpsrlw $2, %ymm4, %ymm7
; AVX512VL-NEXT:    vpand %ymm11, %ymm7, %ymm7
; AVX512VL-NEXT:    vpaddb %ymm5, %ymm5, %ymm5
; AVX512VL-NEXT:    vpblendvb %ymm5, %ymm7, %ymm4, %ymm4
; AVX512VL-NEXT:    vpsrlw $1, %ymm4, %ymm7
; AVX512VL-NEXT:    vpand %ymm12, %ymm7, %ymm7
; AVX512VL-NEXT:    vpaddb %ymm5, %ymm5, %ymm5
; AVX512VL-NEXT:    vpblendvb %ymm5, %ymm7, %ymm4, %ymm4
; AVX512VL-NEXT:    vpsllw $4, %ymm0, %ymm5
; AVX512VL-NEXT:    vpand %ymm10, %ymm5, %ymm5
; AVX512VL-NEXT:    vpsubb %ymm2, %ymm13, %ymm7
; AVX512VL-NEXT:    vpsllw $5, %ymm7, %ymm7
; AVX512VL-NEXT:    vpblendvb %ymm7, %ymm5, %ymm0, %ymm0
; AVX512VL-NEXT:    vpsllw $2, %ymm0, %ymm5
; AVX512VL-NEXT:    vpand %ymm15, %ymm5, %ymm5
; AVX512VL-NEXT:    vpaddb %ymm7, %ymm7, %ymm7
; AVX512VL-NEXT:    vpblendvb %ymm7, %ymm5, %ymm0, %ymm0
; AVX512VL-NEXT:    vpaddb %ymm0, %ymm0, %ymm5
; AVX512VL-NEXT:    vpaddb %ymm7, %ymm7, %ymm7
; AVX512VL-NEXT:    vpblendvb %ymm7, %ymm5, %ymm0, %ymm0
; AVX512VL-NEXT:    vpor %ymm4, %ymm0, %ymm0
; AVX512VL-NEXT:    vpcmpeqb %ymm6, %ymm2, %ymm2
; AVX512VL-NEXT:    vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
; AVX512VL-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: var_funnnel_v64i8:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpsrlw $4, %zmm1, %zmm3
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512BW-NEXT:    vpsllw $5, %zmm2, %zmm4
; AVX512BW-NEXT:    vpmovb2m %zmm4, %k1
; AVX512BW-NEXT:    vpblendmb %zmm3, %zmm1, %zmm3 {%k1}
; AVX512BW-NEXT:    vpsrlw $2, %zmm3, %zmm5
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm5, %zmm5
; AVX512BW-NEXT:    vpaddb %zmm4, %zmm4, %zmm4
; AVX512BW-NEXT:    vpmovb2m %zmm4, %k1
; AVX512BW-NEXT:    vmovdqu8 %zmm5, %zmm3 {%k1}
; AVX512BW-NEXT:    vpsrlw $1, %zmm3, %zmm5
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm5, %zmm5
; AVX512BW-NEXT:    vpaddb %zmm4, %zmm4, %zmm4
; AVX512BW-NEXT:    vpmovb2m %zmm4, %k1
; AVX512BW-NEXT:    vmovdqu8 %zmm5, %zmm3 {%k1}
; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512BW-NEXT:    vpsubb %zmm2, %zmm4, %zmm4
; AVX512BW-NEXT:    vpsllw $5, %zmm4, %zmm4
; AVX512BW-NEXT:    vpaddb %zmm4, %zmm4, %zmm5
; AVX512BW-NEXT:    vpmovb2m %zmm5, %k1
; AVX512BW-NEXT:    vpmovb2m %zmm4, %k2
; AVX512BW-NEXT:    vpsllw $4, %zmm0, %zmm4
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm4, %zmm4
; AVX512BW-NEXT:    vmovdqu8 %zmm4, %zmm0 {%k2}
; AVX512BW-NEXT:    vpsllw $2, %zmm0, %zmm4
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm4, %zmm4
; AVX512BW-NEXT:    vmovdqu8 %zmm4, %zmm0 {%k1}
; AVX512BW-NEXT:    vpaddb %zmm5, %zmm5, %zmm4
; AVX512BW-NEXT:    vpmovb2m %zmm4, %k1
; AVX512BW-NEXT:    vpaddb %zmm0, %zmm0, %zmm0 {%k1}
; AVX512BW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512BW-NEXT:    vptestnmb %zmm2, %zmm2, %k1
; AVX512BW-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: var_funnnel_v64i8:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpsrlw $4, %zmm1, %zmm3
; AVX512VBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512VBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512VBMI2-NEXT:    vpsllw $5, %zmm2, %zmm4
; AVX512VBMI2-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VBMI2-NEXT:    vpblendmb %zmm3, %zmm1, %zmm3 {%k1}
; AVX512VBMI2-NEXT:    vpsrlw $2, %zmm3, %zmm5
; AVX512VBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm5, %zmm5
; AVX512VBMI2-NEXT:    vpaddb %zmm4, %zmm4, %zmm4
; AVX512VBMI2-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VBMI2-NEXT:    vmovdqu8 %zmm5, %zmm3 {%k1}
; AVX512VBMI2-NEXT:    vpsrlw $1, %zmm3, %zmm5
; AVX512VBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm5, %zmm5
; AVX512VBMI2-NEXT:    vpaddb %zmm4, %zmm4, %zmm4
; AVX512VBMI2-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VBMI2-NEXT:    vmovdqu8 %zmm5, %zmm3 {%k1}
; AVX512VBMI2-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512VBMI2-NEXT:    vpsubb %zmm2, %zmm4, %zmm4
; AVX512VBMI2-NEXT:    vpsllw $5, %zmm4, %zmm4
; AVX512VBMI2-NEXT:    vpaddb %zmm4, %zmm4, %zmm5
; AVX512VBMI2-NEXT:    vpmovb2m %zmm5, %k1
; AVX512VBMI2-NEXT:    vpmovb2m %zmm4, %k2
; AVX512VBMI2-NEXT:    vpsllw $4, %zmm0, %zmm4
; AVX512VBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm4, %zmm4
; AVX512VBMI2-NEXT:    vmovdqu8 %zmm4, %zmm0 {%k2}
; AVX512VBMI2-NEXT:    vpsllw $2, %zmm0, %zmm4
; AVX512VBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm4, %zmm4
; AVX512VBMI2-NEXT:    vmovdqu8 %zmm4, %zmm0 {%k1}
; AVX512VBMI2-NEXT:    vpaddb %zmm5, %zmm5, %zmm4
; AVX512VBMI2-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VBMI2-NEXT:    vpaddb %zmm0, %zmm0, %zmm0 {%k1}
; AVX512VBMI2-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VBMI2-NEXT:    vptestnmb %zmm2, %zmm2, %k1
; AVX512VBMI2-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: var_funnnel_v64i8:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpsrlw $4, %zmm1, %zmm3
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpsllw $5, %zmm2, %zmm4
; AVX512VLBW-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VLBW-NEXT:    vpblendmb %zmm3, %zmm1, %zmm3 {%k1}
; AVX512VLBW-NEXT:    vpsrlw $2, %zmm3, %zmm5
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm5, %zmm5
; AVX512VLBW-NEXT:    vpaddb %zmm4, %zmm4, %zmm4
; AVX512VLBW-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VLBW-NEXT:    vmovdqu8 %zmm5, %zmm3 {%k1}
; AVX512VLBW-NEXT:    vpsrlw $1, %zmm3, %zmm5
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm5, %zmm5
; AVX512VLBW-NEXT:    vpaddb %zmm4, %zmm4, %zmm4
; AVX512VLBW-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VLBW-NEXT:    vmovdqu8 %zmm5, %zmm3 {%k1}
; AVX512VLBW-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512VLBW-NEXT:    vpsubb %zmm2, %zmm4, %zmm4
; AVX512VLBW-NEXT:    vpsllw $5, %zmm4, %zmm4
; AVX512VLBW-NEXT:    vpaddb %zmm4, %zmm4, %zmm5
; AVX512VLBW-NEXT:    vpmovb2m %zmm5, %k1
; AVX512VLBW-NEXT:    vpmovb2m %zmm4, %k2
; AVX512VLBW-NEXT:    vpsllw $4, %zmm0, %zmm4
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm4, %zmm4
; AVX512VLBW-NEXT:    vmovdqu8 %zmm4, %zmm0 {%k2}
; AVX512VLBW-NEXT:    vpsllw $2, %zmm0, %zmm4
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm4, %zmm4
; AVX512VLBW-NEXT:    vmovdqu8 %zmm4, %zmm0 {%k1}
; AVX512VLBW-NEXT:    vpaddb %zmm5, %zmm5, %zmm4
; AVX512VLBW-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VLBW-NEXT:    vpaddb %zmm0, %zmm0, %zmm0 {%k1}
; AVX512VLBW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vptestnmb %zmm2, %zmm2, %k1
; AVX512VLBW-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: var_funnnel_v64i8:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpsrlw $4, %zmm1, %zmm3
; AVX512VLVBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512VLVBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpsllw $5, %zmm2, %zmm4
; AVX512VLVBMI2-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VLVBMI2-NEXT:    vpblendmb %zmm3, %zmm1, %zmm3 {%k1}
; AVX512VLVBMI2-NEXT:    vpsrlw $2, %zmm3, %zmm5
; AVX512VLVBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm5, %zmm5
; AVX512VLVBMI2-NEXT:    vpaddb %zmm4, %zmm4, %zmm4
; AVX512VLVBMI2-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VLVBMI2-NEXT:    vmovdqu8 %zmm5, %zmm3 {%k1}
; AVX512VLVBMI2-NEXT:    vpsrlw $1, %zmm3, %zmm5
; AVX512VLVBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm5, %zmm5
; AVX512VLVBMI2-NEXT:    vpaddb %zmm4, %zmm4, %zmm4
; AVX512VLVBMI2-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VLVBMI2-NEXT:    vmovdqu8 %zmm5, %zmm3 {%k1}
; AVX512VLVBMI2-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512VLVBMI2-NEXT:    vpsubb %zmm2, %zmm4, %zmm4
; AVX512VLVBMI2-NEXT:    vpsllw $5, %zmm4, %zmm4
; AVX512VLVBMI2-NEXT:    vpaddb %zmm4, %zmm4, %zmm5
; AVX512VLVBMI2-NEXT:    vpmovb2m %zmm5, %k1
; AVX512VLVBMI2-NEXT:    vpmovb2m %zmm4, %k2
; AVX512VLVBMI2-NEXT:    vpsllw $4, %zmm0, %zmm4
; AVX512VLVBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm4, %zmm4
; AVX512VLVBMI2-NEXT:    vmovdqu8 %zmm4, %zmm0 {%k2}
; AVX512VLVBMI2-NEXT:    vpsllw $2, %zmm0, %zmm4
; AVX512VLVBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm4, %zmm4
; AVX512VLVBMI2-NEXT:    vmovdqu8 %zmm4, %zmm0 {%k1}
; AVX512VLVBMI2-NEXT:    vpaddb %zmm5, %zmm5, %zmm4
; AVX512VLVBMI2-NEXT:    vpmovb2m %zmm4, %k1
; AVX512VLVBMI2-NEXT:    vpaddb %zmm0, %zmm0, %zmm0 {%k1}
; AVX512VLVBMI2-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VLVBMI2-NEXT:    vptestnmb %zmm2, %zmm2, %k1
; AVX512VLVBMI2-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512VLVBMI2-NEXT:    retq
  %res = call <64 x i8> @llvm.fshr.v64i8(<64 x i8> %x, <64 x i8> %y, <64 x i8> %amt)
  ret <64 x i8> %res
}

;
; Uniform Variable Shifts
;

define <8 x i64> @splatvar_funnnel_v8i64(<8 x i64> %x, <8 x i64> %y, <8 x i64> %amt) nounwind {
; AVX512F-LABEL: splatvar_funnnel_v8i64:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vpbroadcastq %xmm2, %zmm2
; AVX512F-NEXT:    vpandq {{.*}}(%rip){1to8}, %zmm2, %zmm2
; AVX512F-NEXT:    vpsrlq %xmm2, %zmm1, %zmm3
; AVX512F-NEXT:    vmovdqa {{.*#+}} xmm4 = [64,64]
; AVX512F-NEXT:    vpsubq %xmm2, %xmm4, %xmm4
; AVX512F-NEXT:    vpsllq %xmm4, %zmm0, %zmm0
; AVX512F-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512F-NEXT:    vptestnmq %zmm2, %zmm2, %k1
; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm0 {%k1}
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: splatvar_funnnel_v8i64:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vpbroadcastq %xmm2, %zmm2
; AVX512VL-NEXT:    vpandq {{.*}}(%rip){1to8}, %zmm2, %zmm2
; AVX512VL-NEXT:    vpsrlq %xmm2, %zmm1, %zmm3
; AVX512VL-NEXT:    vmovdqa {{.*#+}} xmm4 = [64,64]
; AVX512VL-NEXT:    vpsubq %xmm2, %xmm4, %xmm4
; AVX512VL-NEXT:    vpsllq %xmm4, %zmm0, %zmm0
; AVX512VL-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VL-NEXT:    vptestnmq %zmm2, %zmm2, %k1
; AVX512VL-NEXT:    vmovdqa64 %zmm1, %zmm0 {%k1}
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: splatvar_funnnel_v8i64:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpbroadcastq %xmm2, %zmm2
; AVX512BW-NEXT:    vpandq {{.*}}(%rip){1to8}, %zmm2, %zmm2
; AVX512BW-NEXT:    vpsrlq %xmm2, %zmm1, %zmm3
; AVX512BW-NEXT:    vmovdqa {{.*#+}} xmm4 = [64,64]
; AVX512BW-NEXT:    vpsubq %xmm2, %xmm4, %xmm4
; AVX512BW-NEXT:    vpsllq %xmm4, %zmm0, %zmm0
; AVX512BW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512BW-NEXT:    vptestnmq %zmm2, %zmm2, %k1
; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm0 {%k1}
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: splatvar_funnnel_v8i64:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpbroadcastq %xmm2, %zmm2
; AVX512VBMI2-NEXT:    vpshrdvq %zmm2, %zmm0, %zmm1
; AVX512VBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: splatvar_funnnel_v8i64:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpbroadcastq %xmm2, %zmm2
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip){1to8}, %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpsrlq %xmm2, %zmm1, %zmm3
; AVX512VLBW-NEXT:    vmovdqa {{.*#+}} xmm4 = [64,64]
; AVX512VLBW-NEXT:    vpsubq %xmm2, %xmm4, %xmm4
; AVX512VLBW-NEXT:    vpsllq %xmm4, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vptestnmq %zmm2, %zmm2, %k1
; AVX512VLBW-NEXT:    vmovdqa64 %zmm1, %zmm0 {%k1}
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: splatvar_funnnel_v8i64:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpbroadcastq %xmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpshrdvq %zmm2, %zmm0, %zmm1
; AVX512VLVBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %splat = shufflevector <8 x i64> %amt, <8 x i64> undef, <8 x i32> zeroinitializer
  %res = call <8 x i64> @llvm.fshr.v8i64(<8 x i64> %x, <8 x i64> %y, <8 x i64> %splat)
  ret <8 x i64> %res
}

define <16 x i32> @splatvar_funnnel_v16i32(<16 x i32> %x, <16 x i32> %y, <16 x i32> %amt) nounwind {
; AVX512F-LABEL: splatvar_funnnel_v16i32:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vpbroadcastd %xmm2, %zmm2
; AVX512F-NEXT:    vpandd {{.*}}(%rip){1to16}, %zmm2, %zmm2
; AVX512F-NEXT:    vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
; AVX512F-NEXT:    vpsrld %xmm3, %zmm1, %zmm3
; AVX512F-NEXT:    vpbroadcastd {{.*#+}} xmm4 = [32,32,32,32]
; AVX512F-NEXT:    vpsubd %xmm2, %xmm4, %xmm4
; AVX512F-NEXT:    vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
; AVX512F-NEXT:    vpslld %xmm4, %zmm0, %zmm0
; AVX512F-NEXT:    vpord %zmm3, %zmm0, %zmm0
; AVX512F-NEXT:    vptestnmd %zmm2, %zmm2, %k1
; AVX512F-NEXT:    vmovdqa32 %zmm1, %zmm0 {%k1}
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: splatvar_funnnel_v16i32:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vpbroadcastd %xmm2, %zmm2
; AVX512VL-NEXT:    vpandd {{.*}}(%rip){1to16}, %zmm2, %zmm2
; AVX512VL-NEXT:    vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
; AVX512VL-NEXT:    vpsrld %xmm3, %zmm1, %zmm3
; AVX512VL-NEXT:    vpbroadcastd {{.*#+}} xmm4 = [32,32,32,32]
; AVX512VL-NEXT:    vpsubd %xmm2, %xmm4, %xmm4
; AVX512VL-NEXT:    vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
; AVX512VL-NEXT:    vpslld %xmm4, %zmm0, %zmm0
; AVX512VL-NEXT:    vpord %zmm3, %zmm0, %zmm0
; AVX512VL-NEXT:    vptestnmd %zmm2, %zmm2, %k1
; AVX512VL-NEXT:    vmovdqa32 %zmm1, %zmm0 {%k1}
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: splatvar_funnnel_v16i32:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpbroadcastd %xmm2, %zmm2
; AVX512BW-NEXT:    vpandd {{.*}}(%rip){1to16}, %zmm2, %zmm2
; AVX512BW-NEXT:    vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
; AVX512BW-NEXT:    vpsrld %xmm3, %zmm1, %zmm3
; AVX512BW-NEXT:    vpbroadcastd {{.*#+}} xmm4 = [32,32,32,32]
; AVX512BW-NEXT:    vpsubd %xmm2, %xmm4, %xmm4
; AVX512BW-NEXT:    vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
; AVX512BW-NEXT:    vpslld %xmm4, %zmm0, %zmm0
; AVX512BW-NEXT:    vpord %zmm3, %zmm0, %zmm0
; AVX512BW-NEXT:    vptestnmd %zmm2, %zmm2, %k1
; AVX512BW-NEXT:    vmovdqa32 %zmm1, %zmm0 {%k1}
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: splatvar_funnnel_v16i32:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpbroadcastd %xmm2, %zmm2
; AVX512VBMI2-NEXT:    vpshrdvd %zmm2, %zmm0, %zmm1
; AVX512VBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: splatvar_funnnel_v16i32:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpbroadcastd %xmm2, %zmm2
; AVX512VLBW-NEXT:    vpandd {{.*}}(%rip){1to16}, %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
; AVX512VLBW-NEXT:    vpsrld %xmm3, %zmm1, %zmm3
; AVX512VLBW-NEXT:    vpbroadcastd {{.*#+}} xmm4 = [32,32,32,32]
; AVX512VLBW-NEXT:    vpsubd %xmm2, %xmm4, %xmm4
; AVX512VLBW-NEXT:    vpmovzxdq {{.*#+}} xmm4 = xmm4[0],zero,xmm4[1],zero
; AVX512VLBW-NEXT:    vpslld %xmm4, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vpord %zmm3, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vptestnmd %zmm2, %zmm2, %k1
; AVX512VLBW-NEXT:    vmovdqa32 %zmm1, %zmm0 {%k1}
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: splatvar_funnnel_v16i32:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpbroadcastd %xmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpshrdvd %zmm2, %zmm0, %zmm1
; AVX512VLVBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %splat = shufflevector <16 x i32> %amt, <16 x i32> undef, <16 x i32> zeroinitializer
  %res = call <16 x i32> @llvm.fshr.v16i32(<16 x i32> %x, <16 x i32> %y, <16 x i32> %splat)
  ret <16 x i32> %res
}

define <32 x i16> @splatvar_funnnel_v32i16(<32 x i16> %x, <32 x i16> %y, <32 x i16> %amt) nounwind {
; AVX512F-LABEL: splatvar_funnnel_v32i16:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vextracti64x4 $1, %zmm0, %ymm3
; AVX512F-NEXT:    vextracti64x4 $1, %zmm1, %ymm4
; AVX512F-NEXT:    vpbroadcastw %xmm2, %ymm2
; AVX512F-NEXT:    vpand {{.*}}(%rip), %ymm2, %ymm2
; AVX512F-NEXT:    vpmovzxwq {{.*#+}} xmm5 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
; AVX512F-NEXT:    vpsrlw %xmm5, %ymm4, %ymm6
; AVX512F-NEXT:    vmovdqa {{.*#+}} xmm7 = [16,16,16,16,16,16,16,16]
; AVX512F-NEXT:    vpsubw %xmm2, %xmm7, %xmm7
; AVX512F-NEXT:    vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
; AVX512F-NEXT:    vpsllw %xmm7, %ymm3, %ymm3
; AVX512F-NEXT:    vpor %ymm6, %ymm3, %ymm3
; AVX512F-NEXT:    vpxor %xmm6, %xmm6, %xmm6
; AVX512F-NEXT:    vpcmpeqw %ymm6, %ymm2, %ymm2
; AVX512F-NEXT:    vpblendvb %ymm2, %ymm4, %ymm3, %ymm3
; AVX512F-NEXT:    vpsrlw %xmm5, %ymm1, %ymm4
; AVX512F-NEXT:    vpsllw %xmm7, %ymm0, %ymm0
; AVX512F-NEXT:    vpor %ymm4, %ymm0, %ymm0
; AVX512F-NEXT:    vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
; AVX512F-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: splatvar_funnnel_v32i16:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm0, %ymm3
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm1, %ymm4
; AVX512VL-NEXT:    vpbroadcastw %xmm2, %ymm2
; AVX512VL-NEXT:    vpand {{.*}}(%rip), %ymm2, %ymm2
; AVX512VL-NEXT:    vpmovzxwq {{.*#+}} xmm5 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
; AVX512VL-NEXT:    vpsrlw %xmm5, %ymm4, %ymm6
; AVX512VL-NEXT:    vmovdqa {{.*#+}} xmm7 = [16,16,16,16,16,16,16,16]
; AVX512VL-NEXT:    vpsubw %xmm2, %xmm7, %xmm7
; AVX512VL-NEXT:    vpmovzxwq {{.*#+}} xmm7 = xmm7[0],zero,zero,zero,xmm7[1],zero,zero,zero
; AVX512VL-NEXT:    vpsllw %xmm7, %ymm3, %ymm3
; AVX512VL-NEXT:    vpor %ymm6, %ymm3, %ymm3
; AVX512VL-NEXT:    vpxor %xmm6, %xmm6, %xmm6
; AVX512VL-NEXT:    vpcmpeqw %ymm6, %ymm2, %ymm2
; AVX512VL-NEXT:    vpblendvb %ymm2, %ymm4, %ymm3, %ymm3
; AVX512VL-NEXT:    vpsrlw %xmm5, %ymm1, %ymm4
; AVX512VL-NEXT:    vpsllw %xmm7, %ymm0, %ymm0
; AVX512VL-NEXT:    vpor %ymm4, %ymm0, %ymm0
; AVX512VL-NEXT:    vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
; AVX512VL-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: splatvar_funnnel_v32i16:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpbroadcastw %xmm2, %zmm2
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512BW-NEXT:    vpmovzxwq {{.*#+}} xmm3 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
; AVX512BW-NEXT:    vpsrlw %xmm3, %zmm1, %zmm3
; AVX512BW-NEXT:    vmovdqa {{.*#+}} xmm4 = [16,16,16,16,16,16,16,16]
; AVX512BW-NEXT:    vpsubw %xmm2, %xmm4, %xmm4
; AVX512BW-NEXT:    vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
; AVX512BW-NEXT:    vpsllw %xmm4, %zmm0, %zmm0
; AVX512BW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512BW-NEXT:    vptestnmw %zmm2, %zmm2, %k1
; AVX512BW-NEXT:    vmovdqu16 %zmm1, %zmm0 {%k1}
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: splatvar_funnnel_v32i16:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpbroadcastw %xmm2, %zmm2
; AVX512VBMI2-NEXT:    vpshrdvw %zmm2, %zmm0, %zmm1
; AVX512VBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: splatvar_funnnel_v32i16:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpbroadcastw %xmm2, %zmm2
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpmovzxwq {{.*#+}} xmm3 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
; AVX512VLBW-NEXT:    vpsrlw %xmm3, %zmm1, %zmm3
; AVX512VLBW-NEXT:    vmovdqa {{.*#+}} xmm4 = [16,16,16,16,16,16,16,16]
; AVX512VLBW-NEXT:    vpsubw %xmm2, %xmm4, %xmm4
; AVX512VLBW-NEXT:    vpmovzxwq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,xmm4[1],zero,zero,zero
; AVX512VLBW-NEXT:    vpsllw %xmm4, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vptestnmw %zmm2, %zmm2, %k1
; AVX512VLBW-NEXT:    vmovdqu16 %zmm1, %zmm0 {%k1}
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: splatvar_funnnel_v32i16:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpbroadcastw %xmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpshrdvw %zmm2, %zmm0, %zmm1
; AVX512VLVBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %splat = shufflevector <32 x i16> %amt, <32 x i16> undef, <32 x i32> zeroinitializer
  %res = call <32 x i16> @llvm.fshr.v32i16(<32 x i16> %x, <32 x i16> %y, <32 x i16> %splat)
  ret <32 x i16> %res
}

define <64 x i8> @splatvar_funnnel_v64i8(<64 x i8> %x, <64 x i8> %y, <64 x i8> %amt) nounwind {
; AVX512F-LABEL: splatvar_funnnel_v64i8:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vextracti64x4 $1, %zmm0, %ymm9
; AVX512F-NEXT:    vextracti64x4 $1, %zmm1, %ymm4
; AVX512F-NEXT:    vpbroadcastb %xmm2, %ymm2
; AVX512F-NEXT:    vpand {{.*}}(%rip), %ymm2, %ymm2
; AVX512F-NEXT:    vpmovzxbq {{.*#+}} xmm5 = xmm2[0],zero,zero,zero,zero,zero,zero,zero,xmm2[1],zero,zero,zero,zero,zero,zero,zero
; AVX512F-NEXT:    vpsrlw %xmm5, %ymm4, %ymm6
; AVX512F-NEXT:    vpcmpeqd %xmm8, %xmm8, %xmm8
; AVX512F-NEXT:    vpsrlw %xmm5, %xmm8, %xmm7
; AVX512F-NEXT:    vpsrlw $8, %xmm7, %xmm7
; AVX512F-NEXT:    vpbroadcastb %xmm7, %ymm7
; AVX512F-NEXT:    vpand %ymm7, %ymm6, %ymm10
; AVX512F-NEXT:    vmovdqa {{.*#+}} xmm3 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512F-NEXT:    vpsubb %xmm2, %xmm3, %xmm3
; AVX512F-NEXT:    vpmovzxbq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,zero,zero,zero,zero,xmm3[1],zero,zero,zero,zero,zero,zero,zero
; AVX512F-NEXT:    vpsllw %xmm3, %ymm9, %ymm9
; AVX512F-NEXT:    vpsllw %xmm3, %xmm8, %xmm6
; AVX512F-NEXT:    vpbroadcastb %xmm6, %ymm6
; AVX512F-NEXT:    vpand %ymm6, %ymm9, %ymm8
; AVX512F-NEXT:    vpor %ymm10, %ymm8, %ymm8
; AVX512F-NEXT:    vpxor %xmm9, %xmm9, %xmm9
; AVX512F-NEXT:    vpcmpeqb %ymm9, %ymm2, %ymm2
; AVX512F-NEXT:    vpblendvb %ymm2, %ymm4, %ymm8, %ymm4
; AVX512F-NEXT:    vpsrlw %xmm5, %ymm1, %ymm5
; AVX512F-NEXT:    vpand %ymm7, %ymm5, %ymm5
; AVX512F-NEXT:    vpsllw %xmm3, %ymm0, %ymm0
; AVX512F-NEXT:    vpand %ymm6, %ymm0, %ymm0
; AVX512F-NEXT:    vpor %ymm5, %ymm0, %ymm0
; AVX512F-NEXT:    vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
; AVX512F-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: splatvar_funnnel_v64i8:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm0, %ymm9
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm1, %ymm4
; AVX512VL-NEXT:    vpbroadcastb %xmm2, %ymm2
; AVX512VL-NEXT:    vpand {{.*}}(%rip), %ymm2, %ymm2
; AVX512VL-NEXT:    vpmovzxbq {{.*#+}} xmm5 = xmm2[0],zero,zero,zero,zero,zero,zero,zero,xmm2[1],zero,zero,zero,zero,zero,zero,zero
; AVX512VL-NEXT:    vpsrlw %xmm5, %ymm4, %ymm6
; AVX512VL-NEXT:    vpcmpeqd %xmm8, %xmm8, %xmm8
; AVX512VL-NEXT:    vpsrlw %xmm5, %xmm8, %xmm7
; AVX512VL-NEXT:    vpsrlw $8, %xmm7, %xmm7
; AVX512VL-NEXT:    vpbroadcastb %xmm7, %ymm7
; AVX512VL-NEXT:    vpand %ymm7, %ymm6, %ymm10
; AVX512VL-NEXT:    vmovdqa {{.*#+}} xmm3 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512VL-NEXT:    vpsubb %xmm2, %xmm3, %xmm3
; AVX512VL-NEXT:    vpmovzxbq {{.*#+}} xmm3 = xmm3[0],zero,zero,zero,zero,zero,zero,zero,xmm3[1],zero,zero,zero,zero,zero,zero,zero
; AVX512VL-NEXT:    vpsllw %xmm3, %ymm9, %ymm9
; AVX512VL-NEXT:    vpsllw %xmm3, %xmm8, %xmm6
; AVX512VL-NEXT:    vpbroadcastb %xmm6, %ymm6
; AVX512VL-NEXT:    vpand %ymm6, %ymm9, %ymm8
; AVX512VL-NEXT:    vpor %ymm10, %ymm8, %ymm8
; AVX512VL-NEXT:    vpxor %xmm9, %xmm9, %xmm9
; AVX512VL-NEXT:    vpcmpeqb %ymm9, %ymm2, %ymm2
; AVX512VL-NEXT:    vpblendvb %ymm2, %ymm4, %ymm8, %ymm4
; AVX512VL-NEXT:    vpsrlw %xmm5, %ymm1, %ymm5
; AVX512VL-NEXT:    vpand %ymm7, %ymm5, %ymm5
; AVX512VL-NEXT:    vpsllw %xmm3, %ymm0, %ymm0
; AVX512VL-NEXT:    vpand %ymm6, %ymm0, %ymm0
; AVX512VL-NEXT:    vpor %ymm5, %ymm0, %ymm0
; AVX512VL-NEXT:    vpblendvb %ymm2, %ymm1, %ymm0, %ymm0
; AVX512VL-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: splatvar_funnnel_v64i8:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpbroadcastb %xmm2, %zmm2
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512BW-NEXT:    vpmovzxbq {{.*#+}} xmm3 = xmm2[0],zero,zero,zero,zero,zero,zero,zero,xmm2[1],zero,zero,zero,zero,zero,zero,zero
; AVX512BW-NEXT:    vpsrlw %xmm3, %zmm1, %zmm4
; AVX512BW-NEXT:    vpcmpeqd %xmm5, %xmm5, %xmm5
; AVX512BW-NEXT:    vpsrlw %xmm3, %xmm5, %xmm3
; AVX512BW-NEXT:    vpsrlw $8, %xmm3, %xmm3
; AVX512BW-NEXT:    vpbroadcastb %xmm3, %zmm3
; AVX512BW-NEXT:    vpandq %zmm3, %zmm4, %zmm3
; AVX512BW-NEXT:    vmovdqa {{.*#+}} xmm4 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512BW-NEXT:    vpsubb %xmm2, %xmm4, %xmm4
; AVX512BW-NEXT:    vpmovzxbq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,zero,zero,zero,zero,xmm4[1],zero,zero,zero,zero,zero,zero,zero
; AVX512BW-NEXT:    vpsllw %xmm4, %zmm0, %zmm0
; AVX512BW-NEXT:    vpsllw %xmm4, %xmm5, %xmm4
; AVX512BW-NEXT:    vpbroadcastb %xmm4, %zmm4
; AVX512BW-NEXT:    vpandq %zmm4, %zmm0, %zmm0
; AVX512BW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512BW-NEXT:    vptestnmb %zmm2, %zmm2, %k1
; AVX512BW-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: splatvar_funnnel_v64i8:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpbroadcastb %xmm2, %zmm2
; AVX512VBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512VBMI2-NEXT:    vpmovzxbq {{.*#+}} xmm3 = xmm2[0],zero,zero,zero,zero,zero,zero,zero,xmm2[1],zero,zero,zero,zero,zero,zero,zero
; AVX512VBMI2-NEXT:    vpsrlw %xmm3, %zmm1, %zmm4
; AVX512VBMI2-NEXT:    vpcmpeqd %xmm5, %xmm5, %xmm5
; AVX512VBMI2-NEXT:    vpsrlw %xmm3, %xmm5, %xmm3
; AVX512VBMI2-NEXT:    vpsrlw $8, %xmm3, %xmm3
; AVX512VBMI2-NEXT:    vpbroadcastb %xmm3, %zmm3
; AVX512VBMI2-NEXT:    vpandq %zmm3, %zmm4, %zmm3
; AVX512VBMI2-NEXT:    vmovdqa {{.*#+}} xmm4 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512VBMI2-NEXT:    vpsubb %xmm2, %xmm4, %xmm4
; AVX512VBMI2-NEXT:    vpmovzxbq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,zero,zero,zero,zero,xmm4[1],zero,zero,zero,zero,zero,zero,zero
; AVX512VBMI2-NEXT:    vpsllw %xmm4, %zmm0, %zmm0
; AVX512VBMI2-NEXT:    vpsllw %xmm4, %xmm5, %xmm4
; AVX512VBMI2-NEXT:    vpbroadcastb %xmm4, %zmm4
; AVX512VBMI2-NEXT:    vpandq %zmm4, %zmm0, %zmm0
; AVX512VBMI2-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VBMI2-NEXT:    vptestnmb %zmm2, %zmm2, %k1
; AVX512VBMI2-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: splatvar_funnnel_v64i8:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpbroadcastb %xmm2, %zmm2
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpmovzxbq {{.*#+}} xmm3 = xmm2[0],zero,zero,zero,zero,zero,zero,zero,xmm2[1],zero,zero,zero,zero,zero,zero,zero
; AVX512VLBW-NEXT:    vpsrlw %xmm3, %zmm1, %zmm4
; AVX512VLBW-NEXT:    vpcmpeqd %xmm5, %xmm5, %xmm5
; AVX512VLBW-NEXT:    vpsrlw %xmm3, %xmm5, %xmm3
; AVX512VLBW-NEXT:    vpsrlw $8, %xmm3, %xmm3
; AVX512VLBW-NEXT:    vpbroadcastb %xmm3, %zmm3
; AVX512VLBW-NEXT:    vpandq %zmm3, %zmm4, %zmm3
; AVX512VLBW-NEXT:    vmovdqa {{.*#+}} xmm4 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512VLBW-NEXT:    vpsubb %xmm2, %xmm4, %xmm4
; AVX512VLBW-NEXT:    vpmovzxbq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,zero,zero,zero,zero,xmm4[1],zero,zero,zero,zero,zero,zero,zero
; AVX512VLBW-NEXT:    vpsllw %xmm4, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vpsllw %xmm4, %xmm5, %xmm4
; AVX512VLBW-NEXT:    vpbroadcastb %xmm4, %zmm4
; AVX512VLBW-NEXT:    vpandq %zmm4, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vptestnmb %zmm2, %zmm2, %k1
; AVX512VLBW-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: splatvar_funnnel_v64i8:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpbroadcastb %xmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpmovzxbq {{.*#+}} xmm3 = xmm2[0],zero,zero,zero,zero,zero,zero,zero,xmm2[1],zero,zero,zero,zero,zero,zero,zero
; AVX512VLVBMI2-NEXT:    vpsrlw %xmm3, %zmm1, %zmm4
; AVX512VLVBMI2-NEXT:    vpcmpeqd %xmm5, %xmm5, %xmm5
; AVX512VLVBMI2-NEXT:    vpsrlw %xmm3, %xmm5, %xmm3
; AVX512VLVBMI2-NEXT:    vpsrlw $8, %xmm3, %xmm3
; AVX512VLVBMI2-NEXT:    vpbroadcastb %xmm3, %zmm3
; AVX512VLVBMI2-NEXT:    vpandq %zmm3, %zmm4, %zmm3
; AVX512VLVBMI2-NEXT:    vmovdqa {{.*#+}} xmm4 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8]
; AVX512VLVBMI2-NEXT:    vpsubb %xmm2, %xmm4, %xmm4
; AVX512VLVBMI2-NEXT:    vpmovzxbq {{.*#+}} xmm4 = xmm4[0],zero,zero,zero,zero,zero,zero,zero,xmm4[1],zero,zero,zero,zero,zero,zero,zero
; AVX512VLVBMI2-NEXT:    vpsllw %xmm4, %zmm0, %zmm0
; AVX512VLVBMI2-NEXT:    vpsllw %xmm4, %xmm5, %xmm4
; AVX512VLVBMI2-NEXT:    vpbroadcastb %xmm4, %zmm4
; AVX512VLVBMI2-NEXT:    vpandq %zmm4, %zmm0, %zmm0
; AVX512VLVBMI2-NEXT:    vporq %zmm3, %zmm0, %zmm0
; AVX512VLVBMI2-NEXT:    vptestnmb %zmm2, %zmm2, %k1
; AVX512VLVBMI2-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512VLVBMI2-NEXT:    retq
  %splat = shufflevector <64 x i8> %amt, <64 x i8> undef, <64 x i32> zeroinitializer
  %res = call <64 x i8> @llvm.fshr.v64i8(<64 x i8> %x, <64 x i8> %y, <64 x i8> %splat)
  ret <64 x i8> %res
}

;
; Constant Shifts
;

define <8 x i64> @constant_funnnel_v8i64(<8 x i64> %x, <8 x i64> %y) nounwind {
; AVX512F-LABEL: constant_funnnel_v8i64:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vpsrlvq {{.*}}(%rip), %zmm1, %zmm1
; AVX512F-NEXT:    vpsllvq {{.*}}(%rip), %zmm0, %zmm0
; AVX512F-NEXT:    vporq %zmm1, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: constant_funnnel_v8i64:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vpsrlvq {{.*}}(%rip), %zmm1, %zmm1
; AVX512VL-NEXT:    vpsllvq {{.*}}(%rip), %zmm0, %zmm0
; AVX512VL-NEXT:    vporq %zmm1, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: constant_funnnel_v8i64:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpsrlvq {{.*}}(%rip), %zmm1, %zmm1
; AVX512BW-NEXT:    vpsllvq {{.*}}(%rip), %zmm0, %zmm0
; AVX512BW-NEXT:    vporq %zmm1, %zmm0, %zmm0
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: constant_funnnel_v8i64:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpshrdvq {{.*}}(%rip), %zmm0, %zmm1
; AVX512VBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: constant_funnnel_v8i64:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpsrlvq {{.*}}(%rip), %zmm1, %zmm1
; AVX512VLBW-NEXT:    vpsllvq {{.*}}(%rip), %zmm0, %zmm0
; AVX512VLBW-NEXT:    vporq %zmm1, %zmm0, %zmm0
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: constant_funnnel_v8i64:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpshrdvq {{.*}}(%rip), %zmm0, %zmm1
; AVX512VLVBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %res = call <8 x i64> @llvm.fshr.v8i64(<8 x i64> %x, <8 x i64> %y, <8 x i64> <i64 4, i64 14, i64 50, i64 60, i64 4, i64 14, i64 50, i64 60>)
  ret <8 x i64> %res
}

define <16 x i32> @constant_funnnel_v16i32(<16 x i32> %x, <16 x i32> %y) nounwind {
; AVX512F-LABEL: constant_funnnel_v16i32:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vpsrlvd {{.*}}(%rip), %zmm1, %zmm1
; AVX512F-NEXT:    vpsllvd {{.*}}(%rip), %zmm0, %zmm0
; AVX512F-NEXT:    vpord %zmm1, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: constant_funnnel_v16i32:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vpsrlvd {{.*}}(%rip), %zmm1, %zmm1
; AVX512VL-NEXT:    vpsllvd {{.*}}(%rip), %zmm0, %zmm0
; AVX512VL-NEXT:    vpord %zmm1, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: constant_funnnel_v16i32:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpsrlvd {{.*}}(%rip), %zmm1, %zmm1
; AVX512BW-NEXT:    vpsllvd {{.*}}(%rip), %zmm0, %zmm0
; AVX512BW-NEXT:    vpord %zmm1, %zmm0, %zmm0
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: constant_funnnel_v16i32:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpshrdvd {{.*}}(%rip), %zmm0, %zmm1
; AVX512VBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: constant_funnnel_v16i32:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpsrlvd {{.*}}(%rip), %zmm1, %zmm1
; AVX512VLBW-NEXT:    vpsllvd {{.*}}(%rip), %zmm0, %zmm0
; AVX512VLBW-NEXT:    vpord %zmm1, %zmm0, %zmm0
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: constant_funnnel_v16i32:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpshrdvd {{.*}}(%rip), %zmm0, %zmm1
; AVX512VLVBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %res = call <16 x i32> @llvm.fshr.v16i32(<16 x i32> %x, <16 x i32> %y, <16 x i32> <i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11>)
  ret <16 x i32> %res
}

define <32 x i16> @constant_funnnel_v32i16(<32 x i16> %x, <32 x i16> %y) nounwind {
; AVX512F-LABEL: constant_funnnel_v32i16:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; AVX512F-NEXT:    vextracti64x4 $1, %zmm1, %ymm3
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm4 = <u,32768,16384,8192,4096,2048,1024,512,256,128,64,32,16,8,4,2>
; AVX512F-NEXT:    vpmulhuw %ymm4, %ymm3, %ymm5
; AVX512F-NEXT:    vpmullw %ymm4, %ymm2, %ymm2
; AVX512F-NEXT:    vpor %ymm5, %ymm2, %ymm2
; AVX512F-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0],ymm2[1,2,3,4,5,6,7],ymm3[8],ymm2[9,10,11,12,13,14,15]
; AVX512F-NEXT:    vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
; AVX512F-NEXT:    vpmulhuw %ymm4, %ymm1, %ymm3
; AVX512F-NEXT:    vpmullw %ymm4, %ymm0, %ymm0
; AVX512F-NEXT:    vpor %ymm3, %ymm0, %ymm0
; AVX512F-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm0[1,2,3,4,5,6,7],ymm1[8],ymm0[9,10,11,12,13,14,15]
; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
; AVX512F-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: constant_funnnel_v32i16:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm1, %ymm3
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm4 = <u,32768,16384,8192,4096,2048,1024,512,256,128,64,32,16,8,4,2>
; AVX512VL-NEXT:    vpmulhuw %ymm4, %ymm3, %ymm5
; AVX512VL-NEXT:    vpmullw %ymm4, %ymm2, %ymm2
; AVX512VL-NEXT:    vpor %ymm5, %ymm2, %ymm2
; AVX512VL-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0],ymm2[1,2,3,4,5,6,7],ymm3[8],ymm2[9,10,11,12,13,14,15]
; AVX512VL-NEXT:    vpblendd {{.*#+}} ymm2 = ymm3[0,1,2,3],ymm2[4,5,6,7]
; AVX512VL-NEXT:    vpmulhuw %ymm4, %ymm1, %ymm3
; AVX512VL-NEXT:    vpmullw %ymm4, %ymm0, %ymm0
; AVX512VL-NEXT:    vpor %ymm3, %ymm0, %ymm0
; AVX512VL-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm0[1,2,3,4,5,6,7],ymm1[8],ymm0[9,10,11,12,13,14,15]
; AVX512VL-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
; AVX512VL-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: constant_funnnel_v32i16:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpsrlvw {{.*}}(%rip), %zmm1, %zmm2
; AVX512BW-NEXT:    vpsllvw {{.*}}(%rip), %zmm0, %zmm0
; AVX512BW-NEXT:    vporq %zmm2, %zmm0, %zmm0
; AVX512BW-NEXT:    movl $65537, %eax # imm = 0x10001
; AVX512BW-NEXT:    kmovd %eax, %k1
; AVX512BW-NEXT:    vmovdqu16 %zmm1, %zmm0 {%k1}
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: constant_funnnel_v32i16:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpshrdvw {{.*}}(%rip), %zmm0, %zmm1
; AVX512VBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: constant_funnnel_v32i16:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpsrlvw {{.*}}(%rip), %zmm1, %zmm2
; AVX512VLBW-NEXT:    vpsllvw {{.*}}(%rip), %zmm0, %zmm0
; AVX512VLBW-NEXT:    vporq %zmm2, %zmm0, %zmm0
; AVX512VLBW-NEXT:    movl $65537, %eax # imm = 0x10001
; AVX512VLBW-NEXT:    kmovd %eax, %k1
; AVX512VLBW-NEXT:    vmovdqu16 %zmm1, %zmm0 {%k1}
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: constant_funnnel_v32i16:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpshrdvw {{.*}}(%rip), %zmm0, %zmm1
; AVX512VLVBMI2-NEXT:    vmovdqa64 %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %res = call <32 x i16> @llvm.fshr.v32i16(<32 x i16> %x, <32 x i16> %y, <32 x i16> <i16 0, i16 1, i16 2, i16 3, i16 4, i16 5, i16 6, i16 7, i16 8, i16 9, i16 10, i16 11, i16 12, i16 13, i16 14, i16 15, i16 0, i16 1, i16 2, i16 3, i16 4, i16 5, i16 6, i16 7, i16 8, i16 9, i16 10, i16 11, i16 12, i16 13, i16 14, i16 15>)
  ret <32 x i16> %res
}

define <64 x i8> @constant_funnnel_v64i8(<64 x i8> %x, <64 x i8> %y) nounwind {
; AVX512F-LABEL: constant_funnnel_v64i8:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vextracti64x4 $1, %zmm1, %ymm2
; AVX512F-NEXT:    vextracti64x4 $1, %zmm0, %ymm3
; AVX512F-NEXT:    vpsllw $4, %ymm3, %ymm4
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm5 = [240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240]
; AVX512F-NEXT:    vpand %ymm5, %ymm4, %ymm4
; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536]
; AVX512F-NEXT:    # ymm6 = mem[0,1,0,1]
; AVX512F-NEXT:    vpblendvb %ymm6, %ymm4, %ymm3, %ymm3
; AVX512F-NEXT:    vpsllw $2, %ymm3, %ymm4
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm7 = [252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252]
; AVX512F-NEXT:    vpand %ymm7, %ymm4, %ymm4
; AVX512F-NEXT:    vpaddb %ymm6, %ymm6, %ymm8
; AVX512F-NEXT:    vpblendvb %ymm8, %ymm4, %ymm3, %ymm3
; AVX512F-NEXT:    vpaddb %ymm3, %ymm3, %ymm4
; AVX512F-NEXT:    vpaddb %ymm8, %ymm8, %ymm9
; AVX512F-NEXT:    vpblendvb %ymm9, %ymm4, %ymm3, %ymm3
; AVX512F-NEXT:    vpxor %xmm4, %xmm4, %xmm4
; AVX512F-NEXT:    vpunpckhbw {{.*#+}} ymm10 = ymm2[8],ymm4[8],ymm2[9],ymm4[9],ymm2[10],ymm4[10],ymm2[11],ymm4[11],ymm2[12],ymm4[12],ymm2[13],ymm4[13],ymm2[14],ymm4[14],ymm2[15],ymm4[15],ymm2[24],ymm4[24],ymm2[25],ymm4[25],ymm2[26],ymm4[26],ymm2[27],ymm4[27],ymm2[28],ymm4[28],ymm2[29],ymm4[29],ymm2[30],ymm4[30],ymm2[31],ymm4[31]
; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm11 = [256,2,4,8,16,32,64,128,256,2,4,8,16,32,64,128]
; AVX512F-NEXT:    # ymm11 = mem[0,1,0,1]
; AVX512F-NEXT:    vpmullw %ymm11, %ymm10, %ymm10
; AVX512F-NEXT:    vpsrlw $8, %ymm10, %ymm10
; AVX512F-NEXT:    vpunpcklbw {{.*#+}} ymm12 = ymm2[0],ymm4[0],ymm2[1],ymm4[1],ymm2[2],ymm4[2],ymm2[3],ymm4[3],ymm2[4],ymm4[4],ymm2[5],ymm4[5],ymm2[6],ymm4[6],ymm2[7],ymm4[7],ymm2[16],ymm4[16],ymm2[17],ymm4[17],ymm2[18],ymm4[18],ymm2[19],ymm4[19],ymm2[20],ymm4[20],ymm2[21],ymm4[21],ymm2[22],ymm4[22],ymm2[23],ymm4[23]
; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm13 = [256,128,64,32,16,8,4,2,256,128,64,32,16,8,4,2]
; AVX512F-NEXT:    # ymm13 = mem[0,1,0,1]
; AVX512F-NEXT:    vpmullw %ymm13, %ymm12, %ymm12
; AVX512F-NEXT:    vpsrlw $8, %ymm12, %ymm12
; AVX512F-NEXT:    vpackuswb %ymm10, %ymm12, %ymm10
; AVX512F-NEXT:    vpor %ymm10, %ymm3, %ymm3
; AVX512F-NEXT:    vpbroadcastq {{.*#+}} ymm10 = [18446744073709551360,18446744073709551360,18446744073709551360,18446744073709551360]
; AVX512F-NEXT:    vpblendvb %ymm10, %ymm3, %ymm2, %ymm2
; AVX512F-NEXT:    vpsllw $4, %ymm0, %ymm3
; AVX512F-NEXT:    vpand %ymm5, %ymm3, %ymm3
; AVX512F-NEXT:    vpblendvb %ymm6, %ymm3, %ymm0, %ymm0
; AVX512F-NEXT:    vpsllw $2, %ymm0, %ymm3
; AVX512F-NEXT:    vpand %ymm7, %ymm3, %ymm3
; AVX512F-NEXT:    vpblendvb %ymm8, %ymm3, %ymm0, %ymm0
; AVX512F-NEXT:    vpaddb %ymm0, %ymm0, %ymm3
; AVX512F-NEXT:    vpblendvb %ymm9, %ymm3, %ymm0, %ymm0
; AVX512F-NEXT:    vpunpckhbw {{.*#+}} ymm3 = ymm1[8],ymm4[8],ymm1[9],ymm4[9],ymm1[10],ymm4[10],ymm1[11],ymm4[11],ymm1[12],ymm4[12],ymm1[13],ymm4[13],ymm1[14],ymm4[14],ymm1[15],ymm4[15],ymm1[24],ymm4[24],ymm1[25],ymm4[25],ymm1[26],ymm4[26],ymm1[27],ymm4[27],ymm1[28],ymm4[28],ymm1[29],ymm4[29],ymm1[30],ymm4[30],ymm1[31],ymm4[31]
; AVX512F-NEXT:    vpmullw %ymm11, %ymm3, %ymm3
; AVX512F-NEXT:    vpsrlw $8, %ymm3, %ymm3
; AVX512F-NEXT:    vpunpcklbw {{.*#+}} ymm4 = ymm1[0],ymm4[0],ymm1[1],ymm4[1],ymm1[2],ymm4[2],ymm1[3],ymm4[3],ymm1[4],ymm4[4],ymm1[5],ymm4[5],ymm1[6],ymm4[6],ymm1[7],ymm4[7],ymm1[16],ymm4[16],ymm1[17],ymm4[17],ymm1[18],ymm4[18],ymm1[19],ymm4[19],ymm1[20],ymm4[20],ymm1[21],ymm4[21],ymm1[22],ymm4[22],ymm1[23],ymm4[23]
; AVX512F-NEXT:    vpmullw %ymm13, %ymm4, %ymm4
; AVX512F-NEXT:    vpsrlw $8, %ymm4, %ymm4
; AVX512F-NEXT:    vpackuswb %ymm3, %ymm4, %ymm3
; AVX512F-NEXT:    vpor %ymm3, %ymm0, %ymm0
; AVX512F-NEXT:    vpblendvb %ymm10, %ymm0, %ymm1, %ymm0
; AVX512F-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: constant_funnnel_v64i8:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm1, %ymm2
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm0, %ymm3
; AVX512VL-NEXT:    vpsllw $4, %ymm3, %ymm4
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm5 = [240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240]
; AVX512VL-NEXT:    vpand %ymm5, %ymm4, %ymm4
; AVX512VL-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536]
; AVX512VL-NEXT:    # ymm6 = mem[0,1,0,1]
; AVX512VL-NEXT:    vpblendvb %ymm6, %ymm4, %ymm3, %ymm3
; AVX512VL-NEXT:    vpsllw $2, %ymm3, %ymm4
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm7 = [252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252,252]
; AVX512VL-NEXT:    vpand %ymm7, %ymm4, %ymm4
; AVX512VL-NEXT:    vpaddb %ymm6, %ymm6, %ymm8
; AVX512VL-NEXT:    vpblendvb %ymm8, %ymm4, %ymm3, %ymm3
; AVX512VL-NEXT:    vpaddb %ymm3, %ymm3, %ymm4
; AVX512VL-NEXT:    vpaddb %ymm8, %ymm8, %ymm9
; AVX512VL-NEXT:    vpblendvb %ymm9, %ymm4, %ymm3, %ymm3
; AVX512VL-NEXT:    vpunpckhbw {{.*#+}} ymm4 = ymm2[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31]
; AVX512VL-NEXT:    vpsrlw $8, %ymm4, %ymm4
; AVX512VL-NEXT:    vbroadcasti128 {{.*#+}} ymm10 = [256,2,4,8,16,32,64,128,256,2,4,8,16,32,64,128]
; AVX512VL-NEXT:    # ymm10 = mem[0,1,0,1]
; AVX512VL-NEXT:    vpmullw %ymm10, %ymm4, %ymm4
; AVX512VL-NEXT:    vpsrlw $8, %ymm4, %ymm4
; AVX512VL-NEXT:    vpunpcklbw {{.*#+}} ymm11 = ymm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23]
; AVX512VL-NEXT:    vpsrlw $8, %ymm11, %ymm11
; AVX512VL-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [256,128,64,32,16,8,4,2,256,128,64,32,16,8,4,2]
; AVX512VL-NEXT:    # ymm12 = mem[0,1,0,1]
; AVX512VL-NEXT:    vpmullw %ymm12, %ymm11, %ymm11
; AVX512VL-NEXT:    vpsrlw $8, %ymm11, %ymm11
; AVX512VL-NEXT:    vpackuswb %ymm4, %ymm11, %ymm4
; AVX512VL-NEXT:    vpor %ymm4, %ymm3, %ymm3
; AVX512VL-NEXT:    vpbroadcastq {{.*#+}} ymm4 = [18446744073709551360,18446744073709551360,18446744073709551360,18446744073709551360]
; AVX512VL-NEXT:    vpblendvb %ymm4, %ymm3, %ymm2, %ymm2
; AVX512VL-NEXT:    vpsllw $4, %ymm0, %ymm3
; AVX512VL-NEXT:    vpand %ymm5, %ymm3, %ymm3
; AVX512VL-NEXT:    vpblendvb %ymm6, %ymm3, %ymm0, %ymm0
; AVX512VL-NEXT:    vpsllw $2, %ymm0, %ymm3
; AVX512VL-NEXT:    vpand %ymm7, %ymm3, %ymm3
; AVX512VL-NEXT:    vpblendvb %ymm8, %ymm3, %ymm0, %ymm0
; AVX512VL-NEXT:    vpaddb %ymm0, %ymm0, %ymm3
; AVX512VL-NEXT:    vpblendvb %ymm9, %ymm3, %ymm0, %ymm0
; AVX512VL-NEXT:    vpunpckhbw {{.*#+}} ymm3 = ymm1[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31]
; AVX512VL-NEXT:    vpsrlw $8, %ymm3, %ymm3
; AVX512VL-NEXT:    vpmullw %ymm10, %ymm3, %ymm3
; AVX512VL-NEXT:    vpsrlw $8, %ymm3, %ymm3
; AVX512VL-NEXT:    vpunpcklbw {{.*#+}} ymm5 = ymm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23]
; AVX512VL-NEXT:    vpsrlw $8, %ymm5, %ymm5
; AVX512VL-NEXT:    vpmullw %ymm12, %ymm5, %ymm5
; AVX512VL-NEXT:    vpsrlw $8, %ymm5, %ymm5
; AVX512VL-NEXT:    vpackuswb %ymm3, %ymm5, %ymm3
; AVX512VL-NEXT:    vpor %ymm3, %ymm0, %ymm0
; AVX512VL-NEXT:    vpblendvb %ymm4, %ymm0, %ymm1, %ymm0
; AVX512VL-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: constant_funnnel_v64i8:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536]
; AVX512BW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
; AVX512BW-NEXT:    vpmovb2m %zmm2, %k1
; AVX512BW-NEXT:    vpsllw $4, %zmm0, %zmm3
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512BW-NEXT:    vmovdqu8 %zmm3, %zmm0 {%k1}
; AVX512BW-NEXT:    vpsllw $2, %zmm0, %zmm3
; AVX512BW-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512BW-NEXT:    vpaddb %zmm2, %zmm2, %zmm2
; AVX512BW-NEXT:    vpmovb2m %zmm2, %k1
; AVX512BW-NEXT:    vmovdqu8 %zmm3, %zmm0 {%k1}
; AVX512BW-NEXT:    vpaddb %zmm2, %zmm2, %zmm2
; AVX512BW-NEXT:    vpmovb2m %zmm2, %k1
; AVX512BW-NEXT:    vpaddb %zmm0, %zmm0, %zmm0 {%k1}
; AVX512BW-NEXT:    vpunpckhbw {{.*#+}} zmm2 = zmm1[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,40,40,41,41,42,42,43,43,44,44,45,45,46,46,47,47,56,56,57,57,58,58,59,59,60,60,61,61,62,62,63,63]
; AVX512BW-NEXT:    vpsrlw $8, %zmm2, %zmm2
; AVX512BW-NEXT:    vpsllvw {{.*}}(%rip), %zmm2, %zmm2
; AVX512BW-NEXT:    vpsrlw $8, %zmm2, %zmm2
; AVX512BW-NEXT:    vpunpcklbw {{.*#+}} zmm3 = zmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55]
; AVX512BW-NEXT:    vpsrlw $8, %zmm3, %zmm3
; AVX512BW-NEXT:    vpsllvw {{.*}}(%rip), %zmm3, %zmm3
; AVX512BW-NEXT:    vpsrlw $8, %zmm3, %zmm3
; AVX512BW-NEXT:    vpackuswb %zmm2, %zmm3, %zmm2
; AVX512BW-NEXT:    vporq %zmm2, %zmm0, %zmm0
; AVX512BW-NEXT:    movabsq $72340172838076673, %rax # imm = 0x101010101010101
; AVX512BW-NEXT:    kmovq %rax, %k1
; AVX512BW-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: constant_funnnel_v64i8:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536]
; AVX512VBMI2-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
; AVX512VBMI2-NEXT:    vpmovb2m %zmm2, %k1
; AVX512VBMI2-NEXT:    vpsllw $4, %zmm0, %zmm3
; AVX512VBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512VBMI2-NEXT:    vmovdqu8 %zmm3, %zmm0 {%k1}
; AVX512VBMI2-NEXT:    vpsllw $2, %zmm0, %zmm3
; AVX512VBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512VBMI2-NEXT:    vpaddb %zmm2, %zmm2, %zmm2
; AVX512VBMI2-NEXT:    vpmovb2m %zmm2, %k1
; AVX512VBMI2-NEXT:    vmovdqu8 %zmm3, %zmm0 {%k1}
; AVX512VBMI2-NEXT:    vpaddb %zmm2, %zmm2, %zmm2
; AVX512VBMI2-NEXT:    vpmovb2m %zmm2, %k1
; AVX512VBMI2-NEXT:    vpaddb %zmm0, %zmm0, %zmm0 {%k1}
; AVX512VBMI2-NEXT:    vpunpckhbw {{.*#+}} zmm2 = zmm1[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,40,40,41,41,42,42,43,43,44,44,45,45,46,46,47,47,56,56,57,57,58,58,59,59,60,60,61,61,62,62,63,63]
; AVX512VBMI2-NEXT:    vpsrlw $8, %zmm2, %zmm2
; AVX512VBMI2-NEXT:    vpsllvw {{.*}}(%rip), %zmm2, %zmm2
; AVX512VBMI2-NEXT:    vpsrlw $8, %zmm2, %zmm2
; AVX512VBMI2-NEXT:    vpunpcklbw {{.*#+}} zmm3 = zmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55]
; AVX512VBMI2-NEXT:    vpsrlw $8, %zmm3, %zmm3
; AVX512VBMI2-NEXT:    vpsllvw {{.*}}(%rip), %zmm3, %zmm3
; AVX512VBMI2-NEXT:    vpsrlw $8, %zmm3, %zmm3
; AVX512VBMI2-NEXT:    vpackuswb %zmm2, %zmm3, %zmm2
; AVX512VBMI2-NEXT:    vporq %zmm2, %zmm0, %zmm0
; AVX512VBMI2-NEXT:    movabsq $72340172838076673, %rax # imm = 0x101010101010101
; AVX512VBMI2-NEXT:    kmovq %rax, %k1
; AVX512VBMI2-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: constant_funnnel_v64i8:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536]
; AVX512VLBW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
; AVX512VLBW-NEXT:    vpmovb2m %zmm2, %k1
; AVX512VLBW-NEXT:    vpsllw $4, %zmm0, %zmm3
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512VLBW-NEXT:    vmovdqu8 %zmm3, %zmm0 {%k1}
; AVX512VLBW-NEXT:    vpsllw $2, %zmm0, %zmm3
; AVX512VLBW-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512VLBW-NEXT:    vpaddb %zmm2, %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpmovb2m %zmm2, %k1
; AVX512VLBW-NEXT:    vmovdqu8 %zmm3, %zmm0 {%k1}
; AVX512VLBW-NEXT:    vpaddb %zmm2, %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpmovb2m %zmm2, %k1
; AVX512VLBW-NEXT:    vpaddb %zmm0, %zmm0, %zmm0 {%k1}
; AVX512VLBW-NEXT:    vpunpckhbw {{.*#+}} zmm2 = zmm1[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,40,40,41,41,42,42,43,43,44,44,45,45,46,46,47,47,56,56,57,57,58,58,59,59,60,60,61,61,62,62,63,63]
; AVX512VLBW-NEXT:    vpsrlw $8, %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpsllvw {{.*}}(%rip), %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpsrlw $8, %zmm2, %zmm2
; AVX512VLBW-NEXT:    vpunpcklbw {{.*#+}} zmm3 = zmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55]
; AVX512VLBW-NEXT:    vpsrlw $8, %zmm3, %zmm3
; AVX512VLBW-NEXT:    vpsllvw {{.*}}(%rip), %zmm3, %zmm3
; AVX512VLBW-NEXT:    vpsrlw $8, %zmm3, %zmm3
; AVX512VLBW-NEXT:    vpackuswb %zmm2, %zmm3, %zmm2
; AVX512VLBW-NEXT:    vporq %zmm2, %zmm0, %zmm0
; AVX512VLBW-NEXT:    movabsq $72340172838076673, %rax # imm = 0x101010101010101
; AVX512VLBW-NEXT:    kmovq %rax, %k1
; AVX512VLBW-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: constant_funnnel_v64i8:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536,57344,41152,24704,8256,8192,24640,41088,57536]
; AVX512VLVBMI2-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
; AVX512VLVBMI2-NEXT:    vpmovb2m %zmm2, %k1
; AVX512VLVBMI2-NEXT:    vpsllw $4, %zmm0, %zmm3
; AVX512VLVBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512VLVBMI2-NEXT:    vmovdqu8 %zmm3, %zmm0 {%k1}
; AVX512VLVBMI2-NEXT:    vpsllw $2, %zmm0, %zmm3
; AVX512VLVBMI2-NEXT:    vpandq {{.*}}(%rip), %zmm3, %zmm3
; AVX512VLVBMI2-NEXT:    vpaddb %zmm2, %zmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpmovb2m %zmm2, %k1
; AVX512VLVBMI2-NEXT:    vmovdqu8 %zmm3, %zmm0 {%k1}
; AVX512VLVBMI2-NEXT:    vpaddb %zmm2, %zmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpmovb2m %zmm2, %k1
; AVX512VLVBMI2-NEXT:    vpaddb %zmm0, %zmm0, %zmm0 {%k1}
; AVX512VLVBMI2-NEXT:    vpunpckhbw {{.*#+}} zmm2 = zmm1[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,40,40,41,41,42,42,43,43,44,44,45,45,46,46,47,47,56,56,57,57,58,58,59,59,60,60,61,61,62,62,63,63]
; AVX512VLVBMI2-NEXT:    vpsrlw $8, %zmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpsllvw {{.*}}(%rip), %zmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpsrlw $8, %zmm2, %zmm2
; AVX512VLVBMI2-NEXT:    vpunpcklbw {{.*#+}} zmm3 = zmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55]
; AVX512VLVBMI2-NEXT:    vpsrlw $8, %zmm3, %zmm3
; AVX512VLVBMI2-NEXT:    vpsllvw {{.*}}(%rip), %zmm3, %zmm3
; AVX512VLVBMI2-NEXT:    vpsrlw $8, %zmm3, %zmm3
; AVX512VLVBMI2-NEXT:    vpackuswb %zmm2, %zmm3, %zmm2
; AVX512VLVBMI2-NEXT:    vporq %zmm2, %zmm0, %zmm0
; AVX512VLVBMI2-NEXT:    movabsq $72340172838076673, %rax # imm = 0x101010101010101
; AVX512VLVBMI2-NEXT:    kmovq %rax, %k1
; AVX512VLVBMI2-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
; AVX512VLVBMI2-NEXT:    retq
  %res = call <64 x i8> @llvm.fshr.v64i8(<64 x i8> %x, <64 x i8> %y, <64 x i8> <i8 0, i8 1, i8 2, i8 3, i8 4, i8 5, i8 6, i8 7, i8 8, i8 7, i8 6, i8 5, i8 4, i8 3, i8 2, i8 1, i8 0, i8 1, i8 2, i8 3, i8 4, i8 5, i8 6, i8 7, i8 8, i8 7, i8 6, i8 5, i8 4, i8 3, i8 2, i8 1, i8 0, i8 1, i8 2, i8 3, i8 4, i8 5, i8 6, i8 7, i8 8, i8 7, i8 6, i8 5, i8 4, i8 3, i8 2, i8 1, i8 0, i8 1, i8 2, i8 3, i8 4, i8 5, i8 6, i8 7, i8 8, i8 7, i8 6, i8 5, i8 4, i8 3, i8 2, i8 1>)
  ret <64 x i8> %res
}

;
; Uniform Constant Shifts
;

define <8 x i64> @splatconstant_funnnel_v8i64(<8 x i64> %x, <8 x i64> %y) nounwind {
; AVX512F-LABEL: splatconstant_funnnel_v8i64:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vpsrlq $14, %zmm1, %zmm1
; AVX512F-NEXT:    vpsllq $50, %zmm0, %zmm0
; AVX512F-NEXT:    vporq %zmm1, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: splatconstant_funnnel_v8i64:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vpsrlq $14, %zmm1, %zmm1
; AVX512VL-NEXT:    vpsllq $50, %zmm0, %zmm0
; AVX512VL-NEXT:    vporq %zmm1, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: splatconstant_funnnel_v8i64:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpsrlq $14, %zmm1, %zmm1
; AVX512BW-NEXT:    vpsllq $50, %zmm0, %zmm0
; AVX512BW-NEXT:    vporq %zmm1, %zmm0, %zmm0
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: splatconstant_funnnel_v8i64:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpshrdq $14, %zmm0, %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: splatconstant_funnnel_v8i64:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpsrlq $14, %zmm1, %zmm1
; AVX512VLBW-NEXT:    vpsllq $50, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vporq %zmm1, %zmm0, %zmm0
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: splatconstant_funnnel_v8i64:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpshrdq $14, %zmm0, %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %res = call <8 x i64> @llvm.fshr.v8i64(<8 x i64> %x, <8 x i64> %y, <8 x i64> <i64 14, i64 14, i64 14, i64 14, i64 14, i64 14, i64 14, i64 14>)
  ret <8 x i64> %res
}

define <16 x i32> @splatconstant_funnnel_v16i32(<16 x i32> %x, <16 x i32> %y) nounwind {
; AVX512F-LABEL: splatconstant_funnnel_v16i32:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vpsrld $4, %zmm1, %zmm1
; AVX512F-NEXT:    vpslld $28, %zmm0, %zmm0
; AVX512F-NEXT:    vpord %zmm1, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: splatconstant_funnnel_v16i32:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vpsrld $4, %zmm1, %zmm1
; AVX512VL-NEXT:    vpslld $28, %zmm0, %zmm0
; AVX512VL-NEXT:    vpord %zmm1, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: splatconstant_funnnel_v16i32:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpsrld $4, %zmm1, %zmm1
; AVX512BW-NEXT:    vpslld $28, %zmm0, %zmm0
; AVX512BW-NEXT:    vpord %zmm1, %zmm0, %zmm0
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: splatconstant_funnnel_v16i32:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpshrdd $4, %zmm0, %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: splatconstant_funnnel_v16i32:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpsrld $4, %zmm1, %zmm1
; AVX512VLBW-NEXT:    vpslld $28, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vpord %zmm1, %zmm0, %zmm0
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: splatconstant_funnnel_v16i32:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpshrdd $4, %zmm0, %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %res = call <16 x i32> @llvm.fshr.v16i32(<16 x i32> %x, <16 x i32> %y, <16 x i32> <i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4>)
  ret <16 x i32> %res
}

define <32 x i16> @splatconstant_funnnel_v32i16(<32 x i16> %x, <32 x i16> %y) nounwind {
; AVX512F-LABEL: splatconstant_funnnel_v32i16:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; AVX512F-NEXT:    vextracti64x4 $1, %zmm1, %ymm3
; AVX512F-NEXT:    vpsrlw $7, %ymm3, %ymm3
; AVX512F-NEXT:    vpsllw $9, %ymm2, %ymm2
; AVX512F-NEXT:    vpor %ymm3, %ymm2, %ymm2
; AVX512F-NEXT:    vpsrlw $7, %ymm1, %ymm1
; AVX512F-NEXT:    vpsllw $9, %ymm0, %ymm0
; AVX512F-NEXT:    vpor %ymm1, %ymm0, %ymm0
; AVX512F-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: splatconstant_funnnel_v32i16:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm1, %ymm3
; AVX512VL-NEXT:    vpsrlw $7, %ymm3, %ymm3
; AVX512VL-NEXT:    vpsllw $9, %ymm2, %ymm2
; AVX512VL-NEXT:    vpor %ymm3, %ymm2, %ymm2
; AVX512VL-NEXT:    vpsrlw $7, %ymm1, %ymm1
; AVX512VL-NEXT:    vpsllw $9, %ymm0, %ymm0
; AVX512VL-NEXT:    vpor %ymm1, %ymm0, %ymm0
; AVX512VL-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: splatconstant_funnnel_v32i16:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpsrlw $7, %zmm1, %zmm1
; AVX512BW-NEXT:    vpsllw $9, %zmm0, %zmm0
; AVX512BW-NEXT:    vporq %zmm1, %zmm0, %zmm0
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: splatconstant_funnnel_v32i16:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpshrdw $7, %zmm0, %zmm1, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: splatconstant_funnnel_v32i16:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpsrlw $7, %zmm1, %zmm1
; AVX512VLBW-NEXT:    vpsllw $9, %zmm0, %zmm0
; AVX512VLBW-NEXT:    vporq %zmm1, %zmm0, %zmm0
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: splatconstant_funnnel_v32i16:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpshrdw $7, %zmm0, %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %res = call <32 x i16> @llvm.fshr.v32i16(<32 x i16> %x, <32 x i16> %y, <32 x i16> <i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7>)
  ret <32 x i16> %res
}

define <64 x i8> @splatconstant_funnnel_v64i8(<64 x i8> %x, <64 x i8> %y) nounwind {
; AVX512F-LABEL: splatconstant_funnnel_v64i8:
; AVX512F:       # %bb.0:
; AVX512F-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; AVX512F-NEXT:    vextracti64x4 $1, %zmm1, %ymm3
; AVX512F-NEXT:    vpsrlw $4, %ymm3, %ymm3
; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm4 = [240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240]
; AVX512F-NEXT:    vpandn %ymm3, %ymm4, %ymm3
; AVX512F-NEXT:    vpsllw $4, %ymm2, %ymm2
; AVX512F-NEXT:    vpand %ymm4, %ymm2, %ymm2
; AVX512F-NEXT:    vpor %ymm3, %ymm2, %ymm2
; AVX512F-NEXT:    vpsrlw $4, %ymm1, %ymm1
; AVX512F-NEXT:    vpandn %ymm1, %ymm4, %ymm1
; AVX512F-NEXT:    vpsllw $4, %ymm0, %ymm0
; AVX512F-NEXT:    vpand %ymm4, %ymm0, %ymm0
; AVX512F-NEXT:    vpor %ymm1, %ymm0, %ymm0
; AVX512F-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512F-NEXT:    retq
;
; AVX512VL-LABEL: splatconstant_funnnel_v64i8:
; AVX512VL:       # %bb.0:
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm0, %ymm2
; AVX512VL-NEXT:    vextracti64x4 $1, %zmm1, %ymm3
; AVX512VL-NEXT:    vpsrlw $4, %ymm3, %ymm3
; AVX512VL-NEXT:    vpsllw $4, %ymm2, %ymm2
; AVX512VL-NEXT:    vmovdqa {{.*#+}} ymm4 = [240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240,240]
; AVX512VL-NEXT:    vpternlogq $226, %ymm3, %ymm4, %ymm2
; AVX512VL-NEXT:    vpsrlw $4, %ymm1, %ymm1
; AVX512VL-NEXT:    vpsllw $4, %ymm0, %ymm0
; AVX512VL-NEXT:    vpternlogq $226, %ymm1, %ymm4, %ymm0
; AVX512VL-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512VL-NEXT:    retq
;
; AVX512BW-LABEL: splatconstant_funnnel_v64i8:
; AVX512BW:       # %bb.0:
; AVX512BW-NEXT:    vpsllw $4, %zmm0, %zmm2
; AVX512BW-NEXT:    vpsrlw $4, %zmm1, %zmm0
; AVX512BW-NEXT:    vpternlogq $216, {{.*}}(%rip), %zmm2, %zmm0
; AVX512BW-NEXT:    retq
;
; AVX512VBMI2-LABEL: splatconstant_funnnel_v64i8:
; AVX512VBMI2:       # %bb.0:
; AVX512VBMI2-NEXT:    vpsllw $4, %zmm0, %zmm2
; AVX512VBMI2-NEXT:    vpsrlw $4, %zmm1, %zmm0
; AVX512VBMI2-NEXT:    vpternlogq $216, {{.*}}(%rip), %zmm2, %zmm0
; AVX512VBMI2-NEXT:    retq
;
; AVX512VLBW-LABEL: splatconstant_funnnel_v64i8:
; AVX512VLBW:       # %bb.0:
; AVX512VLBW-NEXT:    vpsllw $4, %zmm0, %zmm2
; AVX512VLBW-NEXT:    vpsrlw $4, %zmm1, %zmm0
; AVX512VLBW-NEXT:    vpternlogq $216, {{.*}}(%rip), %zmm2, %zmm0
; AVX512VLBW-NEXT:    retq
;
; AVX512VLVBMI2-LABEL: splatconstant_funnnel_v64i8:
; AVX512VLVBMI2:       # %bb.0:
; AVX512VLVBMI2-NEXT:    vpsllw $4, %zmm0, %zmm2
; AVX512VLVBMI2-NEXT:    vpsrlw $4, %zmm1, %zmm0
; AVX512VLVBMI2-NEXT:    vpternlogq $216, {{.*}}(%rip), %zmm2, %zmm0
; AVX512VLVBMI2-NEXT:    retq
  %res = call <64 x i8> @llvm.fshr.v64i8(<64 x i8> %x, <64 x i8> %y, <64 x i8> <i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4, i8 4>)
  ret <64 x i8> %res
}