llvm.org GIT mirror llvm / aea5485
[x86] avoid printing unnecessary sign bits of hex immediates in asm comments (PR20347) It would be better to check the valid/expected size of the immediate operand, but this is generally better than what we print right now. Differential Revision: http://reviews.llvm.org/D20385 git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@271114 91177308-0d34-0410-b5e6-96231b3b80d8 Sanjay Patel 3 years ago
12 changed file(s) with 79 addition(s) and 70 deletion(s). Raw diff Collapse all Expand all
164164 if (Op.isReg()) {
165165 printRegName(O, Op.getReg());
166166 } else if (Op.isImm()) {
167 // Print X86 immediates as signed values.
167 // Print immediates as signed values.
168168 int64_t Imm = Op.getImm();
169169 O << markup("");
170
171 // TODO: This should be in a helper function in the base class, so it can
172 // be used by other printers.
170173
171174 // If there are no instruction-specific comments, add a comment clarifying
172175 // the hex value of the immediate operand when it isn't in the range
173176 // [-256,255].
174 if (CommentStream && !HasCustomInstComment && (Imm > 255 || Imm < -256))
175 *CommentStream << format("imm = 0x%" PRIX64 "\n", (uint64_t)Imm);
176
177 if (CommentStream && !HasCustomInstComment && (Imm > 255 || Imm < -256)) {
178 // Don't print unnecessary hex sign bits.
179 if (Imm == (int16_t)(Imm))
180 *CommentStream << format("imm = 0x%" PRIX16 "\n", (uint16_t)Imm);
181 else if (Imm == (int32_t)(Imm))
182 *CommentStream << format("imm = 0x%" PRIX32 "\n", (uint32_t)Imm);
183 else
184 *CommentStream << format("imm = 0x%" PRIX64 "\n", (uint64_t)Imm);
185 }
177186 } else {
178187 assert(Op.isExpr() && "unknown operand kind in printOperand");
179188 O << markup("
4141 define void @mp_11193(<8 x float> * nocapture %aFOO, <8 x float>* nocapture %RET) nounwind {
4242 ; CHECK-LABEL: mp_11193:
4343 ; CHECK: # BB#0: # %allocas
44 ; CHECK-NEXT: movl $-1082130432, (%rsi) # imm = 0xFFFFFFFFBF800000
44 ; CHECK-NEXT: movl $-1082130432, (%rsi) # imm = 0xBF800000
4545 ; CHECK-NEXT: retq
4646 allocas:
4747 %bincmp = fcmp olt <8 x float> ,
121121 ; ALL-NEXT: testl %edx, %edx
122122 ; ALL-NEXT: movl $1, %eax
123123 ; ALL-NEXT: cmovel %eax, %edx
124 ; ALL-NEXT: cmpl $-2147483648, %esi ## imm = 0xFFFFFFFF80000000
124 ; ALL-NEXT: cmpl $-2147483648, %esi ## imm = 0x80000000
125125 ; ALL-NEXT: cmovnel %edx, %eax
126126 ; ALL-NEXT: cmpl $-1, %edi
127127 ; ALL-NEXT: cmovnel %edx, %eax
2525 define <2 x i16> @fold_v2i16() {
2626 ; CHECK-LABEL: fold_v2i16:
2727 ; CHECK: # BB#0:
28 ; CHECK-NEXT: movw $-4096, %ax # imm = 0xFFFFFFFFFFFFF000
28 ; CHECK-NEXT: movw $-4096, %ax # imm = 0xF000
2929 ; CHECK-NEXT: movw $240, %dx
3030 ; CHECK-NEXT: retl
3131 %b = call <2 x i16> @llvm.bitreverse.v2i16(<2 x i16> )
55 ; CHECK: # BB#0: # %entry
66 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %eax
77 ; CHECK-NEXT: shrl $2, %eax
8 ; CHECK-NEXT: orl $-67108864, %eax # imm = 0xFFFFFFFFFC000000
8 ; CHECK-NEXT: orl $-67108864, %eax # imm = 0xFC000000
99 ; CHECK-NEXT: movl $1073741823, %edx # imm = 0x3FFFFFFF
1010 ; CHECK-NEXT: retl
1111 entry:
44 ; CHECK-LABEL: test1:
55 ; CHECK: # BB#0:
66 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx
7 ; CHECK-NEXT: movl $-2139062143, %edx # imm = 0xFFFFFFFF80808081
7 ; CHECK-NEXT: movl $-2139062143, %edx # imm = 0x80808081
88 ; CHECK-NEXT: movl %ecx, %eax
99 ; CHECK-NEXT: imull %edx
1010 ; CHECK-NEXT: addl %ecx, %edx
4343 ; CHECK-LABEL: test3:
4444 ; CHECK: # BB#0:
4545 ; CHECK-NEXT: movl {{[0-9]+}}(%esp), %ecx
46 ; CHECK-NEXT: movl $-2139062143, %edx # imm = 0xFFFFFFFF80808081
46 ; CHECK-NEXT: movl $-2139062143, %edx # imm = 0x80808081
4747 ; CHECK-NEXT: movl %ecx, %eax
4848 ; CHECK-NEXT: mull %edx
4949 ; CHECK-NEXT: shrl $7, %edx
1515 ; SSE2-NEXT: andq $-64, %rsp
1616 ; SSE2-NEXT: subq $128, %rsp
1717 ; SSE2-NEXT: pxor %xmm0, %xmm0
18 ; SSE2-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
18 ; SSE2-NEXT: movq $-1024, %rax # imm = 0xFC00
1919 ; SSE2-NEXT: pxor %xmm1, %xmm1
2020 ; SSE2-NEXT: pxor %xmm3, %xmm3
2121 ; SSE2-NEXT: pxor %xmm2, %xmm2
5757 ; AVX2-NEXT: andq $-64, %rsp
5858 ; AVX2-NEXT: subq $128, %rsp
5959 ; AVX2-NEXT: vpxor %ymm0, %ymm0, %ymm0
60 ; AVX2-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
60 ; AVX2-NEXT: movq $-1024, %rax # imm = 0xFC00
6161 ; AVX2-NEXT: vpxor %ymm1, %ymm1, %ymm1
6262 ; AVX2-NEXT: .p2align 4, 0x90
6363 ; AVX2-NEXT: .LBB0_1: # %vector.body
8888 ; AVX512F-LABEL: sad_16i8:
8989 ; AVX512F: # BB#0: # %entry
9090 ; AVX512F-NEXT: vpxord %zmm0, %zmm0, %zmm0
91 ; AVX512F-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
91 ; AVX512F-NEXT: movq $-1024, %rax # imm = 0xFC00
9292 ; AVX512F-NEXT: .p2align 4, 0x90
9393 ; AVX512F-NEXT: .LBB0_1: # %vector.body
9494 ; AVX512F-NEXT: # =>This Inner Loop Header: Depth=1
115115 ; AVX512BW-LABEL: sad_16i8:
116116 ; AVX512BW: # BB#0: # %entry
117117 ; AVX512BW-NEXT: vpxord %zmm0, %zmm0, %zmm0
118 ; AVX512BW-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
118 ; AVX512BW-NEXT: movq $-1024, %rax # imm = 0xFC00
119119 ; AVX512BW-NEXT: .p2align 4, 0x90
120120 ; AVX512BW-NEXT: .LBB0_1: # %vector.body
121121 ; AVX512BW-NEXT: # =>This Inner Loop Header: Depth=1
179179 ; SSE2-LABEL: sad_32i8:
180180 ; SSE2: # BB#0: # %entry
181181 ; SSE2-NEXT: pxor %xmm12, %xmm12
182 ; SSE2-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
182 ; SSE2-NEXT: movq $-1024, %rax # imm = 0xFC00
183183 ; SSE2-NEXT: pxor %xmm4, %xmm4
184184 ; SSE2-NEXT: pxor %xmm2, %xmm2
185185 ; SSE2-NEXT: pxor %xmm0, %xmm0
315315 ; AVX2-NEXT: andq $-128, %rsp
316316 ; AVX2-NEXT: subq $256, %rsp # imm = 0x100
317317 ; AVX2-NEXT: vpxor %ymm0, %ymm0, %ymm0
318 ; AVX2-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
318 ; AVX2-NEXT: movq $-1024, %rax # imm = 0xFC00
319319 ; AVX2-NEXT: vpxor %ymm1, %ymm1, %ymm1
320320 ; AVX2-NEXT: vpxor %ymm2, %ymm2, %ymm2
321321 ; AVX2-NEXT: vpxor %ymm3, %ymm3, %ymm3
357357 ; AVX512F-NEXT: andq $-128, %rsp
358358 ; AVX512F-NEXT: subq $256, %rsp # imm = 0x100
359359 ; AVX512F-NEXT: vpxord %zmm0, %zmm0, %zmm0
360 ; AVX512F-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
360 ; AVX512F-NEXT: movq $-1024, %rax # imm = 0xFC00
361361 ; AVX512F-NEXT: vpxord %zmm1, %zmm1, %zmm1
362362 ; AVX512F-NEXT: .p2align 4, 0x90
363363 ; AVX512F-NEXT: .LBB1_1: # %vector.body
396396 ; AVX512BW-NEXT: andq $-128, %rsp
397397 ; AVX512BW-NEXT: subq $256, %rsp # imm = 0x100
398398 ; AVX512BW-NEXT: vpxord %zmm0, %zmm0, %zmm0
399 ; AVX512BW-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
399 ; AVX512BW-NEXT: movq $-1024, %rax # imm = 0xFC00
400400 ; AVX512BW-NEXT: vpxord %zmm1, %zmm1, %zmm1
401401 ; AVX512BW-NEXT: .p2align 4, 0x90
402402 ; AVX512BW-NEXT: .LBB1_1: # %vector.body
471471 ; SSE2: # BB#0: # %entry
472472 ; SSE2-NEXT: subq $232, %rsp
473473 ; SSE2-NEXT: pxor %xmm8, %xmm8
474 ; SSE2-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
474 ; SSE2-NEXT: movq $-1024, %rax # imm = 0xFC00
475475 ; SSE2-NEXT: pxor %xmm5, %xmm5
476476 ; SSE2-NEXT: pxor %xmm2, %xmm2
477477 ; SSE2-NEXT: pxor %xmm1, %xmm1
763763 ; AVX2-LABEL: sad_avx64i8:
764764 ; AVX2: # BB#0: # %entry
765765 ; AVX2-NEXT: vpxor %ymm0, %ymm0, %ymm0
766 ; AVX2-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
766 ; AVX2-NEXT: movq $-1024, %rax # imm = 0xFC00
767767 ; AVX2-NEXT: vpxor %ymm2, %ymm2, %ymm2
768768 ; AVX2-NEXT: vpxor %ymm1, %ymm1, %ymm1
769769 ; AVX2-NEXT: vpxor %ymm3, %ymm3, %ymm3
839839 ; AVX512F-LABEL: sad_avx64i8:
840840 ; AVX512F: # BB#0: # %entry
841841 ; AVX512F-NEXT: vpxord %zmm0, %zmm0, %zmm0
842 ; AVX512F-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
842 ; AVX512F-NEXT: movq $-1024, %rax # imm = 0xFC00
843843 ; AVX512F-NEXT: vpxord %zmm1, %zmm1, %zmm1
844844 ; AVX512F-NEXT: vpxord %zmm2, %zmm2, %zmm2
845845 ; AVX512F-NEXT: vpxord %zmm3, %zmm3, %zmm3
892892 ; AVX512BW-NEXT: andq $-256, %rsp
893893 ; AVX512BW-NEXT: subq $512, %rsp # imm = 0x200
894894 ; AVX512BW-NEXT: vpxord %zmm0, %zmm0, %zmm0
895 ; AVX512BW-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
895 ; AVX512BW-NEXT: movq $-1024, %rax # imm = 0xFC00
896896 ; AVX512BW-NEXT: vpxord %zmm2, %zmm2, %zmm2
897897 ; AVX512BW-NEXT: vpxord %zmm3, %zmm3, %zmm3
898898 ; AVX512BW-NEXT: vpxord %zmm1, %zmm1, %zmm1
975975 ; SSE2-LABEL: sad_2i8:
976976 ; SSE2: # BB#0: # %entry
977977 ; SSE2-NEXT: pxor %xmm0, %xmm0
978 ; SSE2-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
978 ; SSE2-NEXT: movq $-1024, %rax # imm = 0xFC00
979979 ; SSE2-NEXT: movl $65535, %ecx # imm = 0xFFFF
980980 ; SSE2-NEXT: movd %ecx, %xmm1
981981 ; SSE2-NEXT: .p2align 4, 0x90
998998 ; AVX2-LABEL: sad_2i8:
999999 ; AVX2: # BB#0: # %entry
10001000 ; AVX2-NEXT: vpxor %xmm0, %xmm0, %xmm0
1001 ; AVX2-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
1001 ; AVX2-NEXT: movq $-1024, %rax # imm = 0xFC00
10021002 ; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1
10031003 ; AVX2-NEXT: .p2align 4, 0x90
10041004 ; AVX2-NEXT: .LBB3_1: # %vector.body
10201020 ; AVX512F-LABEL: sad_2i8:
10211021 ; AVX512F: # BB#0: # %entry
10221022 ; AVX512F-NEXT: vpxor %xmm0, %xmm0, %xmm0
1023 ; AVX512F-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
1023 ; AVX512F-NEXT: movq $-1024, %rax # imm = 0xFC00
10241024 ; AVX512F-NEXT: vpxor %xmm1, %xmm1, %xmm1
10251025 ; AVX512F-NEXT: .p2align 4, 0x90
10261026 ; AVX512F-NEXT: .LBB3_1: # %vector.body
10421042 ; AVX512BW-LABEL: sad_2i8:
10431043 ; AVX512BW: # BB#0: # %entry
10441044 ; AVX512BW-NEXT: vpxor %xmm0, %xmm0, %xmm0
1045 ; AVX512BW-NEXT: movq $-1024, %rax # imm = 0xFFFFFFFFFFFFFC00
1045 ; AVX512BW-NEXT: movq $-1024, %rax # imm = 0xFC00
10461046 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1
10471047 ; AVX512BW-NEXT: .p2align 4, 0x90
10481048 ; AVX512BW-NEXT: .LBB3_1: # %vector.body
14101410 ; X32-NEXT: leal (%esp), %ecx
14111411 ; X32-NEXT: stmxcsr (%ecx)
14121412 ; X32-NEXT: movl (%esp), %edx
1413 ; X32-NEXT: andl $-8065, %edx # imm = 0xFFFFFFFFFFFFE07F
1413 ; X32-NEXT: andl $-8065, %edx # imm = 0xE07F
14141414 ; X32-NEXT: orl %eax, %edx
14151415 ; X32-NEXT: movl %edx, (%esp)
14161416 ; X32-NEXT: ldmxcsr (%ecx)
14221422 ; X64-NEXT: leaq -{{[0-9]+}}(%rsp), %rax
14231423 ; X64-NEXT: stmxcsr (%rax)
14241424 ; X64-NEXT: movl -{{[0-9]+}}(%rsp), %ecx
1425 ; X64-NEXT: andl $-8065, %ecx # imm = 0xFFFFFFFFFFFFE07F
1425 ; X64-NEXT: andl $-8065, %ecx # imm = 0xE07F
14261426 ; X64-NEXT: orl %edi, %ecx
14271427 ; X64-NEXT: movl %ecx, -{{[0-9]+}}(%rsp)
14281428 ; X64-NEXT: ldmxcsr (%rax)
14831483 ; X32-NEXT: leal (%esp), %ecx
14841484 ; X32-NEXT: stmxcsr (%ecx)
14851485 ; X32-NEXT: movl (%esp), %edx
1486 ; X32-NEXT: andl $-32769, %edx # imm = 0xFFFFFFFFFFFF7FFF
1486 ; X32-NEXT: andl $-32769, %edx # imm = 0xFFFF7FFF
14871487 ; X32-NEXT: orl %eax, %edx
14881488 ; X32-NEXT: movl %edx, (%esp)
14891489 ; X32-NEXT: ldmxcsr (%ecx)
14951495 ; X64-NEXT: leaq -{{[0-9]+}}(%rsp), %rax
14961496 ; X64-NEXT: stmxcsr (%rax)
14971497 ; X64-NEXT: movl -{{[0-9]+}}(%rsp), %ecx
1498 ; X64-NEXT: andl $-32769, %ecx # imm = 0xFFFFFFFFFFFF7FFF
1498 ; X64-NEXT: andl $-32769, %ecx # imm = 0xFFFF7FFF
14991499 ; X64-NEXT: orl %edi, %ecx
15001500 ; X64-NEXT: movl %ecx, -{{[0-9]+}}(%rsp)
15011501 ; X64-NEXT: ldmxcsr (%rax)
15631563 ; X32-NEXT: leal (%esp), %ecx
15641564 ; X32-NEXT: stmxcsr (%ecx)
15651565 ; X32-NEXT: movl (%esp), %edx
1566 ; X32-NEXT: andl $-24577, %edx # imm = 0xFFFFFFFFFFFF9FFF
1566 ; X32-NEXT: andl $-24577, %edx # imm = 0x9FFF
15671567 ; X32-NEXT: orl %eax, %edx
15681568 ; X32-NEXT: movl %edx, (%esp)
15691569 ; X32-NEXT: ldmxcsr (%ecx)
15751575 ; X64-NEXT: leaq -{{[0-9]+}}(%rsp), %rax
15761576 ; X64-NEXT: stmxcsr (%rax)
15771577 ; X64-NEXT: movl -{{[0-9]+}}(%rsp), %ecx
1578 ; X64-NEXT: andl $-24577, %ecx # imm = 0xFFFFFFFFFFFF9FFF
1578 ; X64-NEXT: andl $-24577, %ecx # imm = 0x9FFF
15791579 ; X64-NEXT: orl %edi, %ecx
15801580 ; X64-NEXT: movl %ecx, -{{[0-9]+}}(%rsp)
15811581 ; X64-NEXT: ldmxcsr (%rax)
5757 ; X32-SSE1-NEXT: movl %esp, %ebp
5858 ; X32-SSE1-NEXT: andl $-16, %esp
5959 ; X32-SSE1-NEXT: subl $32, %esp
60 ; X32-SSE1-NEXT: movl $-2147483648, %eax # imm = 0xFFFFFFFF80000000
60 ; X32-SSE1-NEXT: movl $-2147483648, %eax # imm = 0x80000000
6161 ; X32-SSE1-NEXT: movl 12(%ebp), %ecx
6262 ; X32-SSE1-NEXT: xorl %eax, %ecx
6363 ; X32-SSE1-NEXT: movl %ecx, {{[0-9]+}}(%esp)
7070 ;
7171 ; X32-SSE2-LABEL: fneg_bitcast:
7272 ; X32-SSE2: # BB#0:
73 ; X32-SSE2-NEXT: movl $-2147483648, %eax # imm = 0xFFFFFFFF80000000
73 ; X32-SSE2-NEXT: movl $-2147483648, %eax # imm = 0x80000000
7474 ; X32-SSE2-NEXT: movl {{[0-9]+}}(%esp), %ecx
7575 ; X32-SSE2-NEXT: xorl %eax, %ecx
7676 ; X32-SSE2-NEXT: xorl {{[0-9]+}}(%esp), %eax
614614 ; SSE-NEXT: orq %rcx, %rdx
615615 ; SSE-NEXT: movq %rdi, %rcx
616616 ; SSE-NEXT: shrq %rcx
617 ; SSE-NEXT: andl $-2147483648, %ecx # imm = 0xFFFFFFFF80000000
617 ; SSE-NEXT: andl $-2147483648, %ecx # imm = 0x80000000
618618 ; SSE-NEXT: orq %rdx, %rcx
619619 ; SSE-NEXT: movq %rdi, %rdx
620620 ; SSE-NEXT: shrq $3, %rdx
870870 ; AVX-NEXT: orq %rcx, %rdx
871871 ; AVX-NEXT: movq %rdi, %rcx
872872 ; AVX-NEXT: shrq %rcx
873 ; AVX-NEXT: andl $-2147483648, %ecx # imm = 0xFFFFFFFF80000000
873 ; AVX-NEXT: andl $-2147483648, %ecx # imm = 0x80000000
874874 ; AVX-NEXT: orq %rdx, %rcx
875875 ; AVX-NEXT: movq %rdi, %rdx
876876 ; AVX-NEXT: shrq $3, %rdx
8686 ; AVX-NEXT: vextracti32x4 $3, %zmm0, %xmm1
8787 ; AVX-NEXT: vpextrd $1, %xmm1, %eax
8888 ; AVX-NEXT: cltq
89 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
89 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
9090 ; AVX-NEXT: shrq $32, %rcx
9191 ; AVX-NEXT: addl %ecx, %eax
9292 ; AVX-NEXT: movl %eax, %ecx
9595 ; AVX-NEXT: addl %ecx, %eax
9696 ; AVX-NEXT: vmovd %xmm1, %ecx
9797 ; AVX-NEXT: movslq %ecx, %rcx
98 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0xFFFFFFFF92492493
98 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0x92492493
9999 ; AVX-NEXT: shrq $32, %rdx
100100 ; AVX-NEXT: addl %edx, %ecx
101101 ; AVX-NEXT: movl %ecx, %edx
106106 ; AVX-NEXT: vpinsrd $1, %eax, %xmm2, %xmm2
107107 ; AVX-NEXT: vpextrd $2, %xmm1, %eax
108108 ; AVX-NEXT: cltq
109 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
109 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
110110 ; AVX-NEXT: shrq $32, %rcx
111111 ; AVX-NEXT: addl %ecx, %eax
112112 ; AVX-NEXT: movl %eax, %ecx
116116 ; AVX-NEXT: vpinsrd $2, %eax, %xmm2, %xmm2
117117 ; AVX-NEXT: vpextrd $3, %xmm1, %eax
118118 ; AVX-NEXT: cltq
119 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
119 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
120120 ; AVX-NEXT: shrq $32, %rcx
121121 ; AVX-NEXT: addl %ecx, %eax
122122 ; AVX-NEXT: movl %eax, %ecx
127127 ; AVX-NEXT: vextracti32x4 $2, %zmm0, %xmm2
128128 ; AVX-NEXT: vpextrd $1, %xmm2, %eax
129129 ; AVX-NEXT: cltq
130 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
130 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
131131 ; AVX-NEXT: shrq $32, %rcx
132132 ; AVX-NEXT: addl %ecx, %eax
133133 ; AVX-NEXT: movl %eax, %ecx
136136 ; AVX-NEXT: addl %ecx, %eax
137137 ; AVX-NEXT: vmovd %xmm2, %ecx
138138 ; AVX-NEXT: movslq %ecx, %rcx
139 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0xFFFFFFFF92492493
139 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0x92492493
140140 ; AVX-NEXT: shrq $32, %rdx
141141 ; AVX-NEXT: addl %edx, %ecx
142142 ; AVX-NEXT: movl %ecx, %edx
147147 ; AVX-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3
148148 ; AVX-NEXT: vpextrd $2, %xmm2, %eax
149149 ; AVX-NEXT: cltq
150 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
150 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
151151 ; AVX-NEXT: shrq $32, %rcx
152152 ; AVX-NEXT: addl %ecx, %eax
153153 ; AVX-NEXT: movl %eax, %ecx
157157 ; AVX-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3
158158 ; AVX-NEXT: vpextrd $3, %xmm2, %eax
159159 ; AVX-NEXT: cltq
160 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
160 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
161161 ; AVX-NEXT: shrq $32, %rcx
162162 ; AVX-NEXT: addl %ecx, %eax
163163 ; AVX-NEXT: movl %eax, %ecx
169169 ; AVX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
170170 ; AVX-NEXT: vpextrd $1, %xmm2, %eax
171171 ; AVX-NEXT: cltq
172 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
172 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
173173 ; AVX-NEXT: shrq $32, %rcx
174174 ; AVX-NEXT: addl %ecx, %eax
175175 ; AVX-NEXT: movl %eax, %ecx
178178 ; AVX-NEXT: addl %ecx, %eax
179179 ; AVX-NEXT: vmovd %xmm2, %ecx
180180 ; AVX-NEXT: movslq %ecx, %rcx
181 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0xFFFFFFFF92492493
181 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0x92492493
182182 ; AVX-NEXT: shrq $32, %rdx
183183 ; AVX-NEXT: addl %edx, %ecx
184184 ; AVX-NEXT: movl %ecx, %edx
189189 ; AVX-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3
190190 ; AVX-NEXT: vpextrd $2, %xmm2, %eax
191191 ; AVX-NEXT: cltq
192 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
192 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
193193 ; AVX-NEXT: shrq $32, %rcx
194194 ; AVX-NEXT: addl %ecx, %eax
195195 ; AVX-NEXT: movl %eax, %ecx
199199 ; AVX-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3
200200 ; AVX-NEXT: vpextrd $3, %xmm2, %eax
201201 ; AVX-NEXT: cltq
202 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
202 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
203203 ; AVX-NEXT: shrq $32, %rcx
204204 ; AVX-NEXT: addl %ecx, %eax
205205 ; AVX-NEXT: movl %eax, %ecx
209209 ; AVX-NEXT: vpinsrd $3, %eax, %xmm3, %xmm2
210210 ; AVX-NEXT: vpextrd $1, %xmm0, %eax
211211 ; AVX-NEXT: cltq
212 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
212 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
213213 ; AVX-NEXT: shrq $32, %rcx
214214 ; AVX-NEXT: addl %ecx, %eax
215215 ; AVX-NEXT: movl %eax, %ecx
218218 ; AVX-NEXT: addl %ecx, %eax
219219 ; AVX-NEXT: vmovd %xmm0, %ecx
220220 ; AVX-NEXT: movslq %ecx, %rcx
221 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0xFFFFFFFF92492493
221 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0x92492493
222222 ; AVX-NEXT: shrq $32, %rdx
223223 ; AVX-NEXT: addl %edx, %ecx
224224 ; AVX-NEXT: movl %ecx, %edx
229229 ; AVX-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3
230230 ; AVX-NEXT: vpextrd $2, %xmm0, %eax
231231 ; AVX-NEXT: cltq
232 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
232 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
233233 ; AVX-NEXT: shrq $32, %rcx
234234 ; AVX-NEXT: addl %ecx, %eax
235235 ; AVX-NEXT: movl %eax, %ecx
239239 ; AVX-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3
240240 ; AVX-NEXT: vpextrd $3, %xmm0, %eax
241241 ; AVX-NEXT: cltq
242 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
242 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
243243 ; AVX-NEXT: shrq $32, %rcx
244244 ; AVX-NEXT: addl %ecx, %eax
245245 ; AVX-NEXT: movl %eax, %ecx
11611161 ; AVX-NEXT: vextracti32x4 $3, %zmm0, %xmm1
11621162 ; AVX-NEXT: vpextrd $1, %xmm1, %eax
11631163 ; AVX-NEXT: cltq
1164 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1164 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
11651165 ; AVX-NEXT: shrq $32, %rcx
11661166 ; AVX-NEXT: addl %eax, %ecx
11671167 ; AVX-NEXT: movl %ecx, %edx
11731173 ; AVX-NEXT: subl %edx, %eax
11741174 ; AVX-NEXT: vmovd %xmm1, %ecx
11751175 ; AVX-NEXT: movslq %ecx, %rcx
1176 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0xFFFFFFFF92492493
1176 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0x92492493
11771177 ; AVX-NEXT: shrq $32, %rdx
11781178 ; AVX-NEXT: addl %ecx, %edx
11791179 ; AVX-NEXT: movl %edx, %esi
11871187 ; AVX-NEXT: vpinsrd $1, %eax, %xmm2, %xmm2
11881188 ; AVX-NEXT: vpextrd $2, %xmm1, %eax
11891189 ; AVX-NEXT: cltq
1190 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1190 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
11911191 ; AVX-NEXT: shrq $32, %rcx
11921192 ; AVX-NEXT: addl %eax, %ecx
11931193 ; AVX-NEXT: movl %ecx, %edx
12001200 ; AVX-NEXT: vpinsrd $2, %eax, %xmm2, %xmm2
12011201 ; AVX-NEXT: vpextrd $3, %xmm1, %eax
12021202 ; AVX-NEXT: cltq
1203 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1203 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
12041204 ; AVX-NEXT: shrq $32, %rcx
12051205 ; AVX-NEXT: addl %eax, %ecx
12061206 ; AVX-NEXT: movl %ecx, %edx
12141214 ; AVX-NEXT: vextracti32x4 $2, %zmm0, %xmm2
12151215 ; AVX-NEXT: vpextrd $1, %xmm2, %eax
12161216 ; AVX-NEXT: cltq
1217 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1217 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
12181218 ; AVX-NEXT: shrq $32, %rcx
12191219 ; AVX-NEXT: addl %eax, %ecx
12201220 ; AVX-NEXT: movl %ecx, %edx
12261226 ; AVX-NEXT: subl %edx, %eax
12271227 ; AVX-NEXT: vmovd %xmm2, %ecx
12281228 ; AVX-NEXT: movslq %ecx, %rcx
1229 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0xFFFFFFFF92492493
1229 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0x92492493
12301230 ; AVX-NEXT: shrq $32, %rdx
12311231 ; AVX-NEXT: addl %ecx, %edx
12321232 ; AVX-NEXT: movl %edx, %esi
12401240 ; AVX-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3
12411241 ; AVX-NEXT: vpextrd $2, %xmm2, %eax
12421242 ; AVX-NEXT: cltq
1243 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1243 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
12441244 ; AVX-NEXT: shrq $32, %rcx
12451245 ; AVX-NEXT: addl %eax, %ecx
12461246 ; AVX-NEXT: movl %ecx, %edx
12531253 ; AVX-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3
12541254 ; AVX-NEXT: vpextrd $3, %xmm2, %eax
12551255 ; AVX-NEXT: cltq
1256 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1256 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
12571257 ; AVX-NEXT: shrq $32, %rcx
12581258 ; AVX-NEXT: addl %eax, %ecx
12591259 ; AVX-NEXT: movl %ecx, %edx
12681268 ; AVX-NEXT: vextracti32x4 $1, %zmm0, %xmm2
12691269 ; AVX-NEXT: vpextrd $1, %xmm2, %eax
12701270 ; AVX-NEXT: cltq
1271 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1271 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
12721272 ; AVX-NEXT: shrq $32, %rcx
12731273 ; AVX-NEXT: addl %eax, %ecx
12741274 ; AVX-NEXT: movl %ecx, %edx
12801280 ; AVX-NEXT: subl %edx, %eax
12811281 ; AVX-NEXT: vmovd %xmm2, %ecx
12821282 ; AVX-NEXT: movslq %ecx, %rcx
1283 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0xFFFFFFFF92492493
1283 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0x92492493
12841284 ; AVX-NEXT: shrq $32, %rdx
12851285 ; AVX-NEXT: addl %ecx, %edx
12861286 ; AVX-NEXT: movl %edx, %esi
12941294 ; AVX-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3
12951295 ; AVX-NEXT: vpextrd $2, %xmm2, %eax
12961296 ; AVX-NEXT: cltq
1297 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1297 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
12981298 ; AVX-NEXT: shrq $32, %rcx
12991299 ; AVX-NEXT: addl %eax, %ecx
13001300 ; AVX-NEXT: movl %ecx, %edx
13071307 ; AVX-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3
13081308 ; AVX-NEXT: vpextrd $3, %xmm2, %eax
13091309 ; AVX-NEXT: cltq
1310 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1310 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
13111311 ; AVX-NEXT: shrq $32, %rcx
13121312 ; AVX-NEXT: addl %eax, %ecx
13131313 ; AVX-NEXT: movl %ecx, %edx
13201320 ; AVX-NEXT: vpinsrd $3, %eax, %xmm3, %xmm2
13211321 ; AVX-NEXT: vpextrd $1, %xmm0, %eax
13221322 ; AVX-NEXT: cltq
1323 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1323 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
13241324 ; AVX-NEXT: shrq $32, %rcx
13251325 ; AVX-NEXT: addl %eax, %ecx
13261326 ; AVX-NEXT: movl %ecx, %edx
13321332 ; AVX-NEXT: subl %edx, %eax
13331333 ; AVX-NEXT: vmovd %xmm0, %ecx
13341334 ; AVX-NEXT: movslq %ecx, %rcx
1335 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0xFFFFFFFF92492493
1335 ; AVX-NEXT: imulq $-1840700269, %rcx, %rdx # imm = 0x92492493
13361336 ; AVX-NEXT: shrq $32, %rdx
13371337 ; AVX-NEXT: addl %ecx, %edx
13381338 ; AVX-NEXT: movl %edx, %esi
13461346 ; AVX-NEXT: vpinsrd $1, %eax, %xmm3, %xmm3
13471347 ; AVX-NEXT: vpextrd $2, %xmm0, %eax
13481348 ; AVX-NEXT: cltq
1349 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1349 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
13501350 ; AVX-NEXT: shrq $32, %rcx
13511351 ; AVX-NEXT: addl %eax, %ecx
13521352 ; AVX-NEXT: movl %ecx, %edx
13591359 ; AVX-NEXT: vpinsrd $2, %eax, %xmm3, %xmm3
13601360 ; AVX-NEXT: vpextrd $3, %xmm0, %eax
13611361 ; AVX-NEXT: cltq
1362 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0xFFFFFFFF92492493
1362 ; AVX-NEXT: imulq $-1840700269, %rax, %rcx # imm = 0x92492493
13631363 ; AVX-NEXT: shrq $32, %rcx
13641364 ; AVX-NEXT: addl %eax, %ecx
13651365 ; AVX-NEXT: movl %ecx, %edx
1717 # CHECK: movb $-128, %al
1818
1919 # CHECK: movw $32767, %ax # imm = 0x7FFF
20 # CHECK: movw $-32768, %ax # imm = 0xFFFFFFFFFFFF8000
20 # CHECK: movw $-32768, %ax # imm = 0x8000
2121
2222 # CHECK: movl $2147483647, %eax # imm = 0x7FFFFFFF
23 # CHECK: movl $-2147483648, %eax # imm = 0xFFFFFFFF80000000
23 # CHECK: movl $-2147483648, %eax # imm = 0x80000000
2424
2525 # CHECK: movabsq $9223372036854775807, %rax # imm = 0x7FFFFFFFFFFFFFFF
2626 # CHECK: movabsq $-9223372036854775808, %rax # imm = 0x8000000000000000