Skip to content

Commit aa8e1bc

Browse files
authored
[X86] Add BLEND/UNPCK shuffles to canCreateUndefOrPoisonForTargetNode/isGuaranteedNotToBeUndefOrPoisonForTargetNode (#146728)
None of these implicitly generate UNDEF/POISON
1 parent bf016b9 commit aa8e1bc

File tree

2 files changed

+65
-58
lines changed

2 files changed

+65
-58
lines changed

llvm/lib/Target/X86/X86ISelLowering.cpp

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45042,7 +45042,10 @@ bool X86TargetLowering::isGuaranteedNotToBeUndefOrPoisonForTargetNode(
4504245042
unsigned NumElts = DemandedElts.getBitWidth();
4504345043

4504445044
switch (Op.getOpcode()) {
45045+
case X86ISD::BLENDI:
4504545046
case X86ISD::PSHUFD:
45047+
case X86ISD::UNPCKL:
45048+
case X86ISD::UNPCKH:
4504645049
case X86ISD::VPERMILPI:
4504745050
case X86ISD::VPERMV3: {
4504845051
SmallVector<int, 8> Mask;
@@ -45086,11 +45089,16 @@ bool X86TargetLowering::canCreateUndefOrPoisonForTargetNode(
4508645089
case X86ISD::VSRLI:
4508745090
case X86ISD::VSRAI:
4508845091
return false;
45092+
// SSE blends.
45093+
case X86ISD::BLENDI:
45094+
case X86ISD::BLENDV:
45095+
return false;
45096+
// SSE target shuffles.
4508945097
case X86ISD::PSHUFD:
45098+
case X86ISD::UNPCKL:
45099+
case X86ISD::UNPCKH:
4509045100
case X86ISD::VPERMILPI:
4509145101
case X86ISD::VPERMV3:
45092-
case X86ISD::UNPCKH:
45093-
case X86ISD::UNPCKL:
4509445102
return false;
4509545103
// SSE comparisons handle all icmp/fcmp cases.
4509645104
// TODO: Add CMPM/MM with test coverage.

llvm/test/CodeGen/X86/avg.ll

Lines changed: 55 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -1829,75 +1829,74 @@ define void @not_avg_v16i8_wide_constants(ptr %a, ptr %b) nounwind {
18291829
; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm3 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero,mem[4],zero,mem[5],zero,mem[6],zero,mem[7],zero
18301830
; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm0 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero,mem[4],zero,mem[5],zero,mem[6],zero,mem[7],zero
18311831
; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm1 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero,mem[4],zero,mem[5],zero,mem[6],zero,mem[7],zero
1832-
; AVX1-NEXT: vpextrw $7, %xmm3, %edx
1833-
; AVX1-NEXT: vpextrw $6, %xmm3, %ecx
1834-
; AVX1-NEXT: vpextrw $5, %xmm3, %eax
1832+
; AVX1-NEXT: vpxor %xmm4, %xmm4, %xmm4
1833+
; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm5 = xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
1834+
; AVX1-NEXT: vpextrd $2, %xmm5, %ecx
1835+
; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
1836+
; AVX1-NEXT: vpextrd $2, %xmm4, %eax
1837+
; AVX1-NEXT: vpextrw $3, %xmm3, %edx
18351838
; AVX1-NEXT: decl %edx
18361839
; AVX1-NEXT: vmovd %edx, %xmm4
1837-
; AVX1-NEXT: vpextrw $4, %xmm3, %edx
1838-
; AVX1-NEXT: decl %ecx
1839-
; AVX1-NEXT: vmovd %ecx, %xmm5
1840-
; AVX1-NEXT: vpextrw $1, %xmm3, %ecx
1841-
; AVX1-NEXT: decl %eax
1842-
; AVX1-NEXT: vmovd %eax, %xmm6
1843-
; AVX1-NEXT: vpextrw $0, %xmm3, %eax
1840+
; AVX1-NEXT: vpextrw $2, %xmm3, %edx
1841+
; AVX1-NEXT: decl %edx
1842+
; AVX1-NEXT: vmovd %edx, %xmm5
1843+
; AVX1-NEXT: vpextrw $1, %xmm3, %edx
1844+
; AVX1-NEXT: decl %edx
1845+
; AVX1-NEXT: vmovd %edx, %xmm6
1846+
; AVX1-NEXT: vpextrw $0, %xmm3, %edx
18441847
; AVX1-NEXT: decl %edx
18451848
; AVX1-NEXT: vmovd %edx, %xmm7
1846-
; AVX1-NEXT: vpextrw $3, %xmm3, %edx
1847-
; AVX1-NEXT: decq %rcx
1848-
; AVX1-NEXT: vmovq %rcx, %xmm8
1849-
; AVX1-NEXT: vpextrw $2, %xmm3, %ecx
1850-
; AVX1-NEXT: decq %rax
1851-
; AVX1-NEXT: vmovq %rax, %xmm3
1852-
; AVX1-NEXT: vpextrw $7, %xmm2, %eax
1849+
; AVX1-NEXT: vpextrw $3, %xmm2, %edx
1850+
; AVX1-NEXT: decl %edx
1851+
; AVX1-NEXT: vmovd %edx, %xmm8
1852+
; AVX1-NEXT: vpextrw $2, %xmm2, %edx
18531853
; AVX1-NEXT: decl %edx
18541854
; AVX1-NEXT: vmovd %edx, %xmm9
1855-
; AVX1-NEXT: vpextrw $6, %xmm2, %edx
1856-
; AVX1-NEXT: decl %ecx
1857-
; AVX1-NEXT: vmovd %ecx, %xmm10
1858-
; AVX1-NEXT: vpextrw $5, %xmm2, %ecx
1859-
; AVX1-NEXT: decl %eax
1860-
; AVX1-NEXT: vmovd %eax, %xmm11
1861-
; AVX1-NEXT: vpextrw $4, %xmm2, %eax
1855+
; AVX1-NEXT: vpextrw $1, %xmm2, %edx
1856+
; AVX1-NEXT: decl %edx
1857+
; AVX1-NEXT: vmovd %edx, %xmm10
1858+
; AVX1-NEXT: vpextrw $0, %xmm2, %edx
1859+
; AVX1-NEXT: decl %edx
1860+
; AVX1-NEXT: vmovd %edx, %xmm11
1861+
; AVX1-NEXT: vpextrw $5, %xmm3, %edx
18621862
; AVX1-NEXT: decl %edx
18631863
; AVX1-NEXT: vmovd %edx, %xmm12
1864-
; AVX1-NEXT: vpextrw $1, %xmm2, %edx
1864+
; AVX1-NEXT: vpextrw $4, %xmm3, %edx
1865+
; AVX1-NEXT: decl %edx
1866+
; AVX1-NEXT: vmovd %edx, %xmm13
1867+
; AVX1-NEXT: vpextrw $5, %xmm2, %edx
1868+
; AVX1-NEXT: decl %edx
1869+
; AVX1-NEXT: vmovd %edx, %xmm14
1870+
; AVX1-NEXT: vpextrw $4, %xmm2, %edx
1871+
; AVX1-NEXT: decl %edx
1872+
; AVX1-NEXT: vmovd %edx, %xmm15
1873+
; AVX1-NEXT: vpextrw $7, %xmm3, %edx
18651874
; AVX1-NEXT: decl %ecx
1866-
; AVX1-NEXT: vmovd %ecx, %xmm13
1867-
; AVX1-NEXT: vpextrw $0, %xmm2, %ecx
1868-
; AVX1-NEXT: decl %eax
1869-
; AVX1-NEXT: vmovd %eax, %xmm14
1870-
; AVX1-NEXT: vpextrw $3, %xmm2, %eax
1871-
; AVX1-NEXT: decq %rdx
1872-
; AVX1-NEXT: vmovq %rdx, %xmm15
1873-
; AVX1-NEXT: vpextrw $2, %xmm2, %edx
1874-
; AVX1-NEXT: decq %rcx
1875-
; AVX1-NEXT: vmovq %rcx, %xmm2
1875+
; AVX1-NEXT: vmovd %ecx, %xmm3
1876+
; AVX1-NEXT: vpextrw $7, %xmm2, %ecx
1877+
; AVX1-NEXT: decl %edx
1878+
; AVX1-NEXT: vmovd %edx, %xmm2
18761879
; AVX1-NEXT: decl %eax
18771880
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
18781881
; AVX1-NEXT: vmovd %eax, %xmm5
1879-
; AVX1-NEXT: decl %edx
1882+
; AVX1-NEXT: decl %ecx
18801883
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
1881-
; AVX1-NEXT: vmovd %edx, %xmm7
1882-
; AVX1-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,0,0,0]
1883-
; AVX1-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,1,0,1]
1884-
; AVX1-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1,2,3,4,5],xmm4[6,7]
1885-
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm8[0],xmm3[1],xmm8[1],xmm3[2],xmm8[2],xmm3[3],xmm8[3]
1886-
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
1887-
; AVX1-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,0,1,1]
1888-
; AVX1-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm6[2,3],xmm3[4,5,6,7]
1889-
; AVX1-NEXT: vpblendw {{.*#+}} xmm3 = xmm3[0,1,2,3],xmm4[4,5,6,7]
1890-
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm4 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
1891-
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
1892-
; AVX1-NEXT: vpshufd {{.*#+}} xmm4 = xmm4[0,0,0,0]
1893-
; AVX1-NEXT: vpshufd {{.*#+}} xmm6 = xmm6[0,1,0,1]
1894-
; AVX1-NEXT: vpblendw {{.*#+}} xmm4 = xmm6[0,1,2,3,4,5],xmm4[6,7]
1895-
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm15[0],xmm2[1],xmm15[1],xmm2[2],xmm15[2],xmm2[3],xmm15[3]
1896-
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
1897-
; AVX1-NEXT: vpshufd {{.*#+}} xmm5 = xmm5[0,0,1,1]
1898-
; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm5[2,3],xmm2[4,5,6,7]
1899-
; AVX1-NEXT: vpblendw {{.*#+}} xmm2 = xmm2[0,1,2,3],xmm4[4,5,6,7]
1900-
; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm2, %ymm2
1884+
; AVX1-NEXT: vmovd %ecx, %xmm7
1885+
; AVX1-NEXT: vpunpckldq {{.*#+}} xmm4 = xmm6[0],xmm4[0],xmm6[1],xmm4[1]
1886+
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
1887+
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
1888+
; AVX1-NEXT: vpunpckldq {{.*#+}} xmm6 = xmm8[0],xmm6[0],xmm8[1],xmm6[1]
1889+
; AVX1-NEXT: vinsertf128 $1, %xmm4, %ymm6, %ymm4
1890+
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm6 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
1891+
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm8 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
1892+
; AVX1-NEXT: vinsertf128 $1, %xmm6, %ymm8, %ymm6
1893+
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
1894+
; AVX1-NEXT: vmovddup {{.*#+}} ymm3 = ymm6[0,0,2,2]
1895+
; AVX1-NEXT: vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3]
1896+
; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm5, %ymm2
1897+
; AVX1-NEXT: vshufps {{.*#+}} ymm2 = ymm2[0,0,0,0,4,4,4,4]
1898+
; AVX1-NEXT: vblendps {{.*#+}} ymm2 = ymm3[0,1,2],ymm2[3],ymm3[4,5,6],ymm2[7]
1899+
; AVX1-NEXT: vblendps {{.*#+}} ymm2 = ymm4[0,1],ymm2[2,3],ymm4[4,5],ymm2[6,7]
19011900
; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0
19021901
; AVX1-NEXT: vandps %ymm0, %ymm2, %ymm1
19031902
; AVX1-NEXT: vxorps %ymm0, %ymm2, %ymm0

0 commit comments

Comments
 (0)