@@ -75,8 +75,7 @@ define <4 x double> @insert_v4f64_0zz3(<4 x double> %a) {
7575; AVX-LABEL: insert_v4f64_0zz3:
7676; AVX: # BB#0:
7777; AVX-NEXT: vxorpd %ymm1, %ymm1, %ymm1
78- ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3]
79- ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3]
78+ ; AVX-NEXT: vblendpd {{.*#+}} ymm0 = ymm0[0],ymm1[1,2],ymm0[3]
8079; AVX-NEXT: retq
8180 %1 = insertelement <4 x double > %a , double 0 .0 , i32 1
8281 %2 = insertelement <4 x double > %1 , double 0 .0 , i32 2
@@ -235,8 +234,7 @@ define <8 x float> @insert_v8f32_z12345z7(<8 x float> %a) {
235234; AVX-LABEL: insert_v8f32_z12345z7:
236235; AVX: # BB#0:
237236; AVX-NEXT: vxorps %ymm1, %ymm1, %ymm1
238- ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0],ymm0[1,2,3,4,5,6,7]
239- ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6],ymm0[7]
237+ ; AVX-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0],ymm0[1,2,3,4,5],ymm1[6],ymm0[7]
240238; AVX-NEXT: retq
241239 %1 = insertelement <8 x float > %a , float 0 .0 , i32 0
242240 %2 = insertelement <8 x float > %1 , float 0 .0 , i32 6
@@ -330,15 +328,13 @@ define <8 x i32> @insert_v8i32_z12345z7(<8 x i32> %a) {
330328; AVX1-LABEL: insert_v8i32_z12345z7:
331329; AVX1: # BB#0:
332330; AVX1-NEXT: vxorps %ymm1, %ymm1, %ymm1
333- ; AVX1-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0],ymm0[1,2,3,4,5,6,7]
334- ; AVX1-NEXT: vblendps {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6],ymm0[7]
331+ ; AVX1-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0],ymm0[1,2,3,4,5],ymm1[6],ymm0[7]
335332; AVX1-NEXT: retq
336333;
337334; AVX2-LABEL: insert_v8i32_z12345z7:
338335; AVX2: # BB#0:
339336; AVX2-NEXT: vpxor %ymm1, %ymm1, %ymm1
340- ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1,2,3,4,5,6,7]
341- ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3,4,5],ymm1[6],ymm0[7]
337+ ; AVX2-NEXT: vpblendd {{.*#+}} ymm0 = ymm1[0],ymm0[1,2,3,4,5],ymm1[6],ymm0[7]
342338; AVX2-NEXT: retq
343339 %1 = insertelement <8 x i32 > %a , i32 0 , i32 0
344340 %2 = insertelement <8 x i32 > %1 , i32 0 , i32 6
@@ -370,15 +366,13 @@ define <8 x i16> @insert_v8i16_z12345z7(<8 x i16> %a) {
370366; SSE41-LABEL: insert_v8i16_z12345z7:
371367; SSE41: # BB#0:
372368; SSE41-NEXT: pxor %xmm1, %xmm1
373- ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4,5,6,7]
374- ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5],xmm1[6],xmm0[7]
369+ ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4,5],xmm1[6],xmm0[7]
375370; SSE41-NEXT: retq
376371;
377372; AVX-LABEL: insert_v8i16_z12345z7:
378373; AVX: # BB#0:
379374; AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1
380- ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4,5,6,7]
381- ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5],xmm1[6],xmm0[7]
375+ ; AVX-NEXT: vpblendw {{.*#+}} xmm0 = xmm1[0],xmm0[1,2,3,4,5],xmm1[6],xmm0[7]
382376; AVX-NEXT: retq
383377 %1 = insertelement <8 x i16 > %a , i16 0 , i32 0
384378 %2 = insertelement <8 x i16 > %1 , i16 0 , i32 6
@@ -413,8 +407,7 @@ define <16 x i16> @insert_v16i16_z12345z789ABZDEz(<16 x i16> %a) {
413407; SSE41-LABEL: insert_v16i16_z12345z789ABZDEz:
414408; SSE41: # BB#0:
415409; SSE41-NEXT: pxor %xmm2, %xmm2
416- ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm2[0],xmm0[1,2,3,4,5,6,7]
417- ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5],xmm2[6],xmm0[7]
410+ ; SSE41-NEXT: pblendw {{.*#+}} xmm0 = xmm2[0],xmm0[1,2,3,4,5],xmm2[6],xmm0[7]
418411; SSE41-NEXT: pblendw {{.*#+}} xmm1 = xmm1[0,1,2,3,4,5,6],xmm2[7]
419412; SSE41-NEXT: retq
420413;
0 commit comments