[X86] Fix some checks to use X86 instead of X32.

These tests were recently updated so it looks like gone wrong.

llvm-svn: 334786
This commit is contained in:
Craig Topper 2018-06-15 04:42:55 +00:00
parent f43807dd89
commit 3b060daba5
1 changed files with 96 additions and 96 deletions

View File

@ -2563,12 +2563,12 @@ entry:
ret <8 x i64> %2
}
define <4 x float> @test_mm_mask_add_ss(<4 x float> %__W, i8 zeroext %__U, <4 x float> %__A, <4 x float> %__B) {
; X32-LABEL: test_mm_mask_add_ss:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vaddss %xmm2, %xmm1, %xmm0 {%k1}
; X32-NEXT: retl
; X86-LABEL: test_mm_mask_add_ss:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vaddss %xmm2, %xmm1, %xmm0 {%k1}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_mask_add_ss:
; X64: # %bb.0: # %entry
@ -2588,12 +2588,12 @@ entry:
}
define <4 x float> @test_mm_maskz_add_ss(i8 zeroext %__U, <4 x float> %__A, <4 x float> %__B) {
; X32-LABEL: test_mm_maskz_add_ss:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vaddss %xmm1, %xmm0, %xmm0 {%k1} {z}
; X32-NEXT: retl
; X86-LABEL: test_mm_maskz_add_ss:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vaddss %xmm1, %xmm0, %xmm0 {%k1} {z}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_maskz_add_ss:
; X64: # %bb.0: # %entry
@ -2612,12 +2612,12 @@ entry:
}
define <2 x double> @test_mm_mask_add_sd(<2 x double> %__W, i8 zeroext %__U, <2 x double> %__A, <2 x double> %__B) {
; X32-LABEL: test_mm_mask_add_sd:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vaddsd %xmm2, %xmm1, %xmm0 {%k1}
; X32-NEXT: retl
; X86-LABEL: test_mm_mask_add_sd:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vaddsd %xmm2, %xmm1, %xmm0 {%k1}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_mask_add_sd:
; X64: # %bb.0: # %entry
@ -2637,12 +2637,12 @@ entry:
}
define <2 x double> @test_mm_maskz_add_sd(i8 zeroext %__U, <2 x double> %__A, <2 x double> %__B) {
; X32-LABEL: test_mm_maskz_add_sd:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vaddsd %xmm1, %xmm0, %xmm0 {%k1} {z}
; X32-NEXT: retl
; X86-LABEL: test_mm_maskz_add_sd:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vaddsd %xmm1, %xmm0, %xmm0 {%k1} {z}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_maskz_add_sd:
; X64: # %bb.0: # %entry
@ -2661,12 +2661,12 @@ entry:
}
define <4 x float> @test_mm_mask_sub_ss(<4 x float> %__W, i8 zeroext %__U, <4 x float> %__A, <4 x float> %__B) {
; X32-LABEL: test_mm_mask_sub_ss:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vsubss %xmm2, %xmm1, %xmm0 {%k1}
; X32-NEXT: retl
; X86-LABEL: test_mm_mask_sub_ss:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vsubss %xmm2, %xmm1, %xmm0 {%k1}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_mask_sub_ss:
; X64: # %bb.0: # %entry
@ -2686,12 +2686,12 @@ entry:
}
define <4 x float> @test_mm_maskz_sub_ss(i8 zeroext %__U, <4 x float> %__A, <4 x float> %__B) {
; X32-LABEL: test_mm_maskz_sub_ss:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vsubss %xmm1, %xmm0, %xmm0 {%k1} {z}
; X32-NEXT: retl
; X86-LABEL: test_mm_maskz_sub_ss:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vsubss %xmm1, %xmm0, %xmm0 {%k1} {z}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_maskz_sub_ss:
; X64: # %bb.0: # %entry
@ -2710,12 +2710,12 @@ entry:
}
define <2 x double> @test_mm_mask_sub_sd(<2 x double> %__W, i8 zeroext %__U, <2 x double> %__A, <2 x double> %__B) {
; X32-LABEL: test_mm_mask_sub_sd:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vsubsd %xmm2, %xmm1, %xmm0 {%k1}
; X32-NEXT: retl
; X86-LABEL: test_mm_mask_sub_sd:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vsubsd %xmm2, %xmm1, %xmm0 {%k1}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_mask_sub_sd:
; X64: # %bb.0: # %entry
@ -2735,12 +2735,12 @@ entry:
}
define <2 x double> @test_mm_maskz_sub_sd(i8 zeroext %__U, <2 x double> %__A, <2 x double> %__B) {
; X32-LABEL: test_mm_maskz_sub_sd:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vsubsd %xmm1, %xmm0, %xmm0 {%k1} {z}
; X32-NEXT: retl
; X86-LABEL: test_mm_maskz_sub_sd:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vsubsd %xmm1, %xmm0, %xmm0 {%k1} {z}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_maskz_sub_sd:
; X64: # %bb.0: # %entry
@ -2759,12 +2759,12 @@ entry:
}
define <4 x float> @test_mm_mask_mul_ss(<4 x float> %__W, i8 zeroext %__U, <4 x float> %__A, <4 x float> %__B) {
; X32-LABEL: test_mm_mask_mul_ss:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vmulss %xmm2, %xmm1, %xmm0 {%k1}
; X32-NEXT: retl
; X86-LABEL: test_mm_mask_mul_ss:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vmulss %xmm2, %xmm1, %xmm0 {%k1}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_mask_mul_ss:
; X64: # %bb.0: # %entry
@ -2784,12 +2784,12 @@ entry:
}
define <4 x float> @test_mm_maskz_mul_ss(i8 zeroext %__U, <4 x float> %__A, <4 x float> %__B) {
; X32-LABEL: test_mm_maskz_mul_ss:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vmulss %xmm1, %xmm0, %xmm0 {%k1} {z}
; X32-NEXT: retl
; X86-LABEL: test_mm_maskz_mul_ss:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vmulss %xmm1, %xmm0, %xmm0 {%k1} {z}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_maskz_mul_ss:
; X64: # %bb.0: # %entry
@ -2808,12 +2808,12 @@ entry:
}
define <2 x double> @test_mm_mask_mul_sd(<2 x double> %__W, i8 zeroext %__U, <2 x double> %__A, <2 x double> %__B) {
; X32-LABEL: test_mm_mask_mul_sd:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vmulsd %xmm2, %xmm1, %xmm0 {%k1}
; X32-NEXT: retl
; X86-LABEL: test_mm_mask_mul_sd:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vmulsd %xmm2, %xmm1, %xmm0 {%k1}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_mask_mul_sd:
; X64: # %bb.0: # %entry
@ -2833,12 +2833,12 @@ entry:
}
define <2 x double> @test_mm_maskz_mul_sd(i8 zeroext %__U, <2 x double> %__A, <2 x double> %__B) {
; X32-LABEL: test_mm_maskz_mul_sd:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vmulsd %xmm1, %xmm0, %xmm0 {%k1} {z}
; X32-NEXT: retl
; X86-LABEL: test_mm_maskz_mul_sd:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vmulsd %xmm1, %xmm0, %xmm0 {%k1} {z}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_maskz_mul_sd:
; X64: # %bb.0: # %entry
@ -2857,12 +2857,12 @@ entry:
}
define <4 x float> @test_mm_mask_div_ss(<4 x float> %__W, i8 zeroext %__U, <4 x float> %__A, <4 x float> %__B) {
; X32-LABEL: test_mm_mask_div_ss:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vdivss %xmm2, %xmm1, %xmm0 {%k1}
; X32-NEXT: retl
; X86-LABEL: test_mm_mask_div_ss:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vdivss %xmm2, %xmm1, %xmm0 {%k1}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_mask_div_ss:
; X64: # %bb.0: # %entry
@ -2882,12 +2882,12 @@ entry:
}
define <4 x float> @test_mm_maskz_div_ss(i8 zeroext %__U, <4 x float> %__A, <4 x float> %__B) {
; X32-LABEL: test_mm_maskz_div_ss:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vdivss %xmm1, %xmm0, %xmm0 {%k1} {z}
; X32-NEXT: retl
; X86-LABEL: test_mm_maskz_div_ss:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vdivss %xmm1, %xmm0, %xmm0 {%k1} {z}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_maskz_div_ss:
; X64: # %bb.0: # %entry
@ -2906,12 +2906,12 @@ entry:
}
define <2 x double> @test_mm_mask_div_sd(<2 x double> %__W, i8 zeroext %__U, <2 x double> %__A, <2 x double> %__B) {
; X32-LABEL: test_mm_mask_div_sd:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vdivsd %xmm2, %xmm1, %xmm0 {%k1}
; X32-NEXT: retl
; X86-LABEL: test_mm_mask_div_sd:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vdivsd %xmm2, %xmm1, %xmm0 {%k1}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_mask_div_sd:
; X64: # %bb.0: # %entry
@ -2931,12 +2931,12 @@ entry:
}
define <2 x double> @test_mm_maskz_div_sd(i8 zeroext %__U, <2 x double> %__A, <2 x double> %__B) {
; X32-LABEL: test_mm_maskz_div_sd:
; X32: # %bb.0: # %entry
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kmovw %eax, %k1
; X32-NEXT: vdivsd %xmm1, %xmm0, %xmm0 {%k1} {z}
; X32-NEXT: retl
; X86-LABEL: test_mm_maskz_div_sd:
; X86: # %bb.0: # %entry
; X86-NEXT: movb {{[0-9]+}}(%esp), %al
; X86-NEXT: kmovw %eax, %k1
; X86-NEXT: vdivsd %xmm1, %xmm0, %xmm0 {%k1} {z}
; X86-NEXT: retl
;
; X64-LABEL: test_mm_maskz_div_sd:
; X64: # %bb.0: # %entry