[X86] Add guards to some of the x86 intrinsic tests to skip 64-bit mode only intrinsics when compiled for 32-bit mode.

All the command lines are for 64-bit mode, but sometimes I compile
the tests in 32-bit mode to see what assembly we get and we need
to skip these to do that.

llvm-svn: 365668
This commit is contained in:
Craig Topper 2019-07-10 17:11:23 +00:00
parent ab5a30ac9d
commit f9cb127ca9
10 changed files with 58 additions and 0 deletions

View File

@ -1079,11 +1079,13 @@ int test_mm256_extract_epi32(__m256i A) {
return _mm256_extract_epi32(A, 7);
}
#if __x86_64__
long long test_mm256_extract_epi64(__m256i A) {
// CHECK-LABEL: test_mm256_extract_epi64
// CHECK: extractelement <4 x i64> %{{.*}}, {{i32|i64}} 3
return _mm256_extract_epi64(A, 3);
}
#endif
__m128d test_mm256_extractf128_pd(__m256d A) {
// CHECK-LABEL: test_mm256_extractf128_pd
@ -1157,11 +1159,13 @@ __m256i test_mm256_insert_epi32(__m256i x, int b) {
return _mm256_insert_epi32(x, b, 5);
}
#if __x86_64__
__m256i test_mm256_insert_epi64(__m256i x, long long b) {
// CHECK-LABEL: test_mm256_insert_epi64
// CHECK: insertelement <4 x i64> %{{.*}}, i64 %{{.*}}, {{i32|i64}} 2
return _mm256_insert_epi64(x, b, 2);
}
#endif
__m256d test_mm256_insertf128_pd(__m256d A, __m128d B) {
// CHECK-LABEL: test_mm256_insertf128_pd

View File

@ -65,6 +65,7 @@ int test_mm_tzcnt_32(unsigned int __X) {
return _mm_tzcnt_32(__X);
}
#ifdef __x86_64__
unsigned long long test__andn_u64(unsigned long __X, unsigned long __Y) {
// CHECK-LABEL: test__andn_u64
// CHECK: xor i64 %{{.*}}, -1
@ -110,6 +111,7 @@ long long test_mm_tzcnt_64(unsigned long long __X) {
// CHECK: i64 @llvm.cttz.i64(i64 %{{.*}}, i1 false)
return _mm_tzcnt_64(__X);
}
#endif
// Intel intrinsics
@ -164,6 +166,7 @@ unsigned int test_tzcnt_u32(unsigned int __X) {
return _tzcnt_u32(__X);
}
#ifdef __x86_64__
unsigned long long test_andn_u64(unsigned long __X, unsigned long __Y) {
// CHECK-LABEL: test_andn_u64
// CHECK: xor i64 %{{.*}}, -1
@ -209,3 +212,4 @@ unsigned long long test_tzcnt_u64(unsigned long long __X) {
// CHECK: i64 @llvm.cttz.i64(i64 %{{.*}}, i1 false)
return _tzcnt_u64(__X);
}
#endif

View File

@ -28,6 +28,7 @@ unsigned int test_mulx_u32(unsigned int __X, unsigned int __Y,
return _mulx_u32(__X, __Y, __P);
}
#ifdef __x86_64__
unsigned long long test_bzhi_u64(unsigned long long __X, unsigned long long __Y) {
// CHECK: @llvm.x86.bmi.bzhi.64
return _bzhi_u64(__X, __Y);
@ -49,3 +50,4 @@ unsigned long long test_mulx_u64(unsigned long long __X, unsigned long long __Y,
// CHECK: mul i128
return _mulx_u64(__X, __Y, __P);
}
#endif

View File

@ -21,6 +21,7 @@ int test__popcntd(unsigned int __X) {
return __popcntd(__X);
}
#ifdef __x86_64__
#ifdef __POPCNT__
long long test_mm_popcnt_u64(unsigned long long __X) {
//CHECK-POPCNT: call i64 @llvm.ctpop.i64
@ -37,3 +38,4 @@ long long test__popcntq(unsigned long long __X) {
//CHECK: call i64 @llvm.ctpop.i64
return __popcntq(__X);
}
#endif

View File

@ -17,12 +17,14 @@ int rdrand32(unsigned *p) {
// CHECK: store i32
}
#if __x86_64__
int rdrand64(unsigned long long *p) {
return _rdrand64_step(p);
// CHECK: @rdrand64
// CHECK: call { i64, i32 } @llvm.x86.rdrand.64
// CHECK: store i64
}
#endif
int rdseed16(unsigned short *p) {
return _rdseed16_step(p);
@ -38,9 +40,11 @@ int rdseed32(unsigned *p) {
// CHECK: store i32
}
#if __x86_64__
int rdseed64(unsigned long long *p) {
return _rdseed64_step(p);
// CHECK: @rdseed64
// CHECK: call { i64, i32 } @llvm.x86.rdseed.64
// CHECK: store i64
}
#endif

View File

@ -267,12 +267,14 @@ __m128 test_mm_cvtsi32_ss(__m128 A, int B) {
return _mm_cvtsi32_ss(A, B);
}
#ifdef __x86_64__
__m128 test_mm_cvtsi64_ss(__m128 A, long long B) {
// CHECK-LABEL: test_mm_cvtsi64_ss
// CHECK: sitofp i64 %{{.*}} to float
// CHECK: insertelement <4 x float> %{{.*}}, float %{{.*}}, i32 0
return _mm_cvtsi64_ss(A, B);
}
#endif
float test_mm_cvtss_f32(__m128 A) {
// CHECK-LABEL: test_mm_cvtss_f32
@ -286,11 +288,13 @@ int test_mm_cvtss_si32(__m128 A) {
return _mm_cvtss_si32(A);
}
#ifdef __x86_64__
long long test_mm_cvtss_si64(__m128 A) {
// CHECK-LABEL: test_mm_cvtss_si64
// CHECK: call i64 @llvm.x86.sse.cvtss2si64(<4 x float> %{{.*}})
return _mm_cvtss_si64(A);
}
#endif
int test_mm_cvtt_ss2si(__m128 A) {
// CHECK-LABEL: test_mm_cvtt_ss2si
@ -304,11 +308,13 @@ int test_mm_cvttss_si32(__m128 A) {
return _mm_cvttss_si32(A);
}
#ifdef __x86_64__
long long test_mm_cvttss_si64(__m128 A) {
// CHECK-LABEL: test_mm_cvttss_si64
// CHECK: call i64 @llvm.x86.sse.cvttss2si64(<4 x float> %{{.*}})
return _mm_cvttss_si64(A);
}
#endif
__m128 test_mm_div_ps(__m128 A, __m128 B) {
// CHECK-LABEL: test_mm_div_ps

View File

@ -500,11 +500,13 @@ int test_mm_cvtsd_si32(__m128d A) {
return _mm_cvtsd_si32(A);
}
#ifdef __x86_64__
long long test_mm_cvtsd_si64(__m128d A) {
// CHECK-LABEL: test_mm_cvtsd_si64
// CHECK: call i64 @llvm.x86.sse2.cvtsd2si64(<2 x double> %{{.*}})
return _mm_cvtsd_si64(A);
}
#endif
__m128 test_mm_cvtsd_ss(__m128 A, __m128d B) {
// CHECK-LABEL: test_mm_cvtsd_ss
@ -518,11 +520,13 @@ int test_mm_cvtsi128_si32(__m128i A) {
return _mm_cvtsi128_si32(A);
}
#ifdef __x86_64__
long long test_mm_cvtsi128_si64(__m128i A) {
// CHECK-LABEL: test_mm_cvtsi128_si64
// CHECK: extractelement <2 x i64> %{{.*}}, i32 0
return _mm_cvtsi128_si64(A);
}
#endif
__m128d test_mm_cvtsi32_sd(__m128d A, int B) {
// CHECK-LABEL: test_mm_cvtsi32_sd
@ -540,6 +544,7 @@ __m128i test_mm_cvtsi32_si128(int A) {
return _mm_cvtsi32_si128(A);
}
#ifdef __x86_64__
__m128d test_mm_cvtsi64_sd(__m128d A, long long B) {
// CHECK-LABEL: test_mm_cvtsi64_sd
// CHECK: sitofp i64 %{{.*}} to double
@ -553,6 +558,7 @@ __m128i test_mm_cvtsi64_si128(long long A) {
// CHECK: insertelement <2 x i64> %{{.*}}, i64 0, i32 1
return _mm_cvtsi64_si128(A);
}
#endif
__m128d test_mm_cvtss_sd(__m128d A, __m128 B) {
// CHECK-LABEL: test_mm_cvtss_sd
@ -580,11 +586,13 @@ int test_mm_cvttsd_si32(__m128d A) {
return _mm_cvttsd_si32(A);
}
#ifdef __x86_64__
long long test_mm_cvttsd_si64(__m128d A) {
// CHECK-LABEL: test_mm_cvttsd_si64
// CHECK: call i64 @llvm.x86.sse2.cvttsd2si64(<2 x double> %{{.*}})
return _mm_cvttsd_si64(A);
}
#endif
__m128d test_mm_div_pd(__m128d A, __m128d B) {
// CHECK-LABEL: test_mm_div_pd
@ -1492,11 +1500,13 @@ void test_mm_stream_si32(int *A, int B) {
_mm_stream_si32(A, B);
}
#ifdef __x86_64__
void test_mm_stream_si64(long long *A, long long B) {
// CHECK-LABEL: test_mm_stream_si64
// CHECK: store i64 %{{.*}}, i64* %{{.*}}, align 1, !nontemporal
_mm_stream_si64(A, B);
}
#endif
void test_mm_stream_si128(__m128i *A, __m128i B) {
// CHECK-LABEL: test_mm_stream_si128

View File

@ -182,11 +182,13 @@ int test_mm_extract_epi32(__m128i x) {
return _mm_extract_epi32(x, 1);
}
#ifdef __x86_64__
long long test_mm_extract_epi64(__m128i x) {
// CHECK-LABEL: test_mm_extract_epi64
// CHECK: extractelement <2 x i64> %{{.*}}, {{i32|i64}} 1
return _mm_extract_epi64(x, 1);
}
#endif
int test_mm_extract_ps(__m128 x) {
// CHECK-LABEL: test_mm_extract_ps
@ -230,11 +232,13 @@ __m128i test_mm_insert_epi32(__m128i x, int b) {
return _mm_insert_epi32(x, b, 1);
}
#ifdef __x86_64__
__m128i test_mm_insert_epi64(__m128i x, long long b) {
// CHECK-LABEL: test_mm_insert_epi64
// CHECK: insertelement <2 x i64> %{{.*}}, i64 %{{.*}}, {{i32|i64}} 1
return _mm_insert_epi64(x, b, 1);
}
#endif
__m128 test_mm_insert_ps(__m128 x, __m128 y) {
// CHECK-LABEL: test_mm_insert_ps

View File

@ -114,8 +114,10 @@ unsigned int test_mm_crc32_u32(unsigned int CRC, unsigned int V) {
return _mm_crc32_u32(CRC, V);
}
#ifdef __x86_64__
unsigned long long test_mm_crc32_u64(unsigned long long CRC, unsigned long long V) {
// CHECK-LABEL: test_mm_crc32_u64
// CHECK: call i64 @llvm.x86.sse42.crc32.64.64(i64 %{{.*}}, i64 %{{.*}})
return _mm_crc32_u64(CRC, V);
}
#endif

View File

@ -10,6 +10,7 @@ unsigned int test__bextri_u32(unsigned int a) {
return __bextri_u32(a, 1);
}
#ifdef __x86_64__
unsigned long long test__bextri_u64(unsigned long long a) {
// CHECK-LABEL: test__bextri_u64
// CHECK: call i64 @llvm.x86.tbm.bextri.u64(i64 %{{.*}}, i64 2)
@ -21,6 +22,7 @@ unsigned long long test__bextri_u64_bigint(unsigned long long a) {
// CHECK: call i64 @llvm.x86.tbm.bextri.u64(i64 %{{.*}}, i64 549755813887)
return __bextri_u64(a, 0x7fffffffffLL);
}
#endif
unsigned int test__blcfill_u32(unsigned int a) {
// CHECK-LABEL: test__blcfill_u32
@ -29,12 +31,14 @@ unsigned int test__blcfill_u32(unsigned int a) {
return __blcfill_u32(a);
}
#ifdef __x86_64__
unsigned long long test__blcfill_u64(unsigned long long a) {
// CHECK-LABEL: test__blcfill_u64
// CHECK: [[TMP:%.*]] = add i64 %{{.*}}, 1
// CHECK: %{{.*}} = and i64 %{{.*}}, [[TMP]]
return __blcfill_u64(a);
}
#endif
unsigned int test__blci_u32(unsigned int a) {
// CHECK-LABEL: test__blci_u32
@ -44,6 +48,7 @@ unsigned int test__blci_u32(unsigned int a) {
return __blci_u32(a);
}
#ifdef __x86_64__
unsigned long long test__blci_u64(unsigned long long a) {
// CHECK-LABEL: test__blci_u64
// CHECK: [[TMP1:%.*]] = add i64 %{{.*}}, 1
@ -51,6 +56,7 @@ unsigned long long test__blci_u64(unsigned long long a) {
// CHECK: %{{.*}} = or i64 %{{.*}}, [[TMP2]]
return __blci_u64(a);
}
#endif
unsigned int test__blcic_u32(unsigned int a) {
// CHECK-LABEL: test__blcic_u32
@ -60,6 +66,7 @@ unsigned int test__blcic_u32(unsigned int a) {
return __blcic_u32(a);
}
#ifdef __x86_64__
unsigned long long test__blcic_u64(unsigned long long a) {
// CHECK-LABEL: test__blcic_u64
// CHECK: [[TMP1:%.*]] = xor i64 %{{.*}}, -1
@ -67,6 +74,7 @@ unsigned long long test__blcic_u64(unsigned long long a) {
// CHECK-NEXT: {{.*}} = and i64 [[TMP1]], [[TMP2]]
return __blcic_u64(a);
}
#endif
unsigned int test__blcmsk_u32(unsigned int a) {
// CHECK-LABEL: test__blcmsk_u32
@ -75,12 +83,14 @@ unsigned int test__blcmsk_u32(unsigned int a) {
return __blcmsk_u32(a);
}
#ifdef __x86_64__
unsigned long long test__blcmsk_u64(unsigned long long a) {
// CHECK-LABEL: test__blcmsk_u64
// CHECK: [[TMP:%.*]] = add i64 %{{.*}}, 1
// CHECK-NEXT: {{.*}} = xor i64 %{{.*}}, [[TMP]]
return __blcmsk_u64(a);
}
#endif
unsigned int test__blcs_u32(unsigned int a) {
// CHECK-LABEL: test__blcs_u32
@ -89,12 +99,14 @@ unsigned int test__blcs_u32(unsigned int a) {
return __blcs_u32(a);
}
#ifdef __x86_64__
unsigned long long test__blcs_u64(unsigned long long a) {
// CHECK-LABEL: test__blcs_u64
// CHECK: [[TMP:%.*]] = add i64 %{{.*}}, 1
// CHECK-NEXT: {{.*}} = or i64 %{{.*}}, [[TMP]]
return __blcs_u64(a);
}
#endif
unsigned int test__blsfill_u32(unsigned int a) {
// CHECK-LABEL: test__blsfill_u32
@ -103,12 +115,14 @@ unsigned int test__blsfill_u32(unsigned int a) {
return __blsfill_u32(a);
}
#ifdef __x86_64__
unsigned long long test__blsfill_u64(unsigned long long a) {
// CHECK-LABEL: test__blsfill_u64
// CHECK: [[TMP:%.*]] = sub i64 %{{.*}}, 1
// CHECK-NEXT: {{.*}} = or i64 %{{.*}}, [[TMP]]
return __blsfill_u64(a);
}
#endif
unsigned int test__blsic_u32(unsigned int a) {
// CHECK-LABEL: test__blsic_u32
@ -118,6 +132,7 @@ unsigned int test__blsic_u32(unsigned int a) {
return __blsic_u32(a);
}
#ifdef __x86_64__
unsigned long long test__blsic_u64(unsigned long long a) {
// CHECK-LABEL: test__blsic_u64
// CHECK: [[TMP1:%.*]] = xor i64 %{{.*}}, -1
@ -125,6 +140,7 @@ unsigned long long test__blsic_u64(unsigned long long a) {
// CHECK-NEXT: {{.*}} = or i64 [[TMP1]], [[TMP2]]
return __blsic_u64(a);
}
#endif
unsigned int test__t1mskc_u32(unsigned int a) {
// CHECK-LABEL: test__t1mskc_u32
@ -134,6 +150,7 @@ unsigned int test__t1mskc_u32(unsigned int a) {
return __t1mskc_u32(a);
}
#ifdef __x86_64__
unsigned long long test__t1mskc_u64(unsigned long long a) {
// CHECK-LABEL: test__t1mskc_u64
// CHECK: [[TMP1:%.*]] = xor i64 %{{.*}}, -1
@ -141,6 +158,7 @@ unsigned long long test__t1mskc_u64(unsigned long long a) {
// CHECK-NEXT: {{.*}} = or i64 [[TMP1]], [[TMP2]]
return __t1mskc_u64(a);
}
#endif
unsigned int test__tzmsk_u32(unsigned int a) {
// CHECK-LABEL: test__tzmsk_u32
@ -150,6 +168,7 @@ unsigned int test__tzmsk_u32(unsigned int a) {
return __tzmsk_u32(a);
}
#ifdef __x86_64__
unsigned long long test__tzmsk_u64(unsigned long long a) {
// CHECK-LABEL: test__tzmsk_u64
// CHECK: [[TMP1:%.*]] = xor i64 %{{.*}}, -1
@ -157,3 +176,4 @@ unsigned long long test__tzmsk_u64(unsigned long long a) {
// CHECK-NEXT: {{.*}} = and i64 [[TMP1]], [[TMP2]]
return __tzmsk_u64(a);
}
#endif