llvm-project/llvm/test/CodeGen/X86/avx512bw-vec-test-testn.ll

162 lines
5.5 KiB
LLVM

; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s
; Function Attrs: norecurse nounwind readnone
define zeroext i32 @TEST_mm512_test_epi16_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
; CHECK-LABEL: TEST_mm512_test_epi16_mask:
; CHECK: # BB#0: # %entry
; CHECK-NEXT: vpandq %zmm0, %zmm1, %zmm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vpcmpneqw %zmm1, %zmm0, %k0
; CHECK-NEXT: kmovd %k0, %eax
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
entry:
%and.i.i = and <8 x i64> %__B, %__A
%0 = bitcast <8 x i64> %and.i.i to <32 x i16>
%1 = icmp ne <32 x i16> %0, zeroinitializer
%2 = bitcast <32 x i1> %1 to i32
ret i32 %2
}
; Function Attrs: norecurse nounwind readnone
define zeroext i64 @TEST_mm512_test_epi8_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
; CHECK-LABEL: TEST_mm512_test_epi8_mask:
; CHECK: # BB#0: # %entry
; CHECK-NEXT: vpandq %zmm0, %zmm1, %zmm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vpcmpneqb %zmm1, %zmm0, %k0
; CHECK-NEXT: kmovq %k0, %rax
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
entry:
%and.i.i = and <8 x i64> %__B, %__A
%0 = bitcast <8 x i64> %and.i.i to <64 x i8>
%1 = icmp ne <64 x i8> %0, zeroinitializer
%2 = bitcast <64 x i1> %1 to i64
ret i64 %2
}
; Function Attrs: norecurse nounwind readnone
define zeroext i32 @TEST_mm512_mask_test_epi16_mask(i32 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
; CHECK-LABEL: TEST_mm512_mask_test_epi16_mask:
; CHECK: # BB#0: # %entry
; CHECK-NEXT: vpandq %zmm0, %zmm1, %zmm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: kmovd %edi, %k1
; CHECK-NEXT: vpcmpneqw %zmm1, %zmm0, %k0 {%k1}
; CHECK-NEXT: kmovd %k0, %eax
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
entry:
%and.i.i = and <8 x i64> %__B, %__A
%0 = bitcast <8 x i64> %and.i.i to <32 x i16>
%1 = icmp ne <32 x i16> %0, zeroinitializer
%2 = bitcast i32 %__U to <32 x i1>
%3 = and <32 x i1> %1, %2
%4 = bitcast <32 x i1> %3 to i32
ret i32 %4
}
; Function Attrs: norecurse nounwind readnone
define zeroext i64 @TEST_mm512_mask_test_epi8_mask(i64 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
; CHECK-LABEL: TEST_mm512_mask_test_epi8_mask:
; CHECK: # BB#0: # %entry
; CHECK-NEXT: vpandq %zmm0, %zmm1, %zmm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: kmovq %rdi, %k1
; CHECK-NEXT: vpcmpneqb %zmm1, %zmm0, %k0 {%k1}
; CHECK-NEXT: kmovq %k0, %rax
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
entry:
%and.i.i = and <8 x i64> %__B, %__A
%0 = bitcast <8 x i64> %and.i.i to <64 x i8>
%1 = icmp ne <64 x i8> %0, zeroinitializer
%2 = bitcast i64 %__U to <64 x i1>
%3 = and <64 x i1> %1, %2
%4 = bitcast <64 x i1> %3 to i64
ret i64 %4
}
; Function Attrs: norecurse nounwind readnone
define zeroext i32 @TEST_mm512_testn_epi16_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
; CHECK-LABEL: TEST_mm512_testn_epi16_mask:
; CHECK: # BB#0: # %entry
; CHECK-NEXT: vpandq %zmm0, %zmm1, %zmm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vpcmpeqw %zmm1, %zmm0, %k0
; CHECK-NEXT: kmovd %k0, %eax
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
entry:
%and.i.i = and <8 x i64> %__B, %__A
%0 = bitcast <8 x i64> %and.i.i to <32 x i16>
%1 = icmp eq <32 x i16> %0, zeroinitializer
%2 = bitcast <32 x i1> %1 to i32
ret i32 %2
}
; Function Attrs: norecurse nounwind readnone
define zeroext i64 @TEST_mm512_testn_epi8_mask(<8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
; CHECK-LABEL: TEST_mm512_testn_epi8_mask:
; CHECK: # BB#0: # %entry
; CHECK-NEXT: vpandq %zmm0, %zmm1, %zmm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: vpcmpeqb %zmm1, %zmm0, %k0
; CHECK-NEXT: kmovq %k0, %rax
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
entry:
%and.i.i = and <8 x i64> %__B, %__A
%0 = bitcast <8 x i64> %and.i.i to <64 x i8>
%1 = icmp eq <64 x i8> %0, zeroinitializer
%2 = bitcast <64 x i1> %1 to i64
ret i64 %2
}
; Function Attrs: norecurse nounwind readnone
define zeroext i32 @TEST_mm512_mask_testn_epi16_mask(i32 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
; CHECK-LABEL: TEST_mm512_mask_testn_epi16_mask:
; CHECK: # BB#0: # %entry
; CHECK-NEXT: vpandq %zmm0, %zmm1, %zmm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: kmovd %edi, %k1
; CHECK-NEXT: vpcmpeqw %zmm1, %zmm0, %k0 {%k1}
; CHECK-NEXT: kmovd %k0, %eax
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
entry:
%and.i.i = and <8 x i64> %__B, %__A
%0 = bitcast <8 x i64> %and.i.i to <32 x i16>
%1 = icmp eq <32 x i16> %0, zeroinitializer
%2 = bitcast i32 %__U to <32 x i1>
%3 = and <32 x i1> %1, %2
%4 = bitcast <32 x i1> %3 to i32
ret i32 %4
}
; Function Attrs: norecurse nounwind readnone
define zeroext i64 @TEST_mm512_mask_testn_epi8_mask(i64 %__U, <8 x i64> %__A, <8 x i64> %__B) local_unnamed_addr #0 {
; CHECK-LABEL: TEST_mm512_mask_testn_epi8_mask:
; CHECK: # BB#0: # %entry
; CHECK-NEXT: vpandq %zmm0, %zmm1, %zmm0
; CHECK-NEXT: vpxor %xmm1, %xmm1, %xmm1
; CHECK-NEXT: kmovq %rdi, %k1
; CHECK-NEXT: vpcmpeqb %zmm1, %zmm0, %k0 {%k1}
; CHECK-NEXT: kmovq %k0, %rax
; CHECK-NEXT: vzeroupper
; CHECK-NEXT: retq
entry:
%and.i.i = and <8 x i64> %__B, %__A
%0 = bitcast <8 x i64> %and.i.i to <64 x i8>
%1 = icmp eq <64 x i8> %0, zeroinitializer
%2 = bitcast i64 %__U to <64 x i1>
%3 = and <64 x i1> %1, %2
%4 = bitcast <64 x i1> %3 to i64
ret i64 %4
}