2017-07-16 22:09:15 +08:00
|
|
|
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
|
|
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -print-schedule -mattr=+bmi2 | FileCheck %s --check-prefix=CHECK --check-prefix=GENERIC
|
|
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -print-schedule -mcpu=haswell | FileCheck %s --check-prefix=CHECK --check-prefix=HASWELL
|
|
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -print-schedule -mcpu=skylake | FileCheck %s --check-prefix=CHECK --check-prefix=HASWELL
|
|
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -print-schedule -mcpu=knl | FileCheck %s --check-prefix=CHECK --check-prefix=HASWELL
|
|
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -print-schedule -mcpu=znver1 | FileCheck %s --check-prefix=CHECK --check-prefix=ZNVER1
|
|
|
|
|
|
|
|
define i32 @test_bzhi_i32(i32 %a0, i32 %a1, i32 *%a2) {
|
|
|
|
; GENERIC-LABEL: test_bzhi_i32:
|
|
|
|
; GENERIC: # BB#0:
|
|
|
|
; GENERIC-NEXT: bzhil %edi, (%rdx), %ecx
|
|
|
|
; GENERIC-NEXT: bzhil %edi, %esi, %eax
|
|
|
|
; GENERIC-NEXT: addl %ecx, %eax
|
|
|
|
; GENERIC-NEXT: retq
|
|
|
|
;
|
|
|
|
; HASWELL-LABEL: test_bzhi_i32:
|
|
|
|
; HASWELL: # BB#0:
|
|
|
|
; HASWELL-NEXT: bzhil %edi, (%rdx), %ecx # sched: [4:0.50]
|
|
|
|
; HASWELL-NEXT: bzhil %edi, %esi, %eax # sched: [1:0.50]
|
|
|
|
; HASWELL-NEXT: addl %ecx, %eax # sched: [1:0.25]
|
|
|
|
; HASWELL-NEXT: retq # sched: [1:1.00]
|
|
|
|
;
|
|
|
|
; ZNVER1-LABEL: test_bzhi_i32:
|
|
|
|
; ZNVER1: # BB#0:
|
|
|
|
; ZNVER1-NEXT: bzhil %edi, (%rdx), %ecx # sched: [?:0.000000e+00]
|
|
|
|
; ZNVER1-NEXT: bzhil %edi, %esi, %eax # sched: [?:0.000000e+00]
|
2017-07-19 10:45:14 +08:00
|
|
|
; ZNVER1-NEXT: addl %ecx, %eax # sched: [1:0.25]
|
|
|
|
; ZNVER1-NEXT: retq # sched: [5:0.50]
|
2017-07-16 22:09:15 +08:00
|
|
|
%1 = load i32, i32 *%a2
|
|
|
|
%2 = tail call i32 @llvm.x86.bmi.bzhi.32(i32 %1, i32 %a0)
|
|
|
|
%3 = tail call i32 @llvm.x86.bmi.bzhi.32(i32 %a1, i32 %a0)
|
|
|
|
%4 = add i32 %2, %3
|
|
|
|
ret i32 %4
|
|
|
|
}
|
|
|
|
declare i32 @llvm.x86.bmi.bzhi.32(i32, i32)
|
|
|
|
|
|
|
|
define i64 @test_bzhi_i64(i64 %a0, i64 %a1, i64 *%a2) {
|
|
|
|
; GENERIC-LABEL: test_bzhi_i64:
|
|
|
|
; GENERIC: # BB#0:
|
|
|
|
; GENERIC-NEXT: bzhiq %rdi, (%rdx), %rcx
|
|
|
|
; GENERIC-NEXT: bzhiq %rdi, %rsi, %rax
|
|
|
|
; GENERIC-NEXT: addq %rcx, %rax
|
|
|
|
; GENERIC-NEXT: retq
|
|
|
|
;
|
|
|
|
; HASWELL-LABEL: test_bzhi_i64:
|
|
|
|
; HASWELL: # BB#0:
|
|
|
|
; HASWELL-NEXT: bzhiq %rdi, (%rdx), %rcx # sched: [4:0.50]
|
|
|
|
; HASWELL-NEXT: bzhiq %rdi, %rsi, %rax # sched: [1:0.50]
|
|
|
|
; HASWELL-NEXT: addq %rcx, %rax # sched: [1:0.25]
|
|
|
|
; HASWELL-NEXT: retq # sched: [1:1.00]
|
|
|
|
;
|
|
|
|
; ZNVER1-LABEL: test_bzhi_i64:
|
|
|
|
; ZNVER1: # BB#0:
|
|
|
|
; ZNVER1-NEXT: bzhiq %rdi, (%rdx), %rcx # sched: [?:0.000000e+00]
|
|
|
|
; ZNVER1-NEXT: bzhiq %rdi, %rsi, %rax # sched: [?:0.000000e+00]
|
2017-07-19 10:45:14 +08:00
|
|
|
; ZNVER1-NEXT: addq %rcx, %rax # sched: [1:0.25]
|
|
|
|
; ZNVER1-NEXT: retq # sched: [5:0.50]
|
2017-07-16 22:09:15 +08:00
|
|
|
%1 = load i64, i64 *%a2
|
|
|
|
%2 = tail call i64 @llvm.x86.bmi.bzhi.64(i64 %1, i64 %a0)
|
|
|
|
%3 = tail call i64 @llvm.x86.bmi.bzhi.64(i64 %a1, i64 %a0)
|
|
|
|
%4 = add i64 %2, %3
|
|
|
|
ret i64 %4
|
|
|
|
}
|
|
|
|
declare i64 @llvm.x86.bmi.bzhi.64(i64, i64)
|
|
|
|
|
|
|
|
define i32 @test_pdep_i32(i32 %a0, i32 %a1, i32 *%a2) {
|
|
|
|
; GENERIC-LABEL: test_pdep_i32:
|
|
|
|
; GENERIC: # BB#0:
|
|
|
|
; GENERIC-NEXT: pdepl (%rdx), %edi, %ecx
|
|
|
|
; GENERIC-NEXT: pdepl %esi, %edi, %eax
|
|
|
|
; GENERIC-NEXT: addl %ecx, %eax
|
|
|
|
; GENERIC-NEXT: retq
|
|
|
|
;
|
|
|
|
; HASWELL-LABEL: test_pdep_i32:
|
|
|
|
; HASWELL: # BB#0:
|
|
|
|
; HASWELL-NEXT: pdepl (%rdx), %edi, %ecx # sched: [7:1.00]
|
|
|
|
; HASWELL-NEXT: pdepl %esi, %edi, %eax # sched: [3:1.00]
|
|
|
|
; HASWELL-NEXT: addl %ecx, %eax # sched: [1:0.25]
|
|
|
|
; HASWELL-NEXT: retq # sched: [1:1.00]
|
|
|
|
;
|
|
|
|
; ZNVER1-LABEL: test_pdep_i32:
|
|
|
|
; ZNVER1: # BB#0:
|
|
|
|
; ZNVER1-NEXT: pdepl (%rdx), %edi, %ecx # sched: [?:0.000000e+00]
|
|
|
|
; ZNVER1-NEXT: pdepl %esi, %edi, %eax # sched: [?:0.000000e+00]
|
2017-07-19 10:45:14 +08:00
|
|
|
; ZNVER1-NEXT: addl %ecx, %eax # sched: [1:0.25]
|
|
|
|
; ZNVER1-NEXT: retq # sched: [5:0.50]
|
2017-07-16 22:09:15 +08:00
|
|
|
%1 = load i32, i32 *%a2
|
|
|
|
%2 = tail call i32 @llvm.x86.bmi.pdep.32(i32 %a0, i32 %1)
|
|
|
|
%3 = tail call i32 @llvm.x86.bmi.pdep.32(i32 %a0, i32 %a1)
|
|
|
|
%4 = add i32 %2, %3
|
|
|
|
ret i32 %4
|
|
|
|
}
|
|
|
|
declare i32 @llvm.x86.bmi.pdep.32(i32, i32)
|
|
|
|
|
|
|
|
define i64 @test_pdep_i64(i64 %a0, i64 %a1, i64 *%a2) {
|
|
|
|
; GENERIC-LABEL: test_pdep_i64:
|
|
|
|
; GENERIC: # BB#0:
|
|
|
|
; GENERIC-NEXT: pdepq (%rdx), %rdi, %rcx
|
|
|
|
; GENERIC-NEXT: pdepq %rsi, %rdi, %rax
|
|
|
|
; GENERIC-NEXT: addq %rcx, %rax
|
|
|
|
; GENERIC-NEXT: retq
|
|
|
|
;
|
|
|
|
; HASWELL-LABEL: test_pdep_i64:
|
|
|
|
; HASWELL: # BB#0:
|
|
|
|
; HASWELL-NEXT: pdepq (%rdx), %rdi, %rcx # sched: [7:1.00]
|
|
|
|
; HASWELL-NEXT: pdepq %rsi, %rdi, %rax # sched: [3:1.00]
|
|
|
|
; HASWELL-NEXT: addq %rcx, %rax # sched: [1:0.25]
|
|
|
|
; HASWELL-NEXT: retq # sched: [1:1.00]
|
|
|
|
;
|
|
|
|
; ZNVER1-LABEL: test_pdep_i64:
|
|
|
|
; ZNVER1: # BB#0:
|
|
|
|
; ZNVER1-NEXT: pdepq (%rdx), %rdi, %rcx # sched: [?:0.000000e+00]
|
|
|
|
; ZNVER1-NEXT: pdepq %rsi, %rdi, %rax # sched: [?:0.000000e+00]
|
2017-07-19 10:45:14 +08:00
|
|
|
; ZNVER1-NEXT: addq %rcx, %rax # sched: [1:0.25]
|
|
|
|
; ZNVER1-NEXT: retq # sched: [5:0.50]
|
2017-07-16 22:09:15 +08:00
|
|
|
%1 = load i64, i64 *%a2
|
|
|
|
%2 = tail call i64 @llvm.x86.bmi.pdep.64(i64 %a0, i64 %1)
|
|
|
|
%3 = tail call i64 @llvm.x86.bmi.pdep.64(i64 %a0, i64 %a1)
|
|
|
|
%4 = add i64 %2, %3
|
|
|
|
ret i64 %4
|
|
|
|
}
|
|
|
|
declare i64 @llvm.x86.bmi.pdep.64(i64, i64)
|
|
|
|
|
|
|
|
define i32 @test_pext_i32(i32 %a0, i32 %a1, i32 *%a2) {
|
|
|
|
; GENERIC-LABEL: test_pext_i32:
|
|
|
|
; GENERIC: # BB#0:
|
|
|
|
; GENERIC-NEXT: pextl (%rdx), %edi, %ecx
|
|
|
|
; GENERIC-NEXT: pextl %esi, %edi, %eax
|
|
|
|
; GENERIC-NEXT: addl %ecx, %eax
|
|
|
|
; GENERIC-NEXT: retq
|
|
|
|
;
|
|
|
|
; HASWELL-LABEL: test_pext_i32:
|
|
|
|
; HASWELL: # BB#0:
|
|
|
|
; HASWELL-NEXT: pextl (%rdx), %edi, %ecx # sched: [7:1.00]
|
|
|
|
; HASWELL-NEXT: pextl %esi, %edi, %eax # sched: [3:1.00]
|
|
|
|
; HASWELL-NEXT: addl %ecx, %eax # sched: [1:0.25]
|
|
|
|
; HASWELL-NEXT: retq # sched: [1:1.00]
|
|
|
|
;
|
|
|
|
; ZNVER1-LABEL: test_pext_i32:
|
|
|
|
; ZNVER1: # BB#0:
|
|
|
|
; ZNVER1-NEXT: pextl (%rdx), %edi, %ecx # sched: [?:0.000000e+00]
|
|
|
|
; ZNVER1-NEXT: pextl %esi, %edi, %eax # sched: [?:0.000000e+00]
|
2017-07-19 10:45:14 +08:00
|
|
|
; ZNVER1-NEXT: addl %ecx, %eax # sched: [1:0.25]
|
|
|
|
; ZNVER1-NEXT: retq # sched: [5:0.50]
|
2017-07-16 22:09:15 +08:00
|
|
|
%1 = load i32, i32 *%a2
|
|
|
|
%2 = tail call i32 @llvm.x86.bmi.pext.32(i32 %a0, i32 %1)
|
|
|
|
%3 = tail call i32 @llvm.x86.bmi.pext.32(i32 %a0, i32 %a1)
|
|
|
|
%4 = add i32 %2, %3
|
|
|
|
ret i32 %4
|
|
|
|
}
|
|
|
|
declare i32 @llvm.x86.bmi.pext.32(i32, i32)
|
|
|
|
|
|
|
|
define i64 @test_pext_i64(i64 %a0, i64 %a1, i64 *%a2) {
|
|
|
|
; GENERIC-LABEL: test_pext_i64:
|
|
|
|
; GENERIC: # BB#0:
|
|
|
|
; GENERIC-NEXT: pextq (%rdx), %rdi, %rcx
|
|
|
|
; GENERIC-NEXT: pextq %rsi, %rdi, %rax
|
|
|
|
; GENERIC-NEXT: addq %rcx, %rax
|
|
|
|
; GENERIC-NEXT: retq
|
|
|
|
;
|
|
|
|
; HASWELL-LABEL: test_pext_i64:
|
|
|
|
; HASWELL: # BB#0:
|
|
|
|
; HASWELL-NEXT: pextq (%rdx), %rdi, %rcx # sched: [7:1.00]
|
|
|
|
; HASWELL-NEXT: pextq %rsi, %rdi, %rax # sched: [3:1.00]
|
|
|
|
; HASWELL-NEXT: addq %rcx, %rax # sched: [1:0.25]
|
|
|
|
; HASWELL-NEXT: retq # sched: [1:1.00]
|
|
|
|
;
|
|
|
|
; ZNVER1-LABEL: test_pext_i64:
|
|
|
|
; ZNVER1: # BB#0:
|
|
|
|
; ZNVER1-NEXT: pextq (%rdx), %rdi, %rcx # sched: [?:0.000000e+00]
|
|
|
|
; ZNVER1-NEXT: pextq %rsi, %rdi, %rax # sched: [?:0.000000e+00]
|
2017-07-19 10:45:14 +08:00
|
|
|
; ZNVER1-NEXT: addq %rcx, %rax # sched: [1:0.25]
|
|
|
|
; ZNVER1-NEXT: retq # sched: [5:0.50]
|
2017-07-16 22:09:15 +08:00
|
|
|
%1 = load i64, i64 *%a2
|
|
|
|
%2 = tail call i64 @llvm.x86.bmi.pext.64(i64 %a0, i64 %1)
|
|
|
|
%3 = tail call i64 @llvm.x86.bmi.pext.64(i64 %a0, i64 %a1)
|
|
|
|
%4 = add i64 %2, %3
|
|
|
|
ret i64 %4
|
|
|
|
}
|
|
|
|
declare i64 @llvm.x86.bmi.pext.64(i64, i64)
|