2021-08-15 08:17:30 +08:00
|
|
|
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
|
|
|
|
; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx512fp16 -mattr=+avx512vl -O3 | FileCheck %s --check-prefixes=X86
|
|
|
|
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512fp16 -mattr=+avx512vl -O3 | FileCheck %s --check-prefixes=X64
|
|
|
|
|
|
|
|
declare half @llvm.experimental.constrained.fadd.f16(half, half, metadata, metadata)
|
|
|
|
declare half @llvm.experimental.constrained.fsub.f16(half, half, metadata, metadata)
|
|
|
|
declare half @llvm.experimental.constrained.fmul.f16(half, half, metadata, metadata)
|
|
|
|
declare half @llvm.experimental.constrained.fdiv.f16(half, half, metadata, metadata)
|
2021-08-18 09:03:01 +08:00
|
|
|
declare float @llvm.experimental.constrained.fpext.f32.f16(half, metadata)
|
|
|
|
declare double @llvm.experimental.constrained.fpext.f64.f16(half, metadata)
|
|
|
|
declare half @llvm.experimental.constrained.fptrunc.f16.f32(float, metadata, metadata)
|
|
|
|
declare half @llvm.experimental.constrained.fptrunc.f16.f64(double, metadata, metadata)
|
2021-08-22 08:24:20 +08:00
|
|
|
declare half @llvm.experimental.constrained.sqrt.f16(half, metadata, metadata)
|
2021-08-15 08:17:30 +08:00
|
|
|
|
|
|
|
define half @fadd_f16(half %a, half %b) nounwind strictfp {
|
|
|
|
; X86-LABEL: fadd_f16:
|
|
|
|
; X86: # %bb.0:
|
|
|
|
; X86-NEXT: vmovsh {{[0-9]+}}(%esp), %xmm0
|
|
|
|
; X86-NEXT: vaddsh {{[0-9]+}}(%esp), %xmm0, %xmm0
|
|
|
|
; X86-NEXT: retl
|
|
|
|
;
|
|
|
|
; X64-LABEL: fadd_f16:
|
|
|
|
; X64: # %bb.0:
|
|
|
|
; X64-NEXT: vaddsh %xmm1, %xmm0, %xmm0
|
|
|
|
; X64-NEXT: retq
|
|
|
|
%ret = call half @llvm.experimental.constrained.fadd.f16(half %a, half %b,
|
|
|
|
metadata !"round.dynamic",
|
|
|
|
metadata !"fpexcept.strict") #0
|
|
|
|
ret half %ret
|
|
|
|
}
|
|
|
|
|
|
|
|
define half @fsub_f16(half %a, half %b) nounwind strictfp {
|
|
|
|
; X86-LABEL: fsub_f16:
|
|
|
|
; X86: # %bb.0:
|
|
|
|
; X86-NEXT: vmovsh {{[0-9]+}}(%esp), %xmm0
|
|
|
|
; X86-NEXT: vsubsh {{[0-9]+}}(%esp), %xmm0, %xmm0
|
|
|
|
; X86-NEXT: retl
|
|
|
|
;
|
|
|
|
; X64-LABEL: fsub_f16:
|
|
|
|
; X64: # %bb.0:
|
|
|
|
; X64-NEXT: vsubsh %xmm1, %xmm0, %xmm0
|
|
|
|
; X64-NEXT: retq
|
|
|
|
%ret = call half @llvm.experimental.constrained.fsub.f16(half %a, half %b,
|
|
|
|
metadata !"round.dynamic",
|
|
|
|
metadata !"fpexcept.strict") #0
|
|
|
|
ret half %ret
|
|
|
|
}
|
|
|
|
|
|
|
|
define half @fmul_f16(half %a, half %b) nounwind strictfp {
|
|
|
|
; X86-LABEL: fmul_f16:
|
|
|
|
; X86: # %bb.0:
|
|
|
|
; X86-NEXT: vmovsh {{[0-9]+}}(%esp), %xmm0
|
|
|
|
; X86-NEXT: vmulsh {{[0-9]+}}(%esp), %xmm0, %xmm0
|
|
|
|
; X86-NEXT: retl
|
|
|
|
;
|
|
|
|
; X64-LABEL: fmul_f16:
|
|
|
|
; X64: # %bb.0:
|
|
|
|
; X64-NEXT: vmulsh %xmm1, %xmm0, %xmm0
|
|
|
|
; X64-NEXT: retq
|
|
|
|
%ret = call half @llvm.experimental.constrained.fmul.f16(half %a, half %b,
|
|
|
|
metadata !"round.dynamic",
|
|
|
|
metadata !"fpexcept.strict") #0
|
|
|
|
ret half %ret
|
|
|
|
}
|
|
|
|
|
|
|
|
define half @fdiv_f16(half %a, half %b) nounwind strictfp {
|
|
|
|
; X86-LABEL: fdiv_f16:
|
|
|
|
; X86: # %bb.0:
|
|
|
|
; X86-NEXT: vmovsh {{[0-9]+}}(%esp), %xmm0
|
|
|
|
; X86-NEXT: vdivsh {{[0-9]+}}(%esp), %xmm0, %xmm0
|
|
|
|
; X86-NEXT: retl
|
|
|
|
;
|
|
|
|
; X64-LABEL: fdiv_f16:
|
|
|
|
; X64: # %bb.0:
|
|
|
|
; X64-NEXT: vdivsh %xmm1, %xmm0, %xmm0
|
|
|
|
; X64-NEXT: retq
|
|
|
|
%ret = call half @llvm.experimental.constrained.fdiv.f16(half %a, half %b,
|
|
|
|
metadata !"round.dynamic",
|
|
|
|
metadata !"fpexcept.strict") #0
|
|
|
|
ret half %ret
|
|
|
|
}
|
|
|
|
|
2021-08-18 09:03:01 +08:00
|
|
|
define void @fpext_f16_to_f32(half* %val, float* %ret) nounwind strictfp {
|
|
|
|
; X86-LABEL: fpext_f16_to_f32:
|
|
|
|
; X86: # %bb.0:
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: vmovsh (%ecx), %xmm0
|
|
|
|
; X86-NEXT: vcvtsh2ss %xmm0, %xmm0, %xmm0
|
|
|
|
; X86-NEXT: vmovss %xmm0, (%eax)
|
|
|
|
; X86-NEXT: retl
|
|
|
|
;
|
|
|
|
; X64-LABEL: fpext_f16_to_f32:
|
|
|
|
; X64: # %bb.0:
|
|
|
|
; X64-NEXT: vmovsh (%rdi), %xmm0
|
|
|
|
; X64-NEXT: vcvtsh2ss %xmm0, %xmm0, %xmm0
|
|
|
|
; X64-NEXT: vmovss %xmm0, (%rsi)
|
|
|
|
; X64-NEXT: retq
|
|
|
|
%1 = load half, half* %val, align 4
|
|
|
|
%res = call float @llvm.experimental.constrained.fpext.f32.f16(half %1,
|
|
|
|
metadata !"fpexcept.strict") #0
|
|
|
|
store float %res, float* %ret, align 8
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
|
|
|
define void @fpext_f16_to_f64(half* %val, double* %ret) nounwind strictfp {
|
|
|
|
; X86-LABEL: fpext_f16_to_f64:
|
|
|
|
; X86: # %bb.0:
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: vmovsh (%ecx), %xmm0
|
|
|
|
; X86-NEXT: vcvtsh2sd %xmm0, %xmm0, %xmm0
|
|
|
|
; X86-NEXT: vmovsd %xmm0, (%eax)
|
|
|
|
; X86-NEXT: retl
|
|
|
|
;
|
|
|
|
; X64-LABEL: fpext_f16_to_f64:
|
|
|
|
; X64: # %bb.0:
|
|
|
|
; X64-NEXT: vmovsh (%rdi), %xmm0
|
|
|
|
; X64-NEXT: vcvtsh2sd %xmm0, %xmm0, %xmm0
|
|
|
|
; X64-NEXT: vmovsd %xmm0, (%rsi)
|
|
|
|
; X64-NEXT: retq
|
|
|
|
%1 = load half, half* %val, align 4
|
|
|
|
%res = call double @llvm.experimental.constrained.fpext.f64.f16(half %1,
|
|
|
|
metadata !"fpexcept.strict") #0
|
|
|
|
store double %res, double* %ret, align 8
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
|
|
|
define void @fptrunc_float_to_f16(float* %val, half *%ret) nounwind strictfp {
|
|
|
|
; X86-LABEL: fptrunc_float_to_f16:
|
|
|
|
; X86: # %bb.0:
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
|
|
|
; X86-NEXT: vcvtss2sh %xmm0, %xmm0, %xmm0
|
|
|
|
; X86-NEXT: vmovsh %xmm0, (%eax)
|
|
|
|
; X86-NEXT: retl
|
|
|
|
;
|
|
|
|
; X64-LABEL: fptrunc_float_to_f16:
|
|
|
|
; X64: # %bb.0:
|
|
|
|
; X64-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
|
|
|
; X64-NEXT: vcvtss2sh %xmm0, %xmm0, %xmm0
|
|
|
|
; X64-NEXT: vmovsh %xmm0, (%rsi)
|
|
|
|
; X64-NEXT: retq
|
|
|
|
%1 = load float, float* %val, align 8
|
|
|
|
%res = call half @llvm.experimental.constrained.fptrunc.f16.f32(float %1,
|
|
|
|
metadata !"round.dynamic",
|
|
|
|
metadata !"fpexcept.strict") #0
|
|
|
|
store half %res, half* %ret, align 4
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
|
|
|
define void @fptrunc_double_to_f16(double* %val, half *%ret) nounwind strictfp {
|
|
|
|
; X86-LABEL: fptrunc_double_to_f16:
|
|
|
|
; X86: # %bb.0:
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
|
|
|
; X86-NEXT: vcvtsd2sh %xmm0, %xmm0, %xmm0
|
|
|
|
; X86-NEXT: vmovsh %xmm0, (%eax)
|
|
|
|
; X86-NEXT: retl
|
|
|
|
;
|
|
|
|
; X64-LABEL: fptrunc_double_to_f16:
|
|
|
|
; X64: # %bb.0:
|
|
|
|
; X64-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
|
|
|
; X64-NEXT: vcvtsd2sh %xmm0, %xmm0, %xmm0
|
|
|
|
; X64-NEXT: vmovsh %xmm0, (%rsi)
|
|
|
|
; X64-NEXT: retq
|
|
|
|
%1 = load double, double* %val, align 8
|
|
|
|
%res = call half @llvm.experimental.constrained.fptrunc.f16.f64(double %1,
|
|
|
|
metadata !"round.dynamic",
|
|
|
|
metadata !"fpexcept.strict") #0
|
|
|
|
store half %res, half* %ret, align 4
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
2021-08-22 08:24:20 +08:00
|
|
|
define void @fsqrt_f16(half* %a) nounwind strictfp {
|
|
|
|
; X86-LABEL: fsqrt_f16:
|
|
|
|
; X86: # %bb.0:
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: vmovsh (%eax), %xmm0
|
|
|
|
; X86-NEXT: vsqrtsh %xmm0, %xmm0, %xmm0
|
|
|
|
; X86-NEXT: vmovsh %xmm0, (%eax)
|
|
|
|
; X86-NEXT: retl
|
|
|
|
;
|
|
|
|
; X64-LABEL: fsqrt_f16:
|
|
|
|
; X64: # %bb.0:
|
|
|
|
; X64-NEXT: vmovsh (%rdi), %xmm0
|
|
|
|
; X64-NEXT: vsqrtsh %xmm0, %xmm0, %xmm0
|
|
|
|
; X64-NEXT: vmovsh %xmm0, (%rdi)
|
|
|
|
; X64-NEXT: retq
|
|
|
|
%1 = load half, half* %a, align 4
|
|
|
|
%res = call half @llvm.experimental.constrained.sqrt.f16(half %1,
|
|
|
|
metadata !"round.dynamic",
|
|
|
|
metadata !"fpexcept.strict") #0
|
|
|
|
store half %res, half* %a, align 4
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
2021-08-15 08:17:30 +08:00
|
|
|
attributes #0 = { strictfp }
|