2019-11-26 09:14:52 +08:00
|
|
|
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
|
2020-01-03 10:57:27 +08:00
|
|
|
; RUN: llc < %s -mtriple=x86_64-linux-android -mattr=+sse | FileCheck %s --check-prefixes=X64,X64-SSE
|
|
|
|
; RUN: llc < %s -mtriple=x86_64-linux-gnu -mattr=+sse | FileCheck %s --check-prefixes=X64,X64-SSE
|
|
|
|
; RUN: llc < %s -mtriple=x86_64-linux-android -mattr=+avx | FileCheck %s --check-prefixes=X64,X64-AVX
|
|
|
|
; RUN: llc < %s -mtriple=x86_64-linux-gnu -mattr=+avx | FileCheck %s --check-prefixes=X64,X64-AVX
|
2021-08-10 11:18:40 +08:00
|
|
|
; RUN: llc < %s -mtriple=x86_64-linux-android -mattr=+avx512fp16 | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX512
|
|
|
|
; RUN: llc < %s -mtriple=x86_64-linux-gnu -mattr=+avx512fp16 | FileCheck %s --check-prefixes=X64,X64-AVX,X64-AVX512
|
2020-01-03 10:57:27 +08:00
|
|
|
; RUN: llc < %s -mtriple=i686-linux-gnu -mattr=-sse | FileCheck %s --check-prefixes=X86
|
2019-11-26 09:14:52 +08:00
|
|
|
|
|
|
|
; Check soft floating point conversion function calls.
|
|
|
|
|
2021-08-10 11:18:40 +08:00
|
|
|
@vf16 = common dso_local global half 0.000000e+00, align 2
|
2020-12-31 06:40:50 +08:00
|
|
|
@vf32 = common dso_local global float 0.000000e+00, align 4
|
|
|
|
@vf64 = common dso_local global double 0.000000e+00, align 8
|
|
|
|
@vf80 = common dso_local global x86_fp80 0xK00000000000000000000, align 8
|
|
|
|
@vf128 = common dso_local global fp128 0xL00000000000000000000000000000000, align 16
|
2019-11-26 09:14:52 +08:00
|
|
|
|
2021-08-10 11:18:40 +08:00
|
|
|
define dso_local void @TestFPExtF16_F128() nounwind strictfp {
|
|
|
|
; X64-SSE-LABEL: TestFPExtF16_F128:
|
|
|
|
; X64-SSE: # %bb.0: # %entry
|
|
|
|
; X64-SSE-NEXT: pushq %rax
|
|
|
|
; X64-SSE-NEXT: movzwl vf16(%rip), %edi
|
|
|
|
; X64-SSE-NEXT: callq __gnu_h2f_ieee@PLT
|
|
|
|
; X64-SSE-NEXT: callq __extendsftf2@PLT
|
|
|
|
; X64-SSE-NEXT: movaps %xmm0, vf128(%rip)
|
|
|
|
; X64-SSE-NEXT: popq %rax
|
|
|
|
; X64-SSE-NEXT: retq
|
|
|
|
;
|
|
|
|
; X64-AVX512-LABEL: TestFPExtF16_F128:
|
|
|
|
; X64-AVX512: # %bb.0: # %entry
|
|
|
|
; X64-AVX512-NEXT: pushq %rax
|
|
|
|
; X64-AVX512-NEXT: vmovsh vf16(%rip), %xmm0
|
|
|
|
; X64-AVX512-NEXT: callq __extendhftf2@PLT
|
|
|
|
; X64-AVX512-NEXT: vmovaps %xmm0, vf128(%rip)
|
|
|
|
; X64-AVX512-NEXT: popq %rax
|
|
|
|
; X64-AVX512-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: TestFPExtF16_F128:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $24, %esp
|
|
|
|
; X86-NEXT: movzwl vf16, %eax
|
|
|
|
; X86-NEXT: movl %eax, (%esp)
|
|
|
|
; X86-NEXT: calll __gnu_h2f_ieee
|
|
|
|
; X86-NEXT: fstps {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: wait
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl %eax, (%esp)
|
|
|
|
; X86-NEXT: calll __extendsftf2
|
|
|
|
; X86-NEXT: subl $4, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: movl %esi, vf128+12
|
|
|
|
; X86-NEXT: movl %edx, vf128+8
|
|
|
|
; X86-NEXT: movl %ecx, vf128+4
|
|
|
|
; X86-NEXT: movl %eax, vf128
|
|
|
|
; X86-NEXT: addl $24, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: retl
|
|
|
|
entry:
|
|
|
|
%0 = load half, half* @vf16, align 2
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.fpext.f128.f16(half %0, metadata !"fpexcept.strict") #0
|
|
|
|
store fp128 %conv, fp128* @vf128, align 16
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
2020-12-31 06:40:50 +08:00
|
|
|
define dso_local void @TestFPExtF32_F128() nounwind strictfp {
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-LABEL: TestFPExtF32_F128:
|
|
|
|
; X64-SSE: # %bb.0: # %entry
|
|
|
|
; X64-SSE-NEXT: pushq %rax
|
|
|
|
; X64-SSE-NEXT: movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-SSE-NEXT: callq __extendsftf2@PLT
|
|
|
|
; X64-SSE-NEXT: movaps %xmm0, vf128(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-NEXT: popq %rax
|
|
|
|
; X64-SSE-NEXT: retq
|
|
|
|
;
|
|
|
|
; X64-AVX-LABEL: TestFPExtF32_F128:
|
|
|
|
; X64-AVX: # %bb.0: # %entry
|
|
|
|
; X64-AVX-NEXT: pushq %rax
|
|
|
|
; X64-AVX-NEXT: vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-AVX-NEXT: callq __extendsftf2@PLT
|
|
|
|
; X64-AVX-NEXT: vmovaps %xmm0, vf128(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-AVX-NEXT: popq %rax
|
|
|
|
; X64-AVX-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: TestFPExtF32_F128:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $24, %esp
|
|
|
|
; X86-NEXT: flds vf32
|
|
|
|
; X86-NEXT: fstps {{[0-9]+}}(%esp)
|
2020-01-16 10:49:59 +08:00
|
|
|
; X86-NEXT: wait
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl %eax, (%esp)
|
|
|
|
; X86-NEXT: calll __extendsftf2
|
|
|
|
; X86-NEXT: subl $4, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
2020-01-13 21:37:07 +08:00
|
|
|
; X86-NEXT: movl %esi, vf128+12
|
|
|
|
; X86-NEXT: movl %edx, vf128+8
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: movl %ecx, vf128+4
|
2020-01-13 21:37:07 +08:00
|
|
|
; X86-NEXT: movl %eax, vf128
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: addl $24, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: retl
|
2019-11-26 09:14:52 +08:00
|
|
|
entry:
|
|
|
|
%0 = load float, float* @vf32, align 4
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.fpext.f128.f32(float %0, metadata !"fpexcept.strict") #0
|
|
|
|
store fp128 %conv, fp128* @vf128, align 16
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
2020-12-31 06:40:50 +08:00
|
|
|
define dso_local void @TestFPExtF64_F128() nounwind strictfp {
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-LABEL: TestFPExtF64_F128:
|
|
|
|
; X64-SSE: # %bb.0: # %entry
|
|
|
|
; X64-SSE-NEXT: pushq %rax
|
|
|
|
; X64-SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-SSE-NEXT: callq __extenddftf2@PLT
|
|
|
|
; X64-SSE-NEXT: movaps %xmm0, vf128(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-NEXT: popq %rax
|
|
|
|
; X64-SSE-NEXT: retq
|
|
|
|
;
|
|
|
|
; X64-AVX-LABEL: TestFPExtF64_F128:
|
|
|
|
; X64-AVX: # %bb.0: # %entry
|
|
|
|
; X64-AVX-NEXT: pushq %rax
|
|
|
|
; X64-AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-AVX-NEXT: callq __extenddftf2@PLT
|
|
|
|
; X64-AVX-NEXT: vmovaps %xmm0, vf128(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-AVX-NEXT: popq %rax
|
|
|
|
; X64-AVX-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: TestFPExtF64_F128:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $40, %esp
|
|
|
|
; X86-NEXT: fldl vf64
|
|
|
|
; X86-NEXT: fstpl {{[0-9]+}}(%esp)
|
2020-01-16 10:49:59 +08:00
|
|
|
; X86-NEXT: wait
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl %eax, (%esp)
|
|
|
|
; X86-NEXT: calll __extenddftf2
|
|
|
|
; X86-NEXT: subl $4, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
2020-01-13 21:37:07 +08:00
|
|
|
; X86-NEXT: movl %esi, vf128+12
|
|
|
|
; X86-NEXT: movl %edx, vf128+8
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: movl %ecx, vf128+4
|
2020-01-13 21:37:07 +08:00
|
|
|
; X86-NEXT: movl %eax, vf128
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: addl $40, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: retl
|
2019-11-26 09:14:52 +08:00
|
|
|
entry:
|
|
|
|
%0 = load double, double* @vf64, align 8
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.fpext.f128.f64(double %0, metadata !"fpexcept.strict") #0
|
|
|
|
store fp128 %conv, fp128* @vf128, align 16
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
2020-12-31 06:40:50 +08:00
|
|
|
define dso_local void @TestFPExtF80_F128() nounwind strictfp {
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-LABEL: TestFPExtF80_F128:
|
|
|
|
; X64-SSE: # %bb.0: # %entry
|
|
|
|
; X64-SSE-NEXT: subq $24, %rsp
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-SSE-NEXT: fldt vf80(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-NEXT: fstpt (%rsp)
|
2020-01-16 10:49:59 +08:00
|
|
|
; X64-SSE-NEXT: wait
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-SSE-NEXT: callq __extendxftf2@PLT
|
|
|
|
; X64-SSE-NEXT: movaps %xmm0, vf128(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-NEXT: addq $24, %rsp
|
|
|
|
; X64-SSE-NEXT: retq
|
|
|
|
;
|
|
|
|
; X64-AVX-LABEL: TestFPExtF80_F128:
|
|
|
|
; X64-AVX: # %bb.0: # %entry
|
|
|
|
; X64-AVX-NEXT: subq $24, %rsp
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-AVX-NEXT: fldt vf80(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-AVX-NEXT: fstpt (%rsp)
|
2020-01-16 10:49:59 +08:00
|
|
|
; X64-AVX-NEXT: wait
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-AVX-NEXT: callq __extendxftf2@PLT
|
|
|
|
; X64-AVX-NEXT: vmovaps %xmm0, vf128(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-AVX-NEXT: addq $24, %rsp
|
|
|
|
; X64-AVX-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: TestFPExtF80_F128:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $40, %esp
|
|
|
|
; X86-NEXT: fldt vf80
|
|
|
|
; X86-NEXT: fstpt {{[0-9]+}}(%esp)
|
2020-01-16 10:49:59 +08:00
|
|
|
; X86-NEXT: wait
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl %eax, (%esp)
|
|
|
|
; X86-NEXT: calll __extendxftf2
|
|
|
|
; X86-NEXT: subl $4, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
2020-01-13 21:37:07 +08:00
|
|
|
; X86-NEXT: movl %esi, vf128+12
|
|
|
|
; X86-NEXT: movl %edx, vf128+8
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: movl %ecx, vf128+4
|
2020-01-13 21:37:07 +08:00
|
|
|
; X86-NEXT: movl %eax, vf128
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: addl $40, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: retl
|
2019-11-26 09:14:52 +08:00
|
|
|
entry:
|
|
|
|
%0 = load x86_fp80, x86_fp80* @vf80, align 8
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.fpext.f128.f80(x86_fp80 %0, metadata !"fpexcept.strict") #0
|
|
|
|
store fp128 %conv, fp128* @vf128, align 16
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
2021-08-10 11:18:40 +08:00
|
|
|
define dso_local void @TestFPTruncF128_F16() nounwind strictfp {
|
|
|
|
; X64-SSE-LABEL: TestFPTruncF128_F16:
|
|
|
|
; X64-SSE: # %bb.0: # %entry
|
|
|
|
; X64-SSE-NEXT: pushq %rax
|
|
|
|
; X64-SSE-NEXT: movaps vf128(%rip), %xmm0
|
|
|
|
; X64-SSE-NEXT: callq __trunctfhf2@PLT
|
|
|
|
; X64-SSE-NEXT: movw %ax, vf16(%rip)
|
|
|
|
; X64-SSE-NEXT: popq %rax
|
|
|
|
; X64-SSE-NEXT: retq
|
|
|
|
;
|
|
|
|
; X64-AVX512-LABEL: TestFPTruncF128_F16:
|
|
|
|
; X64-AVX512: # %bb.0: # %entry
|
|
|
|
; X64-AVX512-NEXT: pushq %rax
|
|
|
|
; X64-AVX512-NEXT: vmovaps vf128(%rip), %xmm0
|
|
|
|
; X64-AVX512-NEXT: callq __trunctfhf2@PLT
|
|
|
|
; X64-AVX512-NEXT: vmovsh %xmm0, vf16(%rip)
|
|
|
|
; X64-AVX512-NEXT: popq %rax
|
|
|
|
; X64-AVX512-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: TestFPTruncF128_F16:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl vf128+12
|
|
|
|
; X86-NEXT: pushl vf128+8
|
|
|
|
; X86-NEXT: pushl vf128+4
|
|
|
|
; X86-NEXT: pushl vf128
|
|
|
|
; X86-NEXT: calll __trunctfhf2
|
|
|
|
; X86-NEXT: addl $16, %esp
|
|
|
|
; X86-NEXT: movw %ax, vf16
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: retl
|
|
|
|
entry:
|
|
|
|
%0 = load fp128, fp128* @vf128, align 16
|
|
|
|
%conv = call half @llvm.experimental.constrained.fptrunc.f16.f128(fp128 %0, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
store half %conv, half* @vf16, align 2
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
2020-12-31 06:40:50 +08:00
|
|
|
define dso_local void @TestFPTruncF128_F32() nounwind strictfp {
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-LABEL: TestFPTruncF128_F32:
|
|
|
|
; X64-SSE: # %bb.0: # %entry
|
|
|
|
; X64-SSE-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-SSE-NEXT: movaps vf128(%rip), %xmm0
|
|
|
|
; X64-SSE-NEXT: callq __trunctfsf2@PLT
|
|
|
|
; X64-SSE-NEXT: movss %xmm0, vf32(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-NEXT: popq %rax
|
|
|
|
; X64-SSE-NEXT: retq
|
|
|
|
;
|
|
|
|
; X64-AVX-LABEL: TestFPTruncF128_F32:
|
|
|
|
; X64-AVX: # %bb.0: # %entry
|
|
|
|
; X64-AVX-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-AVX-NEXT: vmovaps vf128(%rip), %xmm0
|
|
|
|
; X64-AVX-NEXT: callq __trunctfsf2@PLT
|
|
|
|
; X64-AVX-NEXT: vmovss %xmm0, vf32(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-AVX-NEXT: popq %rax
|
|
|
|
; X64-AVX-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: TestFPTruncF128_F32:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl vf128+12
|
|
|
|
; X86-NEXT: pushl vf128+8
|
|
|
|
; X86-NEXT: pushl vf128+4
|
|
|
|
; X86-NEXT: pushl vf128
|
|
|
|
; X86-NEXT: calll __trunctfsf2
|
|
|
|
; X86-NEXT: addl $16, %esp
|
|
|
|
; X86-NEXT: fstps vf32
|
2020-01-16 10:49:59 +08:00
|
|
|
; X86-NEXT: wait
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-26 09:14:52 +08:00
|
|
|
entry:
|
|
|
|
%0 = load fp128, fp128* @vf128, align 16
|
|
|
|
%conv = call float @llvm.experimental.constrained.fptrunc.f32.f128(fp128 %0, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
store float %conv, float* @vf32, align 4
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
2020-12-31 06:40:50 +08:00
|
|
|
define dso_local void @TestFPTruncF128_F64() nounwind strictfp {
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-LABEL: TestFPTruncF128_F64:
|
|
|
|
; X64-SSE: # %bb.0: # %entry
|
|
|
|
; X64-SSE-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-SSE-NEXT: movaps vf128(%rip), %xmm0
|
|
|
|
; X64-SSE-NEXT: callq __trunctfdf2@PLT
|
|
|
|
; X64-SSE-NEXT: movsd %xmm0, vf64(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-NEXT: popq %rax
|
|
|
|
; X64-SSE-NEXT: retq
|
|
|
|
;
|
|
|
|
; X64-AVX-LABEL: TestFPTruncF128_F64:
|
|
|
|
; X64-AVX: # %bb.0: # %entry
|
|
|
|
; X64-AVX-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-AVX-NEXT: vmovaps vf128(%rip), %xmm0
|
|
|
|
; X64-AVX-NEXT: callq __trunctfdf2@PLT
|
|
|
|
; X64-AVX-NEXT: vmovsd %xmm0, vf64(%rip)
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-AVX-NEXT: popq %rax
|
|
|
|
; X64-AVX-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: TestFPTruncF128_F64:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl vf128+12
|
|
|
|
; X86-NEXT: pushl vf128+8
|
|
|
|
; X86-NEXT: pushl vf128+4
|
|
|
|
; X86-NEXT: pushl vf128
|
|
|
|
; X86-NEXT: calll __trunctfdf2
|
|
|
|
; X86-NEXT: addl $16, %esp
|
|
|
|
; X86-NEXT: fstpl vf64
|
2020-01-16 10:49:59 +08:00
|
|
|
; X86-NEXT: wait
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-26 09:14:52 +08:00
|
|
|
entry:
|
|
|
|
%0 = load fp128, fp128* @vf128, align 16
|
|
|
|
%conv = call double @llvm.experimental.constrained.fptrunc.f64.f128(fp128 %0, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
store double %conv, double* @vf64, align 8
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
2020-12-31 06:40:50 +08:00
|
|
|
define dso_local void @TestFPTruncF128_F80() nounwind strictfp {
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-LABEL: TestFPTruncF128_F80:
|
|
|
|
; X64-SSE: # %bb.0: # %entry
|
|
|
|
; X64-SSE-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-SSE-NEXT: movaps vf128(%rip), %xmm0
|
|
|
|
; X64-SSE-NEXT: callq __trunctfxf2@PLT
|
|
|
|
; X64-SSE-NEXT: fstpt vf80(%rip)
|
2020-01-16 10:49:59 +08:00
|
|
|
; X64-SSE-NEXT: wait
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-SSE-NEXT: popq %rax
|
|
|
|
; X64-SSE-NEXT: retq
|
|
|
|
;
|
|
|
|
; X64-AVX-LABEL: TestFPTruncF128_F80:
|
|
|
|
; X64-AVX: # %bb.0: # %entry
|
|
|
|
; X64-AVX-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-AVX-NEXT: vmovaps vf128(%rip), %xmm0
|
|
|
|
; X64-AVX-NEXT: callq __trunctfxf2@PLT
|
|
|
|
; X64-AVX-NEXT: fstpt vf80(%rip)
|
2020-01-16 10:49:59 +08:00
|
|
|
; X64-AVX-NEXT: wait
|
2019-11-26 09:14:52 +08:00
|
|
|
; X64-AVX-NEXT: popq %rax
|
|
|
|
; X64-AVX-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: TestFPTruncF128_F80:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl vf128+12
|
|
|
|
; X86-NEXT: pushl vf128+8
|
|
|
|
; X86-NEXT: pushl vf128+4
|
|
|
|
; X86-NEXT: pushl vf128
|
|
|
|
; X86-NEXT: calll __trunctfxf2
|
|
|
|
; X86-NEXT: addl $16, %esp
|
|
|
|
; X86-NEXT: fstpt vf80
|
2020-01-16 10:49:59 +08:00
|
|
|
; X86-NEXT: wait
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-26 09:14:52 +08:00
|
|
|
entry:
|
|
|
|
%0 = load fp128, fp128* @vf128, align 16
|
|
|
|
%conv = call x86_fp80 @llvm.experimental.constrained.fptrunc.f80.f128(fp128 %0, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
store x86_fp80 %conv, x86_fp80* @vf80, align 8
|
|
|
|
ret void
|
|
|
|
}
|
|
|
|
|
2020-12-31 06:40:50 +08:00
|
|
|
define dso_local i8 @fptosi_i8(fp128 %x) nounwind strictfp {
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-LABEL: fptosi_i8:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __fixtfsi@PLT
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-NEXT: # kill: def $al killed $al killed $eax
|
|
|
|
; X64-NEXT: popq %rcx
|
|
|
|
; X64-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: fptosi_i8:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: calll __fixtfsi
|
|
|
|
; X86-NEXT: addl $16, %esp
|
|
|
|
; X86-NEXT: # kill: def $al killed $al killed $eax
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-28 09:25:26 +08:00
|
|
|
entry:
|
|
|
|
%conv = call i8 @llvm.experimental.constrained.fptosi.i8.f128(fp128 %x, metadata !"fpexcept.strict") #0
|
|
|
|
ret i8 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define i16 @fptosi_i16(fp128 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: fptosi_i16:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __fixtfsi@PLT
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-NEXT: # kill: def $ax killed $ax killed $eax
|
|
|
|
; X64-NEXT: popq %rcx
|
|
|
|
; X64-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: fptosi_i16:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: calll __fixtfsi
|
|
|
|
; X86-NEXT: addl $16, %esp
|
|
|
|
; X86-NEXT: # kill: def $ax killed $ax killed $eax
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-28 09:25:26 +08:00
|
|
|
entry:
|
|
|
|
%conv = call i16 @llvm.experimental.constrained.fptosi.i16.f128(fp128 %x, metadata !"fpexcept.strict") #0
|
|
|
|
ret i16 %conv
|
|
|
|
}
|
|
|
|
|
2020-12-31 06:40:50 +08:00
|
|
|
define dso_local i32 @fptosi_i32(fp128 %x) nounwind strictfp {
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-LABEL: fptosi_i32:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __fixtfsi@PLT
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-NEXT: popq %rcx
|
|
|
|
; X64-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: fptosi_i32:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: calll __fixtfsi
|
|
|
|
; X86-NEXT: addl $28, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-28 09:25:26 +08:00
|
|
|
entry:
|
|
|
|
%conv = call i32 @llvm.experimental.constrained.fptosi.i32.f128(fp128 %x, metadata !"fpexcept.strict") #0
|
|
|
|
ret i32 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define i64 @fptosi_i64(fp128 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: fptosi_i64:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __fixtfdi@PLT
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-NEXT: popq %rcx
|
|
|
|
; X64-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: fptosi_i64:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: calll __fixtfdi
|
|
|
|
; X86-NEXT: addl $28, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-28 09:25:26 +08:00
|
|
|
entry:
|
|
|
|
%conv = call i64 @llvm.experimental.constrained.fptosi.i64.f128(fp128 %x, metadata !"fpexcept.strict") #0
|
|
|
|
ret i64 %conv
|
|
|
|
}
|
|
|
|
|
2019-11-28 09:44:43 +08:00
|
|
|
define i128 @fptosi_i128(fp128 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: fptosi_i128:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __fixtfti@PLT
|
2019-11-28 09:44:43 +08:00
|
|
|
; X64-NEXT: popq %rcx
|
|
|
|
; X64-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: fptosi_i128:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: calll __fixtfti
|
|
|
|
; X86-NEXT: addl $28, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
2020-01-13 21:37:07 +08:00
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
[FPEnv][SelectionDAG] Relax chain requirements
This patch implements the following changes:
1) SelectionDAGBuilder::visitConstrainedFPIntrinsic currently treats
each constrained intrinsic like a global barrier (e.g. a function call)
and fully serializes all pending chains. This is actually not required;
it is allowed for constrained intrinsics to be reordered w.r.t one
another or (nonvolatile) memory accesses. The MI-level scheduler already
allows for that flexibility, so it makes sense to allow it at the DAG
level as well.
This patch therefore changes the way chains for constrained intrisincs
are created, and handles them basically like load operations are handled.
This has the effect that constrained intrinsics are no longer serialized
against one another or (nonvolatile) loads. They are still serialized
against stores, but that seems hard to change with the current DAG chain
setup, and it also doesn't seem to be a big problem preventing DAG
2) The OPC_CheckFoldableChainNode check requires that each of the
intermediate nodes in a multi-node pattern match only has a single use.
This check tends to fail if those intermediate nodes are strict operations
as those have a chain output that typically indeed has another use.
However, we don't really need to consider chains here at all, since they
will all be rewritten anyway by UpdateChains later. Other parts of the
matcher therefore already ignore chains, but this hasOneUse check doesn't.
This patch replaces hasOneUse by a custom test that verifies there is no
more than one use of any non-chain output value.
In theory, this change could affect code unrelated to strict FP nodes,
but at least on SystemZ I could not find any single instance of that
happening
3) The SystemZ back-end currently does not allow matching multiply-and-
extend operations (32x32 -> 64bit or 64x64 -> 128bit FP multiply) for
strict FP operations. This was not possible in the past due to the
problems described under 1) and 2) above.
With those issues fixed, it is now possible to fully support those
instructions in strict mode as well, and this patch does so.
Differential Revision: https://reviews.llvm.org/D70913
2019-12-06 18:02:11 +08:00
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
2020-01-13 21:37:07 +08:00
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
2019-11-28 09:44:43 +08:00
|
|
|
entry:
|
|
|
|
%conv = call i128 @llvm.experimental.constrained.fptosi.i128.f128(fp128 %x, metadata !"fpexcept.strict") #0
|
|
|
|
ret i128 %conv
|
|
|
|
}
|
|
|
|
|
2020-12-31 06:40:50 +08:00
|
|
|
define dso_local i8 @fptoui_i8(fp128 %x) nounwind strictfp {
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-LABEL: fptoui_i8:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __fixtfsi@PLT
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-NEXT: # kill: def $al killed $al killed $eax
|
|
|
|
; X64-NEXT: popq %rcx
|
|
|
|
; X64-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: fptoui_i8:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: calll __fixunstfsi
|
|
|
|
; X86-NEXT: addl $16, %esp
|
|
|
|
; X86-NEXT: # kill: def $al killed $al killed $eax
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-28 09:25:26 +08:00
|
|
|
entry:
|
|
|
|
%conv = call i8 @llvm.experimental.constrained.fptoui.i8.f128(fp128 %x, metadata !"fpexcept.strict") #0
|
|
|
|
ret i8 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define i16 @fptoui_i16(fp128 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: fptoui_i16:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __fixtfsi@PLT
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-NEXT: # kill: def $ax killed $ax killed $eax
|
|
|
|
; X64-NEXT: popq %rcx
|
|
|
|
; X64-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: fptoui_i16:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: calll __fixunstfsi
|
|
|
|
; X86-NEXT: addl $16, %esp
|
|
|
|
; X86-NEXT: # kill: def $ax killed $ax killed $eax
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-28 09:25:26 +08:00
|
|
|
entry:
|
|
|
|
%conv = call i16 @llvm.experimental.constrained.fptoui.i16.f128(fp128 %x, metadata !"fpexcept.strict") #0
|
|
|
|
ret i16 %conv
|
|
|
|
}
|
|
|
|
|
2020-12-31 06:40:50 +08:00
|
|
|
define dso_local i32 @fptoui_i32(fp128 %x) nounwind strictfp {
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-LABEL: fptoui_i32:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __fixunstfsi@PLT
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-NEXT: popq %rcx
|
|
|
|
; X64-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: fptoui_i32:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: calll __fixunstfsi
|
|
|
|
; X86-NEXT: addl $28, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-28 09:25:26 +08:00
|
|
|
entry:
|
|
|
|
%conv = call i32 @llvm.experimental.constrained.fptoui.i32.f128(fp128 %x, metadata !"fpexcept.strict") #0
|
|
|
|
ret i32 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define i64 @fptoui_i64(fp128 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: fptoui_i64:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __fixunstfdi@PLT
|
2019-11-28 09:25:26 +08:00
|
|
|
; X64-NEXT: popq %rcx
|
|
|
|
; X64-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: fptoui_i64:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: calll __fixunstfdi
|
|
|
|
; X86-NEXT: addl $28, %esp
|
|
|
|
; X86-NEXT: retl
|
2019-11-28 09:25:26 +08:00
|
|
|
entry:
|
|
|
|
%conv = call i64 @llvm.experimental.constrained.fptoui.i64.f128(fp128 %x, metadata !"fpexcept.strict") #0
|
|
|
|
ret i64 %conv
|
|
|
|
}
|
|
|
|
|
2019-11-28 09:44:43 +08:00
|
|
|
define i128 @fptoui_i128(fp128 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: fptoui_i128:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __fixunstfti@PLT
|
2019-11-28 09:44:43 +08:00
|
|
|
; X64-NEXT: popq %rcx
|
|
|
|
; X64-NEXT: retq
|
2019-11-28 13:15:36 +08:00
|
|
|
;
|
|
|
|
; X86-LABEL: fptoui_i128:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: calll __fixunstfti
|
|
|
|
; X86-NEXT: addl $28, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
2020-01-13 21:37:07 +08:00
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
[FPEnv][SelectionDAG] Relax chain requirements
This patch implements the following changes:
1) SelectionDAGBuilder::visitConstrainedFPIntrinsic currently treats
each constrained intrinsic like a global barrier (e.g. a function call)
and fully serializes all pending chains. This is actually not required;
it is allowed for constrained intrinsics to be reordered w.r.t one
another or (nonvolatile) memory accesses. The MI-level scheduler already
allows for that flexibility, so it makes sense to allow it at the DAG
level as well.
This patch therefore changes the way chains for constrained intrisincs
are created, and handles them basically like load operations are handled.
This has the effect that constrained intrinsics are no longer serialized
against one another or (nonvolatile) loads. They are still serialized
against stores, but that seems hard to change with the current DAG chain
setup, and it also doesn't seem to be a big problem preventing DAG
2) The OPC_CheckFoldableChainNode check requires that each of the
intermediate nodes in a multi-node pattern match only has a single use.
This check tends to fail if those intermediate nodes are strict operations
as those have a chain output that typically indeed has another use.
However, we don't really need to consider chains here at all, since they
will all be rewritten anyway by UpdateChains later. Other parts of the
matcher therefore already ignore chains, but this hasOneUse check doesn't.
This patch replaces hasOneUse by a custom test that verifies there is no
more than one use of any non-chain output value.
In theory, this change could affect code unrelated to strict FP nodes,
but at least on SystemZ I could not find any single instance of that
happening
3) The SystemZ back-end currently does not allow matching multiply-and-
extend operations (32x32 -> 64bit or 64x64 -> 128bit FP multiply) for
strict FP operations. This was not possible in the past due to the
problems described under 1) and 2) above.
With those issues fixed, it is now possible to fully support those
instructions in strict mode as well, and this patch does so.
Differential Revision: https://reviews.llvm.org/D70913
2019-12-06 18:02:11 +08:00
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
2020-01-13 21:37:07 +08:00
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
2019-11-28 13:15:36 +08:00
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
2019-11-28 09:44:43 +08:00
|
|
|
entry:
|
|
|
|
%conv = call i128 @llvm.experimental.constrained.fptoui.i128.f128(fp128 %x, metadata !"fpexcept.strict") #0
|
|
|
|
ret i128 %conv
|
|
|
|
}
|
|
|
|
|
2020-01-14 04:40:15 +08:00
|
|
|
define fp128 @sitofp_i8(i8 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: sitofp_i8:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
|
|
|
; X64-NEXT: movsbl %dil, %edi
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __floatsitf@PLT
|
2020-01-14 04:40:15 +08:00
|
|
|
; X64-NEXT: popq %rax
|
|
|
|
; X64-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: sitofp_i8:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: movsbl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: subl $8, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: pushl %ecx
|
|
|
|
; X86-NEXT: calll __floatsitf
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
|
|
|
entry:
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.sitofp.f128.i8(i8 %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
ret fp128 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define fp128 @sitofp_i16(i16 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: sitofp_i16:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
|
|
|
; X64-NEXT: movswl %di, %edi
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __floatsitf@PLT
|
2020-01-14 04:40:15 +08:00
|
|
|
; X64-NEXT: popq %rax
|
|
|
|
; X64-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: sitofp_i16:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: movswl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: subl $8, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: pushl %ecx
|
|
|
|
; X86-NEXT: calll __floatsitf
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
|
|
|
entry:
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.sitofp.f128.i16(i16 %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
ret fp128 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define fp128 @sitofp_i32(i32 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: sitofp_i32:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __floatsitf@PLT
|
2020-01-14 04:40:15 +08:00
|
|
|
; X64-NEXT: popq %rax
|
|
|
|
; X64-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: sitofp_i32:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: subl $8, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: calll __floatsitf
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
|
|
|
entry:
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.sitofp.f128.i32(i32 %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
ret fp128 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define fp128 @sitofp_i64(i64 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: sitofp_i64:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __floatditf@PLT
|
2020-01-14 04:40:15 +08:00
|
|
|
; X64-NEXT: popq %rax
|
|
|
|
; X64-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: sitofp_i64:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: subl $4, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: calll __floatditf
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
|
|
|
entry:
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.sitofp.f128.i64(i64 %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
ret fp128 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define fp128 @sitofp_i128(i128 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: sitofp_i128:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __floattitf@PLT
|
2020-01-14 04:40:15 +08:00
|
|
|
; X64-NEXT: popq %rax
|
|
|
|
; X64-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: sitofp_i128:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: calll __floattitf
|
|
|
|
; X86-NEXT: addl $28, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
|
|
|
entry:
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.sitofp.f128.i128(i128 %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
ret fp128 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define fp128 @uitofp_i8(i8 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: uitofp_i8:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
|
|
|
; X64-NEXT: movzbl %dil, %edi
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __floatsitf@PLT
|
2020-01-14 04:40:15 +08:00
|
|
|
; X64-NEXT: popq %rax
|
|
|
|
; X64-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: uitofp_i8:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: movzbl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: subl $8, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: pushl %ecx
|
|
|
|
; X86-NEXT: calll __floatunsitf
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
|
|
|
entry:
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.uitofp.f128.i8(i8 %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
ret fp128 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define fp128 @uitofp_i16(i16 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: uitofp_i16:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
|
|
|
; X64-NEXT: movzwl %di, %edi
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __floatsitf@PLT
|
2020-01-14 04:40:15 +08:00
|
|
|
; X64-NEXT: popq %rax
|
|
|
|
; X64-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: uitofp_i16:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: movzwl {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: subl $8, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: pushl %ecx
|
|
|
|
; X86-NEXT: calll __floatunsitf
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
|
|
|
entry:
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.uitofp.f128.i16(i16 %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
ret fp128 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define fp128 @uitofp_i32(i32 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: uitofp_i32:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __floatunsitf@PLT
|
2020-01-14 04:40:15 +08:00
|
|
|
; X64-NEXT: popq %rax
|
|
|
|
; X64-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: uitofp_i32:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: subl $8, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: calll __floatunsitf
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
|
|
|
entry:
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.uitofp.f128.i32(i32 %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
ret fp128 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define fp128 @uitofp_i64(i64 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: uitofp_i64:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __floatunditf@PLT
|
2020-01-14 04:40:15 +08:00
|
|
|
; X64-NEXT: popq %rax
|
|
|
|
; X64-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: uitofp_i64:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: subl $4, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: calll __floatunditf
|
|
|
|
; X86-NEXT: addl $12, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
|
|
|
entry:
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.uitofp.f128.i64(i64 %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
ret fp128 %conv
|
|
|
|
}
|
|
|
|
|
|
|
|
define fp128 @uitofp_i128(i128 %x) nounwind strictfp {
|
|
|
|
; X64-LABEL: uitofp_i128:
|
|
|
|
; X64: # %bb.0: # %entry
|
|
|
|
; X64-NEXT: pushq %rax
|
2021-06-12 04:26:17 +08:00
|
|
|
; X64-NEXT: callq __floatuntitf@PLT
|
2020-01-14 04:40:15 +08:00
|
|
|
; X64-NEXT: popq %rax
|
|
|
|
; X64-NEXT: retq
|
|
|
|
;
|
|
|
|
; X86-LABEL: uitofp_i128:
|
|
|
|
; X86: # %bb.0: # %entry
|
|
|
|
; X86-NEXT: pushl %edi
|
|
|
|
; X86-NEXT: pushl %esi
|
|
|
|
; X86-NEXT: subl $20, %esp
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %esi
|
|
|
|
; X86-NEXT: subl $12, %esp
|
|
|
|
; X86-NEXT: leal {{[0-9]+}}(%esp), %eax
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl {{[0-9]+}}(%esp)
|
|
|
|
; X86-NEXT: pushl %eax
|
|
|
|
; X86-NEXT: calll __floatuntitf
|
|
|
|
; X86-NEXT: addl $28, %esp
|
|
|
|
; X86-NEXT: movl (%esp), %eax
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edx
|
|
|
|
; X86-NEXT: movl {{[0-9]+}}(%esp), %edi
|
|
|
|
; X86-NEXT: movl %edi, 8(%esi)
|
|
|
|
; X86-NEXT: movl %edx, 12(%esi)
|
|
|
|
; X86-NEXT: movl %eax, (%esi)
|
|
|
|
; X86-NEXT: movl %ecx, 4(%esi)
|
|
|
|
; X86-NEXT: movl %esi, %eax
|
|
|
|
; X86-NEXT: addl $20, %esp
|
|
|
|
; X86-NEXT: popl %esi
|
|
|
|
; X86-NEXT: popl %edi
|
|
|
|
; X86-NEXT: retl $4
|
|
|
|
entry:
|
|
|
|
%conv = call fp128 @llvm.experimental.constrained.uitofp.f128.i128(i128 %x, metadata !"round.dynamic", metadata !"fpexcept.strict") #0
|
|
|
|
ret fp128 %conv
|
|
|
|
}
|
|
|
|
|
2019-11-26 09:14:52 +08:00
|
|
|
attributes #0 = { strictfp }
|
|
|
|
|
2021-08-10 11:18:40 +08:00
|
|
|
declare half @llvm.experimental.constrained.fptrunc.f16.f128(fp128, metadata, metadata)
|
2019-11-26 09:14:52 +08:00
|
|
|
declare float @llvm.experimental.constrained.fptrunc.f32.f128(fp128, metadata, metadata)
|
|
|
|
declare double @llvm.experimental.constrained.fptrunc.f64.f128(fp128, metadata, metadata)
|
|
|
|
declare x86_fp80 @llvm.experimental.constrained.fptrunc.f80.f128(fp128, metadata, metadata)
|
2021-08-10 11:18:40 +08:00
|
|
|
declare fp128 @llvm.experimental.constrained.fpext.f128.f16(half, metadata)
|
2019-11-26 09:14:52 +08:00
|
|
|
declare fp128 @llvm.experimental.constrained.fpext.f128.f32(float, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.fpext.f128.f64(double, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.fpext.f128.f80(x86_fp80, metadata)
|
2019-11-28 09:25:26 +08:00
|
|
|
declare i8 @llvm.experimental.constrained.fptosi.i8.f128(fp128, metadata)
|
|
|
|
declare i16 @llvm.experimental.constrained.fptosi.i16.f128(fp128, metadata)
|
|
|
|
declare i32 @llvm.experimental.constrained.fptosi.i32.f128(fp128, metadata)
|
|
|
|
declare i64 @llvm.experimental.constrained.fptosi.i64.f128(fp128, metadata)
|
2019-11-28 09:44:43 +08:00
|
|
|
declare i128 @llvm.experimental.constrained.fptosi.i128.f128(fp128, metadata)
|
2019-11-28 09:25:26 +08:00
|
|
|
declare i8 @llvm.experimental.constrained.fptoui.i8.f128(fp128, metadata)
|
|
|
|
declare i16 @llvm.experimental.constrained.fptoui.i16.f128(fp128, metadata)
|
|
|
|
declare i32 @llvm.experimental.constrained.fptoui.i32.f128(fp128, metadata)
|
|
|
|
declare i64 @llvm.experimental.constrained.fptoui.i64.f128(fp128, metadata)
|
2019-11-28 09:44:43 +08:00
|
|
|
declare i128 @llvm.experimental.constrained.fptoui.i128.f128(fp128, metadata)
|
2020-01-14 04:40:15 +08:00
|
|
|
declare fp128 @llvm.experimental.constrained.sitofp.f128.i8(i8, metadata, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.sitofp.f128.i16(i16, metadata, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.sitofp.f128.i32(i32, metadata, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.sitofp.f128.i64(i64, metadata, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.sitofp.f128.i128(i128, metadata, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.uitofp.f128.i8(i8, metadata, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.uitofp.f128.i16(i16, metadata, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.uitofp.f128.i32(i32, metadata, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.uitofp.f128.i64(i64, metadata, metadata)
|
|
|
|
declare fp128 @llvm.experimental.constrained.uitofp.f128.i128(i128, metadata, metadata)
|