diff --git a/llvm/test/CodeGen/X86/stack-folding-fp-nofpexcept.mir b/llvm/test/CodeGen/X86/stack-folding-fp-nofpexcept.mir new file mode 100644 index 000000000000..7a838482d1bb --- /dev/null +++ b/llvm/test/CodeGen/X86/stack-folding-fp-nofpexcept.mir @@ -0,0 +1,52 @@ +# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py +# RUN: llc %s -o - -O3 -mtriple=x86_64-unknown-unknown -run-pass=greedy | FileCheck %s +--- | + ; ModuleID = 'test.ll' + source_filename = "test.ll" + target datalayout = "e-m:o-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + define <2 x double> @stack_fold_addpd(<2 x double> %a0, <2 x double> %a1) { + %1 = tail call <2 x i64> asm sideeffect "nop", "=x,~{xmm2},~{xmm3},~{xmm4},~{xmm5},~{xmm6},~{xmm7},~{xmm8},~{xmm9},~{xmm10},~{xmm11},~{xmm12},~{xmm13},~{xmm14},~{xmm15},~{flags}"() + %2 = fadd <2 x double> %a0, %a1 + ret <2 x double> %2 + } + ; Function Attrs: nounwind + declare void @llvm.stackprotector(i8*, i8**) #0 + + attributes #0 = { nounwind } + +... +--- +name: stack_fold_addpd +alignment: 16 +tracksRegLiveness: true +registers: + - { id: 0, class: vr128 } + - { id: 1, class: vr128 } + - { id: 2, class: vr128 } + - { id: 3, class: vr128 } +liveins: + - { reg: '$xmm0', virtual-reg: '%0' } + - { reg: '$xmm1', virtual-reg: '%1' } +frameInfo: + maxAlignment: 1 +machineFunctionInfo: {} +body: | + bb.0 (%ir-block.0): + liveins: $xmm0, $xmm1 + + ; CHECK-LABEL: name: stack_fold_addpd + ; CHECK: liveins: $xmm0, $xmm1 + ; CHECK: MOVAPSmr %stack.0, 1, $noreg, 0, $noreg, $xmm1 :: (store 16 into %stack.0) + ; CHECK: [[COPY:%[0-9]+]]:vr128 = COPY $xmm0 + ; CHECK: INLINEASM &nop, 1, 7405578, def dead %2, 12, implicit-def dead early-clobber $xmm2, 12, implicit-def dead early-clobber $xmm3, 12, implicit-def dead early-clobber $xmm4, 12, implicit-def dead early-clobber $xmm5, 12, implicit-def dead early-clobber $xmm6, 12, implicit-def dead early-clobber $xmm7, 12, implicit-def dead early-clobber $xmm8, 12, implicit-def dead early-clobber $xmm9, 12, implicit-def dead early-clobber $xmm10, 12, implicit-def dead early-clobber $xmm11, 12, implicit-def dead early-clobber $xmm12, 12, implicit-def dead early-clobber $xmm13, 12, implicit-def dead early-clobber $xmm14, 12, implicit-def dead early-clobber $xmm15, 12, implicit-def dead early-clobber $eflags + ; CHECK: [[ADDPDrm:%[0-9]+]]:vr128 = ADDPDrm [[ADDPDrm]], %stack.0, 1, $noreg, 0, $noreg, implicit $mxcsr :: (load 16 from %stack.0) + ; CHECK: $xmm0 = COPY [[ADDPDrm]] + ; CHECK: RET 0, $xmm0 + %1:vr128 = COPY $xmm1 + %3:vr128 = COPY $xmm0 + INLINEASM &nop, 1, 7405578, def dead %2, 12, implicit-def dead early-clobber $xmm2, 12, implicit-def dead early-clobber $xmm3, 12, implicit-def dead early-clobber $xmm4, 12, implicit-def dead early-clobber $xmm5, 12, implicit-def dead early-clobber $xmm6, 12, implicit-def dead early-clobber $xmm7, 12, implicit-def dead early-clobber $xmm8, 12, implicit-def dead early-clobber $xmm9, 12, implicit-def dead early-clobber $xmm10, 12, implicit-def dead early-clobber $xmm11, 12, implicit-def dead early-clobber $xmm12, 12, implicit-def dead early-clobber $xmm13, 12, implicit-def dead early-clobber $xmm14, 12, implicit-def dead early-clobber $xmm15, 12, implicit-def dead early-clobber $eflags + %3:vr128 = nofpexcept ADDPDrr %3, %1, implicit $mxcsr + $xmm0 = COPY %3 + RET 0, $xmm0 + +...