2019-05-03 21:55:40 +08:00
|
|
|
# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
|
2019-10-08 20:46:32 +08:00
|
|
|
# RUN: llc -mtriple=amdgcn-mesa-mesa3d -run-pass=si-optimize-exec-masking-pre-ra -verify-machineinstrs %s -o - | FileCheck -check-prefix=GCN %s
|
2019-05-03 21:55:40 +08:00
|
|
|
|
|
|
|
# Check for regression from assuming an instruction was a copy after
|
|
|
|
# dropping the opcode check.
|
|
|
|
---
|
|
|
|
name: exec_src1_is_not_copy
|
|
|
|
tracksRegLiveness: true
|
|
|
|
machineFunctionInfo:
|
|
|
|
isEntryFunction: true
|
|
|
|
scratchRSrcReg: '$sgpr96_sgpr97_sgpr98_sgpr99'
|
|
|
|
frameOffsetReg: '$sgpr101'
|
|
|
|
body: |
|
|
|
|
; GCN-LABEL: name: exec_src1_is_not_copy
|
|
|
|
; GCN: bb.0:
|
|
|
|
; GCN: successors: %bb.1(0x40000000), %bb.2(0x40000000)
|
|
|
|
; GCN: liveins: $vgpr0
|
|
|
|
; GCN: [[COPY:%[0-9]+]]:sreg_64 = COPY $exec
|
|
|
|
; GCN: [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
|
|
|
|
; GCN: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 0, [[DEF]], implicit $exec
|
|
|
|
; GCN: [[COPY1:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
|
|
|
|
; GCN: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY1]], [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
|
|
|
|
; GCN: [[S_XOR_B64_:%[0-9]+]]:sreg_64 = S_XOR_B64 [[S_AND_B64_]], [[COPY1]], implicit-def dead $scc
|
|
|
|
; GCN: $exec = S_MOV_B64_term [[S_AND_B64_]]
|
|
|
|
; GCN: SI_MASK_BRANCH %bb.2, implicit $exec
|
|
|
|
; GCN: S_BRANCH %bb.1
|
|
|
|
; GCN: bb.1:
|
|
|
|
; GCN: successors: %bb.2(0x80000000)
|
|
|
|
; GCN: bb.2:
|
|
|
|
; GCN: successors: %bb.3(0x40000000), %bb.6(0x40000000)
|
|
|
|
; GCN: [[S_OR_SAVEEXEC_B64_:%[0-9]+]]:sreg_64 = S_OR_SAVEEXEC_B64 [[S_XOR_B64_]], implicit-def $exec, implicit-def $scc, implicit $exec
|
|
|
|
; GCN: $exec = S_AND_B64 $exec, [[COPY]], implicit-def dead $scc
|
|
|
|
; GCN: [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 $exec, [[S_OR_SAVEEXEC_B64_]], implicit-def $scc
|
|
|
|
; GCN: $exec = S_XOR_B64_term $exec, [[S_AND_B64_1]], implicit-def $scc
|
|
|
|
; GCN: SI_MASK_BRANCH %bb.6, implicit $exec
|
|
|
|
; GCN: S_BRANCH %bb.3
|
|
|
|
; GCN: bb.3:
|
|
|
|
; GCN: successors: %bb.4(0x40000000), %bb.5(0x40000000)
|
|
|
|
; GCN: [[V_CMP_NE_U32_e64_1:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 0, [[DEF]], implicit $exec
|
|
|
|
; GCN: [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
|
|
|
|
; GCN: [[S_AND_B64_2:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_NE_U32_e64_1]], implicit-def dead $scc
|
|
|
|
; GCN: $exec = S_MOV_B64_term [[S_AND_B64_2]]
|
|
|
|
; GCN: SI_MASK_BRANCH %bb.5, implicit $exec
|
|
|
|
; GCN: S_BRANCH %bb.4
|
|
|
|
; GCN: bb.4:
|
|
|
|
; GCN: successors: %bb.5(0x80000000)
|
|
|
|
; GCN: bb.5:
|
|
|
|
; GCN: successors: %bb.6(0x80000000)
|
|
|
|
; GCN: $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc
|
|
|
|
; GCN: bb.6:
|
|
|
|
; GCN: $exec = S_OR_B64 $exec, [[S_AND_B64_1]], implicit-def $scc
|
|
|
|
bb.0:
|
|
|
|
successors: %bb.1, %bb.2
|
|
|
|
liveins: $vgpr0
|
|
|
|
|
|
|
|
%0:sreg_64 = COPY $exec
|
|
|
|
%1:vgpr_32 = IMPLICIT_DEF
|
|
|
|
%2:sreg_64 = V_CMP_NE_U32_e64 0, %1, implicit $exec
|
|
|
|
%3:sreg_64 = COPY $exec, implicit-def $exec
|
|
|
|
%4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc
|
|
|
|
%5:sreg_64 = S_XOR_B64 %4, %3, implicit-def dead $scc
|
|
|
|
$exec = S_MOV_B64_term %4
|
|
|
|
SI_MASK_BRANCH %bb.2, implicit $exec
|
|
|
|
S_BRANCH %bb.1
|
|
|
|
|
|
|
|
bb.1:
|
|
|
|
|
|
|
|
bb.2:
|
|
|
|
successors: %bb.3, %bb.6
|
|
|
|
|
|
|
|
%6:sreg_64 = S_OR_SAVEEXEC_B64 %5, implicit-def $exec, implicit-def $scc, implicit $exec
|
|
|
|
$exec = S_AND_B64 $exec, %0, implicit-def dead $scc
|
|
|
|
%7:sreg_64 = S_AND_B64 $exec, %6, implicit-def $scc
|
|
|
|
$exec = S_XOR_B64_term $exec, %7, implicit-def $scc
|
|
|
|
SI_MASK_BRANCH %bb.6, implicit $exec
|
|
|
|
S_BRANCH %bb.3
|
|
|
|
|
|
|
|
bb.3:
|
|
|
|
successors: %bb.4, %bb.5
|
|
|
|
|
|
|
|
%8:sreg_64 = V_CMP_NE_U32_e64 0, %1, implicit $exec
|
|
|
|
%9:sreg_64 = COPY $exec, implicit-def $exec
|
|
|
|
%10:sreg_64 = S_AND_B64 %9, %8, implicit-def dead $scc
|
|
|
|
$exec = S_MOV_B64_term %10
|
|
|
|
SI_MASK_BRANCH %bb.5, implicit $exec
|
|
|
|
S_BRANCH %bb.4
|
|
|
|
|
|
|
|
bb.4:
|
|
|
|
|
|
|
|
bb.5:
|
|
|
|
$exec = S_OR_B64 $exec, %9, implicit-def $scc
|
|
|
|
|
|
|
|
bb.6:
|
|
|
|
$exec = S_OR_B64 $exec, %7, implicit-def $scc
|
|
|
|
|
|
|
|
...
|
2019-10-08 20:46:32 +08:00
|
|
|
|
|
|
|
# When folding a v_cndmask and a v_cmp in a pattern leading to
|
|
|
|
# s_cbranch_vccz, ensure that an undef operand is handled correctly.
|
|
|
|
---
|
|
|
|
name: cndmask_cmp_cbranch_fold_undef
|
|
|
|
tracksRegLiveness: true
|
|
|
|
body: |
|
|
|
|
; GCN-LABEL: name: cndmask_cmp_cbranch_fold_undef
|
|
|
|
; GCN: bb.0:
|
|
|
|
; GCN: successors: %bb.1(0x80000000)
|
|
|
|
; GCN: $vcc = S_ANDN2_B64 $exec, undef %1:sreg_64_xexec, implicit-def $scc
|
|
|
|
; GCN: S_CBRANCH_VCCZ %bb.1, implicit $vcc
|
|
|
|
; GCN: bb.1:
|
|
|
|
bb.0:
|
|
|
|
|
|
|
|
%1:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, undef %0:sreg_64_xexec, implicit $exec
|
|
|
|
V_CMP_NE_U32_e32 1, %1, implicit-def $vcc, implicit $exec
|
|
|
|
$vcc = S_AND_B64 $exec, $vcc, implicit-def dead $scc
|
|
|
|
S_CBRANCH_VCCZ %bb.1, implicit $vcc
|
|
|
|
|
|
|
|
bb.1:
|
|
|
|
|
|
|
|
...
|
2020-04-13 09:44:45 +08:00
|
|
|
|
|
|
|
# Don't crash on exec copy to SGPR subregister.
|
|
|
|
---
|
|
|
|
name: exec_copy_to_subreg
|
|
|
|
tracksRegLiveness: true
|
|
|
|
body: |
|
|
|
|
; GCN-LABEL: name: exec_copy_to_subreg
|
|
|
|
; GCN: bb.0:
|
|
|
|
; GCN: successors: %bb.1(0x80000000)
|
2020-04-22 18:08:08 +08:00
|
|
|
; GCN: dead undef %0.sub0:sgpr_256 = COPY $exec
|
2020-04-13 09:44:45 +08:00
|
|
|
; GCN: dead %1:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, undef %2:sreg_64_xexec, implicit $exec
|
|
|
|
; GCN: S_BRANCH %bb.1
|
|
|
|
; GCN: bb.1:
|
|
|
|
bb.0:
|
|
|
|
|
2020-04-22 18:08:08 +08:00
|
|
|
undef %0.sub0:sgpr_256 = COPY $exec
|
2020-04-13 09:44:45 +08:00
|
|
|
%2:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, undef %1:sreg_64_xexec, implicit $exec
|
|
|
|
S_BRANCH %bb.1
|
|
|
|
|
|
|
|
bb.1:
|
|
|
|
|
|
|
|
...
|