GlobalISel: Implement computeKnownBits for G_UNMERGE_VALUES

This commit is contained in:
Matt Arsenault 2020-08-27 16:17:41 -04:00
parent bc9a29b9ee
commit 92090e8bd8
7 changed files with 339 additions and 286 deletions

View File

@ -247,6 +247,12 @@ public:
One.insertBits(SubBits.One, BitPosition);
}
/// Return a subset of the known bits from [bitPosition,bitPosition+numBits).
KnownBits extractBits(unsigned NumBits, unsigned BitPosition) {
return KnownBits(Zero.extractBits(NumBits, BitPosition),
One.extractBits(NumBits, BitPosition));
}
/// Update known bits based on ANDing with RHS.
KnownBits &operator&=(const KnownBits &RHS);

View File

@ -419,6 +419,24 @@ void GISelKnownBits::computeKnownBitsImpl(Register R, KnownBits &Known,
}
break;
}
case TargetOpcode::G_UNMERGE_VALUES: {
Register NumOps = MI.getNumOperands();
Register SrcReg = MI.getOperand(NumOps - 1).getReg();
if (MRI.getType(SrcReg).isVector())
return; // TODO: Handle vectors.
KnownBits SrcOpKnown;
computeKnownBitsImpl(SrcReg, SrcOpKnown, DemandedElts, Depth + 1);
// Figure out the result operand index
unsigned DstIdx = 0;
for (; DstIdx != NumOps - 1 && MI.getOperand(DstIdx).getReg() != R;
++DstIdx)
;
Known = SrcOpKnown.extractBits(BitWidth, BitWidth * DstIdx);
break;
}
}
assert(!Known.hasConflict() && "Bits known to be one AND zero?");

View File

@ -1064,39 +1064,39 @@ define i64 @v_sdiv_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_movk_i32 s6, 0x1000
; CHECK-NEXT: v_cvt_f32_u32_e32 v2, s6
; CHECK-NEXT: v_cvt_f32_u32_e32 v3, 0
; CHECK-NEXT: v_cvt_f32_ubyte0_e32 v4, 0
; CHECK-NEXT: s_mov_b32 s7, 0xfffff000
; CHECK-NEXT: v_ashrrev_i32_e32 v4, 31, v1
; CHECK-NEXT: v_add_i32_e32 v0, vcc, v0, v4
; CHECK-NEXT: v_mac_f32_e32 v2, 0x4f800000, v3
; CHECK-NEXT: v_ashrrev_i32_e32 v3, 31, v1
; CHECK-NEXT: v_mac_f32_e32 v2, 0x4f800000, v4
; CHECK-NEXT: v_rcp_iflag_f32_e32 v2, v2
; CHECK-NEXT: v_addc_u32_e32 v1, vcc, v1, v4, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v4
; CHECK-NEXT: v_add_i32_e32 v0, vcc, v0, v3
; CHECK-NEXT: v_addc_u32_e32 v1, vcc, v1, v3, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v3
; CHECK-NEXT: v_mul_f32_e32 v2, 0x5f7ffffc, v2
; CHECK-NEXT: v_mul_f32_e32 v3, 0x2f800000, v2
; CHECK-NEXT: v_trunc_f32_e32 v3, v3
; CHECK-NEXT: v_mac_f32_e32 v2, 0xcf800000, v3
; CHECK-NEXT: v_mul_f32_e32 v4, 0x2f800000, v2
; CHECK-NEXT: v_trunc_f32_e32 v4, v4
; CHECK-NEXT: v_mac_f32_e32 v2, 0xcf800000, v4
; CHECK-NEXT: v_cvt_u32_f32_e32 v2, v2
; CHECK-NEXT: v_cvt_u32_f32_e32 v3, v3
; CHECK-NEXT: v_cvt_u32_f32_e32 v4, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v3
; CHECK-NEXT: v_mul_lo_u32 v5, -1, v2
; CHECK-NEXT: v_mul_lo_u32 v6, s7, v3
; CHECK-NEXT: v_mul_lo_u32 v6, s7, v4
; CHECK-NEXT: v_mul_hi_u32 v8, s7, v2
; CHECK-NEXT: v_mul_lo_u32 v7, s7, v2
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v8
; CHECK-NEXT: v_mul_lo_u32 v6, v3, v7
; CHECK-NEXT: v_mul_lo_u32 v6, v4, v7
; CHECK-NEXT: v_mul_lo_u32 v8, v2, v5
; CHECK-NEXT: v_mul_hi_u32 v9, v2, v7
; CHECK-NEXT: v_mul_hi_u32 v7, v3, v7
; CHECK-NEXT: v_mul_hi_u32 v7, v4, v7
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v8
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v9
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CHECK-NEXT: v_mul_lo_u32 v9, v3, v5
; CHECK-NEXT: v_mul_lo_u32 v9, v4, v5
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CHECK-NEXT: v_mul_hi_u32 v8, v2, v5
; CHECK-NEXT: v_mul_hi_u32 v5, v3, v5
; CHECK-NEXT: v_mul_hi_u32 v5, v4, v5
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v9, v7
; CHECK-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v8
@ -1107,12 +1107,12 @@ define i64 @v_sdiv_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v8, v7
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v6
; CHECK-NEXT: v_addc_u32_e64 v6, s[4:5], v3, v5, vcc
; CHECK-NEXT: v_addc_u32_e64 v6, s[4:5], v4, v5, vcc
; CHECK-NEXT: v_mul_lo_u32 v7, -1, v2
; CHECK-NEXT: v_mul_lo_u32 v8, s7, v6
; CHECK-NEXT: v_mul_hi_u32 v10, s7, v2
; CHECK-NEXT: v_mul_lo_u32 v9, s7, v2
; CHECK-NEXT: v_add_i32_e64 v3, s[4:5], v3, v5
; CHECK-NEXT: v_add_i32_e64 v4, s[4:5], v4, v5
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v7, v8
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v7, v10
; CHECK-NEXT: v_mul_lo_u32 v8, v6, v9
@ -1136,21 +1136,21 @@ define i64 @v_sdiv_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, s[4:5]
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v9, v8
; CHECK-NEXT: v_add_i32_e64 v6, s[4:5], v6, v7
; CHECK-NEXT: v_addc_u32_e32 v3, vcc, v3, v6, vcc
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, v4, v6, vcc
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CHECK-NEXT: v_addc_u32_e32 v3, vcc, 0, v3, vcc
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, 0, v4, vcc
; CHECK-NEXT: v_mul_lo_u32 v5, v1, v2
; CHECK-NEXT: v_mul_lo_u32 v6, v0, v3
; CHECK-NEXT: v_mul_lo_u32 v6, v0, v4
; CHECK-NEXT: v_mul_hi_u32 v7, v0, v2
; CHECK-NEXT: v_mul_hi_u32 v2, v1, v2
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_mul_lo_u32 v7, v1, v3
; CHECK-NEXT: v_mul_lo_u32 v7, v1, v4
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CHECK-NEXT: v_mul_hi_u32 v6, v0, v3
; CHECK-NEXT: v_mul_hi_u32 v3, v1, v3
; CHECK-NEXT: v_mul_hi_u32 v6, v0, v4
; CHECK-NEXT: v_mul_hi_u32 v4, v1, v4
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v7, v2
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v6
@ -1159,9 +1159,9 @@ define i64 @v_sdiv_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v5
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v4, v5
; CHECK-NEXT: v_mul_lo_u32 v5, 0, v2
; CHECK-NEXT: v_mul_lo_u32 v6, s6, v3
; CHECK-NEXT: v_mul_lo_u32 v6, s6, v4
; CHECK-NEXT: v_mul_hi_u32 v8, s6, v2
; CHECK-NEXT: v_mul_lo_u32 v7, s6, v2
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
@ -1179,7 +1179,7 @@ define i64 @v_sdiv_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v6
; CHECK-NEXT: v_add_i32_e32 v6, vcc, 1, v2
; CHECK-NEXT: v_cndmask_b32_e64 v5, v5, v7, s[4:5]
; CHECK-NEXT: v_addc_u32_e32 v7, vcc, 0, v3, vcc
; CHECK-NEXT: v_addc_u32_e32 v7, vcc, 0, v4, vcc
; CHECK-NEXT: v_cmp_le_u32_e32 vcc, 0, v1
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, -1, vcc
; CHECK-NEXT: v_cmp_le_u32_e32 vcc, s6, v0
@ -1193,11 +1193,11 @@ define i64 @v_sdiv_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: v_cndmask_b32_e32 v1, v7, v8, vcc
; CHECK-NEXT: v_cmp_ne_u32_e32 vcc, 0, v5
; CHECK-NEXT: v_cndmask_b32_e32 v0, v2, v0, vcc
; CHECK-NEXT: v_cndmask_b32_e32 v1, v3, v1, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v4
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v4
; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v4, vcc
; CHECK-NEXT: v_cndmask_b32_e32 v1, v4, v1, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v3
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v3
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v3
; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v3, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31]
%result = sdiv i64 %num, 4096
ret i64 %result
@ -1509,23 +1509,23 @@ define <2 x i64> @v_sdiv_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CGP-NEXT: s_movk_i32 s6, 0x1000
; CGP-NEXT: v_cvt_f32_u32_e32 v4, s6
; CGP-NEXT: v_cvt_f32_u32_e32 v5, 0
; CGP-NEXT: v_cvt_f32_ubyte0_e32 v6, 0
; CGP-NEXT: s_mov_b32 s7, 0xfffff000
; CGP-NEXT: v_ashrrev_i32_e32 v6, 31, v1
; CGP-NEXT: v_ashrrev_i32_e32 v5, 31, v1
; CGP-NEXT: v_mov_b32_e32 v7, v4
; CGP-NEXT: v_mac_f32_e32 v7, 0x4f800000, v5
; CGP-NEXT: v_mac_f32_e32 v7, 0x4f800000, v6
; CGP-NEXT: v_rcp_iflag_f32_e32 v7, v7
; CGP-NEXT: v_add_i32_e32 v0, vcc, v0, v6
; CGP-NEXT: v_addc_u32_e32 v1, vcc, v1, v6, vcc
; CGP-NEXT: v_xor_b32_e32 v0, v0, v6
; CGP-NEXT: v_add_i32_e32 v0, vcc, v0, v5
; CGP-NEXT: v_addc_u32_e32 v1, vcc, v1, v5, vcc
; CGP-NEXT: v_xor_b32_e32 v0, v0, v5
; CGP-NEXT: v_mul_f32_e32 v7, 0x5f7ffffc, v7
; CGP-NEXT: v_mul_f32_e32 v8, 0x2f800000, v7
; CGP-NEXT: v_trunc_f32_e32 v8, v8
; CGP-NEXT: v_mac_f32_e32 v7, 0xcf800000, v8
; CGP-NEXT: v_cvt_u32_f32_e32 v7, v7
; CGP-NEXT: v_cvt_u32_f32_e32 v8, v8
; CGP-NEXT: v_xor_b32_e32 v1, v1, v6
; CGP-NEXT: v_mac_f32_e32 v4, 0x4f800000, v5
; CGP-NEXT: v_xor_b32_e32 v1, v1, v5
; CGP-NEXT: v_mac_f32_e32 v4, 0x4f800000, v6
; CGP-NEXT: v_mul_lo_u32 v9, -1, v7
; CGP-NEXT: v_mul_lo_u32 v10, s7, v8
; CGP-NEXT: v_mul_hi_u32 v12, s7, v7
@ -1568,7 +1568,7 @@ define <2 x i64> @v_sdiv_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_mul_lo_u32 v14, v7, v11
; CGP-NEXT: v_mul_hi_u32 v9, v7, v13
; CGP-NEXT: v_mul_hi_u32 v13, v10, v13
; CGP-NEXT: v_ashrrev_i32_e32 v5, 31, v3
; CGP-NEXT: v_ashrrev_i32_e32 v6, 31, v3
; CGP-NEXT: v_add_i32_e64 v12, s[4:5], v12, v14
; CGP-NEXT: v_cndmask_b32_e64 v14, 0, 1, s[4:5]
; CGP-NEXT: v_add_i32_e64 v9, s[4:5], v12, v9
@ -1649,19 +1649,19 @@ define <2 x i64> @v_sdiv_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_cvt_u32_f32_e32 v4, v4
; CGP-NEXT: v_cvt_u32_f32_e32 v7, v7
; CGP-NEXT: v_cndmask_b32_e32 v1, v8, v1, vcc
; CGP-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CGP-NEXT: v_add_i32_e32 v2, vcc, v2, v6
; CGP-NEXT: v_mul_lo_u32 v8, -1, v4
; CGP-NEXT: v_mul_lo_u32 v9, s7, v7
; CGP-NEXT: v_mul_hi_u32 v11, s7, v4
; CGP-NEXT: v_mul_lo_u32 v10, s7, v4
; CGP-NEXT: v_addc_u32_e32 v3, vcc, v3, v5, vcc
; CGP-NEXT: v_addc_u32_e32 v3, vcc, v3, v6, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v11
; CGP-NEXT: v_mul_lo_u32 v9, v7, v10
; CGP-NEXT: v_mul_lo_u32 v11, v4, v8
; CGP-NEXT: v_mul_hi_u32 v12, v4, v10
; CGP-NEXT: v_mul_hi_u32 v10, v7, v10
; CGP-NEXT: v_xor_b32_e32 v0, v0, v6
; CGP-NEXT: v_xor_b32_e32 v0, v0, v5
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v11
; CGP-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v12
@ -1692,7 +1692,7 @@ define <2 x i64> @v_sdiv_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_mul_lo_u32 v13, v4, v10
; CGP-NEXT: v_mul_hi_u32 v8, v4, v12
; CGP-NEXT: v_mul_hi_u32 v12, v9, v12
; CGP-NEXT: v_xor_b32_e32 v2, v2, v5
; CGP-NEXT: v_xor_b32_e32 v2, v2, v6
; CGP-NEXT: v_add_i32_e64 v11, s[4:5], v11, v13
; CGP-NEXT: v_cndmask_b32_e64 v13, 0, 1, s[4:5]
; CGP-NEXT: v_add_i32_e64 v8, s[4:5], v11, v8
@ -1713,20 +1713,20 @@ define <2 x i64> @v_sdiv_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_addc_u32_e32 v7, vcc, v7, v9, vcc
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v8
; CGP-NEXT: v_addc_u32_e32 v7, vcc, 0, v7, vcc
; CGP-NEXT: v_xor_b32_e32 v3, v3, v5
; CGP-NEXT: v_xor_b32_e32 v1, v1, v6
; CGP-NEXT: v_sub_i32_e32 v0, vcc, v0, v6
; CGP-NEXT: v_xor_b32_e32 v3, v3, v6
; CGP-NEXT: v_xor_b32_e32 v1, v1, v5
; CGP-NEXT: v_sub_i32_e32 v0, vcc, v0, v5
; CGP-NEXT: v_mul_lo_u32 v8, v3, v4
; CGP-NEXT: v_mul_lo_u32 v9, v2, v7
; CGP-NEXT: v_subb_u32_e32 v1, vcc, v1, v6, vcc
; CGP-NEXT: v_mul_hi_u32 v6, v2, v4
; CGP-NEXT: v_subb_u32_e32 v1, vcc, v1, v5, vcc
; CGP-NEXT: v_mul_hi_u32 v5, v2, v4
; CGP-NEXT: v_mul_hi_u32 v4, v3, v4
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CGP-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v8, v5
; CGP-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CGP-NEXT: v_mul_lo_u32 v8, v3, v7
; CGP-NEXT: v_add_i32_e32 v6, vcc, v9, v6
; CGP-NEXT: v_add_i32_e32 v5, vcc, v9, v5
; CGP-NEXT: v_mul_hi_u32 v9, v2, v7
; CGP-NEXT: v_mul_hi_u32 v7, v3, v7
; CGP-NEXT: v_add_i32_e32 v4, vcc, v8, v4
@ -1734,12 +1734,12 @@ define <2 x i64> @v_sdiv_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v9
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v6
; CGP-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CGP-NEXT: v_add_i32_e32 v6, vcc, v7, v6
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v5
; CGP-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v8, v5
; CGP-NEXT: v_add_i32_e32 v5, vcc, v7, v5
; CGP-NEXT: v_mul_lo_u32 v7, 0, v4
; CGP-NEXT: v_mul_lo_u32 v8, s6, v6
; CGP-NEXT: v_mul_lo_u32 v8, s6, v5
; CGP-NEXT: v_mul_hi_u32 v10, s6, v4
; CGP-NEXT: v_mul_lo_u32 v9, s6, v4
; CGP-NEXT: v_add_i32_e32 v7, vcc, v7, v8
@ -1757,7 +1757,7 @@ define <2 x i64> @v_sdiv_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v8
; CGP-NEXT: v_add_i32_e32 v8, vcc, 1, v4
; CGP-NEXT: v_cndmask_b32_e64 v7, v7, v9, s[4:5]
; CGP-NEXT: v_addc_u32_e32 v9, vcc, 0, v6, vcc
; CGP-NEXT: v_addc_u32_e32 v9, vcc, 0, v5, vcc
; CGP-NEXT: v_cmp_le_u32_e32 vcc, 0, v3
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, -1, vcc
; CGP-NEXT: v_cmp_le_u32_e32 vcc, s6, v2
@ -1771,11 +1771,11 @@ define <2 x i64> @v_sdiv_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_cndmask_b32_e32 v3, v9, v10, vcc
; CGP-NEXT: v_cmp_ne_u32_e32 vcc, 0, v7
; CGP-NEXT: v_cndmask_b32_e32 v2, v4, v2, vcc
; CGP-NEXT: v_cndmask_b32_e32 v3, v6, v3, vcc
; CGP-NEXT: v_xor_b32_e32 v2, v2, v5
; CGP-NEXT: v_xor_b32_e32 v3, v3, v5
; CGP-NEXT: v_sub_i32_e32 v2, vcc, v2, v5
; CGP-NEXT: v_subb_u32_e32 v3, vcc, v3, v5, vcc
; CGP-NEXT: v_cndmask_b32_e32 v3, v5, v3, vcc
; CGP-NEXT: v_xor_b32_e32 v2, v2, v6
; CGP-NEXT: v_xor_b32_e32 v3, v3, v6
; CGP-NEXT: v_sub_i32_e32 v2, vcc, v2, v6
; CGP-NEXT: v_subb_u32_e32 v3, vcc, v3, v6, vcc
; CGP-NEXT: s_setpc_b64 s[30:31]
%result = sdiv <2 x i64> %num, <i64 4096, i64 4096>
ret <2 x i64> %result
@ -1787,39 +1787,39 @@ define i64 @v_sdiv_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_mov_b32 s6, 0x12d8fb
; CHECK-NEXT: v_cvt_f32_u32_e32 v2, s6
; CHECK-NEXT: v_cvt_f32_u32_e32 v3, 0
; CHECK-NEXT: v_cvt_f32_ubyte0_e32 v4, 0
; CHECK-NEXT: s_mov_b32 s7, 0xffed2705
; CHECK-NEXT: v_ashrrev_i32_e32 v4, 31, v1
; CHECK-NEXT: v_add_i32_e32 v0, vcc, v0, v4
; CHECK-NEXT: v_mac_f32_e32 v2, 0x4f800000, v3
; CHECK-NEXT: v_ashrrev_i32_e32 v3, 31, v1
; CHECK-NEXT: v_mac_f32_e32 v2, 0x4f800000, v4
; CHECK-NEXT: v_rcp_iflag_f32_e32 v2, v2
; CHECK-NEXT: v_addc_u32_e32 v1, vcc, v1, v4, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v4
; CHECK-NEXT: v_add_i32_e32 v0, vcc, v0, v3
; CHECK-NEXT: v_addc_u32_e32 v1, vcc, v1, v3, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v3
; CHECK-NEXT: v_mul_f32_e32 v2, 0x5f7ffffc, v2
; CHECK-NEXT: v_mul_f32_e32 v3, 0x2f800000, v2
; CHECK-NEXT: v_trunc_f32_e32 v3, v3
; CHECK-NEXT: v_mac_f32_e32 v2, 0xcf800000, v3
; CHECK-NEXT: v_mul_f32_e32 v4, 0x2f800000, v2
; CHECK-NEXT: v_trunc_f32_e32 v4, v4
; CHECK-NEXT: v_mac_f32_e32 v2, 0xcf800000, v4
; CHECK-NEXT: v_cvt_u32_f32_e32 v2, v2
; CHECK-NEXT: v_cvt_u32_f32_e32 v3, v3
; CHECK-NEXT: v_cvt_u32_f32_e32 v4, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v3
; CHECK-NEXT: v_mul_lo_u32 v5, -1, v2
; CHECK-NEXT: v_mul_lo_u32 v6, s7, v3
; CHECK-NEXT: v_mul_lo_u32 v6, s7, v4
; CHECK-NEXT: v_mul_hi_u32 v8, s7, v2
; CHECK-NEXT: v_mul_lo_u32 v7, s7, v2
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v8
; CHECK-NEXT: v_mul_lo_u32 v6, v3, v7
; CHECK-NEXT: v_mul_lo_u32 v6, v4, v7
; CHECK-NEXT: v_mul_lo_u32 v8, v2, v5
; CHECK-NEXT: v_mul_hi_u32 v9, v2, v7
; CHECK-NEXT: v_mul_hi_u32 v7, v3, v7
; CHECK-NEXT: v_mul_hi_u32 v7, v4, v7
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v8
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v9
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CHECK-NEXT: v_mul_lo_u32 v9, v3, v5
; CHECK-NEXT: v_mul_lo_u32 v9, v4, v5
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CHECK-NEXT: v_mul_hi_u32 v8, v2, v5
; CHECK-NEXT: v_mul_hi_u32 v5, v3, v5
; CHECK-NEXT: v_mul_hi_u32 v5, v4, v5
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v9, v7
; CHECK-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v8
@ -1830,12 +1830,12 @@ define i64 @v_sdiv_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v8, v7
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v6
; CHECK-NEXT: v_addc_u32_e64 v6, s[4:5], v3, v5, vcc
; CHECK-NEXT: v_addc_u32_e64 v6, s[4:5], v4, v5, vcc
; CHECK-NEXT: v_mul_lo_u32 v7, -1, v2
; CHECK-NEXT: v_mul_lo_u32 v8, s7, v6
; CHECK-NEXT: v_mul_hi_u32 v10, s7, v2
; CHECK-NEXT: v_mul_lo_u32 v9, s7, v2
; CHECK-NEXT: v_add_i32_e64 v3, s[4:5], v3, v5
; CHECK-NEXT: v_add_i32_e64 v4, s[4:5], v4, v5
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v7, v8
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v7, v10
; CHECK-NEXT: v_mul_lo_u32 v8, v6, v9
@ -1859,21 +1859,21 @@ define i64 @v_sdiv_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, s[4:5]
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v9, v8
; CHECK-NEXT: v_add_i32_e64 v6, s[4:5], v6, v7
; CHECK-NEXT: v_addc_u32_e32 v3, vcc, v3, v6, vcc
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, v4, v6, vcc
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CHECK-NEXT: v_addc_u32_e32 v3, vcc, 0, v3, vcc
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, 0, v4, vcc
; CHECK-NEXT: v_mul_lo_u32 v5, v1, v2
; CHECK-NEXT: v_mul_lo_u32 v6, v0, v3
; CHECK-NEXT: v_mul_lo_u32 v6, v0, v4
; CHECK-NEXT: v_mul_hi_u32 v7, v0, v2
; CHECK-NEXT: v_mul_hi_u32 v2, v1, v2
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_mul_lo_u32 v7, v1, v3
; CHECK-NEXT: v_mul_lo_u32 v7, v1, v4
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CHECK-NEXT: v_mul_hi_u32 v6, v0, v3
; CHECK-NEXT: v_mul_hi_u32 v3, v1, v3
; CHECK-NEXT: v_mul_hi_u32 v6, v0, v4
; CHECK-NEXT: v_mul_hi_u32 v4, v1, v4
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v7, v2
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v6
@ -1882,9 +1882,9 @@ define i64 @v_sdiv_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v5
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v4, v5
; CHECK-NEXT: v_mul_lo_u32 v5, 0, v2
; CHECK-NEXT: v_mul_lo_u32 v6, s6, v3
; CHECK-NEXT: v_mul_lo_u32 v6, s6, v4
; CHECK-NEXT: v_mul_hi_u32 v8, s6, v2
; CHECK-NEXT: v_mul_lo_u32 v7, s6, v2
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
@ -1902,7 +1902,7 @@ define i64 @v_sdiv_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v6
; CHECK-NEXT: v_add_i32_e32 v6, vcc, 1, v2
; CHECK-NEXT: v_cndmask_b32_e64 v5, v5, v7, s[4:5]
; CHECK-NEXT: v_addc_u32_e32 v7, vcc, 0, v3, vcc
; CHECK-NEXT: v_addc_u32_e32 v7, vcc, 0, v4, vcc
; CHECK-NEXT: v_cmp_le_u32_e32 vcc, 0, v1
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, -1, vcc
; CHECK-NEXT: v_cmp_le_u32_e32 vcc, s6, v0
@ -1916,11 +1916,11 @@ define i64 @v_sdiv_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: v_cndmask_b32_e32 v1, v7, v8, vcc
; CHECK-NEXT: v_cmp_ne_u32_e32 vcc, 0, v5
; CHECK-NEXT: v_cndmask_b32_e32 v0, v2, v0, vcc
; CHECK-NEXT: v_cndmask_b32_e32 v1, v3, v1, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v4
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v4
; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v4, vcc
; CHECK-NEXT: v_cndmask_b32_e32 v1, v4, v1, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v3
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v3
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v3
; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v3, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31]
%result = sdiv i64 %num, 1235195
ret i64 %result
@ -2232,23 +2232,23 @@ define <2 x i64> @v_sdiv_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CGP-NEXT: s_mov_b32 s6, 0x12d8fb
; CGP-NEXT: v_cvt_f32_u32_e32 v4, s6
; CGP-NEXT: v_cvt_f32_u32_e32 v5, 0
; CGP-NEXT: v_cvt_f32_ubyte0_e32 v6, 0
; CGP-NEXT: s_mov_b32 s7, 0xffed2705
; CGP-NEXT: v_ashrrev_i32_e32 v6, 31, v1
; CGP-NEXT: v_ashrrev_i32_e32 v5, 31, v1
; CGP-NEXT: v_mov_b32_e32 v7, v4
; CGP-NEXT: v_mac_f32_e32 v7, 0x4f800000, v5
; CGP-NEXT: v_mac_f32_e32 v7, 0x4f800000, v6
; CGP-NEXT: v_rcp_iflag_f32_e32 v7, v7
; CGP-NEXT: v_add_i32_e32 v0, vcc, v0, v6
; CGP-NEXT: v_addc_u32_e32 v1, vcc, v1, v6, vcc
; CGP-NEXT: v_xor_b32_e32 v0, v0, v6
; CGP-NEXT: v_add_i32_e32 v0, vcc, v0, v5
; CGP-NEXT: v_addc_u32_e32 v1, vcc, v1, v5, vcc
; CGP-NEXT: v_xor_b32_e32 v0, v0, v5
; CGP-NEXT: v_mul_f32_e32 v7, 0x5f7ffffc, v7
; CGP-NEXT: v_mul_f32_e32 v8, 0x2f800000, v7
; CGP-NEXT: v_trunc_f32_e32 v8, v8
; CGP-NEXT: v_mac_f32_e32 v7, 0xcf800000, v8
; CGP-NEXT: v_cvt_u32_f32_e32 v7, v7
; CGP-NEXT: v_cvt_u32_f32_e32 v8, v8
; CGP-NEXT: v_xor_b32_e32 v1, v1, v6
; CGP-NEXT: v_mac_f32_e32 v4, 0x4f800000, v5
; CGP-NEXT: v_xor_b32_e32 v1, v1, v5
; CGP-NEXT: v_mac_f32_e32 v4, 0x4f800000, v6
; CGP-NEXT: v_mul_lo_u32 v9, -1, v7
; CGP-NEXT: v_mul_lo_u32 v10, s7, v8
; CGP-NEXT: v_mul_hi_u32 v12, s7, v7
@ -2291,7 +2291,7 @@ define <2 x i64> @v_sdiv_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_mul_lo_u32 v14, v7, v11
; CGP-NEXT: v_mul_hi_u32 v9, v7, v13
; CGP-NEXT: v_mul_hi_u32 v13, v10, v13
; CGP-NEXT: v_ashrrev_i32_e32 v5, 31, v3
; CGP-NEXT: v_ashrrev_i32_e32 v6, 31, v3
; CGP-NEXT: v_add_i32_e64 v12, s[4:5], v12, v14
; CGP-NEXT: v_cndmask_b32_e64 v14, 0, 1, s[4:5]
; CGP-NEXT: v_add_i32_e64 v9, s[4:5], v12, v9
@ -2372,19 +2372,19 @@ define <2 x i64> @v_sdiv_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_cvt_u32_f32_e32 v4, v4
; CGP-NEXT: v_cvt_u32_f32_e32 v7, v7
; CGP-NEXT: v_cndmask_b32_e32 v1, v8, v1, vcc
; CGP-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CGP-NEXT: v_add_i32_e32 v2, vcc, v2, v6
; CGP-NEXT: v_mul_lo_u32 v8, -1, v4
; CGP-NEXT: v_mul_lo_u32 v9, s7, v7
; CGP-NEXT: v_mul_hi_u32 v11, s7, v4
; CGP-NEXT: v_mul_lo_u32 v10, s7, v4
; CGP-NEXT: v_addc_u32_e32 v3, vcc, v3, v5, vcc
; CGP-NEXT: v_addc_u32_e32 v3, vcc, v3, v6, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v11
; CGP-NEXT: v_mul_lo_u32 v9, v7, v10
; CGP-NEXT: v_mul_lo_u32 v11, v4, v8
; CGP-NEXT: v_mul_hi_u32 v12, v4, v10
; CGP-NEXT: v_mul_hi_u32 v10, v7, v10
; CGP-NEXT: v_xor_b32_e32 v0, v0, v6
; CGP-NEXT: v_xor_b32_e32 v0, v0, v5
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v11
; CGP-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v12
@ -2415,7 +2415,7 @@ define <2 x i64> @v_sdiv_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_mul_lo_u32 v13, v4, v10
; CGP-NEXT: v_mul_hi_u32 v8, v4, v12
; CGP-NEXT: v_mul_hi_u32 v12, v9, v12
; CGP-NEXT: v_xor_b32_e32 v2, v2, v5
; CGP-NEXT: v_xor_b32_e32 v2, v2, v6
; CGP-NEXT: v_add_i32_e64 v11, s[4:5], v11, v13
; CGP-NEXT: v_cndmask_b32_e64 v13, 0, 1, s[4:5]
; CGP-NEXT: v_add_i32_e64 v8, s[4:5], v11, v8
@ -2436,20 +2436,20 @@ define <2 x i64> @v_sdiv_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_addc_u32_e32 v7, vcc, v7, v9, vcc
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v8
; CGP-NEXT: v_addc_u32_e32 v7, vcc, 0, v7, vcc
; CGP-NEXT: v_xor_b32_e32 v3, v3, v5
; CGP-NEXT: v_xor_b32_e32 v1, v1, v6
; CGP-NEXT: v_sub_i32_e32 v0, vcc, v0, v6
; CGP-NEXT: v_xor_b32_e32 v3, v3, v6
; CGP-NEXT: v_xor_b32_e32 v1, v1, v5
; CGP-NEXT: v_sub_i32_e32 v0, vcc, v0, v5
; CGP-NEXT: v_mul_lo_u32 v8, v3, v4
; CGP-NEXT: v_mul_lo_u32 v9, v2, v7
; CGP-NEXT: v_subb_u32_e32 v1, vcc, v1, v6, vcc
; CGP-NEXT: v_mul_hi_u32 v6, v2, v4
; CGP-NEXT: v_subb_u32_e32 v1, vcc, v1, v5, vcc
; CGP-NEXT: v_mul_hi_u32 v5, v2, v4
; CGP-NEXT: v_mul_hi_u32 v4, v3, v4
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CGP-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v8, v5
; CGP-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CGP-NEXT: v_mul_lo_u32 v8, v3, v7
; CGP-NEXT: v_add_i32_e32 v6, vcc, v9, v6
; CGP-NEXT: v_add_i32_e32 v5, vcc, v9, v5
; CGP-NEXT: v_mul_hi_u32 v9, v2, v7
; CGP-NEXT: v_mul_hi_u32 v7, v3, v7
; CGP-NEXT: v_add_i32_e32 v4, vcc, v8, v4
@ -2457,12 +2457,12 @@ define <2 x i64> @v_sdiv_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v9
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v6
; CGP-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CGP-NEXT: v_add_i32_e32 v6, vcc, v7, v6
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v5
; CGP-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v8, v5
; CGP-NEXT: v_add_i32_e32 v5, vcc, v7, v5
; CGP-NEXT: v_mul_lo_u32 v7, 0, v4
; CGP-NEXT: v_mul_lo_u32 v8, s6, v6
; CGP-NEXT: v_mul_lo_u32 v8, s6, v5
; CGP-NEXT: v_mul_hi_u32 v10, s6, v4
; CGP-NEXT: v_mul_lo_u32 v9, s6, v4
; CGP-NEXT: v_add_i32_e32 v7, vcc, v7, v8
@ -2480,7 +2480,7 @@ define <2 x i64> @v_sdiv_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v8
; CGP-NEXT: v_add_i32_e32 v8, vcc, 1, v4
; CGP-NEXT: v_cndmask_b32_e64 v7, v7, v9, s[4:5]
; CGP-NEXT: v_addc_u32_e32 v9, vcc, 0, v6, vcc
; CGP-NEXT: v_addc_u32_e32 v9, vcc, 0, v5, vcc
; CGP-NEXT: v_cmp_le_u32_e32 vcc, 0, v3
; CGP-NEXT: v_cndmask_b32_e64 v10, 0, -1, vcc
; CGP-NEXT: v_cmp_le_u32_e32 vcc, s6, v2
@ -2494,11 +2494,11 @@ define <2 x i64> @v_sdiv_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_cndmask_b32_e32 v3, v9, v10, vcc
; CGP-NEXT: v_cmp_ne_u32_e32 vcc, 0, v7
; CGP-NEXT: v_cndmask_b32_e32 v2, v4, v2, vcc
; CGP-NEXT: v_cndmask_b32_e32 v3, v6, v3, vcc
; CGP-NEXT: v_xor_b32_e32 v2, v2, v5
; CGP-NEXT: v_xor_b32_e32 v3, v3, v5
; CGP-NEXT: v_sub_i32_e32 v2, vcc, v2, v5
; CGP-NEXT: v_subb_u32_e32 v3, vcc, v3, v5, vcc
; CGP-NEXT: v_cndmask_b32_e32 v3, v5, v3, vcc
; CGP-NEXT: v_xor_b32_e32 v2, v2, v6
; CGP-NEXT: v_xor_b32_e32 v3, v3, v6
; CGP-NEXT: v_sub_i32_e32 v2, vcc, v2, v6
; CGP-NEXT: v_subb_u32_e32 v3, vcc, v3, v6, vcc
; CGP-NEXT: s_setpc_b64 s[30:31]
%result = sdiv <2 x i64> %num, <i64 1235195, i64 1235195>
ret <2 x i64> %result

View File

@ -1044,39 +1044,39 @@ define i64 @v_srem_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_movk_i32 s6, 0x1000
; CHECK-NEXT: v_cvt_f32_u32_e32 v2, s6
; CHECK-NEXT: v_cvt_f32_u32_e32 v3, 0
; CHECK-NEXT: v_cvt_f32_ubyte0_e32 v4, 0
; CHECK-NEXT: s_mov_b32 s7, 0xfffff000
; CHECK-NEXT: v_ashrrev_i32_e32 v4, 31, v1
; CHECK-NEXT: v_add_i32_e32 v0, vcc, v0, v4
; CHECK-NEXT: v_mac_f32_e32 v2, 0x4f800000, v3
; CHECK-NEXT: v_ashrrev_i32_e32 v3, 31, v1
; CHECK-NEXT: v_mac_f32_e32 v2, 0x4f800000, v4
; CHECK-NEXT: v_rcp_iflag_f32_e32 v2, v2
; CHECK-NEXT: v_addc_u32_e32 v1, vcc, v1, v4, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v4
; CHECK-NEXT: v_add_i32_e32 v0, vcc, v0, v3
; CHECK-NEXT: v_addc_u32_e32 v1, vcc, v1, v3, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v3
; CHECK-NEXT: v_mul_f32_e32 v2, 0x5f7ffffc, v2
; CHECK-NEXT: v_mul_f32_e32 v3, 0x2f800000, v2
; CHECK-NEXT: v_trunc_f32_e32 v3, v3
; CHECK-NEXT: v_mac_f32_e32 v2, 0xcf800000, v3
; CHECK-NEXT: v_mul_f32_e32 v4, 0x2f800000, v2
; CHECK-NEXT: v_trunc_f32_e32 v4, v4
; CHECK-NEXT: v_mac_f32_e32 v2, 0xcf800000, v4
; CHECK-NEXT: v_cvt_u32_f32_e32 v2, v2
; CHECK-NEXT: v_cvt_u32_f32_e32 v3, v3
; CHECK-NEXT: v_cvt_u32_f32_e32 v4, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v3
; CHECK-NEXT: v_mul_lo_u32 v5, -1, v2
; CHECK-NEXT: v_mul_lo_u32 v6, s7, v3
; CHECK-NEXT: v_mul_lo_u32 v6, s7, v4
; CHECK-NEXT: v_mul_hi_u32 v8, s7, v2
; CHECK-NEXT: v_mul_lo_u32 v7, s7, v2
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v8
; CHECK-NEXT: v_mul_lo_u32 v6, v3, v7
; CHECK-NEXT: v_mul_lo_u32 v6, v4, v7
; CHECK-NEXT: v_mul_lo_u32 v8, v2, v5
; CHECK-NEXT: v_mul_hi_u32 v9, v2, v7
; CHECK-NEXT: v_mul_hi_u32 v7, v3, v7
; CHECK-NEXT: v_mul_hi_u32 v7, v4, v7
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v8
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v9
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CHECK-NEXT: v_mul_lo_u32 v9, v3, v5
; CHECK-NEXT: v_mul_lo_u32 v9, v4, v5
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CHECK-NEXT: v_mul_hi_u32 v8, v2, v5
; CHECK-NEXT: v_mul_hi_u32 v5, v3, v5
; CHECK-NEXT: v_mul_hi_u32 v5, v4, v5
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v9, v7
; CHECK-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v8
@ -1087,12 +1087,12 @@ define i64 @v_srem_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v8, v7
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v6
; CHECK-NEXT: v_addc_u32_e64 v6, s[4:5], v3, v5, vcc
; CHECK-NEXT: v_addc_u32_e64 v6, s[4:5], v4, v5, vcc
; CHECK-NEXT: v_mul_lo_u32 v7, -1, v2
; CHECK-NEXT: v_mul_lo_u32 v8, s7, v6
; CHECK-NEXT: v_mul_hi_u32 v10, s7, v2
; CHECK-NEXT: v_mul_lo_u32 v9, s7, v2
; CHECK-NEXT: v_add_i32_e64 v3, s[4:5], v3, v5
; CHECK-NEXT: v_add_i32_e64 v4, s[4:5], v4, v5
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v7, v8
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v7, v10
; CHECK-NEXT: v_mul_lo_u32 v8, v6, v9
@ -1116,21 +1116,21 @@ define i64 @v_srem_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, s[4:5]
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v9, v8
; CHECK-NEXT: v_add_i32_e64 v6, s[4:5], v6, v7
; CHECK-NEXT: v_addc_u32_e32 v3, vcc, v3, v6, vcc
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, v4, v6, vcc
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CHECK-NEXT: v_addc_u32_e32 v3, vcc, 0, v3, vcc
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, 0, v4, vcc
; CHECK-NEXT: v_mul_lo_u32 v5, v1, v2
; CHECK-NEXT: v_mul_lo_u32 v6, v0, v3
; CHECK-NEXT: v_mul_lo_u32 v6, v0, v4
; CHECK-NEXT: v_mul_hi_u32 v7, v0, v2
; CHECK-NEXT: v_mul_hi_u32 v2, v1, v2
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_mul_lo_u32 v7, v1, v3
; CHECK-NEXT: v_mul_lo_u32 v7, v1, v4
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CHECK-NEXT: v_mul_hi_u32 v6, v0, v3
; CHECK-NEXT: v_mul_hi_u32 v3, v1, v3
; CHECK-NEXT: v_mul_hi_u32 v6, v0, v4
; CHECK-NEXT: v_mul_hi_u32 v4, v1, v4
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v7, v2
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v6
@ -1139,21 +1139,21 @@ define i64 @v_srem_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v5
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v4, v5
; CHECK-NEXT: v_mul_lo_u32 v5, 0, v2
; CHECK-NEXT: v_mul_lo_u32 v3, s6, v3
; CHECK-NEXT: v_mul_lo_u32 v4, s6, v4
; CHECK-NEXT: v_mul_lo_u32 v6, s6, v2
; CHECK-NEXT: v_mul_hi_u32 v2, s6, v2
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v5, v3
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v3, v2
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v5, v4
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v4, v2
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v6
; CHECK-NEXT: v_subb_u32_e64 v3, s[4:5], v1, v2, vcc
; CHECK-NEXT: v_subb_u32_e64 v4, s[4:5], v1, v2, vcc
; CHECK-NEXT: v_sub_i32_e64 v1, s[4:5], v1, v2
; CHECK-NEXT: v_cmp_le_u32_e64 s[4:5], 0, v3
; CHECK-NEXT: v_cmp_le_u32_e64 s[4:5], 0, v4
; CHECK-NEXT: v_cndmask_b32_e64 v2, 0, -1, s[4:5]
; CHECK-NEXT: v_cmp_le_u32_e64 s[4:5], s6, v0
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, -1, s[4:5]
; CHECK-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v3
; CHECK-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v4
; CHECK-NEXT: v_subbrev_u32_e32 v1, vcc, 0, v1, vcc
; CHECK-NEXT: v_cndmask_b32_e64 v2, v2, v5, s[4:5]
; CHECK-NEXT: v_subrev_i32_e32 v5, vcc, s6, v0
@ -1171,11 +1171,11 @@ define i64 @v_srem_i64_pow2k_denom(i64 %num) {
; CHECK-NEXT: v_cndmask_b32_e32 v1, v1, v8, vcc
; CHECK-NEXT: v_cmp_ne_u32_e32 vcc, 0, v2
; CHECK-NEXT: v_cndmask_b32_e32 v0, v0, v5, vcc
; CHECK-NEXT: v_cndmask_b32_e32 v1, v3, v1, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v4
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v4
; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v4, vcc
; CHECK-NEXT: v_cndmask_b32_e32 v1, v4, v1, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v3
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v3
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v3
; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v3, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31]
%result = srem i64 %num, 4096
ret i64 %result
@ -1485,23 +1485,23 @@ define <2 x i64> @v_srem_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CGP-NEXT: s_movk_i32 s6, 0x1000
; CGP-NEXT: v_cvt_f32_u32_e32 v4, s6
; CGP-NEXT: v_cvt_f32_u32_e32 v5, 0
; CGP-NEXT: v_cvt_f32_ubyte0_e32 v6, 0
; CGP-NEXT: s_mov_b32 s7, 0xfffff000
; CGP-NEXT: v_ashrrev_i32_e32 v6, 31, v1
; CGP-NEXT: v_ashrrev_i32_e32 v5, 31, v1
; CGP-NEXT: v_mov_b32_e32 v7, v4
; CGP-NEXT: v_mac_f32_e32 v7, 0x4f800000, v5
; CGP-NEXT: v_mac_f32_e32 v7, 0x4f800000, v6
; CGP-NEXT: v_rcp_iflag_f32_e32 v7, v7
; CGP-NEXT: v_add_i32_e32 v0, vcc, v0, v6
; CGP-NEXT: v_addc_u32_e32 v1, vcc, v1, v6, vcc
; CGP-NEXT: v_xor_b32_e32 v0, v0, v6
; CGP-NEXT: v_add_i32_e32 v0, vcc, v0, v5
; CGP-NEXT: v_addc_u32_e32 v1, vcc, v1, v5, vcc
; CGP-NEXT: v_xor_b32_e32 v0, v0, v5
; CGP-NEXT: v_mul_f32_e32 v7, 0x5f7ffffc, v7
; CGP-NEXT: v_mul_f32_e32 v8, 0x2f800000, v7
; CGP-NEXT: v_trunc_f32_e32 v8, v8
; CGP-NEXT: v_mac_f32_e32 v7, 0xcf800000, v8
; CGP-NEXT: v_cvt_u32_f32_e32 v7, v7
; CGP-NEXT: v_cvt_u32_f32_e32 v8, v8
; CGP-NEXT: v_xor_b32_e32 v1, v1, v6
; CGP-NEXT: v_mac_f32_e32 v4, 0x4f800000, v5
; CGP-NEXT: v_xor_b32_e32 v1, v1, v5
; CGP-NEXT: v_mac_f32_e32 v4, 0x4f800000, v6
; CGP-NEXT: v_mul_lo_u32 v9, -1, v7
; CGP-NEXT: v_mul_lo_u32 v10, s7, v8
; CGP-NEXT: v_mul_hi_u32 v12, s7, v7
@ -1544,7 +1544,7 @@ define <2 x i64> @v_srem_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_mul_lo_u32 v14, v7, v11
; CGP-NEXT: v_mul_hi_u32 v9, v7, v13
; CGP-NEXT: v_mul_hi_u32 v13, v10, v13
; CGP-NEXT: v_ashrrev_i32_e32 v5, 31, v3
; CGP-NEXT: v_ashrrev_i32_e32 v6, 31, v3
; CGP-NEXT: v_add_i32_e64 v12, s[4:5], v12, v14
; CGP-NEXT: v_cndmask_b32_e64 v14, 0, 1, s[4:5]
; CGP-NEXT: v_add_i32_e64 v9, s[4:5], v12, v9
@ -1627,15 +1627,15 @@ define <2 x i64> @v_srem_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_mul_lo_u32 v9, s7, v7
; CGP-NEXT: v_mul_hi_u32 v11, s7, v4
; CGP-NEXT: v_mul_lo_u32 v10, s7, v4
; CGP-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CGP-NEXT: v_addc_u32_e32 v3, vcc, v3, v5, vcc
; CGP-NEXT: v_add_i32_e32 v2, vcc, v2, v6
; CGP-NEXT: v_addc_u32_e32 v3, vcc, v3, v6, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v11
; CGP-NEXT: v_mul_lo_u32 v9, v7, v10
; CGP-NEXT: v_mul_lo_u32 v11, v4, v8
; CGP-NEXT: v_mul_hi_u32 v12, v4, v10
; CGP-NEXT: v_mul_hi_u32 v10, v7, v10
; CGP-NEXT: v_xor_b32_e32 v0, v0, v6
; CGP-NEXT: v_xor_b32_e32 v0, v0, v5
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v11
; CGP-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v12
@ -1666,7 +1666,7 @@ define <2 x i64> @v_srem_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_mul_lo_u32 v13, v4, v10
; CGP-NEXT: v_mul_hi_u32 v8, v4, v12
; CGP-NEXT: v_mul_hi_u32 v12, v9, v12
; CGP-NEXT: v_xor_b32_e32 v2, v2, v5
; CGP-NEXT: v_xor_b32_e32 v2, v2, v6
; CGP-NEXT: v_add_i32_e64 v11, s[4:5], v11, v13
; CGP-NEXT: v_cndmask_b32_e64 v13, 0, 1, s[4:5]
; CGP-NEXT: v_add_i32_e64 v8, s[4:5], v11, v8
@ -1687,20 +1687,20 @@ define <2 x i64> @v_srem_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_addc_u32_e32 v7, vcc, v7, v9, vcc
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v8
; CGP-NEXT: v_addc_u32_e32 v7, vcc, 0, v7, vcc
; CGP-NEXT: v_xor_b32_e32 v3, v3, v5
; CGP-NEXT: v_xor_b32_e32 v1, v1, v6
; CGP-NEXT: v_sub_i32_e32 v0, vcc, v0, v6
; CGP-NEXT: v_xor_b32_e32 v3, v3, v6
; CGP-NEXT: v_xor_b32_e32 v1, v1, v5
; CGP-NEXT: v_sub_i32_e32 v0, vcc, v0, v5
; CGP-NEXT: v_mul_lo_u32 v8, v3, v4
; CGP-NEXT: v_mul_lo_u32 v9, v2, v7
; CGP-NEXT: v_subb_u32_e32 v1, vcc, v1, v6, vcc
; CGP-NEXT: v_mul_hi_u32 v6, v2, v4
; CGP-NEXT: v_subb_u32_e32 v1, vcc, v1, v5, vcc
; CGP-NEXT: v_mul_hi_u32 v5, v2, v4
; CGP-NEXT: v_mul_hi_u32 v4, v3, v4
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CGP-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v8, v5
; CGP-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CGP-NEXT: v_mul_lo_u32 v8, v3, v7
; CGP-NEXT: v_add_i32_e32 v6, vcc, v9, v6
; CGP-NEXT: v_add_i32_e32 v5, vcc, v9, v5
; CGP-NEXT: v_mul_hi_u32 v9, v2, v7
; CGP-NEXT: v_mul_hi_u32 v7, v3, v7
; CGP-NEXT: v_add_i32_e32 v4, vcc, v8, v4
@ -1708,24 +1708,24 @@ define <2 x i64> @v_srem_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v9
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v6
; CGP-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CGP-NEXT: v_add_i32_e32 v6, vcc, v7, v6
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v5
; CGP-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v8, v5
; CGP-NEXT: v_add_i32_e32 v5, vcc, v7, v5
; CGP-NEXT: v_mul_lo_u32 v7, 0, v4
; CGP-NEXT: v_mul_lo_u32 v6, s6, v6
; CGP-NEXT: v_mul_lo_u32 v5, s6, v5
; CGP-NEXT: v_mul_lo_u32 v8, s6, v4
; CGP-NEXT: v_mul_hi_u32 v4, s6, v4
; CGP-NEXT: v_add_i32_e32 v6, vcc, v7, v6
; CGP-NEXT: v_add_i32_e32 v4, vcc, v6, v4
; CGP-NEXT: v_add_i32_e32 v5, vcc, v7, v5
; CGP-NEXT: v_add_i32_e32 v4, vcc, v5, v4
; CGP-NEXT: v_sub_i32_e32 v2, vcc, v2, v8
; CGP-NEXT: v_subb_u32_e64 v6, s[4:5], v3, v4, vcc
; CGP-NEXT: v_subb_u32_e64 v5, s[4:5], v3, v4, vcc
; CGP-NEXT: v_sub_i32_e64 v3, s[4:5], v3, v4
; CGP-NEXT: v_cmp_le_u32_e64 s[4:5], 0, v6
; CGP-NEXT: v_cmp_le_u32_e64 s[4:5], 0, v5
; CGP-NEXT: v_cndmask_b32_e64 v4, 0, -1, s[4:5]
; CGP-NEXT: v_cmp_le_u32_e64 s[4:5], s6, v2
; CGP-NEXT: v_cndmask_b32_e64 v7, 0, -1, s[4:5]
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v6
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v5
; CGP-NEXT: v_subbrev_u32_e32 v3, vcc, 0, v3, vcc
; CGP-NEXT: v_cndmask_b32_e64 v4, v4, v7, s[4:5]
; CGP-NEXT: v_subrev_i32_e32 v7, vcc, s6, v2
@ -1743,11 +1743,11 @@ define <2 x i64> @v_srem_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP-NEXT: v_cndmask_b32_e32 v3, v3, v10, vcc
; CGP-NEXT: v_cmp_ne_u32_e32 vcc, 0, v4
; CGP-NEXT: v_cndmask_b32_e32 v2, v2, v7, vcc
; CGP-NEXT: v_cndmask_b32_e32 v3, v6, v3, vcc
; CGP-NEXT: v_xor_b32_e32 v2, v2, v5
; CGP-NEXT: v_xor_b32_e32 v3, v3, v5
; CGP-NEXT: v_sub_i32_e32 v2, vcc, v2, v5
; CGP-NEXT: v_subb_u32_e32 v3, vcc, v3, v5, vcc
; CGP-NEXT: v_cndmask_b32_e32 v3, v5, v3, vcc
; CGP-NEXT: v_xor_b32_e32 v2, v2, v6
; CGP-NEXT: v_xor_b32_e32 v3, v3, v6
; CGP-NEXT: v_sub_i32_e32 v2, vcc, v2, v6
; CGP-NEXT: v_subb_u32_e32 v3, vcc, v3, v6, vcc
; CGP-NEXT: s_setpc_b64 s[30:31]
%result = srem <2 x i64> %num, <i64 4096, i64 4096>
ret <2 x i64> %result
@ -1759,39 +1759,39 @@ define i64 @v_srem_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_mov_b32 s6, 0x12d8fb
; CHECK-NEXT: v_cvt_f32_u32_e32 v2, s6
; CHECK-NEXT: v_cvt_f32_u32_e32 v3, 0
; CHECK-NEXT: v_cvt_f32_ubyte0_e32 v4, 0
; CHECK-NEXT: s_mov_b32 s7, 0xffed2705
; CHECK-NEXT: v_ashrrev_i32_e32 v4, 31, v1
; CHECK-NEXT: v_add_i32_e32 v0, vcc, v0, v4
; CHECK-NEXT: v_mac_f32_e32 v2, 0x4f800000, v3
; CHECK-NEXT: v_ashrrev_i32_e32 v3, 31, v1
; CHECK-NEXT: v_mac_f32_e32 v2, 0x4f800000, v4
; CHECK-NEXT: v_rcp_iflag_f32_e32 v2, v2
; CHECK-NEXT: v_addc_u32_e32 v1, vcc, v1, v4, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v4
; CHECK-NEXT: v_add_i32_e32 v0, vcc, v0, v3
; CHECK-NEXT: v_addc_u32_e32 v1, vcc, v1, v3, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v3
; CHECK-NEXT: v_mul_f32_e32 v2, 0x5f7ffffc, v2
; CHECK-NEXT: v_mul_f32_e32 v3, 0x2f800000, v2
; CHECK-NEXT: v_trunc_f32_e32 v3, v3
; CHECK-NEXT: v_mac_f32_e32 v2, 0xcf800000, v3
; CHECK-NEXT: v_mul_f32_e32 v4, 0x2f800000, v2
; CHECK-NEXT: v_trunc_f32_e32 v4, v4
; CHECK-NEXT: v_mac_f32_e32 v2, 0xcf800000, v4
; CHECK-NEXT: v_cvt_u32_f32_e32 v2, v2
; CHECK-NEXT: v_cvt_u32_f32_e32 v3, v3
; CHECK-NEXT: v_cvt_u32_f32_e32 v4, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v3
; CHECK-NEXT: v_mul_lo_u32 v5, -1, v2
; CHECK-NEXT: v_mul_lo_u32 v6, s7, v3
; CHECK-NEXT: v_mul_lo_u32 v6, s7, v4
; CHECK-NEXT: v_mul_hi_u32 v8, s7, v2
; CHECK-NEXT: v_mul_lo_u32 v7, s7, v2
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v8
; CHECK-NEXT: v_mul_lo_u32 v6, v3, v7
; CHECK-NEXT: v_mul_lo_u32 v6, v4, v7
; CHECK-NEXT: v_mul_lo_u32 v8, v2, v5
; CHECK-NEXT: v_mul_hi_u32 v9, v2, v7
; CHECK-NEXT: v_mul_hi_u32 v7, v3, v7
; CHECK-NEXT: v_mul_hi_u32 v7, v4, v7
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v8
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v6, v9
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CHECK-NEXT: v_mul_lo_u32 v9, v3, v5
; CHECK-NEXT: v_mul_lo_u32 v9, v4, v5
; CHECK-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CHECK-NEXT: v_mul_hi_u32 v8, v2, v5
; CHECK-NEXT: v_mul_hi_u32 v5, v3, v5
; CHECK-NEXT: v_mul_hi_u32 v5, v4, v5
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v9, v7
; CHECK-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v7, v8
@ -1802,12 +1802,12 @@ define i64 @v_srem_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: v_add_i32_e32 v7, vcc, v8, v7
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v6
; CHECK-NEXT: v_addc_u32_e64 v6, s[4:5], v3, v5, vcc
; CHECK-NEXT: v_addc_u32_e64 v6, s[4:5], v4, v5, vcc
; CHECK-NEXT: v_mul_lo_u32 v7, -1, v2
; CHECK-NEXT: v_mul_lo_u32 v8, s7, v6
; CHECK-NEXT: v_mul_hi_u32 v10, s7, v2
; CHECK-NEXT: v_mul_lo_u32 v9, s7, v2
; CHECK-NEXT: v_add_i32_e64 v3, s[4:5], v3, v5
; CHECK-NEXT: v_add_i32_e64 v4, s[4:5], v4, v5
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v7, v8
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v7, v10
; CHECK-NEXT: v_mul_lo_u32 v8, v6, v9
@ -1831,21 +1831,21 @@ define i64 @v_srem_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: v_cndmask_b32_e64 v8, 0, 1, s[4:5]
; CHECK-NEXT: v_add_i32_e64 v7, s[4:5], v9, v8
; CHECK-NEXT: v_add_i32_e64 v6, s[4:5], v6, v7
; CHECK-NEXT: v_addc_u32_e32 v3, vcc, v3, v6, vcc
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, v4, v6, vcc
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CHECK-NEXT: v_addc_u32_e32 v3, vcc, 0, v3, vcc
; CHECK-NEXT: v_addc_u32_e32 v4, vcc, 0, v4, vcc
; CHECK-NEXT: v_mul_lo_u32 v5, v1, v2
; CHECK-NEXT: v_mul_lo_u32 v6, v0, v3
; CHECK-NEXT: v_mul_lo_u32 v6, v0, v4
; CHECK-NEXT: v_mul_hi_u32 v7, v0, v2
; CHECK-NEXT: v_mul_hi_u32 v2, v1, v2
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v6
; CHECK-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v5, v7
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_mul_lo_u32 v7, v1, v3
; CHECK-NEXT: v_mul_lo_u32 v7, v1, v4
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CHECK-NEXT: v_mul_hi_u32 v6, v0, v3
; CHECK-NEXT: v_mul_hi_u32 v3, v1, v3
; CHECK-NEXT: v_mul_hi_u32 v6, v0, v4
; CHECK-NEXT: v_mul_hi_u32 v4, v1, v4
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v7, v2
; CHECK-NEXT: v_cndmask_b32_e64 v7, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v6
@ -1854,21 +1854,21 @@ define i64 @v_srem_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CHECK-NEXT: v_add_i32_e32 v5, vcc, v6, v5
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v3, v5
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v4, v5
; CHECK-NEXT: v_mul_lo_u32 v5, 0, v2
; CHECK-NEXT: v_mul_lo_u32 v3, s6, v3
; CHECK-NEXT: v_mul_lo_u32 v4, s6, v4
; CHECK-NEXT: v_mul_lo_u32 v6, s6, v2
; CHECK-NEXT: v_mul_hi_u32 v2, s6, v2
; CHECK-NEXT: v_add_i32_e32 v3, vcc, v5, v3
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v3, v2
; CHECK-NEXT: v_add_i32_e32 v4, vcc, v5, v4
; CHECK-NEXT: v_add_i32_e32 v2, vcc, v4, v2
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v6
; CHECK-NEXT: v_subb_u32_e64 v3, s[4:5], v1, v2, vcc
; CHECK-NEXT: v_subb_u32_e64 v4, s[4:5], v1, v2, vcc
; CHECK-NEXT: v_sub_i32_e64 v1, s[4:5], v1, v2
; CHECK-NEXT: v_cmp_le_u32_e64 s[4:5], 0, v3
; CHECK-NEXT: v_cmp_le_u32_e64 s[4:5], 0, v4
; CHECK-NEXT: v_cndmask_b32_e64 v2, 0, -1, s[4:5]
; CHECK-NEXT: v_cmp_le_u32_e64 s[4:5], s6, v0
; CHECK-NEXT: v_cndmask_b32_e64 v5, 0, -1, s[4:5]
; CHECK-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v3
; CHECK-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v4
; CHECK-NEXT: v_subbrev_u32_e32 v1, vcc, 0, v1, vcc
; CHECK-NEXT: v_cndmask_b32_e64 v2, v2, v5, s[4:5]
; CHECK-NEXT: v_subrev_i32_e32 v5, vcc, s6, v0
@ -1886,11 +1886,11 @@ define i64 @v_srem_i64_oddk_denom(i64 %num) {
; CHECK-NEXT: v_cndmask_b32_e32 v1, v1, v8, vcc
; CHECK-NEXT: v_cmp_ne_u32_e32 vcc, 0, v2
; CHECK-NEXT: v_cndmask_b32_e32 v0, v0, v5, vcc
; CHECK-NEXT: v_cndmask_b32_e32 v1, v3, v1, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v4
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v4
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v4
; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v4, vcc
; CHECK-NEXT: v_cndmask_b32_e32 v1, v4, v1, vcc
; CHECK-NEXT: v_xor_b32_e32 v0, v0, v3
; CHECK-NEXT: v_xor_b32_e32 v1, v1, v3
; CHECK-NEXT: v_sub_i32_e32 v0, vcc, v0, v3
; CHECK-NEXT: v_subb_u32_e32 v1, vcc, v1, v3, vcc
; CHECK-NEXT: s_setpc_b64 s[30:31]
%result = srem i64 %num, 1235195
ret i64 %result
@ -2200,23 +2200,23 @@ define <2 x i64> @v_srem_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CGP-NEXT: s_mov_b32 s6, 0x12d8fb
; CGP-NEXT: v_cvt_f32_u32_e32 v4, s6
; CGP-NEXT: v_cvt_f32_u32_e32 v5, 0
; CGP-NEXT: v_cvt_f32_ubyte0_e32 v6, 0
; CGP-NEXT: s_mov_b32 s7, 0xffed2705
; CGP-NEXT: v_ashrrev_i32_e32 v6, 31, v1
; CGP-NEXT: v_ashrrev_i32_e32 v5, 31, v1
; CGP-NEXT: v_mov_b32_e32 v7, v4
; CGP-NEXT: v_mac_f32_e32 v7, 0x4f800000, v5
; CGP-NEXT: v_mac_f32_e32 v7, 0x4f800000, v6
; CGP-NEXT: v_rcp_iflag_f32_e32 v7, v7
; CGP-NEXT: v_add_i32_e32 v0, vcc, v0, v6
; CGP-NEXT: v_addc_u32_e32 v1, vcc, v1, v6, vcc
; CGP-NEXT: v_xor_b32_e32 v0, v0, v6
; CGP-NEXT: v_add_i32_e32 v0, vcc, v0, v5
; CGP-NEXT: v_addc_u32_e32 v1, vcc, v1, v5, vcc
; CGP-NEXT: v_xor_b32_e32 v0, v0, v5
; CGP-NEXT: v_mul_f32_e32 v7, 0x5f7ffffc, v7
; CGP-NEXT: v_mul_f32_e32 v8, 0x2f800000, v7
; CGP-NEXT: v_trunc_f32_e32 v8, v8
; CGP-NEXT: v_mac_f32_e32 v7, 0xcf800000, v8
; CGP-NEXT: v_cvt_u32_f32_e32 v7, v7
; CGP-NEXT: v_cvt_u32_f32_e32 v8, v8
; CGP-NEXT: v_xor_b32_e32 v1, v1, v6
; CGP-NEXT: v_mac_f32_e32 v4, 0x4f800000, v5
; CGP-NEXT: v_xor_b32_e32 v1, v1, v5
; CGP-NEXT: v_mac_f32_e32 v4, 0x4f800000, v6
; CGP-NEXT: v_mul_lo_u32 v9, -1, v7
; CGP-NEXT: v_mul_lo_u32 v10, s7, v8
; CGP-NEXT: v_mul_hi_u32 v12, s7, v7
@ -2259,7 +2259,7 @@ define <2 x i64> @v_srem_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_mul_lo_u32 v14, v7, v11
; CGP-NEXT: v_mul_hi_u32 v9, v7, v13
; CGP-NEXT: v_mul_hi_u32 v13, v10, v13
; CGP-NEXT: v_ashrrev_i32_e32 v5, 31, v3
; CGP-NEXT: v_ashrrev_i32_e32 v6, 31, v3
; CGP-NEXT: v_add_i32_e64 v12, s[4:5], v12, v14
; CGP-NEXT: v_cndmask_b32_e64 v14, 0, 1, s[4:5]
; CGP-NEXT: v_add_i32_e64 v9, s[4:5], v12, v9
@ -2342,15 +2342,15 @@ define <2 x i64> @v_srem_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_mul_lo_u32 v9, s7, v7
; CGP-NEXT: v_mul_hi_u32 v11, s7, v4
; CGP-NEXT: v_mul_lo_u32 v10, s7, v4
; CGP-NEXT: v_add_i32_e32 v2, vcc, v2, v5
; CGP-NEXT: v_addc_u32_e32 v3, vcc, v3, v5, vcc
; CGP-NEXT: v_add_i32_e32 v2, vcc, v2, v6
; CGP-NEXT: v_addc_u32_e32 v3, vcc, v3, v6, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v11
; CGP-NEXT: v_mul_lo_u32 v9, v7, v10
; CGP-NEXT: v_mul_lo_u32 v11, v4, v8
; CGP-NEXT: v_mul_hi_u32 v12, v4, v10
; CGP-NEXT: v_mul_hi_u32 v10, v7, v10
; CGP-NEXT: v_xor_b32_e32 v0, v0, v6
; CGP-NEXT: v_xor_b32_e32 v0, v0, v5
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v11
; CGP-NEXT: v_cndmask_b32_e64 v11, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v9, vcc, v9, v12
@ -2381,7 +2381,7 @@ define <2 x i64> @v_srem_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_mul_lo_u32 v13, v4, v10
; CGP-NEXT: v_mul_hi_u32 v8, v4, v12
; CGP-NEXT: v_mul_hi_u32 v12, v9, v12
; CGP-NEXT: v_xor_b32_e32 v2, v2, v5
; CGP-NEXT: v_xor_b32_e32 v2, v2, v6
; CGP-NEXT: v_add_i32_e64 v11, s[4:5], v11, v13
; CGP-NEXT: v_cndmask_b32_e64 v13, 0, 1, s[4:5]
; CGP-NEXT: v_add_i32_e64 v8, s[4:5], v11, v8
@ -2402,20 +2402,20 @@ define <2 x i64> @v_srem_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_addc_u32_e32 v7, vcc, v7, v9, vcc
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v8
; CGP-NEXT: v_addc_u32_e32 v7, vcc, 0, v7, vcc
; CGP-NEXT: v_xor_b32_e32 v3, v3, v5
; CGP-NEXT: v_xor_b32_e32 v1, v1, v6
; CGP-NEXT: v_sub_i32_e32 v0, vcc, v0, v6
; CGP-NEXT: v_xor_b32_e32 v3, v3, v6
; CGP-NEXT: v_xor_b32_e32 v1, v1, v5
; CGP-NEXT: v_sub_i32_e32 v0, vcc, v0, v5
; CGP-NEXT: v_mul_lo_u32 v8, v3, v4
; CGP-NEXT: v_mul_lo_u32 v9, v2, v7
; CGP-NEXT: v_subb_u32_e32 v1, vcc, v1, v6, vcc
; CGP-NEXT: v_mul_hi_u32 v6, v2, v4
; CGP-NEXT: v_subb_u32_e32 v1, vcc, v1, v5, vcc
; CGP-NEXT: v_mul_hi_u32 v5, v2, v4
; CGP-NEXT: v_mul_hi_u32 v4, v3, v4
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CGP-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v8, v5
; CGP-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CGP-NEXT: v_mul_lo_u32 v8, v3, v7
; CGP-NEXT: v_add_i32_e32 v6, vcc, v9, v6
; CGP-NEXT: v_add_i32_e32 v5, vcc, v9, v5
; CGP-NEXT: v_mul_hi_u32 v9, v2, v7
; CGP-NEXT: v_mul_hi_u32 v7, v3, v7
; CGP-NEXT: v_add_i32_e32 v4, vcc, v8, v4
@ -2423,24 +2423,24 @@ define <2 x i64> @v_srem_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v9
; CGP-NEXT: v_cndmask_b32_e64 v9, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v8, vcc, v8, v9
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v6
; CGP-NEXT: v_cndmask_b32_e64 v6, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v6, vcc, v8, v6
; CGP-NEXT: v_add_i32_e32 v6, vcc, v7, v6
; CGP-NEXT: v_add_i32_e32 v4, vcc, v4, v5
; CGP-NEXT: v_cndmask_b32_e64 v5, 0, 1, vcc
; CGP-NEXT: v_add_i32_e32 v5, vcc, v8, v5
; CGP-NEXT: v_add_i32_e32 v5, vcc, v7, v5
; CGP-NEXT: v_mul_lo_u32 v7, 0, v4
; CGP-NEXT: v_mul_lo_u32 v6, s6, v6
; CGP-NEXT: v_mul_lo_u32 v5, s6, v5
; CGP-NEXT: v_mul_lo_u32 v8, s6, v4
; CGP-NEXT: v_mul_hi_u32 v4, s6, v4
; CGP-NEXT: v_add_i32_e32 v6, vcc, v7, v6
; CGP-NEXT: v_add_i32_e32 v4, vcc, v6, v4
; CGP-NEXT: v_add_i32_e32 v5, vcc, v7, v5
; CGP-NEXT: v_add_i32_e32 v4, vcc, v5, v4
; CGP-NEXT: v_sub_i32_e32 v2, vcc, v2, v8
; CGP-NEXT: v_subb_u32_e64 v6, s[4:5], v3, v4, vcc
; CGP-NEXT: v_subb_u32_e64 v5, s[4:5], v3, v4, vcc
; CGP-NEXT: v_sub_i32_e64 v3, s[4:5], v3, v4
; CGP-NEXT: v_cmp_le_u32_e64 s[4:5], 0, v6
; CGP-NEXT: v_cmp_le_u32_e64 s[4:5], 0, v5
; CGP-NEXT: v_cndmask_b32_e64 v4, 0, -1, s[4:5]
; CGP-NEXT: v_cmp_le_u32_e64 s[4:5], s6, v2
; CGP-NEXT: v_cndmask_b32_e64 v7, 0, -1, s[4:5]
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v6
; CGP-NEXT: v_cmp_eq_u32_e64 s[4:5], 0, v5
; CGP-NEXT: v_subbrev_u32_e32 v3, vcc, 0, v3, vcc
; CGP-NEXT: v_cndmask_b32_e64 v4, v4, v7, s[4:5]
; CGP-NEXT: v_subrev_i32_e32 v7, vcc, s6, v2
@ -2458,11 +2458,11 @@ define <2 x i64> @v_srem_v2i64_oddk_denom(<2 x i64> %num) {
; CGP-NEXT: v_cndmask_b32_e32 v3, v3, v10, vcc
; CGP-NEXT: v_cmp_ne_u32_e32 vcc, 0, v4
; CGP-NEXT: v_cndmask_b32_e32 v2, v2, v7, vcc
; CGP-NEXT: v_cndmask_b32_e32 v3, v6, v3, vcc
; CGP-NEXT: v_xor_b32_e32 v2, v2, v5
; CGP-NEXT: v_xor_b32_e32 v3, v3, v5
; CGP-NEXT: v_sub_i32_e32 v2, vcc, v2, v5
; CGP-NEXT: v_subb_u32_e32 v3, vcc, v3, v5, vcc
; CGP-NEXT: v_cndmask_b32_e32 v3, v5, v3, vcc
; CGP-NEXT: v_xor_b32_e32 v2, v2, v6
; CGP-NEXT: v_xor_b32_e32 v3, v3, v6
; CGP-NEXT: v_sub_i32_e32 v2, vcc, v2, v6
; CGP-NEXT: v_subb_u32_e32 v3, vcc, v3, v6, vcc
; CGP-NEXT: s_setpc_b64 s[30:31]
%result = srem <2 x i64> %num, <i64 1235195, i64 1235195>
ret <2 x i64> %result

View File

@ -969,7 +969,7 @@ define i64 @v_udiv_i64_pow2k_denom(i64 %num) {
; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_movk_i32 s6, 0x1000
; CHECK-NEXT: v_cvt_f32_u32_e32 v2, 0
; CHECK-NEXT: v_cvt_f32_ubyte0_e32 v2, 0
; CHECK-NEXT: s_mov_b32 s7, 0xfffff000
; CHECK-NEXT: v_cvt_f32_u32_e32 v3, s6
; CHECK-NEXT: v_mac_f32_e32 v3, 0x4f800000, v2
@ -1108,7 +1108,7 @@ define <2 x i64> @v_udiv_v2i64_pow2k_denom(<2 x i64> %num) {
; GISEL-NEXT: v_cvt_f32_u32_e32 v4, s10
; GISEL-NEXT: s_sub_u32 s8, 0, s10
; GISEL-NEXT: s_cselect_b32 s4, 1, 0
; GISEL-NEXT: v_cvt_f32_u32_e32 v5, 0
; GISEL-NEXT: v_cvt_f32_ubyte0_e32 v5, 0
; GISEL-NEXT: v_mov_b32_e32 v6, v4
; GISEL-NEXT: s_and_b32 s4, s4, 1
; GISEL-NEXT: v_mac_f32_e32 v4, 0x4f800000, v5
@ -1370,7 +1370,7 @@ define <2 x i64> @v_udiv_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP: ; %bb.0:
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CGP-NEXT: s_movk_i32 s10, 0x1000
; CGP-NEXT: v_cvt_f32_u32_e32 v4, 0
; CGP-NEXT: v_cvt_f32_ubyte0_e32 v4, 0
; CGP-NEXT: v_cvt_f32_u32_e32 v5, s10
; CGP-NEXT: s_mov_b32 s8, 0xfffff000
; CGP-NEXT: v_mov_b32_e32 v6, v5
@ -1630,7 +1630,7 @@ define i64 @v_udiv_i64_oddk_denom(i64 %num) {
; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_mov_b32 s6, 0x12d8fb
; CHECK-NEXT: v_cvt_f32_u32_e32 v2, 0
; CHECK-NEXT: v_cvt_f32_ubyte0_e32 v2, 0
; CHECK-NEXT: s_mov_b32 s7, 0xffed2705
; CHECK-NEXT: v_cvt_f32_u32_e32 v3, s6
; CHECK-NEXT: v_mac_f32_e32 v3, 0x4f800000, v2
@ -1769,7 +1769,7 @@ define <2 x i64> @v_udiv_v2i64_oddk_denom(<2 x i64> %num) {
; GISEL-NEXT: v_cvt_f32_u32_e32 v4, s10
; GISEL-NEXT: s_sub_u32 s8, 0, s10
; GISEL-NEXT: s_cselect_b32 s4, 1, 0
; GISEL-NEXT: v_cvt_f32_u32_e32 v5, 0
; GISEL-NEXT: v_cvt_f32_ubyte0_e32 v5, 0
; GISEL-NEXT: v_mov_b32_e32 v6, v4
; GISEL-NEXT: s_and_b32 s4, s4, 1
; GISEL-NEXT: v_mac_f32_e32 v4, 0x4f800000, v5
@ -2031,7 +2031,7 @@ define <2 x i64> @v_udiv_v2i64_oddk_denom(<2 x i64> %num) {
; CGP: ; %bb.0:
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CGP-NEXT: s_mov_b32 s10, 0x12d8fb
; CGP-NEXT: v_cvt_f32_u32_e32 v4, 0
; CGP-NEXT: v_cvt_f32_ubyte0_e32 v4, 0
; CGP-NEXT: v_cvt_f32_u32_e32 v5, s10
; CGP-NEXT: s_mov_b32 s8, 0xffed2705
; CGP-NEXT: v_mov_b32_e32 v6, v5
@ -3133,7 +3133,7 @@ define <2 x i64> @v_udiv_v2i64_24bit(<2 x i64> %num, <2 x i64> %den) {
; GISEL: ; %bb.0:
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; GISEL-NEXT: s_mov_b32 s6, 0xffffff
; GISEL-NEXT: v_cvt_f32_u32_e32 v1, 0
; GISEL-NEXT: v_cvt_f32_ubyte0_e32 v1, 0
; GISEL-NEXT: v_and_b32_e32 v3, s6, v4
; GISEL-NEXT: v_and_b32_e32 v4, s6, v6
; GISEL-NEXT: v_cvt_f32_u32_e32 v5, v3

View File

@ -955,7 +955,7 @@ define i64 @v_urem_i64_pow2k_denom(i64 %num) {
; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_movk_i32 s6, 0x1000
; CHECK-NEXT: v_cvt_f32_u32_e32 v2, 0
; CHECK-NEXT: v_cvt_f32_ubyte0_e32 v2, 0
; CHECK-NEXT: s_mov_b32 s7, 0xfffff000
; CHECK-NEXT: v_cvt_f32_u32_e32 v3, s6
; CHECK-NEXT: v_mac_f32_e32 v3, 0x4f800000, v2
@ -1092,7 +1092,7 @@ define <2 x i64> @v_urem_v2i64_pow2k_denom(<2 x i64> %num) {
; GISEL-NEXT: v_cvt_f32_u32_e32 v4, s10
; GISEL-NEXT: s_sub_u32 s8, 0, s10
; GISEL-NEXT: s_cselect_b32 s4, 1, 0
; GISEL-NEXT: v_cvt_f32_u32_e32 v5, 0
; GISEL-NEXT: v_cvt_f32_ubyte0_e32 v5, 0
; GISEL-NEXT: v_mov_b32_e32 v6, v4
; GISEL-NEXT: s_and_b32 s4, s4, 1
; GISEL-NEXT: v_mac_f32_e32 v4, 0x4f800000, v5
@ -1350,7 +1350,7 @@ define <2 x i64> @v_urem_v2i64_pow2k_denom(<2 x i64> %num) {
; CGP: ; %bb.0:
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CGP-NEXT: s_movk_i32 s10, 0x1000
; CGP-NEXT: v_cvt_f32_u32_e32 v4, 0
; CGP-NEXT: v_cvt_f32_ubyte0_e32 v4, 0
; CGP-NEXT: v_cvt_f32_u32_e32 v5, s10
; CGP-NEXT: s_mov_b32 s8, 0xfffff000
; CGP-NEXT: v_mov_b32_e32 v6, v5
@ -1606,7 +1606,7 @@ define i64 @v_urem_i64_oddk_denom(i64 %num) {
; CHECK: ; %bb.0:
; CHECK-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CHECK-NEXT: s_mov_b32 s6, 0x12d8fb
; CHECK-NEXT: v_cvt_f32_u32_e32 v2, 0
; CHECK-NEXT: v_cvt_f32_ubyte0_e32 v2, 0
; CHECK-NEXT: s_mov_b32 s7, 0xffed2705
; CHECK-NEXT: v_cvt_f32_u32_e32 v3, s6
; CHECK-NEXT: v_mac_f32_e32 v3, 0x4f800000, v2
@ -1743,7 +1743,7 @@ define <2 x i64> @v_urem_v2i64_oddk_denom(<2 x i64> %num) {
; GISEL-NEXT: v_cvt_f32_u32_e32 v4, s10
; GISEL-NEXT: s_sub_u32 s8, 0, s10
; GISEL-NEXT: s_cselect_b32 s4, 1, 0
; GISEL-NEXT: v_cvt_f32_u32_e32 v5, 0
; GISEL-NEXT: v_cvt_f32_ubyte0_e32 v5, 0
; GISEL-NEXT: v_mov_b32_e32 v6, v4
; GISEL-NEXT: s_and_b32 s4, s4, 1
; GISEL-NEXT: v_mac_f32_e32 v4, 0x4f800000, v5
@ -2001,7 +2001,7 @@ define <2 x i64> @v_urem_v2i64_oddk_denom(<2 x i64> %num) {
; CGP: ; %bb.0:
; CGP-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; CGP-NEXT: s_mov_b32 s10, 0x12d8fb
; CGP-NEXT: v_cvt_f32_u32_e32 v4, 0
; CGP-NEXT: v_cvt_f32_ubyte0_e32 v4, 0
; CGP-NEXT: v_cvt_f32_u32_e32 v5, s10
; CGP-NEXT: s_mov_b32 s8, 0xffed2705
; CGP-NEXT: v_mov_b32_e32 v6, v5
@ -3088,7 +3088,7 @@ define <2 x i64> @v_urem_v2i64_24bit(<2 x i64> %num, <2 x i64> %den) {
; GISEL: ; %bb.0:
; GISEL-NEXT: s_waitcnt vmcnt(0) expcnt(0) lgkmcnt(0)
; GISEL-NEXT: s_mov_b32 s6, 0xffffff
; GISEL-NEXT: v_cvt_f32_u32_e32 v1, 0
; GISEL-NEXT: v_cvt_f32_ubyte0_e32 v1, 0
; GISEL-NEXT: v_and_b32_e32 v3, s6, v4
; GISEL-NEXT: v_and_b32_e32 v4, s6, v6
; GISEL-NEXT: v_cvt_f32_u32_e32 v5, v3

View File

@ -642,3 +642,32 @@ TEST_F(AArch64GISelMITest, TestKnownBitsMergeValues) {
EXPECT_EQ(TestVal, Res.One.getZExtValue());
EXPECT_EQ(~TestVal, Res.Zero.getZExtValue());
}
TEST_F(AArch64GISelMITest, TestKnownBitsUnmergeValues) {
StringRef MIRString = R"(
%val:_(s64) = G_CONSTANT i64 12379570962110515608
%val0:_(s16), %val1:_(s16), %val2:_(s16), %val3:_(s16) = G_UNMERGE_VALUES %val
%part0:_(s16) = COPY %val0
%part1:_(s16) = COPY %val1
%part2:_(s16) = COPY %val2
%part3:_(s16) = COPY %val3
)";
setUp(MIRString);
if (!TM)
return;
const uint64_t TestVal = UINT64_C(0xabcd123344568998);
GISelKnownBits Info(*MF);
int Offset = -4;
for (unsigned BitOffset = 0; BitOffset != 64; BitOffset += 16, ++Offset) {
Register Part = Copies[Copies.size() + Offset];
KnownBits PartKnown = Info.getKnownBits(Part);
EXPECT_EQ(16u, PartKnown.getBitWidth());
uint16_t PartTestVal = static_cast<uint16_t>(TestVal >> BitOffset);
EXPECT_EQ(PartTestVal, PartKnown.One.getZExtValue());
EXPECT_EQ(static_cast<uint16_t>(~PartTestVal), PartKnown.Zero.getZExtValue());
}
}