[InstCombine] fold not ops around min/max intrinsics

This is another step towards parity with the existing
cmp+select folds (see D98152).
This commit is contained in:
Sanjay Patel 2021-04-07 17:19:15 -04:00
parent aca2613330
commit c0bbd0cc35
2 changed files with 30 additions and 11 deletions

View File

@ -3446,6 +3446,25 @@ Instruction *InstCombinerImpl::visitXor(BinaryOperator &I) {
if (Instruction *CastedXor = foldCastedBitwiseLogic(I))
return CastedXor;
// Eliminate a bitwise 'not' op of 'not' min/max by inverting the min/max:
// ~min(~X, ~Y) --> max(X, Y)
// ~max(~X, Y) --> min(X, ~Y)
auto *II = dyn_cast<IntrinsicInst>(Op0);
if (II && match(Op1, m_AllOnes())) {
if (match(Op0, m_MaxOrMin(m_Not(m_Value(X)), m_Not(m_Value(Y))))) {
Intrinsic::ID InvID = getInverseMinMaxIntrinsic(II->getIntrinsicID());
Value *InvMaxMin = Builder.CreateBinaryIntrinsic(InvID, X, Y);
return replaceInstUsesWith(I, InvMaxMin);
}
if (match(Op0, m_OneUse(m_c_MaxOrMin(m_Not(m_Value(X)), m_Value(Y))))) {
Intrinsic::ID InvID = getInverseMinMaxIntrinsic(II->getIntrinsicID());
Value *NotY = Builder.CreateNot(Y);
Value *InvMaxMin = Builder.CreateBinaryIntrinsic(InvID, X, NotY);
return replaceInstUsesWith(I, InvMaxMin);
}
}
// TODO: Remove folds if we canonicalize to intrinsics (see above).
// Eliminate a bitwise 'not' op of 'not' min/max by inverting the min/max:
//
// %notx = xor i32 %x, -1

View File

@ -526,9 +526,8 @@ define i8 @not_smax_of_nots(i8 %x, i8 %y) {
; CHECK-NEXT: call void @use(i8 [[NOTX]])
; CHECK-NEXT: [[NOTY:%.*]] = xor i8 [[Y:%.*]], -1
; CHECK-NEXT: call void @use(i8 [[NOTY]])
; CHECK-NEXT: [[M:%.*]] = call i8 @llvm.smax.i8(i8 [[NOTX]], i8 [[NOTY]])
; CHECK-NEXT: [[NOTM:%.*]] = xor i8 [[M]], -1
; CHECK-NEXT: ret i8 [[NOTM]]
; CHECK-NEXT: [[TMP1:%.*]] = call i8 @llvm.smin.i8(i8 [[X]], i8 [[Y]])
; CHECK-NEXT: ret i8 [[TMP1]]
;
%notx = xor i8 %x, -1
call void @use(i8 %notx)
@ -547,8 +546,8 @@ define i8 @not_smin_of_nots(i8 %x, i8 %y) {
; CHECK-NEXT: call void @use(i8 [[NOTY]])
; CHECK-NEXT: [[M:%.*]] = call i8 @llvm.smin.i8(i8 [[NOTX]], i8 [[NOTY]])
; CHECK-NEXT: call void @use(i8 [[M]])
; CHECK-NEXT: [[NOTM:%.*]] = xor i8 [[M]], -1
; CHECK-NEXT: ret i8 [[NOTM]]
; CHECK-NEXT: [[TMP1:%.*]] = call i8 @llvm.smax.i8(i8 [[X]], i8 [[Y]])
; CHECK-NEXT: ret i8 [[TMP1]]
;
%notx = xor i8 %x, -1
call void @use(i8 %notx)
@ -564,9 +563,9 @@ define i8 @not_umax_of_not(i8 %x, i8 %y) {
; CHECK-LABEL: @not_umax_of_not(
; CHECK-NEXT: [[NOTX:%.*]] = xor i8 [[X:%.*]], -1
; CHECK-NEXT: call void @use(i8 [[NOTX]])
; CHECK-NEXT: [[M:%.*]] = call i8 @llvm.umax.i8(i8 [[NOTX]], i8 [[Y:%.*]])
; CHECK-NEXT: [[NOTM:%.*]] = xor i8 [[M]], -1
; CHECK-NEXT: ret i8 [[NOTM]]
; CHECK-NEXT: [[TMP1:%.*]] = xor i8 [[Y:%.*]], -1
; CHECK-NEXT: [[TMP2:%.*]] = call i8 @llvm.umin.i8(i8 [[X]], i8 [[TMP1]])
; CHECK-NEXT: ret i8 [[TMP2]]
;
%notx = xor i8 %x, -1
call void @use(i8 %notx)
@ -575,6 +574,8 @@ define i8 @not_umax_of_not(i8 %x, i8 %y) {
ret i8 %notm
}
; Negative test - this would require an extra instruction.
define i8 @not_umin_of_not(i8 %x, i8 %y) {
; CHECK-LABEL: @not_umin_of_not(
; CHECK-NEXT: [[NOTX:%.*]] = xor i8 [[X:%.*]], -1
@ -596,9 +597,8 @@ define i8 @not_umin_of_not_constant_op(i8 %x) {
; CHECK-LABEL: @not_umin_of_not_constant_op(
; CHECK-NEXT: [[NOTX:%.*]] = xor i8 [[X:%.*]], -1
; CHECK-NEXT: call void @use(i8 [[NOTX]])
; CHECK-NEXT: [[M:%.*]] = call i8 @llvm.umin.i8(i8 [[NOTX]], i8 42)
; CHECK-NEXT: [[NOTM:%.*]] = xor i8 [[M]], -1
; CHECK-NEXT: ret i8 [[NOTM]]
; CHECK-NEXT: [[TMP1:%.*]] = call i8 @llvm.umax.i8(i8 [[X]], i8 -43)
; CHECK-NEXT: ret i8 [[TMP1]]
;
%notx = xor i8 %x, -1
call void @use(i8 %notx)