From 9a70c7c02ad8a9e5879f9139d67bf74674b472c0 Mon Sep 17 00:00:00 2001 From: Chad Rosier Date: Mon, 6 Mar 2017 21:20:00 +0000 Subject: [PATCH] [AArch64][Redundant Copy Elim] Add support for CMN and shifted imm. This patch extends the current functionality of the AArch64 redundant copy elimination pass to handle CMN instructions as well as a shifted immediates. Differential Revision: https://reviews.llvm.org/D30576. llvm-svn: 297078 --- .../AArch64RedundantCopyElimination.cpp | 19 ++- .../CodeGen/AArch64/machine-copy-remove.mir | 135 ++++++++++++++++++ 2 files changed, 148 insertions(+), 6 deletions(-) diff --git a/llvm/lib/Target/AArch64/AArch64RedundantCopyElimination.cpp b/llvm/lib/Target/AArch64/AArch64RedundantCopyElimination.cpp index a935d39ccfd8..f3c8e7e9bdc2 100644 --- a/llvm/lib/Target/AArch64/AArch64RedundantCopyElimination.cpp +++ b/llvm/lib/Target/AArch64/AArch64RedundantCopyElimination.cpp @@ -154,30 +154,37 @@ AArch64RedundantCopyElimination::knownRegValInBlock( // Track clobbered registers. trackRegDefs(PredI, ClobberedRegs, TRI); + bool IsCMN = false; switch (PredI.getOpcode()) { default: break; + // CMN is an alias for ADDS with a dead destination register. + case AArch64::ADDSWri: + case AArch64::ADDSXri: + IsCMN = true; // CMP is an alias for SUBS with a dead destination register. case AArch64::SUBSWri: case AArch64::SUBSXri: { - unsigned SrcReg = PredI.getOperand(1).getReg(); + MCPhysReg SrcReg = PredI.getOperand(1).getReg(); + // Must not be a symbolic immediate. if (!PredI.getOperand(2).isImm()) return None; - // FIXME: For simplicity, give up on non-zero shifts. - if (PredI.getOperand(3).getImm()) - return None; - // The src register must not be modified between the cmp and conditional // branch. This includes a self-clobbering compare. if (ClobberedRegs[SrcReg]) return None; // We've found the Cmp that sets NZCV. + int32_t KnownImm = PredI.getOperand(2).getImm(); + int32_t Shift = PredI.getOperand(3).getImm(); + KnownImm <<= Shift; + if (IsCMN) + KnownImm = -KnownImm; FirstUse = PredI; - return RegImm(PredI.getOperand(1).getReg(), PredI.getOperand(2).getImm()); + return RegImm(SrcReg, KnownImm); } } diff --git a/llvm/test/CodeGen/AArch64/machine-copy-remove.mir b/llvm/test/CodeGen/AArch64/machine-copy-remove.mir index 56400ba9d74e..6f2d3a3009b0 100644 --- a/llvm/test/CodeGen/AArch64/machine-copy-remove.mir +++ b/llvm/test/CodeGen/AArch64/machine-copy-remove.mir @@ -535,3 +535,138 @@ body: | bb.2: RET_ReallyLR +... +# Eliminate redundant MOVi32imm -1 in bb.1 +# Note: 32-bit compare/32-bit move imm +# Kill marker should be removed from compare. +# CHECK-LABEL: name: test19 +# CHECK: ADDSWri %w0, 1, 0, implicit-def %nzcv +# CHECK: bb.1: +# CHECK-NOT: MOVi32imm +name: test19 +tracksRegLiveness: true +body: | + bb.0.entry: + successors: %bb.1, %bb.2 + liveins: %w0, %x1 + + dead %wzr = ADDSWri killed %w0, 1, 0, implicit-def %nzcv + Bcc 1, %bb.2, implicit killed %nzcv + B %bb.1 + + bb.1: + successors: %bb.2 + liveins: %x1 + + %w0 = MOVi32imm -1 + STRWui killed %w0, killed %x1, 0 + + bb.2: + RET_ReallyLR +... +# Eliminate redundant MOVi64imm -1 in bb.1 +# Note: 64-bit compare/64-bit move imm +# Kill marker should be removed from compare. +# CHECK-LABEL: name: test20 +# CHECK: ADDSXri %x0, 1, 0, implicit-def %nzcv +# CHECK: bb.1: +# CHECK-NOT: MOVi64imm +name: test20 +tracksRegLiveness: true +body: | + bb.0: + successors: %bb.1, %bb.2 + liveins: %x0, %x1 + + dead %xzr = ADDSXri killed %x0, 1, 0, implicit-def %nzcv + Bcc 1, %bb.2, implicit killed %nzcv + B %bb.1 + + bb.1: + successors: %bb.2 + liveins: %x1 + + %x0 = MOVi64imm -1 + STRXui killed %x0, killed %x1, 0 + + bb.2: + RET_ReallyLR +... +# Eliminate redundant MOVi32imm -1 in bb.1 +# Note: 64-bit compare/32-bit move imm +# Kill marker should be removed from compare. +# CHECK-LABEL: name: test21 +# CHECK: ADDSXri %x0, 1, 0, implicit-def %nzcv +# CHECK: bb.1: +# CHECK-NOT: MOVi32imm +name: test21 +tracksRegLiveness: true +body: | + bb.0.entry: + successors: %bb.1, %bb.2 + liveins: %x0, %x1 + + dead %xzr = ADDSXri killed %x0, 1, 0, implicit-def %nzcv + Bcc 1, %bb.2, implicit killed %nzcv + B %bb.1 + + bb.1: + successors: %bb.2 + liveins: %x1 + + %w0 = MOVi32imm -1 + STRWui killed %w0, killed %x1, 0 + + bb.2: + RET_ReallyLR +... +# Don't eliminate MOVi64imm -1 in bb.1 as we don't necessarily know the upper 32-bits. +# Note: 32-bit compare/64-bit move imm +# CHECK-LABEL: name: test22 +# CHECK: bb.1: +# CHECK: MOVi64imm +name: test22 +tracksRegLiveness: true +body: | + bb.0.entry: + successors: %bb.1, %bb.2 + liveins: %w0, %x1 + + dead %wzr = ADDSWri killed %w0, 1, 0, implicit-def %nzcv + Bcc 1, %bb.2, implicit killed %nzcv + B %bb.1 + + bb.1: + successors: %bb.2 + liveins: %x1 + + %x0 = MOVi64imm -1 + STRXui killed %x0, killed %x1, 0 + + bb.2: + RET_ReallyLR +... +# Eliminate redundant MOVi32imm 4096 in bb.1 when the compare has a shifted immediate. +# CHECK-LABEL: name: test23 +# CHECK: bb.1: +# CHECK-NOT: MOVi32imm +name: test23 +tracksRegLiveness: true +body: | + bb.0.entry: + successors: %bb.1, %bb.2 + liveins: %w0, %x1 + + dead %wzr = SUBSWri killed %w0, 1, 12, implicit-def %nzcv + Bcc 1, %bb.2, implicit killed %nzcv + B %bb.1 + + bb.1: + successors: %bb.2 + liveins: %x1 + + %w0 = MOVi32imm 4096 + STRWui killed %w0, killed %x1, 0 + + bb.2: + RET_ReallyLR