From d1d915b8da91e3e527706c838e2cb4b338336e7a Mon Sep 17 00:00:00 2001 From: Roman Lebedev Date: Fri, 31 May 2019 08:24:07 +0000 Subject: [PATCH] [NFC][InstCombine] Copy add/sub constant-folding tests from codegen Last three patterns are missed. llvm-svn: 362209 --- .../InstCombine/addsub-constant-folding.ll | 536 ++++++++++++++++++ 1 file changed, 536 insertions(+) create mode 100644 llvm/test/Transforms/InstCombine/addsub-constant-folding.ll diff --git a/llvm/test/Transforms/InstCombine/addsub-constant-folding.ll b/llvm/test/Transforms/InstCombine/addsub-constant-folding.ll new file mode 100644 index 000000000000..fc98f249fc52 --- /dev/null +++ b/llvm/test/Transforms/InstCombine/addsub-constant-folding.ll @@ -0,0 +1,536 @@ +; NOTE: Assertions have been autogenerated by utils/update_test_checks.py +; RUN: opt < %s -instcombine -S | FileCheck %s + +declare void @use(i32 %arg) +declare void @vec_use(<4 x i32> %arg) + +; (x+c1)+c2 + +define i32 @add_const_add_const(i32 %arg) { +; CHECK-LABEL: @add_const_add_const( +; CHECK-NEXT: [[T1:%.*]] = add i32 [[ARG:%.*]], 10 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = add i32 %arg, 8 + %t1 = add i32 %t0, 2 + ret i32 %t1 +} + +define i32 @add_const_add_const_extrause(i32 %arg) { +; CHECK-LABEL: @add_const_add_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = add i32 [[ARG:%.*]], 8 +; CHECK-NEXT: call void @use(i32 [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add i32 [[ARG]], 10 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = add i32 %arg, 8 + call void @use(i32 %t0) + %t1 = add i32 %t0, 2 + ret i32 %t1 +} + +define <4 x i32> @vec_add_const_add_const(<4 x i32> %arg) { +; CHECK-LABEL: @vec_add_const_add_const( +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = add <4 x i32> %arg, + %t1 = add <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_add_const_add_const_extrause(<4 x i32> %arg) { +; CHECK-LABEL: @vec_add_const_add_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: call void @vec_use(<4 x i32> [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = add <4 x i32> %arg, + call void @vec_use(<4 x i32> %t0) + %t1 = add <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_add_const_add_const_nonsplat(<4 x i32> %arg) { +; CHECK-LABEL: @vec_add_const_add_const_nonsplat( +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = add <4 x i32> %arg, + %t1 = add <4 x i32> %t0, + ret <4 x i32> %t1 +} + +; (x+c1)-c2 + +define i32 @add_const_sub_const(i32 %arg) { +; CHECK-LABEL: @add_const_sub_const( +; CHECK-NEXT: [[T1:%.*]] = add i32 [[ARG:%.*]], 6 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = add i32 %arg, 8 + %t1 = sub i32 %t0, 2 + ret i32 %t1 +} + +define i32 @add_const_sub_const_extrause(i32 %arg) { +; CHECK-LABEL: @add_const_sub_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = add i32 [[ARG:%.*]], 8 +; CHECK-NEXT: call void @use(i32 [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add i32 [[ARG]], 6 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = add i32 %arg, 8 + call void @use(i32 %t0) + %t1 = sub i32 %t0, 2 + ret i32 %t1 +} + +define <4 x i32> @vec_add_const_sub_const(<4 x i32> %arg) { +; CHECK-LABEL: @vec_add_const_sub_const( +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = add <4 x i32> %arg, + %t1 = sub <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_add_const_sub_const_extrause(<4 x i32> %arg) { +; CHECK-LABEL: @vec_add_const_sub_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: call void @vec_use(<4 x i32> [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = add <4 x i32> %arg, + call void @vec_use(<4 x i32> %t0) + %t1 = sub <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_add_const_sub_const_nonsplat(<4 x i32> %arg) { +; CHECK-LABEL: @vec_add_const_sub_const_nonsplat( +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = add <4 x i32> %arg, + %t1 = sub <4 x i32> %t0, + ret <4 x i32> %t1 +} + +; c2-(x+c1) + +define i32 @add_const_const_sub(i32 %arg) { +; CHECK-LABEL: @add_const_const_sub( +; CHECK-NEXT: [[T1:%.*]] = sub i32 -6, [[ARG:%.*]] +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = add i32 %arg, 8 + %t1 = sub i32 2, %t0 + ret i32 %t1 +} + +define i32 @add_const_const_sub_extrause(i32 %arg) { +; CHECK-LABEL: @add_const_const_sub_extrause( +; CHECK-NEXT: [[T0:%.*]] = add i32 [[ARG:%.*]], 8 +; CHECK-NEXT: call void @use(i32 [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = sub i32 -6, [[ARG]] +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = add i32 %arg, 8 + call void @use(i32 %t0) + %t1 = sub i32 2, %t0 + ret i32 %t1 +} + +define <4 x i32> @vec_add_const_const_sub(<4 x i32> %arg) { +; CHECK-LABEL: @vec_add_const_const_sub( +; CHECK-NEXT: [[T1:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = add <4 x i32> %arg, + %t1 = sub <4 x i32> , %t0 + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_add_const_const_sub_extrause(<4 x i32> %arg) { +; CHECK-LABEL: @vec_add_const_const_sub_extrause( +; CHECK-NEXT: [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: call void @vec_use(<4 x i32> [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = sub <4 x i32> , [[ARG]] +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = add <4 x i32> %arg, + call void @vec_use(<4 x i32> %t0) + %t1 = sub <4 x i32> , %t0 + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_add_const_const_sub_nonsplat(<4 x i32> %arg) { +; CHECK-LABEL: @vec_add_const_const_sub_nonsplat( +; CHECK-NEXT: [[T1:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = add <4 x i32> %arg, + %t1 = sub <4 x i32> , %t0 + ret <4 x i32> %t1 +} + +; (x-c1)+c2 + +define i32 @sub_const_add_const(i32 %arg) { +; CHECK-LABEL: @sub_const_add_const( +; CHECK-NEXT: [[T1:%.*]] = add i32 [[ARG:%.*]], -6 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 %arg, 8 + %t1 = add i32 %t0, 2 + ret i32 %t1 +} + +define i32 @sub_const_add_const_extrause(i32 %arg) { +; CHECK-LABEL: @sub_const_add_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = add i32 [[ARG:%.*]], -8 +; CHECK-NEXT: call void @use(i32 [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add i32 [[ARG]], -6 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 %arg, 8 + call void @use(i32 %t0) + %t1 = add i32 %t0, 2 + ret i32 %t1 +} + +define <4 x i32> @vec_sub_const_add_const(<4 x i32> %arg) { +; CHECK-LABEL: @vec_sub_const_add_const( +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> %arg, + %t1 = add <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_sub_const_add_const_extrause(<4 x i32> %arg) { +; CHECK-LABEL: @vec_sub_const_add_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: call void @vec_use(<4 x i32> [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> %arg, + call void @vec_use(<4 x i32> %t0) + %t1 = add <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_sub_const_add_const_nonsplat(<4 x i32> %arg) { +; CHECK-LABEL: @vec_sub_const_add_const_nonsplat( +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> %arg, + %t1 = add <4 x i32> %t0, + ret <4 x i32> %t1 +} + +; (x-c1)-c2 + +define i32 @sub_const_sub_const(i32 %arg) { +; CHECK-LABEL: @sub_const_sub_const( +; CHECK-NEXT: [[T1:%.*]] = add i32 [[ARG:%.*]], -10 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 %arg, 8 + %t1 = sub i32 %t0, 2 + ret i32 %t1 +} + +define i32 @sub_const_sub_const_extrause(i32 %arg) { +; CHECK-LABEL: @sub_const_sub_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = add i32 [[ARG:%.*]], -8 +; CHECK-NEXT: call void @use(i32 [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add i32 [[ARG]], -10 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 %arg, 8 + call void @use(i32 %t0) + %t1 = sub i32 %t0, 2 + ret i32 %t1 +} + +define <4 x i32> @vec_sub_const_sub_const(<4 x i32> %arg) { +; CHECK-LABEL: @vec_sub_const_sub_const( +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> %arg, + %t1 = sub <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_sub_const_sub_const_extrause(<4 x i32> %arg) { +; CHECK-LABEL: @vec_sub_const_sub_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: call void @vec_use(<4 x i32> [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> %arg, + call void @vec_use(<4 x i32> %t0) + %t1 = sub <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_sub_const_sub_const_nonsplat(<4 x i32> %arg) { +; CHECK-LABEL: @vec_sub_const_sub_const_nonsplat( +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> %arg, + %t1 = sub <4 x i32> %t0, + ret <4 x i32> %t1 +} + +; c2-(x-c1) + +define i32 @sub_const_const_sub(i32 %arg) { +; CHECK-LABEL: @sub_const_const_sub( +; CHECK-NEXT: [[T1:%.*]] = sub i32 10, [[ARG:%.*]] +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 %arg, 8 + %t1 = sub i32 2, %t0 + ret i32 %t1 +} + +define i32 @sub_const_const_sub_extrause(i32 %arg) { +; CHECK-LABEL: @sub_const_const_sub_extrause( +; CHECK-NEXT: [[T0:%.*]] = add i32 [[ARG:%.*]], -8 +; CHECK-NEXT: call void @use(i32 [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = sub i32 10, [[ARG]] +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 %arg, 8 + call void @use(i32 %t0) + %t1 = sub i32 2, %t0 + ret i32 %t1 +} + +define <4 x i32> @vec_sub_const_const_sub(<4 x i32> %arg) { +; CHECK-LABEL: @vec_sub_const_const_sub( +; CHECK-NEXT: [[T1:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> %arg, + %t1 = sub <4 x i32> , %t0 + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_sub_const_const_sub_extrause(<4 x i32> %arg) { +; CHECK-LABEL: @vec_sub_const_const_sub_extrause( +; CHECK-NEXT: [[T0:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: call void @vec_use(<4 x i32> [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = sub <4 x i32> , [[ARG]] +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> %arg, + call void @vec_use(<4 x i32> %t0) + %t1 = sub <4 x i32> , %t0 + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_sub_const_const_sub_nonsplat(<4 x i32> %arg) { +; CHECK-LABEL: @vec_sub_const_const_sub_nonsplat( +; CHECK-NEXT: [[T1:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> %arg, + %t1 = sub <4 x i32> , %t0 + ret <4 x i32> %t1 +} + +; (c1-x)+c2 +; FIXME + +define i32 @const_sub_add_const(i32 %arg) { +; CHECK-LABEL: @const_sub_add_const( +; CHECK-NEXT: [[T0:%.*]] = sub i32 8, [[ARG:%.*]] +; CHECK-NEXT: [[T1:%.*]] = add i32 [[T0]], 2 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 8, %arg + %t1 = add i32 %t0, 2 + ret i32 %t1 +} + +define i32 @const_sub_add_const_extrause(i32 %arg) { +; CHECK-LABEL: @const_sub_add_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = sub i32 8, [[ARG:%.*]] +; CHECK-NEXT: call void @use(i32 [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add i32 [[T0]], 2 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 8, %arg + call void @use(i32 %t0) + %t1 = add i32 %t0, 2 + ret i32 %t1 +} + +define <4 x i32> @vec_const_sub_add_const(<4 x i32> %arg) { +; CHECK-LABEL: @vec_const_sub_add_const( +; CHECK-NEXT: [[T0:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[T0]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> , %arg + %t1 = add <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_const_sub_add_const_extrause(<4 x i32> %arg) { +; CHECK-LABEL: @vec_const_sub_add_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: call void @vec_use(<4 x i32> [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[T0]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> , %arg + call void @vec_use(<4 x i32> %t0) + %t1 = add <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_const_sub_add_const_nonsplat(<4 x i32> %arg) { +; CHECK-LABEL: @vec_const_sub_add_const_nonsplat( +; CHECK-NEXT: [[T0:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[T0]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> , %arg + %t1 = add <4 x i32> %t0, + ret <4 x i32> %t1 +} + +; (c1-x)-c2 +; FIXME + +define i32 @const_sub_sub_const(i32 %arg) { +; CHECK-LABEL: @const_sub_sub_const( +; CHECK-NEXT: [[T0:%.*]] = sub i32 8, [[ARG:%.*]] +; CHECK-NEXT: [[T1:%.*]] = add i32 [[T0]], -2 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 8, %arg + %t1 = sub i32 %t0, 2 + ret i32 %t1 +} + +define i32 @const_sub_sub_const_extrause(i32 %arg) { +; CHECK-LABEL: @const_sub_sub_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = sub i32 8, [[ARG:%.*]] +; CHECK-NEXT: call void @use(i32 [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add i32 [[T0]], -2 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 8, %arg + call void @use(i32 %t0) + %t1 = sub i32 %t0, 2 + ret i32 %t1 +} + +define <4 x i32> @vec_const_sub_sub_const(<4 x i32> %arg) { +; CHECK-LABEL: @vec_const_sub_sub_const( +; CHECK-NEXT: [[T0:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[T0]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> , %arg + %t1 = sub <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_const_sub_sub_const_extrause(<4 x i32> %arg) { +; CHECK-LABEL: @vec_const_sub_sub_const_extrause( +; CHECK-NEXT: [[T0:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: call void @vec_use(<4 x i32> [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[T0]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> , %arg + call void @vec_use(<4 x i32> %t0) + %t1 = sub <4 x i32> %t0, + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_const_sub_sub_const_nonsplat(<4 x i32> %arg) { +; CHECK-LABEL: @vec_const_sub_sub_const_nonsplat( +; CHECK-NEXT: [[T0:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[T0]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> , %arg + %t1 = sub <4 x i32> %t0, + ret <4 x i32> %t1 +} + +; c2-(c1-x) +; FIXME + +define i32 @const_sub_const_sub(i32 %arg) { +; CHECK-LABEL: @const_sub_const_sub( +; CHECK-NEXT: [[T1:%.*]] = add i32 [[ARG:%.*]], -6 +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 8, %arg + %t1 = sub i32 2, %t0 + ret i32 %t1 +} + +define i32 @const_sub_const_sub_extrause(i32 %arg) { +; CHECK-LABEL: @const_sub_const_sub_extrause( +; CHECK-NEXT: [[T0:%.*]] = sub i32 8, [[ARG:%.*]] +; CHECK-NEXT: call void @use(i32 [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = sub i32 2, [[T0]] +; CHECK-NEXT: ret i32 [[T1]] +; + %t0 = sub i32 8, %arg + call void @use(i32 %t0) + %t1 = sub i32 2, %t0 + ret i32 %t1 +} + +define <4 x i32> @vec_const_sub_const_sub(<4 x i32> %arg) { +; CHECK-LABEL: @vec_const_sub_const_sub( +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> , %arg + %t1 = sub <4 x i32> , %t0 + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_const_sub_const_sub_extrause(<4 x i32> %arg) { +; CHECK-LABEL: @vec_const_sub_const_sub_extrause( +; CHECK-NEXT: [[T0:%.*]] = sub <4 x i32> , [[ARG:%.*]] +; CHECK-NEXT: call void @vec_use(<4 x i32> [[T0]]) +; CHECK-NEXT: [[T1:%.*]] = sub <4 x i32> , [[T0]] +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> , %arg + call void @vec_use(<4 x i32> %t0) + %t1 = sub <4 x i32> , %t0 + ret <4 x i32> %t1 +} + +define <4 x i32> @vec_const_sub_const_sub_nonsplat(<4 x i32> %arg) { +; CHECK-LABEL: @vec_const_sub_const_sub_nonsplat( +; CHECK-NEXT: [[T1:%.*]] = add <4 x i32> [[ARG:%.*]], +; CHECK-NEXT: ret <4 x i32> [[T1]] +; + %t0 = sub <4 x i32> , %arg + %t1 = sub <4 x i32> , %t0 + ret <4 x i32> %t1 +}