Fixed line endings.

llvm-svn: 244021
This commit is contained in:
Simon Pilgrim 2015-08-05 08:18:00 +00:00
parent 17caf326e5
commit 18617d193f
1 changed files with 72 additions and 72 deletions

View File

@ -194,44 +194,44 @@ Instruction *InstCombiner::SimplifyMemSet(MemSetInst *MI) {
return MI;
}
return nullptr;
}
static Value *SimplifyX86immshift(const IntrinsicInst &II,
InstCombiner::BuilderTy &Builder,
bool ShiftLeft) {
// Simplify if count is constant. To 0 if >= BitWidth,
// otherwise to shl/lshr.
auto CDV = dyn_cast<ConstantDataVector>(II.getArgOperand(1));
auto CInt = dyn_cast<ConstantInt>(II.getArgOperand(1));
if (!CDV && !CInt)
return nullptr;
ConstantInt *Count;
if (CDV)
Count = cast<ConstantInt>(CDV->getElementAsConstant(0));
else
Count = CInt;
auto Vec = II.getArgOperand(0);
auto VT = cast<VectorType>(Vec->getType());
auto SVT = VT->getElementType();
if (Count->getZExtValue() > (SVT->getPrimitiveSizeInBits() - 1))
return ConstantAggregateZero::get(VT);
unsigned VWidth = VT->getNumElements();
// Get a constant vector of the same type as the first operand.
auto VTCI = ConstantInt::get(VT->getElementType(), Count->getZExtValue());
if (ShiftLeft)
return Builder.CreateShl(Vec, Builder.CreateVectorSplat(VWidth, VTCI));
return Builder.CreateLShr(Vec, Builder.CreateVectorSplat(VWidth, VTCI));
}
static Value *SimplifyX86extend(const IntrinsicInst &II,
InstCombiner::BuilderTy &Builder,
bool SignExtend) {
return nullptr;
}
static Value *SimplifyX86immshift(const IntrinsicInst &II,
InstCombiner::BuilderTy &Builder,
bool ShiftLeft) {
// Simplify if count is constant. To 0 if >= BitWidth,
// otherwise to shl/lshr.
auto CDV = dyn_cast<ConstantDataVector>(II.getArgOperand(1));
auto CInt = dyn_cast<ConstantInt>(II.getArgOperand(1));
if (!CDV && !CInt)
return nullptr;
ConstantInt *Count;
if (CDV)
Count = cast<ConstantInt>(CDV->getElementAsConstant(0));
else
Count = CInt;
auto Vec = II.getArgOperand(0);
auto VT = cast<VectorType>(Vec->getType());
auto SVT = VT->getElementType();
if (Count->getZExtValue() > (SVT->getPrimitiveSizeInBits() - 1))
return ConstantAggregateZero::get(VT);
unsigned VWidth = VT->getNumElements();
// Get a constant vector of the same type as the first operand.
auto VTCI = ConstantInt::get(VT->getElementType(), Count->getZExtValue());
if (ShiftLeft)
return Builder.CreateShl(Vec, Builder.CreateVectorSplat(VWidth, VTCI));
return Builder.CreateLShr(Vec, Builder.CreateVectorSplat(VWidth, VTCI));
}
static Value *SimplifyX86extend(const IntrinsicInst &II,
InstCombiner::BuilderTy &Builder,
bool SignExtend) {
VectorType *SrcTy = cast<VectorType>(II.getArgOperand(0)->getType());
VectorType *DstTy = cast<VectorType>(II.getType());
unsigned NumDstElts = DstTy->getNumElements();
@ -750,46 +750,46 @@ Instruction *InstCombiner::visitCallInst(CallInst &CI) {
II->setArgOperand(0, V);
return II;
}
break;
}
// Constant fold lshr( <A x Bi>, Ci ).
case Intrinsic::x86_sse2_psrl_d:
case Intrinsic::x86_sse2_psrl_q:
case Intrinsic::x86_sse2_psrl_w:
case Intrinsic::x86_sse2_psrli_d:
case Intrinsic::x86_sse2_psrli_q:
case Intrinsic::x86_sse2_psrli_w:
case Intrinsic::x86_avx2_psrl_d:
case Intrinsic::x86_avx2_psrl_q:
case Intrinsic::x86_avx2_psrl_w:
case Intrinsic::x86_avx2_psrli_d:
case Intrinsic::x86_avx2_psrli_q:
case Intrinsic::x86_avx2_psrli_w:
if (Value *V = SimplifyX86immshift(*II, *Builder, false))
return ReplaceInstUsesWith(*II, V);
break;
// Constant fold shl( <A x Bi>, Ci ).
case Intrinsic::x86_sse2_psll_d:
case Intrinsic::x86_sse2_psll_q:
case Intrinsic::x86_sse2_psll_w:
break;
}
// Constant fold lshr( <A x Bi>, Ci ).
case Intrinsic::x86_sse2_psrl_d:
case Intrinsic::x86_sse2_psrl_q:
case Intrinsic::x86_sse2_psrl_w:
case Intrinsic::x86_sse2_psrli_d:
case Intrinsic::x86_sse2_psrli_q:
case Intrinsic::x86_sse2_psrli_w:
case Intrinsic::x86_avx2_psrl_d:
case Intrinsic::x86_avx2_psrl_q:
case Intrinsic::x86_avx2_psrl_w:
case Intrinsic::x86_avx2_psrli_d:
case Intrinsic::x86_avx2_psrli_q:
case Intrinsic::x86_avx2_psrli_w:
if (Value *V = SimplifyX86immshift(*II, *Builder, false))
return ReplaceInstUsesWith(*II, V);
break;
// Constant fold shl( <A x Bi>, Ci ).
case Intrinsic::x86_sse2_psll_d:
case Intrinsic::x86_sse2_psll_q:
case Intrinsic::x86_sse2_psll_w:
case Intrinsic::x86_sse2_pslli_d:
case Intrinsic::x86_sse2_pslli_q:
case Intrinsic::x86_sse2_pslli_w:
case Intrinsic::x86_avx2_psll_d:
case Intrinsic::x86_avx2_psll_q:
case Intrinsic::x86_avx2_psll_w:
case Intrinsic::x86_avx2_pslli_d:
case Intrinsic::x86_avx2_pslli_q:
case Intrinsic::x86_avx2_pslli_w:
if (Value *V = SimplifyX86immshift(*II, *Builder, true))
return ReplaceInstUsesWith(*II, V);
break;
case Intrinsic::x86_sse41_pmovsxbd:
case Intrinsic::x86_sse41_pmovsxbq:
case Intrinsic::x86_sse41_pmovsxbw:
case Intrinsic::x86_avx2_pslli_d:
case Intrinsic::x86_avx2_pslli_q:
case Intrinsic::x86_avx2_pslli_w:
if (Value *V = SimplifyX86immshift(*II, *Builder, true))
return ReplaceInstUsesWith(*II, V);
break;
case Intrinsic::x86_sse41_pmovsxbd:
case Intrinsic::x86_sse41_pmovsxbq:
case Intrinsic::x86_sse41_pmovsxbw:
case Intrinsic::x86_sse41_pmovsxdq:
case Intrinsic::x86_sse41_pmovsxwd:
case Intrinsic::x86_sse41_pmovsxwq: