forked from OSchip/llvm-project
Constant fold launder of null and undef
Summary: This might be useful because clang will add some barriers for pointer comparisons. Reviewers: majnemer, dberlin, hfinkel, nlewycky, davide, rsmith, amharc, kuhar Subscribers: davide, amharc, llvm-commits Differential Revision: https://reviews.llvm.org/D32423 llvm-svn: 332786
This commit is contained in:
parent
dbb6f8a817
commit
a26a08cb52
|
@ -1392,6 +1392,7 @@ bool llvm::canConstantFoldCallTo(ImmutableCallSite CS, const Function *F) {
|
|||
case Intrinsic::fma:
|
||||
case Intrinsic::fmuladd:
|
||||
case Intrinsic::copysign:
|
||||
case Intrinsic::launder_invariant_group:
|
||||
case Intrinsic::round:
|
||||
case Intrinsic::masked_load:
|
||||
case Intrinsic::sadd_with_overflow:
|
||||
|
@ -1594,9 +1595,19 @@ Constant *ConstantFoldScalarCall(StringRef Name, unsigned IntrinsicID, Type *Ty,
|
|||
if (IntrinsicID == Intrinsic::cos)
|
||||
return Constant::getNullValue(Ty);
|
||||
if (IntrinsicID == Intrinsic::bswap ||
|
||||
IntrinsicID == Intrinsic::bitreverse)
|
||||
IntrinsicID == Intrinsic::bitreverse ||
|
||||
IntrinsicID == Intrinsic::launder_invariant_group)
|
||||
return Operands[0];
|
||||
}
|
||||
|
||||
if (isa<ConstantPointerNull>(Operands[0]) &&
|
||||
Operands[0]->getType()->getPointerAddressSpace() == 0) {
|
||||
// launder(null) == null iff in addrspace 0
|
||||
if (IntrinsicID == Intrinsic::launder_invariant_group)
|
||||
return Operands[0];
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (auto *Op = dyn_cast<ConstantFP>(Operands[0])) {
|
||||
if (IntrinsicID == Intrinsic::convert_to_fp16) {
|
||||
APFloat Val(Op->getValueAPF());
|
||||
|
|
|
@ -384,8 +384,9 @@ bool llvm::wouldInstructionBeTriviallyDead(Instruction *I,
|
|||
// Special case intrinsics that "may have side effects" but can be deleted
|
||||
// when dead.
|
||||
if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
|
||||
// Safe to delete llvm.stacksave if dead.
|
||||
if (II->getIntrinsicID() == Intrinsic::stacksave)
|
||||
// Safe to delete llvm.stacksave and launder.invariant.group if dead.
|
||||
if (II->getIntrinsicID() == Intrinsic::stacksave ||
|
||||
II->getIntrinsicID() == Intrinsic::launder_invariant_group)
|
||||
return true;
|
||||
|
||||
// Lifetime intrinsics are dead when their right-hand is undef.
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
; RUN: opt -instcombine -S < %s | FileCheck %s
|
||||
|
||||
; CHECK-LABEL: define i8* @simplifyNullLaunder()
|
||||
define i8* @simplifyNullLaunder() {
|
||||
; CHECK-NEXT: ret i8* null
|
||||
%b2 = call i8* @llvm.launder.invariant.group.p0i8(i8* null)
|
||||
ret i8* %b2
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define i8 addrspace(42)* @dontsimplifyNullLaunderForDifferentAddrspace()
|
||||
define i8 addrspace(42)* @dontsimplifyNullLaunderForDifferentAddrspace() {
|
||||
; CHECK: %b2 = call i8 addrspace(42)* @llvm.launder.invariant.group.p42i8(i8 addrspace(42)* null)
|
||||
; CHECK: ret i8 addrspace(42)* %b2
|
||||
%b2 = call i8 addrspace(42)* @llvm.launder.invariant.group.p42i8(i8 addrspace(42)* null)
|
||||
ret i8 addrspace(42)* %b2
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define i8* @simplifyUndefLaunder()
|
||||
define i8* @simplifyUndefLaunder() {
|
||||
; CHECK-NEXT: ret i8* undef
|
||||
%b2 = call i8* @llvm.launder.invariant.group.p0i8(i8* undef)
|
||||
ret i8* %b2
|
||||
}
|
||||
|
||||
; CHECK-LABEL: define i8 addrspace(42)* @simplifyUndefLaunder2()
|
||||
define i8 addrspace(42)* @simplifyUndefLaunder2() {
|
||||
; CHECK-NEXT: ret i8 addrspace(42)* undef
|
||||
%b2 = call i8 addrspace(42)* @llvm.launder.invariant.group.p42i8(i8 addrspace(42)* undef)
|
||||
ret i8 addrspace(42)* %b2
|
||||
}
|
||||
|
||||
|
||||
declare i8* @llvm.launder.invariant.group.p0i8(i8*)
|
||||
declare i8 addrspace(42)* @llvm.launder.invariant.group.p42i8(i8 addrspace(42)*)
|
Loading…
Reference in New Issue