[CallSite removal][CodeGen] Replace ImmutableCallSite with CallBase in isInTailCallPosition.

This commit is contained in:
Craig Topper 2020-04-13 23:04:52 -07:00
parent fe8a2ad4a0
commit 3043093822
5 changed files with 18 additions and 21 deletions

View File

@ -119,7 +119,7 @@ ISD::CondCode getICmpCondCode(ICmpInst::Predicate Pred);
/// between it and the return.
///
/// This function only tests target-independent requirements.
bool isInTailCallPosition(ImmutableCallSite CS, const TargetMachine &TM);
bool isInTailCallPosition(const CallBase &Call, const TargetMachine &TM);
/// Test if given that the input instruction is in the tail call position, if
/// there is an attribute mismatch between the caller and the callee that will

View File

@ -509,9 +509,8 @@ static bool nextRealType(SmallVectorImpl<Type *> &SubTypes,
/// between it and the return.
///
/// This function only tests target-independent requirements.
bool llvm::isInTailCallPosition(ImmutableCallSite CS, const TargetMachine &TM) {
const Instruction *I = CS.getInstruction();
const BasicBlock *ExitBB = I->getParent();
bool llvm::isInTailCallPosition(const CallBase &Call, const TargetMachine &TM) {
const BasicBlock *ExitBB = Call.getParent();
const Instruction *Term = ExitBB->getTerminator();
const ReturnInst *Ret = dyn_cast<ReturnInst>(Term);
@ -525,15 +524,15 @@ bool llvm::isInTailCallPosition(ImmutableCallSite CS, const TargetMachine &TM) {
// been fully understood.
if (!Ret &&
((!TM.Options.GuaranteedTailCallOpt &&
CS.getCallingConv() != CallingConv::Tail) || !isa<UnreachableInst>(Term)))
Call.getCallingConv() != CallingConv::Tail) || !isa<UnreachableInst>(Term)))
return false;
// If I will have a chain, make sure no other instruction that will have a
// chain interposes between I and the return.
if (I->mayHaveSideEffects() || I->mayReadFromMemory() ||
!isSafeToSpeculativelyExecute(I))
if (Call.mayHaveSideEffects() || Call.mayReadFromMemory() ||
!isSafeToSpeculativelyExecute(&Call))
for (BasicBlock::const_iterator BBI = std::prev(ExitBB->end(), 2);; --BBI) {
if (&*BBI == I)
if (&*BBI == &Call)
break;
// Debug info intrinsics do not get in the way of tail call optimization.
if (isa<DbgInfoIntrinsic>(BBI))
@ -551,7 +550,7 @@ bool llvm::isInTailCallPosition(ImmutableCallSite CS, const TargetMachine &TM) {
const Function *F = ExitBB->getParent();
return returnTypeIsEligibleForTailCall(
F, I, Ret, *TM.getSubtargetImpl(*F)->getTargetLowering());
F, &Call, Ret, *TM.getSubtargetImpl(*F)->getTargetLowering());
}
bool llvm::attributesPermitTailCall(const Function *F, const Instruction *I,

View File

@ -68,8 +68,7 @@ bool CallLowering::lowerCall(MachineIRBuilder &MIRBuilder, const CallBase &CB,
Info.SwiftErrorVReg = SwiftErrorVReg;
Info.IsMustTailCall = CB.isMustTailCall();
Info.IsTailCall =
CB.isTailCall() &&
isInTailCallPosition(ImmutableCallSite(&CB), MF.getTarget()) &&
CB.isTailCall() && isInTailCallPosition(CB, MF.getTarget()) &&
(MF.getFunction()
.getFnAttribute("disable-tail-calls")
.getValueAsString() != "true");

View File

@ -1282,7 +1282,7 @@ bool FastISel::lowerCall(const CallInst *CI) {
// Check if target-independent constraints permit a tail call here.
// Target-dependent constraints are checked within fastLowerCall.
bool IsTailCall = CI->isTailCall();
if (IsTailCall && !isInTailCallPosition(CI, TM))
if (IsTailCall && !isInTailCallPosition(*CI, TM))
IsTailCall = false;
if (IsTailCall && MF->getFunction()
.getFnAttribute("disable-tail-calls")

View File

@ -5698,7 +5698,7 @@ void SelectionDAGBuilder::visitIntrinsicCall(const CallInst &I,
Align SrcAlign = MCI.getSourceAlign().valueOrOne();
Align Alignment = commonAlignment(DstAlign, SrcAlign);
bool isVol = MCI.isVolatile();
bool isTC = I.isTailCall() && isInTailCallPosition(&I, DAG.getTarget());
bool isTC = I.isTailCall() && isInTailCallPosition(I, DAG.getTarget());
// FIXME: Support passing different dest/src alignments to the memcpy DAG
// node.
SDValue Root = isVol ? getRoot() : getMemoryRoot();
@ -5720,7 +5720,7 @@ void SelectionDAGBuilder::visitIntrinsicCall(const CallInst &I,
Align SrcAlign = MCI.getSourceAlign().valueOrOne();
Align Alignment = commonAlignment(DstAlign, SrcAlign);
bool isVol = MCI.isVolatile();
bool isTC = I.isTailCall() && isInTailCallPosition(&I, DAG.getTarget());
bool isTC = I.isTailCall() && isInTailCallPosition(I, DAG.getTarget());
// FIXME: Support passing different dest/src alignments to the memcpy DAG
// node.
SDValue MC = DAG.getMemcpy(getRoot(), sdl, Dst, Src, Size, Alignment, isVol,
@ -5738,7 +5738,7 @@ void SelectionDAGBuilder::visitIntrinsicCall(const CallInst &I,
// @llvm.memset defines 0 and 1 to both mean no alignment.
Align Alignment = MSI.getDestAlign().valueOrOne();
bool isVol = MSI.isVolatile();
bool isTC = I.isTailCall() && isInTailCallPosition(&I, DAG.getTarget());
bool isTC = I.isTailCall() && isInTailCallPosition(I, DAG.getTarget());
SDValue Root = isVol ? getRoot() : getMemoryRoot();
SDValue MS = DAG.getMemset(Root, sdl, Op1, Op2, Op3, Alignment, isVol, isTC,
MachinePointerInfo(I.getArgOperand(0)));
@ -5755,7 +5755,7 @@ void SelectionDAGBuilder::visitIntrinsicCall(const CallInst &I,
Align SrcAlign = MMI.getSourceAlign().valueOrOne();
Align Alignment = commonAlignment(DstAlign, SrcAlign);
bool isVol = MMI.isVolatile();
bool isTC = I.isTailCall() && isInTailCallPosition(&I, DAG.getTarget());
bool isTC = I.isTailCall() && isInTailCallPosition(I, DAG.getTarget());
// FIXME: Support passing different dest/src alignments to the memmove DAG
// node.
SDValue Root = isVol ? getRoot() : getMemoryRoot();
@ -5775,7 +5775,7 @@ void SelectionDAGBuilder::visitIntrinsicCall(const CallInst &I,
unsigned SrcAlign = MI.getSourceAlignment();
Type *LengthTy = MI.getLength()->getType();
unsigned ElemSz = MI.getElementSizeInBytes();
bool isTC = I.isTailCall() && isInTailCallPosition(&I, DAG.getTarget());
bool isTC = I.isTailCall() && isInTailCallPosition(I, DAG.getTarget());
SDValue MC = DAG.getAtomicMemcpy(getRoot(), sdl, Dst, DstAlign, Src,
SrcAlign, Length, LengthTy, ElemSz, isTC,
MachinePointerInfo(MI.getRawDest()),
@ -5793,7 +5793,7 @@ void SelectionDAGBuilder::visitIntrinsicCall(const CallInst &I,
unsigned SrcAlign = MI.getSourceAlignment();
Type *LengthTy = MI.getLength()->getType();
unsigned ElemSz = MI.getElementSizeInBytes();
bool isTC = I.isTailCall() && isInTailCallPosition(&I, DAG.getTarget());
bool isTC = I.isTailCall() && isInTailCallPosition(I, DAG.getTarget());
SDValue MC = DAG.getAtomicMemmove(getRoot(), sdl, Dst, DstAlign, Src,
SrcAlign, Length, LengthTy, ElemSz, isTC,
MachinePointerInfo(MI.getRawDest()),
@ -5810,7 +5810,7 @@ void SelectionDAGBuilder::visitIntrinsicCall(const CallInst &I,
unsigned DstAlign = MI.getDestAlignment();
Type *LengthTy = MI.getLength()->getType();
unsigned ElemSz = MI.getElementSizeInBytes();
bool isTC = I.isTailCall() && isInTailCallPosition(&I, DAG.getTarget());
bool isTC = I.isTailCall() && isInTailCallPosition(I, DAG.getTarget());
SDValue MC = DAG.getAtomicMemset(getRoot(), sdl, Dst, DstAlign, Val, Length,
LengthTy, ElemSz, isTC,
MachinePointerInfo(MI.getRawDest()));
@ -7124,8 +7124,7 @@ void SelectionDAGBuilder::LowerCallTo(const CallBase &CB, SDValue Callee,
// Check if target-independent constraints permit a tail call here.
// Target-dependent constraints are checked within TLI->LowerCallTo.
if (isTailCall &&
!isInTailCallPosition(ImmutableCallSite(&CB), DAG.getTarget()))
if (isTailCall && !isInTailCallPosition(CB, DAG.getTarget()))
isTailCall = false;
// Disable tail calls if there is an swifterror argument. Targets have not