[FastISel] Breakout intrinsic lowering into a separate function and add a target-hook.

Create a separate helper function for target-independent intrinsic lowering. Also
add an target-hook that allows to directly call into a target-sepcific intrinsic
lowering method. Currently the implementation is opt-in and doesn't affect
existing target implementations.

llvm-svn: 212843
This commit is contained in:
Juergen Ributzka 2014-07-11 20:42:12 +00:00
parent fe6ad97ca8
commit 5dd32136b9
2 changed files with 45 additions and 34 deletions

View File

@ -27,6 +27,7 @@ class CallInst;
class DataLayout;
class FunctionLoweringInfo;
class Instruction;
class IntrinsicInst;
class LoadInst;
class MVT;
class MachineConstantPool;
@ -179,6 +180,10 @@ protected:
/// argument lowering. It returns true if it was successful.
virtual bool FastLowerArguments();
/// This method is called by target-independent code to do target specific
/// intrinsic lowering. It returns true if it was successful.
virtual bool FastLowerIntrinsicCall(const IntrinsicInst *II);
/// This method is called by target-independent code to request that an
/// instruction with the given type and opcode be emitted.
virtual unsigned FastEmit_(MVT VT,
@ -390,6 +395,7 @@ private:
bool SelectStackmap(const CallInst *I);
bool SelectCall(const User *I);
bool SelectIntrinsicCall(const IntrinsicInst *II);
bool SelectBitCast(const User *I);

View File

@ -687,26 +687,38 @@ bool FastISel::SelectCall(const User *I) {
MachineModuleInfo &MMI = FuncInfo.MF->getMMI();
ComputeUsesVAFloatArgument(*Call, &MMI);
const Function *F = Call->getCalledFunction();
if (!F) return false;
// Handle intrinsic function calls.
if (const auto *II = dyn_cast<IntrinsicInst>(Call))
return SelectIntrinsicCall(II);
// Handle selected intrinsic function calls.
switch (F->getIntrinsicID()) {
// Usually, it does not make sense to initialize a value,
// make an unrelated function call and use the value, because
// it tends to be spilled on the stack. So, we move the pointer
// to the last local value to the beginning of the block, so that
// all the values which have already been materialized,
// appear after the call. It also makes sense to skip intrinsics
// since they tend to be inlined.
flushLocalValueMap();
// An arbitrary call. Bail.
return false;
}
bool FastISel::SelectIntrinsicCall(const IntrinsicInst *II) {
switch (II->getIntrinsicID()) {
default: break;
// At -O0 we don't care about the lifetime intrinsics.
// At -O0 we don't care about the lifetime intrinsics.
case Intrinsic::lifetime_start:
case Intrinsic::lifetime_end:
// The donothing intrinsic does, well, nothing.
// The donothing intrinsic does, well, nothing.
case Intrinsic::donothing:
return true;
case Intrinsic::dbg_declare: {
const DbgDeclareInst *DI = cast<DbgDeclareInst>(Call);
const DbgDeclareInst *DI = cast<DbgDeclareInst>(II);
DIVariable DIVar(DI->getVariable());
assert((!DIVar || DIVar.isVariable()) &&
"Variable in DbgDeclareInst should be either null or a DIVariable.");
if (!DIVar ||
!FuncInfo.MF->getMMI().hasDebugInfo()) {
"Variable in DbgDeclareInst should be either null or a DIVariable.");
if (!DIVar || !FuncInfo.MF->getMMI().hasDebugInfo()) {
DEBUG(dbgs() << "Dropping debug info for " << *DI << "\n");
return true;
}
@ -723,7 +735,7 @@ bool FastISel::SelectCall(const User *I) {
// Some arguments' frame index is recorded during argument lowering.
Offset = FuncInfo.getArgumentFrameIndex(Arg);
if (Offset)
Op = MachineOperand::CreateFI(Offset);
Op = MachineOperand::CreateFI(Offset);
if (!Op)
if (unsigned Reg = lookUpRegForValue(Address))
Op = MachineOperand::CreateReg(Reg, false);
@ -754,9 +766,9 @@ bool FastISel::SelectCall(const User *I) {
} else
BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc,
TII.get(TargetOpcode::DBG_VALUE))
.addOperand(*Op)
.addImm(0)
.addMetadata(DI->getVariable());
.addOperand(*Op)
.addImm(0)
.addMetadata(DI->getVariable());
} else {
// We can't yet handle anything else here because it would require
// generating code, thus altering codegen because of debug info.
@ -766,7 +778,7 @@ bool FastISel::SelectCall(const User *I) {
}
case Intrinsic::dbg_value: {
// This form of DBG_VALUE is target-independent.
const DbgValueInst *DI = cast<DbgValueInst>(Call);
const DbgValueInst *DI = cast<DbgValueInst>(II);
const MCInstrDesc &II = TII.get(TargetOpcode::DBG_VALUE);
const Value *V = DI->getValue();
if (!V) {
@ -801,38 +813,27 @@ bool FastISel::SelectCall(const User *I) {
return true;
}
case Intrinsic::objectsize: {
ConstantInt *CI = cast<ConstantInt>(Call->getArgOperand(1));
ConstantInt *CI = cast<ConstantInt>(II->getArgOperand(1));
unsigned long long Res = CI->isZero() ? -1ULL : 0;
Constant *ResCI = ConstantInt::get(Call->getType(), Res);
Constant *ResCI = ConstantInt::get(II->getType(), Res);
unsigned ResultReg = getRegForValue(ResCI);
if (ResultReg == 0)
return false;
UpdateValueMap(Call, ResultReg);
UpdateValueMap(II, ResultReg);
return true;
}
case Intrinsic::expect: {
unsigned ResultReg = getRegForValue(Call->getArgOperand(0));
unsigned ResultReg = getRegForValue(II->getArgOperand(0));
if (ResultReg == 0)
return false;
UpdateValueMap(Call, ResultReg);
UpdateValueMap(II, ResultReg);
return true;
}
case Intrinsic::experimental_stackmap:
return SelectStackmap(Call);
return SelectStackmap(II);
}
// Usually, it does not make sense to initialize a value,
// make an unrelated function call and use the value, because
// it tends to be spilled on the stack. So, we move the pointer
// to the last local value to the beginning of the block, so that
// all the values which have already been materialized,
// appear after the call. It also makes sense to skip intrinsics
// since they tend to be inlined.
if (!isa<IntrinsicInst>(Call))
flushLocalValueMap();
// An arbitrary call. Bail.
return false;
return FastLowerIntrinsicCall(II);
}
bool FastISel::SelectCast(const User *I, unsigned Opcode) {
@ -1228,6 +1229,10 @@ bool FastISel::FastLowerArguments() {
return false;
}
bool FastISel::FastLowerIntrinsicCall(const IntrinsicInst */*II*/) {
return false;
}
unsigned FastISel::FastEmit_(MVT, MVT,
unsigned) {
return 0;