forked from OSchip/llvm-project
parent
0003c27f5e
commit
8d62763779
|
@ -71,24 +71,24 @@ namespace {
|
|||
const Type *JBLinkTy;
|
||||
GlobalVariable *JBListHead;
|
||||
Constant *SetJmpFn, *LongJmpFn;
|
||||
|
||||
|
||||
// We peek in TLI to grab the target's jmp_buf size and alignment
|
||||
const TargetLowering *TLI;
|
||||
|
||||
|
||||
public:
|
||||
static char ID; // Pass identification, replacement for typeid
|
||||
explicit LowerInvoke(const TargetLowering *tli = NULL)
|
||||
: FunctionPass(&ID), TLI(tli) { }
|
||||
bool doInitialization(Module &M);
|
||||
bool runOnFunction(Function &F);
|
||||
|
||||
|
||||
virtual void getAnalysisUsage(AnalysisUsage &AU) const {
|
||||
// This is a cluster of orthogonal Transforms
|
||||
AU.addPreservedID(PromoteMemoryToRegisterID);
|
||||
AU.addPreservedID(LowerSwitchID);
|
||||
AU.addPreservedID(LowerAllocationsID);
|
||||
}
|
||||
|
||||
|
||||
private:
|
||||
void createAbortMessage(Module *M);
|
||||
void writeAbortMessage(Instruction *IB);
|
||||
|
@ -107,8 +107,8 @@ X("lowerinvoke", "Lower invoke and unwind, for unwindless code generators");
|
|||
const PassInfo *const llvm::LowerInvokePassID = &X;
|
||||
|
||||
// Public Interface To the LowerInvoke pass.
|
||||
FunctionPass *llvm::createLowerInvokePass(const TargetLowering *TLI) {
|
||||
return new LowerInvoke(TLI);
|
||||
FunctionPass *llvm::createLowerInvokePass(const TargetLowering *TLI) {
|
||||
return new LowerInvoke(TLI);
|
||||
}
|
||||
|
||||
// doInitialization - Make sure that there is a prototype for abort in the
|
||||
|
@ -144,7 +144,7 @@ bool LowerInvoke::doInitialization(Module &M) {
|
|||
"llvm.sjljeh.jblist", &M);
|
||||
}
|
||||
|
||||
// VisualStudio defines setjmp as _setjmp via #include <csetjmp> / <setjmp.h>,
|
||||
// VisualStudio defines setjmp as _setjmp via #include <csetjmp> / <setjmp.h>,
|
||||
// so it looks like Intrinsic::_setjmp
|
||||
#if defined(_MSC_VER) && defined(setjmp)
|
||||
#define setjmp_undefined_for_visual_studio
|
||||
|
@ -154,9 +154,9 @@ bool LowerInvoke::doInitialization(Module &M) {
|
|||
SetJmpFn = Intrinsic::getDeclaration(&M, Intrinsic::setjmp);
|
||||
|
||||
#if defined(_MSC_VER) && defined(setjmp_undefined_for_visual_studio)
|
||||
// let's return it to _setjmp state in case anyone ever needs it after this
|
||||
// let's return it to _setjmp state in case anyone ever needs it after this
|
||||
// point under VisualStudio
|
||||
#define setjmp _setjmp
|
||||
#define setjmp _setjmp
|
||||
#endif
|
||||
|
||||
LongJmpFn = Intrinsic::getDeclaration(&M, Intrinsic::longjmp);
|
||||
|
@ -270,25 +270,25 @@ void LowerInvoke::rewriteExpensiveInvoke(InvokeInst *II, unsigned InvokeNo,
|
|||
// If the unwind edge has phi nodes, split the edge.
|
||||
if (isa<PHINode>(II->getUnwindDest()->begin())) {
|
||||
SplitCriticalEdge(II, 1, this);
|
||||
|
||||
|
||||
// If there are any phi nodes left, they must have a single predecessor.
|
||||
while (PHINode *PN = dyn_cast<PHINode>(II->getUnwindDest()->begin())) {
|
||||
PN->replaceAllUsesWith(PN->getIncomingValue(0));
|
||||
PN->eraseFromParent();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Insert a store of the invoke num before the invoke and store zero into the
|
||||
// location afterward.
|
||||
new StoreInst(InvokeNoC, InvokeNum, true, II); // volatile
|
||||
|
||||
|
||||
BasicBlock::iterator NI = II->getNormalDest()->getFirstNonPHI();
|
||||
// nonvolatile.
|
||||
new StoreInst(Constant::getNullValue(Type::Int32Ty), InvokeNum, false, NI);
|
||||
|
||||
|
||||
// Add a switch case to our unwind block.
|
||||
CatchSwitch->addCase(InvokeNoC, II->getUnwindDest());
|
||||
|
||||
|
||||
// Insert a normal call instruction.
|
||||
std::vector<Value*> CallArgs(II->op_begin()+3, II->op_end());
|
||||
CallInst *NewCall = CallInst::Create(II->getCalledValue(),
|
||||
|
@ -298,7 +298,7 @@ void LowerInvoke::rewriteExpensiveInvoke(InvokeInst *II, unsigned InvokeNo,
|
|||
NewCall->setCallingConv(II->getCallingConv());
|
||||
NewCall->setAttributes(II->getAttributes());
|
||||
II->replaceAllUsesWith(NewCall);
|
||||
|
||||
|
||||
// Replace the invoke with an uncond branch.
|
||||
BranchInst::Create(II->getNormalDest(), NewCall->getParent());
|
||||
II->eraseFromParent();
|
||||
|
@ -308,9 +308,9 @@ void LowerInvoke::rewriteExpensiveInvoke(InvokeInst *II, unsigned InvokeNo,
|
|||
/// we reach blocks we've already seen.
|
||||
static void MarkBlocksLiveIn(BasicBlock *BB, std::set<BasicBlock*> &LiveBBs) {
|
||||
if (!LiveBBs.insert(BB).second) return; // already been here.
|
||||
|
||||
|
||||
for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI)
|
||||
MarkBlocksLiveIn(*PI, LiveBBs);
|
||||
MarkBlocksLiveIn(*PI, LiveBBs);
|
||||
}
|
||||
|
||||
// First thing we need to do is scan the whole function for values that are
|
||||
|
@ -331,7 +331,7 @@ splitLiveRangesLiveAcrossInvokes(std::vector<InvokeInst*> &Invokes) {
|
|||
}
|
||||
|
||||
Function *F = Invokes.back()->getParent()->getParent();
|
||||
|
||||
|
||||
// To avoid having to handle incoming arguments specially, we lower each arg
|
||||
// to a copy instruction in the entry block. This ensures that the argument
|
||||
// value itself cannot be live across the entry block.
|
||||
|
@ -350,9 +350,9 @@ splitLiveRangesLiveAcrossInvokes(std::vector<InvokeInst*> &Invokes) {
|
|||
// could cause the opcode to reflect an illegal conversion. However, we're
|
||||
// replacing it here with the same value it was constructed with to simply
|
||||
// make NC its user.
|
||||
NC->setOperand(0, AI);
|
||||
NC->setOperand(0, AI);
|
||||
}
|
||||
|
||||
|
||||
// Finally, scan the code looking for instructions with bad live ranges.
|
||||
for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB)
|
||||
for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E; ++II) {
|
||||
|
@ -364,13 +364,13 @@ splitLiveRangesLiveAcrossInvokes(std::vector<InvokeInst*> &Invokes) {
|
|||
if (Inst->hasOneUse() &&
|
||||
cast<Instruction>(Inst->use_back())->getParent() == BB &&
|
||||
!isa<PHINode>(Inst->use_back())) continue;
|
||||
|
||||
|
||||
// If this is an alloca in the entry block, it's not a real register
|
||||
// value.
|
||||
if (AllocaInst *AI = dyn_cast<AllocaInst>(Inst))
|
||||
if (isa<ConstantInt>(AI->getArraySize()) && BB == F->begin())
|
||||
continue;
|
||||
|
||||
|
||||
// Avoid iterator invalidation by copying users to a temporary vector.
|
||||
std::vector<Instruction*> Users;
|
||||
for (Value::use_iterator UI = Inst->use_begin(), E = Inst->use_end();
|
||||
|
@ -391,7 +391,7 @@ splitLiveRangesLiveAcrossInvokes(std::vector<InvokeInst*> &Invokes) {
|
|||
while (!Users.empty()) {
|
||||
Instruction *U = Users.back();
|
||||
Users.pop_back();
|
||||
|
||||
|
||||
if (!isa<PHINode>(U)) {
|
||||
MarkBlocksLiveIn(U->getParent(), LiveBBs);
|
||||
} else {
|
||||
|
@ -402,7 +402,7 @@ splitLiveRangesLiveAcrossInvokes(std::vector<InvokeInst*> &Invokes) {
|
|||
MarkBlocksLiveIn(PN->getIncomingBlock(i), LiveBBs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Now that we know all of the blocks that this thing is live in, see if
|
||||
// it includes any of the unwind locations.
|
||||
bool NeedsSpill = false;
|
||||
|
@ -441,7 +441,7 @@ bool LowerInvoke::insertExpensiveEHSupport(Function &F) {
|
|||
|
||||
NumInvokes += Invokes.size();
|
||||
NumUnwinds += Unwinds.size();
|
||||
|
||||
|
||||
// TODO: This is not an optimal way to do this. In particular, this always
|
||||
// inserts setjmp calls into the entries of functions with invoke instructions
|
||||
// even though there are possibly paths through the function that do not
|
||||
|
@ -460,17 +460,17 @@ bool LowerInvoke::insertExpensiveEHSupport(Function &F) {
|
|||
// we spill into a stack location, guaranteeing that there is nothing live
|
||||
// across the unwind edge. This process also splits all critical edges
|
||||
// coming out of invoke's.
|
||||
splitLiveRangesLiveAcrossInvokes(Invokes);
|
||||
|
||||
splitLiveRangesLiveAcrossInvokes(Invokes);
|
||||
|
||||
BasicBlock *EntryBB = F.begin();
|
||||
|
||||
|
||||
// Create an alloca for the incoming jump buffer ptr and the new jump buffer
|
||||
// that needs to be restored on all exits from the function. This is an
|
||||
// alloca because the value needs to be live across invokes.
|
||||
unsigned Align = TLI ? TLI->getJumpBufAlignment() : 0;
|
||||
AllocaInst *JmpBuf =
|
||||
AllocaInst *JmpBuf =
|
||||
new AllocaInst(JBLinkTy, 0, Align, "jblink", F.begin()->begin());
|
||||
|
||||
|
||||
std::vector<Value*> Idx;
|
||||
Idx.push_back(Constant::getNullValue(Type::Int32Ty));
|
||||
Idx.push_back(ConstantInt::get(Type::Int32Ty, 1));
|
||||
|
@ -481,33 +481,33 @@ bool LowerInvoke::insertExpensiveEHSupport(Function &F) {
|
|||
Value *OldBuf = new LoadInst(JBListHead, "oldjmpbufptr", true,
|
||||
EntryBB->getTerminator());
|
||||
new StoreInst(OldBuf, OldJmpBufPtr, true, EntryBB->getTerminator());
|
||||
|
||||
|
||||
// Add the new jumpbuf to the list.
|
||||
new StoreInst(JmpBuf, JBListHead, true, EntryBB->getTerminator());
|
||||
|
||||
// Create the catch block. The catch block is basically a big switch
|
||||
// statement that goes to all of the invoke catch blocks.
|
||||
BasicBlock *CatchBB = BasicBlock::Create("setjmp.catch", &F);
|
||||
|
||||
|
||||
// Create an alloca which keeps track of which invoke is currently
|
||||
// executing. For normal calls it contains zero.
|
||||
AllocaInst *InvokeNum = new AllocaInst(Type::Int32Ty, 0, "invokenum",
|
||||
EntryBB->begin());
|
||||
new StoreInst(ConstantInt::get(Type::Int32Ty, 0), InvokeNum, true,
|
||||
EntryBB->getTerminator());
|
||||
|
||||
|
||||
// Insert a load in the Catch block, and a switch on its value. By default,
|
||||
// we go to a block that just does an unwind (which is the correct action
|
||||
// for a standard call).
|
||||
BasicBlock *UnwindBB = BasicBlock::Create("unwindbb", &F);
|
||||
Unwinds.push_back(new UnwindInst(UnwindBB));
|
||||
|
||||
|
||||
Value *CatchLoad = new LoadInst(InvokeNum, "invoke.num", true, CatchBB);
|
||||
SwitchInst *CatchSwitch =
|
||||
SwitchInst::Create(CatchLoad, UnwindBB, Invokes.size(), CatchBB);
|
||||
|
||||
// Now that things are set up, insert the setjmp call itself.
|
||||
|
||||
|
||||
// Split the entry block to insert the conditional branch for the setjmp.
|
||||
BasicBlock *ContBlock = EntryBB->splitBasicBlock(EntryBB->getTerminator(),
|
||||
"setjmp.cont");
|
||||
|
@ -522,12 +522,12 @@ bool LowerInvoke::insertExpensiveEHSupport(Function &F) {
|
|||
EntryBB->getTerminator());
|
||||
|
||||
// Compare the return value to zero.
|
||||
Value *IsNormal = new ICmpInst(ICmpInst::ICMP_EQ, SJRet,
|
||||
Value *IsNormal = new ICmpInst(ICmpInst::ICMP_EQ, SJRet,
|
||||
Constant::getNullValue(SJRet->getType()),
|
||||
"notunwind", EntryBB->getTerminator());
|
||||
// Nuke the uncond branch.
|
||||
EntryBB->getTerminator()->eraseFromParent();
|
||||
|
||||
|
||||
// Put in a new condbranch in its place.
|
||||
BranchInst::Create(ContBlock, CatchBB, IsNormal, EntryBB);
|
||||
|
||||
|
@ -537,7 +537,7 @@ bool LowerInvoke::insertExpensiveEHSupport(Function &F) {
|
|||
}
|
||||
|
||||
// We know that there is at least one unwind.
|
||||
|
||||
|
||||
// Create three new blocks, the block to load the jmpbuf ptr and compare
|
||||
// against null, the block to do the longjmp, and the error block for if it
|
||||
// is null. Add them at the end of the function because they are not hot.
|
||||
|
@ -554,13 +554,13 @@ bool LowerInvoke::insertExpensiveEHSupport(Function &F) {
|
|||
} else {
|
||||
BufPtr = new LoadInst(JBListHead, "ehlist", UnwindHandler);
|
||||
}
|
||||
|
||||
|
||||
// Load the JBList, if it's null, then there was no catch!
|
||||
Value *NotNull = new ICmpInst(ICmpInst::ICMP_NE, BufPtr,
|
||||
Value *NotNull = new ICmpInst(ICmpInst::ICMP_NE, BufPtr,
|
||||
Constant::getNullValue(BufPtr->getType()),
|
||||
"notnull", UnwindHandler);
|
||||
BranchInst::Create(UnwindBlock, TermBlock, NotNull, UnwindHandler);
|
||||
|
||||
|
||||
// Create the block to do the longjmp.
|
||||
// Get a pointer to the jmpbuf and longjmp.
|
||||
std::vector<Value*> Idx;
|
||||
|
@ -573,36 +573,36 @@ bool LowerInvoke::insertExpensiveEHSupport(Function &F) {
|
|||
Idx[1] = ConstantInt::get(Type::Int32Ty, 1);
|
||||
CallInst::Create(LongJmpFn, Idx.begin(), Idx.end(), "", UnwindBlock);
|
||||
new UnreachableInst(UnwindBlock);
|
||||
|
||||
|
||||
// Set up the term block ("throw without a catch").
|
||||
new UnreachableInst(TermBlock);
|
||||
|
||||
// Insert a new call to write(2, AbortMessage, AbortMessageLength);
|
||||
writeAbortMessage(TermBlock->getTerminator());
|
||||
|
||||
|
||||
// Insert a call to abort()
|
||||
CallInst::Create(AbortFn, "",
|
||||
TermBlock->getTerminator())->setTailCall();
|
||||
|
||||
|
||||
|
||||
|
||||
// Replace all unwinds with a branch to the unwind handler.
|
||||
for (unsigned i = 0, e = Unwinds.size(); i != e; ++i) {
|
||||
BranchInst::Create(UnwindHandler, Unwinds[i]);
|
||||
Unwinds[i]->eraseFromParent();
|
||||
}
|
||||
|
||||
Unwinds[i]->eraseFromParent();
|
||||
}
|
||||
|
||||
// Finally, for any returns from this function, if this function contains an
|
||||
// invoke, restore the old jmpbuf pointer to its input value.
|
||||
if (OldJmpBufPtr) {
|
||||
for (unsigned i = 0, e = Returns.size(); i != e; ++i) {
|
||||
ReturnInst *R = Returns[i];
|
||||
|
||||
|
||||
// Before the return, insert a copy from the saved value to the new value.
|
||||
Value *OldBuf = new LoadInst(OldJmpBufPtr, "oldjmpbufptr", true, R);
|
||||
new StoreInst(OldBuf, JBListHead, true, R);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue