Support swifterror in coroutine lowering.

The support for swifterror allocas should work in all lowerings.
The support for swifterror arguments only really works in a lowering
with prototypes where you can ensure that the prototype also has a
swifterror argument; I'm not really sure how it could possibly be
made to work in the switch lowering.

llvm-svn: 368795
This commit is contained in:
John McCall 2019-08-14 03:54:05 +00:00
parent dc4668e5cf
commit 2133feec93
4 changed files with 381 additions and 0 deletions

View File

@ -28,6 +28,7 @@
#include "llvm/Support/MathExtras.h" #include "llvm/Support/MathExtras.h"
#include "llvm/Support/circular_raw_ostream.h" #include "llvm/Support/circular_raw_ostream.h"
#include "llvm/Transforms/Utils/BasicBlockUtils.h" #include "llvm/Transforms/Utils/BasicBlockUtils.h"
#include "llvm/Transforms/Utils/PromoteMemToReg.h"
using namespace llvm; using namespace llvm;
@ -1110,11 +1111,176 @@ static Instruction *lowerNonLocalAlloca(CoroAllocaAllocInst *AI,
return cast<Instruction>(Alloc); return cast<Instruction>(Alloc);
} }
/// Get the current swifterror value.
static Value *emitGetSwiftErrorValue(IRBuilder<> &Builder, Type *ValueTy,
coro::Shape &Shape) {
// Make a fake function pointer as a sort of intrinsic.
auto FnTy = FunctionType::get(ValueTy, {}, false);
auto Fn = ConstantPointerNull::get(FnTy->getPointerTo());
auto Call = Builder.CreateCall(Fn, {});
Shape.SwiftErrorOps.push_back(Call);
return Call;
}
/// Set the given value as the current swifterror value.
///
/// Returns a slot that can be used as a swifterror slot.
static Value *emitSetSwiftErrorValue(IRBuilder<> &Builder, Value *V,
coro::Shape &Shape) {
// Make a fake function pointer as a sort of intrinsic.
auto FnTy = FunctionType::get(V->getType()->getPointerTo(),
{V->getType()}, false);
auto Fn = ConstantPointerNull::get(FnTy->getPointerTo());
auto Call = Builder.CreateCall(Fn, { V });
Shape.SwiftErrorOps.push_back(Call);
return Call;
}
/// Set the swifterror value from the given alloca before a call,
/// then put in back in the alloca afterwards.
///
/// Returns an address that will stand in for the swifterror slot
/// until splitting.
static Value *emitSetAndGetSwiftErrorValueAround(Instruction *Call,
AllocaInst *Alloca,
coro::Shape &Shape) {
auto ValueTy = Alloca->getAllocatedType();
IRBuilder<> Builder(Call);
// Load the current value from the alloca and set it as the
// swifterror value.
auto ValueBeforeCall = Builder.CreateLoad(ValueTy, Alloca);
auto Addr = emitSetSwiftErrorValue(Builder, ValueBeforeCall, Shape);
// Move to after the call. Since swifterror only has a guaranteed
// value on normal exits, we can ignore implicit and explicit unwind
// edges.
if (isa<CallInst>(Call)) {
Builder.SetInsertPoint(Call->getNextNode());
} else {
auto Invoke = cast<InvokeInst>(Call);
Builder.SetInsertPoint(Invoke->getNormalDest()->getFirstNonPHIOrDbg());
}
// Get the current swifterror value and store it to the alloca.
auto ValueAfterCall = emitGetSwiftErrorValue(Builder, ValueTy, Shape);
Builder.CreateStore(ValueAfterCall, Alloca);
return Addr;
}
/// Eliminate a formerly-swifterror alloca by inserting the get/set
/// intrinsics and attempting to MemToReg the alloca away.
static void eliminateSwiftErrorAlloca(Function &F, AllocaInst *Alloca,
coro::Shape &Shape) {
for (auto UI = Alloca->use_begin(), UE = Alloca->use_end(); UI != UE; ) {
// We're likely changing the use list, so use a mutation-safe
// iteration pattern.
auto &Use = *UI;
++UI;
// swifterror values can only be used in very specific ways.
// We take advantage of that here.
auto User = Use.getUser();
if (isa<LoadInst>(User) || isa<StoreInst>(User))
continue;
assert(isa<CallInst>(User) || isa<InvokeInst>(User));
auto Call = cast<Instruction>(User);
auto Addr = emitSetAndGetSwiftErrorValueAround(Call, Alloca, Shape);
// Use the returned slot address as the call argument.
Use.set(Addr);
}
// All the uses should be loads and stores now.
assert(isAllocaPromotable(Alloca));
}
/// "Eliminate" a swifterror argument by reducing it to the alloca case
/// and then loading and storing in the prologue and epilog.
///
/// The argument keeps the swifterror flag.
static void eliminateSwiftErrorArgument(Function &F, Argument &Arg,
coro::Shape &Shape,
SmallVectorImpl<AllocaInst*> &AllocasToPromote) {
IRBuilder<> Builder(F.getEntryBlock().getFirstNonPHIOrDbg());
auto ArgTy = cast<PointerType>(Arg.getType());
auto ValueTy = ArgTy->getElementType();
// Reduce to the alloca case:
// Create an alloca and replace all uses of the arg with it.
auto Alloca = Builder.CreateAlloca(ValueTy, ArgTy->getAddressSpace());
Arg.replaceAllUsesWith(Alloca);
// Set an initial value in the alloca. swifterror is always null on entry.
auto InitialValue = Constant::getNullValue(ValueTy);
Builder.CreateStore(InitialValue, Alloca);
// Find all the suspends in the function and save and restore around them.
for (auto Suspend : Shape.CoroSuspends) {
(void) emitSetAndGetSwiftErrorValueAround(Suspend, Alloca, Shape);
}
// Find all the coro.ends in the function and restore the error value.
for (auto End : Shape.CoroEnds) {
Builder.SetInsertPoint(End);
auto FinalValue = Builder.CreateLoad(ValueTy, Alloca);
(void) emitSetSwiftErrorValue(Builder, FinalValue, Shape);
}
// Now we can use the alloca logic.
AllocasToPromote.push_back(Alloca);
eliminateSwiftErrorAlloca(F, Alloca, Shape);
}
/// Eliminate all problematic uses of swifterror arguments and allocas
/// from the function. We'll fix them up later when splitting the function.
static void eliminateSwiftError(Function &F, coro::Shape &Shape) {
SmallVector<AllocaInst*, 4> AllocasToPromote;
// Look for a swifterror argument.
for (auto &Arg : F.args()) {
if (!Arg.hasSwiftErrorAttr()) continue;
eliminateSwiftErrorArgument(F, Arg, Shape, AllocasToPromote);
break;
}
// Look for swifterror allocas.
for (auto &Inst : F.getEntryBlock()) {
auto Alloca = dyn_cast<AllocaInst>(&Inst);
if (!Alloca || !Alloca->isSwiftError()) continue;
// Clear the swifterror flag.
Alloca->setSwiftError(false);
AllocasToPromote.push_back(Alloca);
eliminateSwiftErrorAlloca(F, Alloca, Shape);
}
// If we have any allocas to promote, compute a dominator tree and
// promote them en masse.
if (!AllocasToPromote.empty()) {
DominatorTree DT(F);
PromoteMemToReg(AllocasToPromote, DT);
}
}
void coro::buildCoroutineFrame(Function &F, Shape &Shape) { void coro::buildCoroutineFrame(Function &F, Shape &Shape) {
// Lower coro.dbg.declare to coro.dbg.value, since we are going to rewrite // Lower coro.dbg.declare to coro.dbg.value, since we are going to rewrite
// access to local variables. // access to local variables.
LowerDbgDeclare(F); LowerDbgDeclare(F);
eliminateSwiftError(F, Shape);
if (Shape.ABI == coro::ABI::Switch && if (Shape.ABI == coro::ABI::Switch &&
Shape.SwitchLowering.PromiseAlloca) { Shape.SwitchLowering.PromiseAlloca) {
Shape.getSwitchCoroId()->clearPromise(); Shape.getSwitchCoroId()->clearPromise();

View File

@ -89,6 +89,7 @@ struct LLVM_LIBRARY_VISIBILITY Shape {
SmallVector<CoroEndInst *, 4> CoroEnds; SmallVector<CoroEndInst *, 4> CoroEnds;
SmallVector<CoroSizeInst *, 2> CoroSizes; SmallVector<CoroSizeInst *, 2> CoroSizes;
SmallVector<AnyCoroSuspendInst *, 4> CoroSuspends; SmallVector<AnyCoroSuspendInst *, 4> CoroSuspends;
SmallVector<CallInst*, 2> SwiftErrorOps;
// Field indexes for special fields in the switch lowering. // Field indexes for special fields in the switch lowering.
struct SwitchFieldIndex { struct SwitchFieldIndex {

View File

@ -97,6 +97,7 @@ private:
ValueToValueMapTy VMap; ValueToValueMapTy VMap;
IRBuilder<> Builder; IRBuilder<> Builder;
Value *NewFramePtr = nullptr; Value *NewFramePtr = nullptr;
Value *SwiftErrorSlot = nullptr;
/// The active suspend instruction; meaningful only for continuation ABIs. /// The active suspend instruction; meaningful only for continuation ABIs.
AnyCoroSuspendInst *ActiveSuspend = nullptr; AnyCoroSuspendInst *ActiveSuspend = nullptr;
@ -147,6 +148,7 @@ private:
void replaceRetconSuspendUses(); void replaceRetconSuspendUses();
void replaceCoroSuspends(); void replaceCoroSuspends();
void replaceCoroEnds(); void replaceCoroEnds();
void replaceSwiftErrorOps();
void handleFinalSuspend(); void handleFinalSuspend();
void maybeFreeContinuationStorage(); void maybeFreeContinuationStorage();
}; };
@ -490,6 +492,68 @@ void CoroCloner::replaceCoroEnds() {
} }
} }
static void replaceSwiftErrorOps(Function &F, coro::Shape &Shape,
ValueToValueMapTy *VMap) {
Value *CachedSlot = nullptr;
auto getSwiftErrorSlot = [&](Type *ValueTy) -> Value * {
if (CachedSlot) {
assert(CachedSlot->getType()->getPointerElementType() == ValueTy &&
"multiple swifterror slots in function with different types");
return CachedSlot;
}
// Check if the function has a swifterror argument.
for (auto &Arg : F.args()) {
if (Arg.isSwiftError()) {
CachedSlot = &Arg;
assert(Arg.getType()->getPointerElementType() == ValueTy &&
"swifterror argument does not have expected type");
return &Arg;
}
}
// Create a swifterror alloca.
IRBuilder<> Builder(F.getEntryBlock().getFirstNonPHIOrDbg());
auto Alloca = Builder.CreateAlloca(ValueTy);
Alloca->setSwiftError(true);
CachedSlot = Alloca;
return Alloca;
};
for (CallInst *Op : Shape.SwiftErrorOps) {
auto MappedOp = VMap ? cast<CallInst>((*VMap)[Op]) : Op;
IRBuilder<> Builder(MappedOp);
// If there are no arguments, this is a 'get' operation.
Value *MappedResult;
if (Op->getNumArgOperands() == 0) {
auto ValueTy = Op->getType();
auto Slot = getSwiftErrorSlot(ValueTy);
MappedResult = Builder.CreateLoad(ValueTy, Slot);
} else {
assert(Op->getNumArgOperands() == 1);
auto Value = MappedOp->getArgOperand(0);
auto ValueTy = Value->getType();
auto Slot = getSwiftErrorSlot(ValueTy);
Builder.CreateStore(Value, Slot);
MappedResult = Slot;
}
MappedOp->replaceAllUsesWith(MappedResult);
MappedOp->eraseFromParent();
}
// If we're updating the original function, we've invalidated SwiftErrorOps.
if (VMap == nullptr) {
Shape.SwiftErrorOps.clear();
}
}
void CoroCloner::replaceSwiftErrorOps() {
::replaceSwiftErrorOps(*NewF, Shape, &VMap);
}
void CoroCloner::replaceEntryBlock() { void CoroCloner::replaceEntryBlock() {
// In the original function, the AllocaSpillBlock is a block immediately // In the original function, the AllocaSpillBlock is a block immediately
// following the allocation of the frame object which defines GEPs for // following the allocation of the frame object which defines GEPs for
@ -691,6 +755,9 @@ void CoroCloner::create() {
// Handle suspends. // Handle suspends.
replaceCoroSuspends(); replaceCoroSuspends();
// Handle swifterror.
replaceSwiftErrorOps();
// Remove coro.end intrinsics. // Remove coro.end intrinsics.
replaceCoroEnds(); replaceCoroEnds();
@ -1364,6 +1431,10 @@ static void splitCoroutine(Function &F, CallGraph &CG, CallGraphSCC &SCC) {
splitCoroutine(F, Shape, Clones); splitCoroutine(F, Shape, Clones);
} }
// Replace all the swifterror operations in the original function.
// This invalidates SwiftErrorOps in the Shape.
replaceSwiftErrorOps(F, Shape, nullptr);
removeCoroEnds(Shape, &CG); removeCoroEnds(Shape, &CG);
postSplitCleanup(F); postSplitCleanup(F);

View File

@ -0,0 +1,143 @@
; RUN: opt < %s -enable-coroutines -O2 -S | FileCheck %s
target datalayout = "E-p:32:32"
define i8* @f(i8* %buffer, i32 %n, i8** swifterror %errorslot) {
entry:
%id = call token @llvm.coro.id.retcon(i32 8, i32 4, i8* %buffer, i8* bitcast (i8* (i8*, i1, i8**)* @f_prototype to i8*), i8* bitcast (i8* (i32)* @allocate to i8*), i8* bitcast (void (i8*)* @deallocate to i8*))
%hdl = call i8* @llvm.coro.begin(token %id, i8* null)
br label %loop
loop:
%n.val = phi i32 [ %n, %entry ], [ %inc, %resume ]
call void @print(i32 %n.val)
call void @maybeThrow(i8** swifterror %errorslot)
%errorload1 = load i8*, i8** %errorslot
call void @logError(i8* %errorload1)
%suspend_result = call { i1, i8** } (...) @llvm.coro.suspend.retcon.i1p0p0i8()
%unwind0 = extractvalue { i1, i8** } %suspend_result, 0
br i1 %unwind0, label %cleanup, label %resume
resume:
%inc = add i32 %n.val, 1
br label %loop
cleanup:
call i1 @llvm.coro.end(i8* %hdl, i1 0)
unreachable
}
; CHECK-LABEL: define i8* @f(i8* %buffer, i32 %n, i8** swifterror %errorslot)
; CHECK-NEXT: entry:
; CHECK-NEXT: [[T0:%.*]] = bitcast i8* %buffer to i32*
; CHECK-NEXT: store i32 %n, i32* [[T0]], align 4
; CHECK-NEXT: call void @print(i32 %n)
; TODO: figure out a way to eliminate this
; CHECK-NEXT: store i8* null, i8** %errorslot
; CHECK-NEXT: call void @maybeThrow(i8** swifterror %errorslot)
; CHECK-NEXT: [[T1:%.*]] = load i8*, i8** %errorslot
; CHECK-NEXT: call void @logError(i8* [[T1]])
; CHECK-NEXT: store i8* [[T1]], i8** %errorslot
; CHECK-NEXT: ret i8* bitcast (i8* (i8*, i1, i8**)* @f.resume.0 to i8*)
; CHECK-NEXT: }
; CHECK-LABEL: define internal i8* @f.resume.0(i8* noalias nonnull %0, i1 zeroext %1, i8** swifterror %2)
; CHECK-NEXT: :
; CHECK-NEXT: br i1 %1,
; CHECK: :
; CHECK-NEXT: [[ERROR:%.*]] = load i8*, i8** %2, align 4
; CHECK-NEXT: [[T0:%.*]] = bitcast i8* %0 to i32*
; CHECK-NEXT: [[T1:%.*]] = load i32, i32* [[T0]], align 4
; CHECK-NEXT: %inc = add i32 [[T1]], 1
; CHECK-NEXT: store i32 %inc, i32* [[T0]], align 4
; CHECK-NEXT: call void @print(i32 %inc)
; CHECK-NEXT: store i8* [[ERROR]], i8** %2
; CHECK-NEXT: call void @maybeThrow(i8** swifterror %2)
; CHECK-NEXT: [[T2:%.*]] = load i8*, i8** %2
; CHECK-NEXT: call void @logError(i8* [[T2]])
; CHECK-NEXT: store i8* [[T2]], i8** %2
; CHECK-NEXT: ret i8* bitcast (i8* (i8*, i1, i8**)* @f.resume.0 to i8*)
; CHECK: :
; CHECK-NEXT: ret i8* null
; CHECK-NEXT: }
define i8* @g(i8* %buffer, i32 %n) {
entry:
%errorslot = alloca swifterror i8*, align 4
store i8* null, i8** %errorslot
%id = call token @llvm.coro.id.retcon(i32 8, i32 4, i8* %buffer, i8* bitcast (i8* (i8*, i1)* @g_prototype to i8*), i8* bitcast (i8* (i32)* @allocate to i8*), i8* bitcast (void (i8*)* @deallocate to i8*))
%hdl = call i8* @llvm.coro.begin(token %id, i8* null)
br label %loop
loop:
%n.val = phi i32 [ %n, %entry ], [ %inc, %resume ]
call void @print(i32 %n.val)
call void @maybeThrow(i8** swifterror %errorslot)
%errorload1 = load i8*, i8** %errorslot
call void @logError(i8* %errorload1)
%unwind0 = call i1 (...) @llvm.coro.suspend.retcon.i1()
br i1 %unwind0, label %cleanup, label %resume
resume:
%inc = add i32 %n.val, 1
br label %loop
cleanup:
call i1 @llvm.coro.end(i8* %hdl, i1 0)
unreachable
}
; CHECK-LABEL: define i8* @g(i8* %buffer, i32 %n)
; CHECK-NEXT: entry:
; CHECK-NEXT: [[ERRORSLOT:%.*]] = alloca swifterror i8*, align 4
; CHECK-NEXT: [[T0:%.*]] = bitcast i8* %buffer to i32*
; CHECK-NEXT: store i32 %n, i32* [[T0]], align 4
; CHECK-NEXT: call void @print(i32 %n)
; CHECK-NEXT: store i8* null, i8** [[ERRORSLOT]], align 4
; CHECK-NEXT: call void @maybeThrow(i8** nonnull swifterror [[ERRORSLOT]])
; CHECK-NEXT: [[T1:%.*]] = load i8*, i8** [[ERRORSLOT]], align 4
; CHECK-NEXT: [[T2:%.*]] = getelementptr inbounds i8, i8* %buffer, i32 4
; CHECK-NEXT: [[T3:%.*]] = bitcast i8* [[T2]] to i8**
; CHECK-NEXT: store i8* [[T1]], i8** [[T3]], align 4
; CHECK-NEXT: call void @logError(i8* [[T1]])
; CHECK-NEXT: ret i8* bitcast (i8* (i8*, i1)* @g.resume.0 to i8*)
; CHECK-NEXT: }
; CHECK-LABEL: define internal i8* @g.resume.0(i8* noalias nonnull %0, i1 zeroext %1)
; CHECK-NEXT: :
; CHECK-NEXT: [[ERRORSLOT:%.*]] = alloca swifterror i8*, align 4
; CHECK-NEXT: br i1 %1,
; CHECK: :
; CHECK-NEXT: [[T0:%.*]] = bitcast i8* %0 to i32*
; CHECK-NEXT: [[T1:%.*]] = load i32, i32* [[T0]], align 4
; CHECK-NEXT: %inc = add i32 [[T1]], 1
; CHECK-NEXT: [[T2:%.*]] = getelementptr inbounds i8, i8* %0, i32 4
; CHECK-NEXT: [[T3:%.*]] = bitcast i8* [[T2]] to i8**
; CHECK-NEXT: [[T4:%.*]] = load i8*, i8** [[T3]]
; CHECK-NEXT: store i32 %inc, i32* [[T0]], align 4
; CHECK-NEXT: call void @print(i32 %inc)
; CHECK-NEXT: store i8* [[T4]], i8** [[ERRORSLOT]]
; CHECK-NEXT: call void @maybeThrow(i8** nonnull swifterror [[ERRORSLOT]])
; CHECK-NEXT: [[T5:%.*]] = load i8*, i8** [[ERRORSLOT]]
; CHECK-NEXT: store i8* [[T5]], i8** [[T3]], align 4
; CHECK-NEXT: call void @logError(i8* [[T5]])
; CHECK-NEXT: ret i8* bitcast (i8* (i8*, i1)* @g.resume.0 to i8*)
; CHECK: :
; CHECK-NEXT: ret i8* null
; CHECK-NEXT: }
declare token @llvm.coro.id.retcon(i32, i32, i8*, i8*, i8*, i8*)
declare i8* @llvm.coro.begin(token, i8*)
declare { i1, i8** } @llvm.coro.suspend.retcon.i1p0p0i8(...)
declare i1 @llvm.coro.suspend.retcon.i1(...)
declare i1 @llvm.coro.end(i8*, i1)
declare i8* @llvm.coro.prepare.retcon(i8*)
declare i8* @f_prototype(i8*, i1 zeroext, i8** swifterror)
declare i8* @g_prototype(i8*, i1 zeroext)
declare noalias i8* @allocate(i32 %size)
declare void @deallocate(i8* %ptr)
declare void @print(i32)
declare void @maybeThrow(i8** swifterror)
declare void @logError(i8*)