Rename LazyCleanup -> Cleanup. No functionality change for these last three

commits.

llvm-svn: 109000
This commit is contained in:
John McCall 2010-07-21 07:22:38 +00:00
parent 20141f2d8c
commit cda666ccd8
10 changed files with 173 additions and 178 deletions

View File

@ -312,7 +312,7 @@ static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD,
namespace { namespace {
/// Call the destructor for a direct base class. /// Call the destructor for a direct base class.
struct CallBaseDtor : EHScopeStack::LazyCleanup { struct CallBaseDtor : EHScopeStack::Cleanup {
const CXXRecordDecl *BaseClass; const CXXRecordDecl *BaseClass;
bool BaseIsVirtual; bool BaseIsVirtual;
CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
@ -361,8 +361,8 @@ static void EmitBaseInitializer(CodeGenFunction &CGF,
CGF.EmitAggExpr(BaseInit->getInit(), V, false, false, true); CGF.EmitAggExpr(BaseInit->getInit(), V, false, false, true);
if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor()) if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor())
CGF.EHStack.pushLazyCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
isBaseVirtual); isBaseVirtual);
} }
static void EmitAggMemberInitializer(CodeGenFunction &CGF, static void EmitAggMemberInitializer(CodeGenFunction &CGF,
@ -452,7 +452,7 @@ static void EmitAggMemberInitializer(CodeGenFunction &CGF,
} }
namespace { namespace {
struct CallMemberDtor : EHScopeStack::LazyCleanup { struct CallMemberDtor : EHScopeStack::Cleanup {
FieldDecl *Field; FieldDecl *Field;
CXXDestructorDecl *Dtor; CXXDestructorDecl *Dtor;
@ -570,8 +570,8 @@ static void EmitMemberInitializer(CodeGenFunction &CGF,
CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
if (!RD->hasTrivialDestructor()) if (!RD->hasTrivialDestructor())
CGF.EHStack.pushLazyCleanup<CallMemberDtor>(EHCleanup, Field, CGF.EHStack.pushCleanup<CallMemberDtor>(EHCleanup, Field,
RD->getDestructor()); RD->getDestructor());
} }
} }
@ -761,7 +761,7 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
namespace { namespace {
/// Call the operator delete associated with the current destructor. /// Call the operator delete associated with the current destructor.
struct CallDtorDelete : EHScopeStack::LazyCleanup { struct CallDtorDelete : EHScopeStack::Cleanup {
CallDtorDelete() {} CallDtorDelete() {}
void Emit(CodeGenFunction &CGF, bool IsForEH) { void Emit(CodeGenFunction &CGF, bool IsForEH) {
@ -772,7 +772,7 @@ namespace {
} }
}; };
struct CallArrayFieldDtor : EHScopeStack::LazyCleanup { struct CallArrayFieldDtor : EHScopeStack::Cleanup {
const FieldDecl *Field; const FieldDecl *Field;
CallArrayFieldDtor(const FieldDecl *Field) : Field(Field) {} CallArrayFieldDtor(const FieldDecl *Field) : Field(Field) {}
@ -798,7 +798,7 @@ namespace {
} }
}; };
struct CallFieldDtor : EHScopeStack::LazyCleanup { struct CallFieldDtor : EHScopeStack::Cleanup {
const FieldDecl *Field; const FieldDecl *Field;
CallFieldDtor(const FieldDecl *Field) : Field(Field) {} CallFieldDtor(const FieldDecl *Field) : Field(Field) {}
@ -831,7 +831,7 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
if (DtorType == Dtor_Deleting) { if (DtorType == Dtor_Deleting) {
assert(DD->getOperatorDelete() && assert(DD->getOperatorDelete() &&
"operator delete missing - EmitDtorEpilogue"); "operator delete missing - EmitDtorEpilogue");
EHStack.pushLazyCleanup<CallDtorDelete>(NormalAndEHCleanup); EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
return; return;
} }
@ -853,9 +853,9 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
if (BaseClassDecl->hasTrivialDestructor()) if (BaseClassDecl->hasTrivialDestructor())
continue; continue;
EHStack.pushLazyCleanup<CallBaseDtor>(NormalAndEHCleanup, EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
BaseClassDecl, BaseClassDecl,
/*BaseIsVirtual*/ true); /*BaseIsVirtual*/ true);
} }
return; return;
@ -878,9 +878,9 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
if (BaseClassDecl->hasTrivialDestructor()) if (BaseClassDecl->hasTrivialDestructor())
continue; continue;
EHStack.pushLazyCleanup<CallBaseDtor>(NormalAndEHCleanup, EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
BaseClassDecl, BaseClassDecl,
/*BaseIsVirtual*/ false); /*BaseIsVirtual*/ false);
} }
// Destroy direct fields. // Destroy direct fields.
@ -904,9 +904,9 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
continue; continue;
if (Array) if (Array)
EHStack.pushLazyCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field); EHStack.pushCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field);
else else
EHStack.pushLazyCleanup<CallFieldDtor>(NormalAndEHCleanup, Field); EHStack.pushCleanup<CallFieldDtor>(NormalAndEHCleanup, Field);
} }
} }
@ -1164,7 +1164,7 @@ void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
} }
namespace { namespace {
struct CallLocalDtor : EHScopeStack::LazyCleanup { struct CallLocalDtor : EHScopeStack::Cleanup {
const CXXDestructorDecl *Dtor; const CXXDestructorDecl *Dtor;
llvm::Value *Addr; llvm::Value *Addr;
@ -1180,7 +1180,7 @@ namespace {
void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
llvm::Value *Addr) { llvm::Value *Addr) {
EHStack.pushLazyCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
} }
void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) { void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {

View File

@ -389,7 +389,7 @@ const llvm::Type *CodeGenFunction::BuildByRefType(const ValueDecl *D) {
} }
namespace { namespace {
struct CallArrayDtor : EHScopeStack::LazyCleanup { struct CallArrayDtor : EHScopeStack::Cleanup {
CallArrayDtor(const CXXDestructorDecl *Dtor, CallArrayDtor(const CXXDestructorDecl *Dtor,
const ConstantArrayType *Type, const ConstantArrayType *Type,
llvm::Value *Loc) llvm::Value *Loc)
@ -408,7 +408,7 @@ namespace {
} }
}; };
struct CallVarDtor : EHScopeStack::LazyCleanup { struct CallVarDtor : EHScopeStack::Cleanup {
CallVarDtor(const CXXDestructorDecl *Dtor, CallVarDtor(const CXXDestructorDecl *Dtor,
llvm::Value *NRVOFlag, llvm::Value *NRVOFlag,
llvm::Value *Loc) llvm::Value *Loc)
@ -441,7 +441,7 @@ namespace {
} }
namespace { namespace {
struct CallStackRestore : EHScopeStack::LazyCleanup { struct CallStackRestore : EHScopeStack::Cleanup {
llvm::Value *Stack; llvm::Value *Stack;
CallStackRestore(llvm::Value *Stack) : Stack(Stack) {} CallStackRestore(llvm::Value *Stack) : Stack(Stack) {}
void Emit(CodeGenFunction &CGF, bool IsForEH) { void Emit(CodeGenFunction &CGF, bool IsForEH) {
@ -451,7 +451,7 @@ namespace {
} }
}; };
struct CallCleanupFunction : EHScopeStack::LazyCleanup { struct CallCleanupFunction : EHScopeStack::Cleanup {
llvm::Constant *CleanupFn; llvm::Constant *CleanupFn;
const CGFunctionInfo &FnInfo; const CGFunctionInfo &FnInfo;
llvm::Value *Addr; llvm::Value *Addr;
@ -479,7 +479,7 @@ namespace {
} }
}; };
struct CallBlockRelease : EHScopeStack::LazyCleanup { struct CallBlockRelease : EHScopeStack::Cleanup {
llvm::Value *Addr; llvm::Value *Addr;
CallBlockRelease(llvm::Value *Addr) : Addr(Addr) {} CallBlockRelease(llvm::Value *Addr) : Addr(Addr) {}
@ -592,7 +592,7 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D,
DidCallStackSave = true; DidCallStackSave = true;
// Push a cleanup block and restore the stack there. // Push a cleanup block and restore the stack there.
EHStack.pushLazyCleanup<CallStackRestore>(NormalCleanup, Stack); EHStack.pushCleanup<CallStackRestore>(NormalCleanup, Stack);
} }
// Get the element type. // Get the element type.
@ -783,11 +783,11 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D,
if (const ConstantArrayType *Array = if (const ConstantArrayType *Array =
getContext().getAsConstantArrayType(Ty)) { getContext().getAsConstantArrayType(Ty)) {
EHStack.pushLazyCleanup<CallArrayDtor>(NormalAndEHCleanup, EHStack.pushCleanup<CallArrayDtor>(NormalAndEHCleanup,
D, Array, Loc); D, Array, Loc);
} else { } else {
EHStack.pushLazyCleanup<CallVarDtor>(NormalAndEHCleanup, EHStack.pushCleanup<CallVarDtor>(NormalAndEHCleanup,
D, NRVOFlag, Loc); D, NRVOFlag, Loc);
} }
} }
} }
@ -800,12 +800,12 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D,
assert(F && "Could not find function!"); assert(F && "Could not find function!");
const CGFunctionInfo &Info = CGM.getTypes().getFunctionInfo(FD); const CGFunctionInfo &Info = CGM.getTypes().getFunctionInfo(FD);
EHStack.pushLazyCleanup<CallCleanupFunction>(NormalAndEHCleanup, EHStack.pushCleanup<CallCleanupFunction>(NormalAndEHCleanup,
F, &Info, DeclPtr, &D); F, &Info, DeclPtr, &D);
} }
if (needsDispose && CGM.getLangOptions().getGCMode() != LangOptions::GCOnly) if (needsDispose && CGM.getLangOptions().getGCMode() != LangOptions::GCOnly)
EHStack.pushLazyCleanup<CallBlockRelease>(NormalAndEHCleanup, DeclPtr); EHStack.pushCleanup<CallBlockRelease>(NormalAndEHCleanup, DeclPtr);
} }
/// Emit an alloca (or GlobalValue depending on target) /// Emit an alloca (or GlobalValue depending on target)

View File

@ -330,7 +330,7 @@ static llvm::Constant *getGuardAbortFn(CodeGenFunction &CGF) {
} }
namespace { namespace {
struct CallGuardAbort : EHScopeStack::LazyCleanup { struct CallGuardAbort : EHScopeStack::Cleanup {
llvm::GlobalVariable *Guard; llvm::GlobalVariable *Guard;
CallGuardAbort(llvm::GlobalVariable *Guard) : Guard(Guard) {} CallGuardAbort(llvm::GlobalVariable *Guard) : Guard(Guard) {}
@ -388,7 +388,7 @@ CodeGenFunction::EmitStaticCXXBlockVarDeclInit(const VarDecl &D,
// Call __cxa_guard_abort along the exceptional edge. // Call __cxa_guard_abort along the exceptional edge.
if (Exceptions) if (Exceptions)
EHStack.pushLazyCleanup<CallGuardAbort>(EHCleanup, GuardVariable); EHStack.pushCleanup<CallGuardAbort>(EHCleanup, GuardVariable);
EmitBlock(InitBlock); EmitBlock(InitBlock);
} }

View File

@ -58,10 +58,10 @@ EHScopeStack::stable_iterator
EHScopeStack::getEnclosingEHCleanup(iterator it) const { EHScopeStack::getEnclosingEHCleanup(iterator it) const {
assert(it != end()); assert(it != end());
do { do {
if (isa<EHLazyCleanupScope>(*it)) { if (isa<EHCleanupScope>(*it)) {
if (cast<EHLazyCleanupScope>(*it).isEHCleanup()) if (cast<EHCleanupScope>(*it).isEHCleanup())
return stabilize(it); return stabilize(it);
return cast<EHLazyCleanupScope>(*it).getEnclosingEHCleanup(); return cast<EHCleanupScope>(*it).getEnclosingEHCleanup();
} }
++it; ++it;
} while (it != end()); } while (it != end());
@ -69,18 +69,18 @@ EHScopeStack::getEnclosingEHCleanup(iterator it) const {
} }
void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) { void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned"); assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned");
char *Buffer = allocate(EHLazyCleanupScope::getSizeForCleanupSize(Size)); char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
bool IsNormalCleanup = Kind != EHCleanup; bool IsNormalCleanup = Kind != EHCleanup;
bool IsEHCleanup = Kind != NormalCleanup; bool IsEHCleanup = Kind != NormalCleanup;
EHLazyCleanupScope *Scope = EHCleanupScope *Scope =
new (Buffer) EHLazyCleanupScope(IsNormalCleanup, new (Buffer) EHCleanupScope(IsNormalCleanup,
IsEHCleanup, IsEHCleanup,
Size, Size,
BranchFixups.size(), BranchFixups.size(),
InnermostNormalCleanup, InnermostNormalCleanup,
InnermostEHCleanup); InnermostEHCleanup);
if (IsNormalCleanup) if (IsNormalCleanup)
InnermostNormalCleanup = stable_begin(); InnermostNormalCleanup = stable_begin();
if (IsEHCleanup) if (IsEHCleanup)
@ -92,8 +92,8 @@ void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) {
void EHScopeStack::popCleanup() { void EHScopeStack::popCleanup() {
assert(!empty() && "popping exception stack when not empty"); assert(!empty() && "popping exception stack when not empty");
assert(isa<EHLazyCleanupScope>(*begin())); assert(isa<EHCleanupScope>(*begin()));
EHLazyCleanupScope &Cleanup = cast<EHLazyCleanupScope>(*begin()); EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
InnermostEHCleanup = Cleanup.getEnclosingEHCleanup(); InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
StartOfData += Cleanup.getAllocatedSize(); StartOfData += Cleanup.getAllocatedSize();
@ -150,7 +150,7 @@ void EHScopeStack::popNullFixups() {
assert(hasNormalCleanups()); assert(hasNormalCleanups());
EHScopeStack::iterator it = find(InnermostNormalCleanup); EHScopeStack::iterator it = find(InnermostNormalCleanup);
unsigned MinSize = cast<EHLazyCleanupScope>(*it).getFixupDepth(); unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
assert(BranchFixups.size() >= MinSize && "fixup stack out of order"); assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
while (BranchFixups.size() > MinSize && while (BranchFixups.size() > MinSize &&
@ -387,7 +387,7 @@ static llvm::Constant *getCleanupValue(CodeGenFunction &CGF) {
namespace { namespace {
/// A cleanup to free the exception object if its initialization /// A cleanup to free the exception object if its initialization
/// throws. /// throws.
struct FreeExceptionCleanup : EHScopeStack::LazyCleanup { struct FreeExceptionCleanup : EHScopeStack::Cleanup {
FreeExceptionCleanup(llvm::Value *ShouldFreeVar, FreeExceptionCleanup(llvm::Value *ShouldFreeVar,
llvm::Value *ExnLocVar) llvm::Value *ExnLocVar)
: ShouldFreeVar(ShouldFreeVar), ExnLocVar(ExnLocVar) {} : ShouldFreeVar(ShouldFreeVar), ExnLocVar(ExnLocVar) {}
@ -437,9 +437,9 @@ static void EmitAnyExprToExn(CodeGenFunction &CGF, const Expr *E,
// exception during initialization. // exception during initialization.
// FIXME: stmt expressions might require this to be a normal // FIXME: stmt expressions might require this to be a normal
// cleanup, too. // cleanup, too.
CGF.EHStack.pushLazyCleanup<FreeExceptionCleanup>(EHCleanup, CGF.EHStack.pushCleanup<FreeExceptionCleanup>(EHCleanup,
ShouldFreeVar, ShouldFreeVar,
ExnLocVar); ExnLocVar);
EHScopeStack::stable_iterator Cleanup = CGF.EHStack.stable_begin(); EHScopeStack::stable_iterator Cleanup = CGF.EHStack.stable_begin();
CGF.Builder.CreateStore(ExnLoc, ExnLocVar); CGF.Builder.CreateStore(ExnLoc, ExnLocVar);
@ -635,8 +635,8 @@ void CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
/// normal-only cleanup scopes. /// normal-only cleanup scopes.
static bool isNonEHScope(const EHScope &S) { static bool isNonEHScope(const EHScope &S) {
switch (S.getKind()) { switch (S.getKind()) {
case EHScope::LazyCleanup: case EHScope::Cleanup:
return !cast<EHLazyCleanupScope>(S).isEHCleanup(); return !cast<EHCleanupScope>(S).isEHCleanup();
case EHScope::Filter: case EHScope::Filter:
case EHScope::Catch: case EHScope::Catch:
case EHScope::Terminate: case EHScope::Terminate:
@ -765,9 +765,9 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
I != E; ++I) { I != E; ++I) {
switch (I->getKind()) { switch (I->getKind()) {
case EHScope::LazyCleanup: case EHScope::Cleanup:
if (!HasEHCleanup) if (!HasEHCleanup)
HasEHCleanup = cast<EHLazyCleanupScope>(*I).isEHCleanup(); HasEHCleanup = cast<EHCleanupScope>(*I).isEHCleanup();
// We otherwise don't care about cleanups. // We otherwise don't care about cleanups.
continue; continue;
@ -1016,7 +1016,7 @@ namespace {
/// of the caught type, so we have to assume the actual thrown /// of the caught type, so we have to assume the actual thrown
/// exception type might have a throwing destructor, even if the /// exception type might have a throwing destructor, even if the
/// caught type's destructor is trivial or nothrow. /// caught type's destructor is trivial or nothrow.
struct CallEndCatch : EHScopeStack::LazyCleanup { struct CallEndCatch : EHScopeStack::Cleanup {
CallEndCatch(bool MightThrow) : MightThrow(MightThrow) {} CallEndCatch(bool MightThrow) : MightThrow(MightThrow) {}
bool MightThrow; bool MightThrow;
@ -1041,7 +1041,7 @@ static llvm::Value *CallBeginCatch(CodeGenFunction &CGF,
llvm::CallInst *Call = CGF.Builder.CreateCall(getBeginCatchFn(CGF), Exn); llvm::CallInst *Call = CGF.Builder.CreateCall(getBeginCatchFn(CGF), Exn);
Call->setDoesNotThrow(); Call->setDoesNotThrow();
CGF.EHStack.pushLazyCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow); CGF.EHStack.pushCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow);
return Call; return Call;
} }
@ -1232,7 +1232,7 @@ static void BeginCatch(CodeGenFunction &CGF,
} }
namespace { namespace {
struct CallRethrow : EHScopeStack::LazyCleanup { struct CallRethrow : EHScopeStack::Cleanup {
void Emit(CodeGenFunction &CGF, bool IsForEH) { void Emit(CodeGenFunction &CGF, bool IsForEH) {
CGF.EmitCallOrInvoke(getReThrowFn(CGF), 0, 0); CGF.EmitCallOrInvoke(getReThrowFn(CGF), 0, 0);
} }
@ -1282,7 +1282,7 @@ void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
// _cxa_rethrow. This needs to happen before __cxa_end_catch is // _cxa_rethrow. This needs to happen before __cxa_end_catch is
// called, and so it is pushed after BeginCatch. // called, and so it is pushed after BeginCatch.
if (ImplicitRethrow) if (ImplicitRethrow)
EHStack.pushLazyCleanup<CallRethrow>(NormalCleanup); EHStack.pushCleanup<CallRethrow>(NormalCleanup);
// Perform the body of the catch. // Perform the body of the catch.
EmitStmt(C->getHandlerBlock()); EmitStmt(C->getHandlerBlock());
@ -1299,7 +1299,7 @@ void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
} }
namespace { namespace {
struct CallEndCatchForFinally : EHScopeStack::LazyCleanup { struct CallEndCatchForFinally : EHScopeStack::Cleanup {
llvm::Value *ForEHVar; llvm::Value *ForEHVar;
llvm::Value *EndCatchFn; llvm::Value *EndCatchFn;
CallEndCatchForFinally(llvm::Value *ForEHVar, llvm::Value *EndCatchFn) CallEndCatchForFinally(llvm::Value *ForEHVar, llvm::Value *EndCatchFn)
@ -1319,7 +1319,7 @@ namespace {
} }
}; };
struct PerformFinally : EHScopeStack::LazyCleanup { struct PerformFinally : EHScopeStack::Cleanup {
const Stmt *Body; const Stmt *Body;
llvm::Value *ForEHVar; llvm::Value *ForEHVar;
llvm::Value *EndCatchFn; llvm::Value *EndCatchFn;
@ -1335,8 +1335,8 @@ namespace {
void Emit(CodeGenFunction &CGF, bool IsForEH) { void Emit(CodeGenFunction &CGF, bool IsForEH) {
// Enter a cleanup to call the end-catch function if one was provided. // Enter a cleanup to call the end-catch function if one was provided.
if (EndCatchFn) if (EndCatchFn)
CGF.EHStack.pushLazyCleanup<CallEndCatchForFinally>(NormalAndEHCleanup, CGF.EHStack.pushCleanup<CallEndCatchForFinally>(NormalAndEHCleanup,
ForEHVar, EndCatchFn); ForEHVar, EndCatchFn);
// Emit the finally block. // Emit the finally block.
CGF.EmitStmt(Body); CGF.EmitStmt(Body);
@ -1430,9 +1430,9 @@ CodeGenFunction::EnterFinallyBlock(const Stmt *Body,
InitTempAlloca(ForEHVar, llvm::ConstantInt::getFalse(getLLVMContext())); InitTempAlloca(ForEHVar, llvm::ConstantInt::getFalse(getLLVMContext()));
// Enter a normal cleanup which will perform the @finally block. // Enter a normal cleanup which will perform the @finally block.
EHStack.pushLazyCleanup<PerformFinally>(NormalCleanup, Body, EHStack.pushCleanup<PerformFinally>(NormalCleanup, Body,
ForEHVar, EndCatchFn, ForEHVar, EndCatchFn,
RethrowFn, SavedExnVar); RethrowFn, SavedExnVar);
// Enter a catch-all scope. // Enter a catch-all scope.
llvm::BasicBlock *CatchAllBB = createBasicBlock("finally.catchall"); llvm::BasicBlock *CatchAllBB = createBasicBlock("finally.catchall");
@ -1537,6 +1537,6 @@ llvm::BasicBlock *CodeGenFunction::getTerminateHandler() {
return TerminateHandler; return TerminateHandler;
} }
EHScopeStack::LazyCleanup::~LazyCleanup() { EHScopeStack::Cleanup::~Cleanup() {
llvm_unreachable("LazyCleanup is indestructable"); llvm_unreachable("Cleanup is indestructable");
} }

View File

@ -63,7 +63,7 @@ protected:
enum { BitsRemaining = 30 }; enum { BitsRemaining = 30 };
public: public:
enum Kind { LazyCleanup, Catch, Terminate, Filter }; enum Kind { Cleanup, Catch, Terminate, Filter };
EHScope(Kind K) : CachedLandingPad(0), K(K) {} EHScope(Kind K) : CachedLandingPad(0), K(K) {}
@ -154,14 +154,14 @@ public:
}; };
/// A cleanup scope which generates the cleanup blocks lazily. /// A cleanup scope which generates the cleanup blocks lazily.
class EHLazyCleanupScope : public EHScope { class EHCleanupScope : public EHScope {
/// Whether this cleanup needs to be run along normal edges. /// Whether this cleanup needs to be run along normal edges.
bool IsNormalCleanup : 1; bool IsNormalCleanup : 1;
/// Whether this cleanup needs to be run along exception edges. /// Whether this cleanup needs to be run along exception edges.
bool IsEHCleanup : 1; bool IsEHCleanup : 1;
/// The amount of extra storage needed by the LazyCleanup. /// The amount of extra storage needed by the Cleanup.
/// Always a multiple of the scope-stack alignment. /// Always a multiple of the scope-stack alignment.
unsigned CleanupSize : 12; unsigned CleanupSize : 12;
@ -188,18 +188,18 @@ public:
/// Gets the size required for a lazy cleanup scope with the given /// Gets the size required for a lazy cleanup scope with the given
/// cleanup-data requirements. /// cleanup-data requirements.
static size_t getSizeForCleanupSize(size_t Size) { static size_t getSizeForCleanupSize(size_t Size) {
return sizeof(EHLazyCleanupScope) + Size; return sizeof(EHCleanupScope) + Size;
} }
size_t getAllocatedSize() const { size_t getAllocatedSize() const {
return sizeof(EHLazyCleanupScope) + CleanupSize; return sizeof(EHCleanupScope) + CleanupSize;
} }
EHLazyCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize, EHCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize,
unsigned FixupDepth, unsigned FixupDepth,
EHScopeStack::stable_iterator EnclosingNormal, EHScopeStack::stable_iterator EnclosingNormal,
EHScopeStack::stable_iterator EnclosingEH) EHScopeStack::stable_iterator EnclosingEH)
: EHScope(EHScope::LazyCleanup), : EHScope(EHScope::Cleanup),
IsNormalCleanup(IsNormal), IsEHCleanup(IsEH), IsNormalCleanup(IsNormal), IsEHCleanup(IsEH),
CleanupSize(CleanupSize), FixupDepth(FixupDepth), CleanupSize(CleanupSize), FixupDepth(FixupDepth),
EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH), EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH),
@ -225,12 +225,12 @@ public:
size_t getCleanupSize() const { return CleanupSize; } size_t getCleanupSize() const { return CleanupSize; }
void *getCleanupBuffer() { return this + 1; } void *getCleanupBuffer() { return this + 1; }
EHScopeStack::LazyCleanup *getCleanup() { EHScopeStack::Cleanup *getCleanup() {
return reinterpret_cast<EHScopeStack::LazyCleanup*>(getCleanupBuffer()); return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
} }
static bool classof(const EHScope *Scope) { static bool classof(const EHScope *Scope) {
return (Scope->getKind() == LazyCleanup); return (Scope->getKind() == Cleanup);
} }
}; };
@ -319,8 +319,8 @@ public:
static_cast<const EHFilterScope*>(get())->getNumFilters()); static_cast<const EHFilterScope*>(get())->getNumFilters());
break; break;
case EHScope::LazyCleanup: case EHScope::Cleanup:
Ptr += static_cast<const EHLazyCleanupScope*>(get()) Ptr += static_cast<const EHCleanupScope*>(get())
->getAllocatedSize(); ->getAllocatedSize();
break; break;

View File

@ -1856,7 +1856,7 @@ llvm::Constant *CGObjCGNU::EnumerationMutationFunction() {
} }
namespace { namespace {
struct CallSyncExit : EHScopeStack::LazyCleanup { struct CallSyncExit : EHScopeStack::Cleanup {
llvm::Value *SyncExitFn; llvm::Value *SyncExitFn;
llvm::Value *SyncArg; llvm::Value *SyncArg;
CallSyncExit(llvm::Value *SyncExitFn, llvm::Value *SyncArg) CallSyncExit(llvm::Value *SyncExitFn, llvm::Value *SyncArg)
@ -1885,8 +1885,7 @@ void CGObjCGNU::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
// Register an all-paths cleanup to release the lock. // Register an all-paths cleanup to release the lock.
llvm::Value *SyncExit = CGM.CreateRuntimeFunction(FTy, "objc_sync_exit"); llvm::Value *SyncExit = CGM.CreateRuntimeFunction(FTy, "objc_sync_exit");
CGF.EHStack.pushLazyCleanup<CallSyncExit>(NormalAndEHCleanup, CGF.EHStack.pushCleanup<CallSyncExit>(NormalAndEHCleanup, SyncExit, SyncArg);
SyncExit, SyncArg);
// Emit the body of the statement. // Emit the body of the statement.
CGF.EmitStmt(S.getSynchBody()); CGF.EmitStmt(S.getSynchBody());

View File

@ -2542,7 +2542,7 @@ void CGObjCMac::EmitSynchronizedStmt(CodeGenFunction &CGF,
} }
namespace { namespace {
struct PerformFragileFinally : EHScopeStack::LazyCleanup { struct PerformFragileFinally : EHScopeStack::Cleanup {
const Stmt &S; const Stmt &S;
llvm::Value *SyncArg; llvm::Value *SyncArg;
llvm::Value *CallTryExitVar; llvm::Value *CallTryExitVar;
@ -2745,11 +2745,11 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
CallTryExitVar); CallTryExitVar);
// Push a normal cleanup to leave the try scope. // Push a normal cleanup to leave the try scope.
CGF.EHStack.pushLazyCleanup<PerformFragileFinally>(NormalCleanup, &S, CGF.EHStack.pushCleanup<PerformFragileFinally>(NormalCleanup, &S,
SyncArg, SyncArg,
CallTryExitVar, CallTryExitVar,
ExceptionData, ExceptionData,
&ObjCTypes); &ObjCTypes);
// Enter a try block: // Enter a try block:
// - Call objc_exception_try_enter to push ExceptionData on top of // - Call objc_exception_try_enter to push ExceptionData on top of
@ -5717,7 +5717,7 @@ void CGObjCNonFragileABIMac::EmitObjCGlobalAssign(CodeGen::CodeGenFunction &CGF,
} }
namespace { namespace {
struct CallSyncExit : EHScopeStack::LazyCleanup { struct CallSyncExit : EHScopeStack::Cleanup {
llvm::Value *SyncExitFn; llvm::Value *SyncExitFn;
llvm::Value *SyncArg; llvm::Value *SyncArg;
CallSyncExit(llvm::Value *SyncExitFn, llvm::Value *SyncArg) CallSyncExit(llvm::Value *SyncExitFn, llvm::Value *SyncArg)
@ -5741,9 +5741,9 @@ CGObjCNonFragileABIMac::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
->setDoesNotThrow(); ->setDoesNotThrow();
// Register an all-paths cleanup to release the lock. // Register an all-paths cleanup to release the lock.
CGF.EHStack.pushLazyCleanup<CallSyncExit>(NormalAndEHCleanup, CGF.EHStack.pushCleanup<CallSyncExit>(NormalAndEHCleanup,
ObjCTypes.getSyncExitFn(), ObjCTypes.getSyncExitFn(),
SyncArg); SyncArg);
// Emit the body of the statement. // Emit the body of the statement.
CGF.EmitStmt(S.getSynchBody()); CGF.EmitStmt(S.getSynchBody());
@ -5760,7 +5760,7 @@ namespace {
llvm::Value *TypeInfo; llvm::Value *TypeInfo;
}; };
struct CallObjCEndCatch : EHScopeStack::LazyCleanup { struct CallObjCEndCatch : EHScopeStack::Cleanup {
CallObjCEndCatch(bool MightThrow, llvm::Value *Fn) : CallObjCEndCatch(bool MightThrow, llvm::Value *Fn) :
MightThrow(MightThrow), Fn(Fn) {} MightThrow(MightThrow), Fn(Fn) {}
bool MightThrow; bool MightThrow;
@ -5865,9 +5865,9 @@ void CGObjCNonFragileABIMac::EmitTryStmt(CodeGen::CodeGenFunction &CGF,
// Add a cleanup to leave the catch. // Add a cleanup to leave the catch.
bool EndCatchMightThrow = (Handler.Variable == 0); bool EndCatchMightThrow = (Handler.Variable == 0);
CGF.EHStack.pushLazyCleanup<CallObjCEndCatch>(NormalAndEHCleanup, CGF.EHStack.pushCleanup<CallObjCEndCatch>(NormalAndEHCleanup,
EndCatchMightThrow, EndCatchMightThrow,
ObjCTypes.getObjCEndCatchFn()); ObjCTypes.getObjCEndCatchFn());
// Bind the catch parameter if it exists. // Bind the catch parameter if it exists.
if (const VarDecl *CatchParam = Handler.Variable) { if (const VarDecl *CatchParam = Handler.Variable) {

View File

@ -16,7 +16,7 @@ using namespace clang;
using namespace CodeGen; using namespace CodeGen;
namespace { namespace {
struct DestroyTemporary : EHScopeStack::LazyCleanup { struct DestroyTemporary : EHScopeStack::Cleanup {
const CXXTemporary *Temporary; const CXXTemporary *Temporary;
llvm::Value *Addr; llvm::Value *Addr;
llvm::Value *CondPtr; llvm::Value *CondPtr;
@ -71,8 +71,8 @@ void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
Builder.CreateStore(Builder.getTrue(), CondPtr); Builder.CreateStore(Builder.getTrue(), CondPtr);
} }
EHStack.pushLazyCleanup<DestroyTemporary>(NormalAndEHCleanup, EHStack.pushCleanup<DestroyTemporary>(NormalAndEHCleanup,
Temporary, Ptr, CondPtr); Temporary, Ptr, CondPtr);
} }
RValue RValue

View File

@ -796,19 +796,19 @@ static void SimplifyCleanupEdges(CodeGenFunction &CGF,
SimplifyCleanupEntry(CGF, Entry); SimplifyCleanupEntry(CGF, Entry);
} }
static void EmitLazyCleanup(CodeGenFunction &CGF, static void EmitCleanup(CodeGenFunction &CGF,
EHScopeStack::LazyCleanup *Fn, EHScopeStack::Cleanup *Fn,
bool ForEH) { bool ForEH) {
if (ForEH) CGF.EHStack.pushTerminate(); if (ForEH) CGF.EHStack.pushTerminate();
Fn->Emit(CGF, ForEH); Fn->Emit(CGF, ForEH);
if (ForEH) CGF.EHStack.popTerminate(); if (ForEH) CGF.EHStack.popTerminate();
assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?"); assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
} }
static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF, static void SplitAndEmitCleanup(CodeGenFunction &CGF,
EHScopeStack::LazyCleanup *Fn, EHScopeStack::Cleanup *Fn,
bool ForEH, bool ForEH,
llvm::BasicBlock *Entry) { llvm::BasicBlock *Entry) {
assert(Entry && "no entry block for cleanup"); assert(Entry && "no entry block for cleanup");
// Remove the switch and load from the end of the entry block. // Remove the switch and load from the end of the entry block.
@ -824,7 +824,7 @@ static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF,
// Emit the actual cleanup at the end of the entry block. // Emit the actual cleanup at the end of the entry block.
CGF.Builder.SetInsertPoint(Entry); CGF.Builder.SetInsertPoint(Entry);
EmitLazyCleanup(CGF, Fn, ForEH); EmitCleanup(CGF, Fn, ForEH);
// Put the load and switch at the end of the exit block. // Put the load and switch at the end of the exit block.
llvm::BasicBlock *Exit = CGF.Builder.GetInsertBlock(); llvm::BasicBlock *Exit = CGF.Builder.GetInsertBlock();
@ -837,10 +837,14 @@ static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF,
CGF.Builder.ClearInsertionPoint(); CGF.Builder.ClearInsertionPoint();
} }
static void PopLazyCleanupBlock(CodeGenFunction &CGF) { /// Pops a cleanup block. If the block includes a normal cleanup, the
assert(isa<EHLazyCleanupScope>(*CGF.EHStack.begin()) && "top not a cleanup!"); /// current insertion point is threaded through the cleanup, as are
EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*CGF.EHStack.begin()); /// any branch fixups on the cleanup.
assert(Scope.getFixupDepth() <= CGF.EHStack.getNumBranchFixups()); void CodeGenFunction::PopCleanupBlock() {
assert(!EHStack.empty() && "cleanup stack is empty!");
assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
// Check whether we need an EH cleanup. This is only true if we've // Check whether we need an EH cleanup. This is only true if we've
// generated a lazy EH cleanup block. // generated a lazy EH cleanup block.
@ -851,14 +855,14 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
// - whether there are branch fix-ups through this cleanup // - whether there are branch fix-ups through this cleanup
unsigned FixupDepth = Scope.getFixupDepth(); unsigned FixupDepth = Scope.getFixupDepth();
bool HasFixups = CGF.EHStack.getNumBranchFixups() != FixupDepth; bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
// - whether control has already been threaded through this cleanup // - whether control has already been threaded through this cleanup
llvm::BasicBlock *NormalEntry = Scope.getNormalBlock(); llvm::BasicBlock *NormalEntry = Scope.getNormalBlock();
bool HasExistingBranches = (NormalEntry != 0); bool HasExistingBranches = (NormalEntry != 0);
// - whether there's a fallthrough // - whether there's a fallthrough
llvm::BasicBlock *FallthroughSource = CGF.Builder.GetInsertBlock(); llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
bool HasFallthrough = (FallthroughSource != 0); bool HasFallthrough = (FallthroughSource != 0);
bool RequiresNormalCleanup = false; bool RequiresNormalCleanup = false;
@ -869,9 +873,9 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
// If we don't need the cleanup at all, we're done. // If we don't need the cleanup at all, we're done.
if (!RequiresNormalCleanup && !RequiresEHCleanup) { if (!RequiresNormalCleanup && !RequiresEHCleanup) {
CGF.EHStack.popCleanup(); EHStack.popCleanup();
assert(CGF.EHStack.getNumBranchFixups() == 0 || assert(EHStack.getNumBranchFixups() == 0 ||
CGF.EHStack.hasNormalCleanups()); EHStack.hasNormalCleanups());
return; return;
} }
@ -883,35 +887,35 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
memcpy(CleanupBuffer.data(), memcpy(CleanupBuffer.data(),
Scope.getCleanupBuffer(), Scope.getCleanupSize()); Scope.getCleanupBuffer(), Scope.getCleanupSize());
CleanupBuffer.set_size(Scope.getCleanupSize()); CleanupBuffer.set_size(Scope.getCleanupSize());
EHScopeStack::LazyCleanup *Fn = EHScopeStack::Cleanup *Fn =
reinterpret_cast<EHScopeStack::LazyCleanup*>(CleanupBuffer.data()); reinterpret_cast<EHScopeStack::Cleanup*>(CleanupBuffer.data());
// We're done with the scope; pop it off so we can emit the cleanups. // We're done with the scope; pop it off so we can emit the cleanups.
CGF.EHStack.popCleanup(); EHStack.popCleanup();
if (RequiresNormalCleanup) { if (RequiresNormalCleanup) {
// If we have a fallthrough and no other need for the cleanup, // If we have a fallthrough and no other need for the cleanup,
// emit it directly. // emit it directly.
if (HasFallthrough && !HasFixups && !HasExistingBranches) { if (HasFallthrough && !HasFixups && !HasExistingBranches) {
EmitLazyCleanup(CGF, Fn, /*ForEH*/ false); EmitCleanup(*this, Fn, /*ForEH*/ false);
// Otherwise, the best approach is to thread everything through // Otherwise, the best approach is to thread everything through
// the cleanup block and then try to clean up after ourselves. // the cleanup block and then try to clean up after ourselves.
} else { } else {
// Force the entry block to exist. // Force the entry block to exist.
if (!HasExistingBranches) { if (!HasExistingBranches) {
NormalEntry = CGF.createBasicBlock("cleanup"); NormalEntry = createBasicBlock("cleanup");
CreateCleanupSwitch(CGF, NormalEntry); CreateCleanupSwitch(*this, NormalEntry);
} }
CGF.EmitBlock(NormalEntry); EmitBlock(NormalEntry);
// Thread the fallthrough edge through the (momentarily trivial) // Thread the fallthrough edge through the (momentarily trivial)
// cleanup. // cleanup.
llvm::BasicBlock *FallthroughDestination = 0; llvm::BasicBlock *FallthroughDestination = 0;
if (HasFallthrough) { if (HasFallthrough) {
assert(isa<llvm::BranchInst>(FallthroughSource->getTerminator())); assert(isa<llvm::BranchInst>(FallthroughSource->getTerminator()));
FallthroughDestination = CGF.createBasicBlock("cleanup.cont"); FallthroughDestination = createBasicBlock("cleanup.cont");
BranchFixup Fix; BranchFixup Fix;
Fix.Destination = FallthroughDestination; Fix.Destination = FallthroughDestination;
@ -924,41 +928,32 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
cast<llvm::BranchInst>(Fix.LatestBranch) cast<llvm::BranchInst>(Fix.LatestBranch)
->setSuccessor(0, Fix.Destination); ->setSuccessor(0, Fix.Destination);
ThreadFixupThroughCleanup(CGF, Fix, NormalEntry, NormalEntry); ThreadFixupThroughCleanup(*this, Fix, NormalEntry, NormalEntry);
} }
// Thread any "real" fixups we need to thread. // Thread any "real" fixups we need to thread.
for (unsigned I = FixupDepth, E = CGF.EHStack.getNumBranchFixups(); for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
I != E; ++I) I != E; ++I)
if (CGF.EHStack.getBranchFixup(I).Destination) if (CGF.EHStack.getBranchFixup(I).Destination)
ThreadFixupThroughCleanup(CGF, CGF.EHStack.getBranchFixup(I), ThreadFixupThroughCleanup(*this, EHStack.getBranchFixup(I),
NormalEntry, NormalEntry); NormalEntry, NormalEntry);
SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ false, NormalEntry); SplitAndEmitCleanup(*this, Fn, /*ForEH*/ false, NormalEntry);
if (HasFallthrough) if (HasFallthrough)
CGF.EmitBlock(FallthroughDestination); EmitBlock(FallthroughDestination);
} }
} }
// Emit the EH cleanup if required. // Emit the EH cleanup if required.
if (RequiresEHCleanup) { if (RequiresEHCleanup) {
CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP(); CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
CGF.EmitBlock(EHEntry); EmitBlock(EHEntry);
SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ true, EHEntry); SplitAndEmitCleanup(*this, Fn, /*ForEH*/ true, EHEntry);
CGF.Builder.restoreIP(SavedIP); Builder.restoreIP(SavedIP);
} }
} }
/// Pops a cleanup block. If the block includes a normal cleanup, the
/// current insertion point is threaded through the cleanup, as are
/// any branch fixups on the cleanup.
void CodeGenFunction::PopCleanupBlock() {
assert(!EHStack.empty() && "cleanup stack is empty!");
assert(isa<EHLazyCleanupScope>(*EHStack.begin()));
return PopLazyCleanupBlock(*this);
}
void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) { void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
if (!HaveInsertPoint()) if (!HaveInsertPoint())
return; return;
@ -990,8 +985,8 @@ void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
for (EHScopeStack::iterator I = EHStack.begin(), for (EHScopeStack::iterator I = EHStack.begin(),
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) { E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
if (isa<EHLazyCleanupScope>(*I)) { if (isa<EHCleanupScope>(*I)) {
EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I); EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
if (Scope.isNormalCleanup()) { if (Scope.isNormalCleanup()) {
llvm::BasicBlock *Block = Scope.getNormalBlock(); llvm::BasicBlock *Block = Scope.getNormalBlock();
if (!Block) { if (!Block) {
@ -1034,8 +1029,8 @@ void CodeGenFunction::EmitBranchThroughEHCleanup(JumpDest Dest) {
for (EHScopeStack::iterator I = EHStack.begin(), for (EHScopeStack::iterator I = EHStack.begin(),
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) { E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
if (isa<EHLazyCleanupScope>(*I)) { if (isa<EHCleanupScope>(*I)) {
EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I); EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
if (Scope.isEHCleanup()) { if (Scope.isEHCleanup()) {
llvm::BasicBlock *Block = Scope.getEHBlock(); llvm::BasicBlock *Block = Scope.getEHBlock();
if (!Block) { if (!Block) {

View File

@ -129,13 +129,14 @@ public:
} }
}; };
/// A lazy cleanup. Subclasses must be POD-like: cleanups will /// Information for lazily generating a cleanup. Subclasses must be
/// not be destructed, and they will be allocated on the cleanup /// POD-like: cleanups will not be destructed, and they will be
/// stack and freely copied and moved around. /// allocated on the cleanup stack and freely copied and moved
/// around.
/// ///
/// LazyCleanup implementations should generally be declared in an /// Cleanup implementations should generally be declared in an
/// anonymous namespace. /// anonymous namespace.
class LazyCleanup { class Cleanup {
public: public:
// Anchor the construction vtable. We use the destructor because // Anchor the construction vtable. We use the destructor because
// gcc gives an obnoxious warning if there are virtual methods // gcc gives an obnoxious warning if there are virtual methods
@ -144,7 +145,7 @@ public:
// doesn't seem to be any other way around this warning. // doesn't seem to be any other way around this warning.
// //
// This destructor will never be called. // This destructor will never be called.
virtual ~LazyCleanup(); virtual ~Cleanup();
/// Emit the cleanup. For normal cleanups, this is run in the /// Emit the cleanup. For normal cleanups, this is run in the
/// same EH context as when the cleanup was pushed, i.e. the /// same EH context as when the cleanup was pushed, i.e. the
@ -204,7 +205,7 @@ private:
void popNullFixups(); void popNullFixups();
void *pushLazyCleanup(CleanupKind K, size_t DataSize); void *pushCleanup(CleanupKind K, size_t DataSize);
public: public:
EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0), EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
@ -217,49 +218,49 @@ public:
/// Push a lazily-created cleanup on the stack. /// Push a lazily-created cleanup on the stack.
template <class T> template <class T>
void pushLazyCleanup(CleanupKind Kind) { void pushCleanup(CleanupKind Kind) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T)); void *Buffer = pushCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(); Cleanup *Obj = new(Buffer) T();
(void) Obj; (void) Obj;
} }
/// Push a lazily-created cleanup on the stack. /// Push a lazily-created cleanup on the stack.
template <class T, class A0> template <class T, class A0>
void pushLazyCleanup(CleanupKind Kind, A0 a0) { void pushCleanup(CleanupKind Kind, A0 a0) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T)); void *Buffer = pushCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(a0); Cleanup *Obj = new(Buffer) T(a0);
(void) Obj; (void) Obj;
} }
/// Push a lazily-created cleanup on the stack. /// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1> template <class T, class A0, class A1>
void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1) { void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T)); void *Buffer = pushCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(a0, a1); Cleanup *Obj = new(Buffer) T(a0, a1);
(void) Obj; (void) Obj;
} }
/// Push a lazily-created cleanup on the stack. /// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1, class A2> template <class T, class A0, class A1, class A2>
void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) { void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T)); void *Buffer = pushCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(a0, a1, a2); Cleanup *Obj = new(Buffer) T(a0, a1, a2);
(void) Obj; (void) Obj;
} }
/// Push a lazily-created cleanup on the stack. /// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1, class A2, class A3> template <class T, class A0, class A1, class A2, class A3>
void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) { void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T)); void *Buffer = pushCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(a0, a1, a2, a3); Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
(void) Obj; (void) Obj;
} }
/// Push a lazily-created cleanup on the stack. /// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1, class A2, class A3, class A4> template <class T, class A0, class A1, class A2, class A3, class A4>
void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) { void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T)); void *Buffer = pushCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4); Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4);
(void) Obj; (void) Obj;
} }