Rename LazyCleanup -> Cleanup. No functionality change for these last three

commits.

llvm-svn: 109000
This commit is contained in:
John McCall 2010-07-21 07:22:38 +00:00
parent 20141f2d8c
commit cda666ccd8
10 changed files with 173 additions and 178 deletions

View File

@ -312,7 +312,7 @@ static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD,
namespace {
/// Call the destructor for a direct base class.
struct CallBaseDtor : EHScopeStack::LazyCleanup {
struct CallBaseDtor : EHScopeStack::Cleanup {
const CXXRecordDecl *BaseClass;
bool BaseIsVirtual;
CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
@ -361,8 +361,8 @@ static void EmitBaseInitializer(CodeGenFunction &CGF,
CGF.EmitAggExpr(BaseInit->getInit(), V, false, false, true);
if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor())
CGF.EHStack.pushLazyCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
isBaseVirtual);
CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
isBaseVirtual);
}
static void EmitAggMemberInitializer(CodeGenFunction &CGF,
@ -452,7 +452,7 @@ static void EmitAggMemberInitializer(CodeGenFunction &CGF,
}
namespace {
struct CallMemberDtor : EHScopeStack::LazyCleanup {
struct CallMemberDtor : EHScopeStack::Cleanup {
FieldDecl *Field;
CXXDestructorDecl *Dtor;
@ -570,8 +570,8 @@ static void EmitMemberInitializer(CodeGenFunction &CGF,
CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
if (!RD->hasTrivialDestructor())
CGF.EHStack.pushLazyCleanup<CallMemberDtor>(EHCleanup, Field,
RD->getDestructor());
CGF.EHStack.pushCleanup<CallMemberDtor>(EHCleanup, Field,
RD->getDestructor());
}
}
@ -761,7 +761,7 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
namespace {
/// Call the operator delete associated with the current destructor.
struct CallDtorDelete : EHScopeStack::LazyCleanup {
struct CallDtorDelete : EHScopeStack::Cleanup {
CallDtorDelete() {}
void Emit(CodeGenFunction &CGF, bool IsForEH) {
@ -772,7 +772,7 @@ namespace {
}
};
struct CallArrayFieldDtor : EHScopeStack::LazyCleanup {
struct CallArrayFieldDtor : EHScopeStack::Cleanup {
const FieldDecl *Field;
CallArrayFieldDtor(const FieldDecl *Field) : Field(Field) {}
@ -798,7 +798,7 @@ namespace {
}
};
struct CallFieldDtor : EHScopeStack::LazyCleanup {
struct CallFieldDtor : EHScopeStack::Cleanup {
const FieldDecl *Field;
CallFieldDtor(const FieldDecl *Field) : Field(Field) {}
@ -831,7 +831,7 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
if (DtorType == Dtor_Deleting) {
assert(DD->getOperatorDelete() &&
"operator delete missing - EmitDtorEpilogue");
EHStack.pushLazyCleanup<CallDtorDelete>(NormalAndEHCleanup);
EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
return;
}
@ -853,9 +853,9 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
if (BaseClassDecl->hasTrivialDestructor())
continue;
EHStack.pushLazyCleanup<CallBaseDtor>(NormalAndEHCleanup,
BaseClassDecl,
/*BaseIsVirtual*/ true);
EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
BaseClassDecl,
/*BaseIsVirtual*/ true);
}
return;
@ -878,9 +878,9 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
if (BaseClassDecl->hasTrivialDestructor())
continue;
EHStack.pushLazyCleanup<CallBaseDtor>(NormalAndEHCleanup,
BaseClassDecl,
/*BaseIsVirtual*/ false);
EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
BaseClassDecl,
/*BaseIsVirtual*/ false);
}
// Destroy direct fields.
@ -904,9 +904,9 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
continue;
if (Array)
EHStack.pushLazyCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field);
EHStack.pushCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field);
else
EHStack.pushLazyCleanup<CallFieldDtor>(NormalAndEHCleanup, Field);
EHStack.pushCleanup<CallFieldDtor>(NormalAndEHCleanup, Field);
}
}
@ -1164,7 +1164,7 @@ void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
}
namespace {
struct CallLocalDtor : EHScopeStack::LazyCleanup {
struct CallLocalDtor : EHScopeStack::Cleanup {
const CXXDestructorDecl *Dtor;
llvm::Value *Addr;
@ -1180,7 +1180,7 @@ namespace {
void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
llvm::Value *Addr) {
EHStack.pushLazyCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr);
}
void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) {

View File

@ -389,7 +389,7 @@ const llvm::Type *CodeGenFunction::BuildByRefType(const ValueDecl *D) {
}
namespace {
struct CallArrayDtor : EHScopeStack::LazyCleanup {
struct CallArrayDtor : EHScopeStack::Cleanup {
CallArrayDtor(const CXXDestructorDecl *Dtor,
const ConstantArrayType *Type,
llvm::Value *Loc)
@ -408,7 +408,7 @@ namespace {
}
};
struct CallVarDtor : EHScopeStack::LazyCleanup {
struct CallVarDtor : EHScopeStack::Cleanup {
CallVarDtor(const CXXDestructorDecl *Dtor,
llvm::Value *NRVOFlag,
llvm::Value *Loc)
@ -441,7 +441,7 @@ namespace {
}
namespace {
struct CallStackRestore : EHScopeStack::LazyCleanup {
struct CallStackRestore : EHScopeStack::Cleanup {
llvm::Value *Stack;
CallStackRestore(llvm::Value *Stack) : Stack(Stack) {}
void Emit(CodeGenFunction &CGF, bool IsForEH) {
@ -451,7 +451,7 @@ namespace {
}
};
struct CallCleanupFunction : EHScopeStack::LazyCleanup {
struct CallCleanupFunction : EHScopeStack::Cleanup {
llvm::Constant *CleanupFn;
const CGFunctionInfo &FnInfo;
llvm::Value *Addr;
@ -479,7 +479,7 @@ namespace {
}
};
struct CallBlockRelease : EHScopeStack::LazyCleanup {
struct CallBlockRelease : EHScopeStack::Cleanup {
llvm::Value *Addr;
CallBlockRelease(llvm::Value *Addr) : Addr(Addr) {}
@ -592,7 +592,7 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D,
DidCallStackSave = true;
// Push a cleanup block and restore the stack there.
EHStack.pushLazyCleanup<CallStackRestore>(NormalCleanup, Stack);
EHStack.pushCleanup<CallStackRestore>(NormalCleanup, Stack);
}
// Get the element type.
@ -783,11 +783,11 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D,
if (const ConstantArrayType *Array =
getContext().getAsConstantArrayType(Ty)) {
EHStack.pushLazyCleanup<CallArrayDtor>(NormalAndEHCleanup,
D, Array, Loc);
EHStack.pushCleanup<CallArrayDtor>(NormalAndEHCleanup,
D, Array, Loc);
} else {
EHStack.pushLazyCleanup<CallVarDtor>(NormalAndEHCleanup,
D, NRVOFlag, Loc);
EHStack.pushCleanup<CallVarDtor>(NormalAndEHCleanup,
D, NRVOFlag, Loc);
}
}
}
@ -800,12 +800,12 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D,
assert(F && "Could not find function!");
const CGFunctionInfo &Info = CGM.getTypes().getFunctionInfo(FD);
EHStack.pushLazyCleanup<CallCleanupFunction>(NormalAndEHCleanup,
F, &Info, DeclPtr, &D);
EHStack.pushCleanup<CallCleanupFunction>(NormalAndEHCleanup,
F, &Info, DeclPtr, &D);
}
if (needsDispose && CGM.getLangOptions().getGCMode() != LangOptions::GCOnly)
EHStack.pushLazyCleanup<CallBlockRelease>(NormalAndEHCleanup, DeclPtr);
EHStack.pushCleanup<CallBlockRelease>(NormalAndEHCleanup, DeclPtr);
}
/// Emit an alloca (or GlobalValue depending on target)

View File

@ -330,7 +330,7 @@ static llvm::Constant *getGuardAbortFn(CodeGenFunction &CGF) {
}
namespace {
struct CallGuardAbort : EHScopeStack::LazyCleanup {
struct CallGuardAbort : EHScopeStack::Cleanup {
llvm::GlobalVariable *Guard;
CallGuardAbort(llvm::GlobalVariable *Guard) : Guard(Guard) {}
@ -388,7 +388,7 @@ CodeGenFunction::EmitStaticCXXBlockVarDeclInit(const VarDecl &D,
// Call __cxa_guard_abort along the exceptional edge.
if (Exceptions)
EHStack.pushLazyCleanup<CallGuardAbort>(EHCleanup, GuardVariable);
EHStack.pushCleanup<CallGuardAbort>(EHCleanup, GuardVariable);
EmitBlock(InitBlock);
}

View File

@ -58,10 +58,10 @@ EHScopeStack::stable_iterator
EHScopeStack::getEnclosingEHCleanup(iterator it) const {
assert(it != end());
do {
if (isa<EHLazyCleanupScope>(*it)) {
if (cast<EHLazyCleanupScope>(*it).isEHCleanup())
if (isa<EHCleanupScope>(*it)) {
if (cast<EHCleanupScope>(*it).isEHCleanup())
return stabilize(it);
return cast<EHLazyCleanupScope>(*it).getEnclosingEHCleanup();
return cast<EHCleanupScope>(*it).getEnclosingEHCleanup();
}
++it;
} while (it != end());
@ -69,18 +69,18 @@ EHScopeStack::getEnclosingEHCleanup(iterator it) const {
}
void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) {
void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned");
char *Buffer = allocate(EHLazyCleanupScope::getSizeForCleanupSize(Size));
char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
bool IsNormalCleanup = Kind != EHCleanup;
bool IsEHCleanup = Kind != NormalCleanup;
EHLazyCleanupScope *Scope =
new (Buffer) EHLazyCleanupScope(IsNormalCleanup,
IsEHCleanup,
Size,
BranchFixups.size(),
InnermostNormalCleanup,
InnermostEHCleanup);
EHCleanupScope *Scope =
new (Buffer) EHCleanupScope(IsNormalCleanup,
IsEHCleanup,
Size,
BranchFixups.size(),
InnermostNormalCleanup,
InnermostEHCleanup);
if (IsNormalCleanup)
InnermostNormalCleanup = stable_begin();
if (IsEHCleanup)
@ -92,8 +92,8 @@ void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) {
void EHScopeStack::popCleanup() {
assert(!empty() && "popping exception stack when not empty");
assert(isa<EHLazyCleanupScope>(*begin()));
EHLazyCleanupScope &Cleanup = cast<EHLazyCleanupScope>(*begin());
assert(isa<EHCleanupScope>(*begin()));
EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
StartOfData += Cleanup.getAllocatedSize();
@ -150,7 +150,7 @@ void EHScopeStack::popNullFixups() {
assert(hasNormalCleanups());
EHScopeStack::iterator it = find(InnermostNormalCleanup);
unsigned MinSize = cast<EHLazyCleanupScope>(*it).getFixupDepth();
unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
while (BranchFixups.size() > MinSize &&
@ -387,7 +387,7 @@ static llvm::Constant *getCleanupValue(CodeGenFunction &CGF) {
namespace {
/// A cleanup to free the exception object if its initialization
/// throws.
struct FreeExceptionCleanup : EHScopeStack::LazyCleanup {
struct FreeExceptionCleanup : EHScopeStack::Cleanup {
FreeExceptionCleanup(llvm::Value *ShouldFreeVar,
llvm::Value *ExnLocVar)
: ShouldFreeVar(ShouldFreeVar), ExnLocVar(ExnLocVar) {}
@ -437,9 +437,9 @@ static void EmitAnyExprToExn(CodeGenFunction &CGF, const Expr *E,
// exception during initialization.
// FIXME: stmt expressions might require this to be a normal
// cleanup, too.
CGF.EHStack.pushLazyCleanup<FreeExceptionCleanup>(EHCleanup,
ShouldFreeVar,
ExnLocVar);
CGF.EHStack.pushCleanup<FreeExceptionCleanup>(EHCleanup,
ShouldFreeVar,
ExnLocVar);
EHScopeStack::stable_iterator Cleanup = CGF.EHStack.stable_begin();
CGF.Builder.CreateStore(ExnLoc, ExnLocVar);
@ -635,8 +635,8 @@ void CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
/// normal-only cleanup scopes.
static bool isNonEHScope(const EHScope &S) {
switch (S.getKind()) {
case EHScope::LazyCleanup:
return !cast<EHLazyCleanupScope>(S).isEHCleanup();
case EHScope::Cleanup:
return !cast<EHCleanupScope>(S).isEHCleanup();
case EHScope::Filter:
case EHScope::Catch:
case EHScope::Terminate:
@ -765,9 +765,9 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
I != E; ++I) {
switch (I->getKind()) {
case EHScope::LazyCleanup:
case EHScope::Cleanup:
if (!HasEHCleanup)
HasEHCleanup = cast<EHLazyCleanupScope>(*I).isEHCleanup();
HasEHCleanup = cast<EHCleanupScope>(*I).isEHCleanup();
// We otherwise don't care about cleanups.
continue;
@ -1016,7 +1016,7 @@ namespace {
/// of the caught type, so we have to assume the actual thrown
/// exception type might have a throwing destructor, even if the
/// caught type's destructor is trivial or nothrow.
struct CallEndCatch : EHScopeStack::LazyCleanup {
struct CallEndCatch : EHScopeStack::Cleanup {
CallEndCatch(bool MightThrow) : MightThrow(MightThrow) {}
bool MightThrow;
@ -1041,7 +1041,7 @@ static llvm::Value *CallBeginCatch(CodeGenFunction &CGF,
llvm::CallInst *Call = CGF.Builder.CreateCall(getBeginCatchFn(CGF), Exn);
Call->setDoesNotThrow();
CGF.EHStack.pushLazyCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow);
CGF.EHStack.pushCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow);
return Call;
}
@ -1232,7 +1232,7 @@ static void BeginCatch(CodeGenFunction &CGF,
}
namespace {
struct CallRethrow : EHScopeStack::LazyCleanup {
struct CallRethrow : EHScopeStack::Cleanup {
void Emit(CodeGenFunction &CGF, bool IsForEH) {
CGF.EmitCallOrInvoke(getReThrowFn(CGF), 0, 0);
}
@ -1282,7 +1282,7 @@ void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
// _cxa_rethrow. This needs to happen before __cxa_end_catch is
// called, and so it is pushed after BeginCatch.
if (ImplicitRethrow)
EHStack.pushLazyCleanup<CallRethrow>(NormalCleanup);
EHStack.pushCleanup<CallRethrow>(NormalCleanup);
// Perform the body of the catch.
EmitStmt(C->getHandlerBlock());
@ -1299,7 +1299,7 @@ void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
}
namespace {
struct CallEndCatchForFinally : EHScopeStack::LazyCleanup {
struct CallEndCatchForFinally : EHScopeStack::Cleanup {
llvm::Value *ForEHVar;
llvm::Value *EndCatchFn;
CallEndCatchForFinally(llvm::Value *ForEHVar, llvm::Value *EndCatchFn)
@ -1319,7 +1319,7 @@ namespace {
}
};
struct PerformFinally : EHScopeStack::LazyCleanup {
struct PerformFinally : EHScopeStack::Cleanup {
const Stmt *Body;
llvm::Value *ForEHVar;
llvm::Value *EndCatchFn;
@ -1335,8 +1335,8 @@ namespace {
void Emit(CodeGenFunction &CGF, bool IsForEH) {
// Enter a cleanup to call the end-catch function if one was provided.
if (EndCatchFn)
CGF.EHStack.pushLazyCleanup<CallEndCatchForFinally>(NormalAndEHCleanup,
ForEHVar, EndCatchFn);
CGF.EHStack.pushCleanup<CallEndCatchForFinally>(NormalAndEHCleanup,
ForEHVar, EndCatchFn);
// Emit the finally block.
CGF.EmitStmt(Body);
@ -1430,9 +1430,9 @@ CodeGenFunction::EnterFinallyBlock(const Stmt *Body,
InitTempAlloca(ForEHVar, llvm::ConstantInt::getFalse(getLLVMContext()));
// Enter a normal cleanup which will perform the @finally block.
EHStack.pushLazyCleanup<PerformFinally>(NormalCleanup, Body,
ForEHVar, EndCatchFn,
RethrowFn, SavedExnVar);
EHStack.pushCleanup<PerformFinally>(NormalCleanup, Body,
ForEHVar, EndCatchFn,
RethrowFn, SavedExnVar);
// Enter a catch-all scope.
llvm::BasicBlock *CatchAllBB = createBasicBlock("finally.catchall");
@ -1537,6 +1537,6 @@ llvm::BasicBlock *CodeGenFunction::getTerminateHandler() {
return TerminateHandler;
}
EHScopeStack::LazyCleanup::~LazyCleanup() {
llvm_unreachable("LazyCleanup is indestructable");
EHScopeStack::Cleanup::~Cleanup() {
llvm_unreachable("Cleanup is indestructable");
}

View File

@ -63,7 +63,7 @@ protected:
enum { BitsRemaining = 30 };
public:
enum Kind { LazyCleanup, Catch, Terminate, Filter };
enum Kind { Cleanup, Catch, Terminate, Filter };
EHScope(Kind K) : CachedLandingPad(0), K(K) {}
@ -154,14 +154,14 @@ public:
};
/// A cleanup scope which generates the cleanup blocks lazily.
class EHLazyCleanupScope : public EHScope {
class EHCleanupScope : public EHScope {
/// Whether this cleanup needs to be run along normal edges.
bool IsNormalCleanup : 1;
/// Whether this cleanup needs to be run along exception edges.
bool IsEHCleanup : 1;
/// The amount of extra storage needed by the LazyCleanup.
/// The amount of extra storage needed by the Cleanup.
/// Always a multiple of the scope-stack alignment.
unsigned CleanupSize : 12;
@ -188,18 +188,18 @@ public:
/// Gets the size required for a lazy cleanup scope with the given
/// cleanup-data requirements.
static size_t getSizeForCleanupSize(size_t Size) {
return sizeof(EHLazyCleanupScope) + Size;
return sizeof(EHCleanupScope) + Size;
}
size_t getAllocatedSize() const {
return sizeof(EHLazyCleanupScope) + CleanupSize;
return sizeof(EHCleanupScope) + CleanupSize;
}
EHLazyCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize,
unsigned FixupDepth,
EHScopeStack::stable_iterator EnclosingNormal,
EHScopeStack::stable_iterator EnclosingEH)
: EHScope(EHScope::LazyCleanup),
EHCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize,
unsigned FixupDepth,
EHScopeStack::stable_iterator EnclosingNormal,
EHScopeStack::stable_iterator EnclosingEH)
: EHScope(EHScope::Cleanup),
IsNormalCleanup(IsNormal), IsEHCleanup(IsEH),
CleanupSize(CleanupSize), FixupDepth(FixupDepth),
EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH),
@ -225,12 +225,12 @@ public:
size_t getCleanupSize() const { return CleanupSize; }
void *getCleanupBuffer() { return this + 1; }
EHScopeStack::LazyCleanup *getCleanup() {
return reinterpret_cast<EHScopeStack::LazyCleanup*>(getCleanupBuffer());
EHScopeStack::Cleanup *getCleanup() {
return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
}
static bool classof(const EHScope *Scope) {
return (Scope->getKind() == LazyCleanup);
return (Scope->getKind() == Cleanup);
}
};
@ -319,8 +319,8 @@ public:
static_cast<const EHFilterScope*>(get())->getNumFilters());
break;
case EHScope::LazyCleanup:
Ptr += static_cast<const EHLazyCleanupScope*>(get())
case EHScope::Cleanup:
Ptr += static_cast<const EHCleanupScope*>(get())
->getAllocatedSize();
break;

View File

@ -1856,7 +1856,7 @@ llvm::Constant *CGObjCGNU::EnumerationMutationFunction() {
}
namespace {
struct CallSyncExit : EHScopeStack::LazyCleanup {
struct CallSyncExit : EHScopeStack::Cleanup {
llvm::Value *SyncExitFn;
llvm::Value *SyncArg;
CallSyncExit(llvm::Value *SyncExitFn, llvm::Value *SyncArg)
@ -1885,8 +1885,7 @@ void CGObjCGNU::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
// Register an all-paths cleanup to release the lock.
llvm::Value *SyncExit = CGM.CreateRuntimeFunction(FTy, "objc_sync_exit");
CGF.EHStack.pushLazyCleanup<CallSyncExit>(NormalAndEHCleanup,
SyncExit, SyncArg);
CGF.EHStack.pushCleanup<CallSyncExit>(NormalAndEHCleanup, SyncExit, SyncArg);
// Emit the body of the statement.
CGF.EmitStmt(S.getSynchBody());

View File

@ -2542,7 +2542,7 @@ void CGObjCMac::EmitSynchronizedStmt(CodeGenFunction &CGF,
}
namespace {
struct PerformFragileFinally : EHScopeStack::LazyCleanup {
struct PerformFragileFinally : EHScopeStack::Cleanup {
const Stmt &S;
llvm::Value *SyncArg;
llvm::Value *CallTryExitVar;
@ -2745,11 +2745,11 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
CallTryExitVar);
// Push a normal cleanup to leave the try scope.
CGF.EHStack.pushLazyCleanup<PerformFragileFinally>(NormalCleanup, &S,
SyncArg,
CallTryExitVar,
ExceptionData,
&ObjCTypes);
CGF.EHStack.pushCleanup<PerformFragileFinally>(NormalCleanup, &S,
SyncArg,
CallTryExitVar,
ExceptionData,
&ObjCTypes);
// Enter a try block:
// - Call objc_exception_try_enter to push ExceptionData on top of
@ -5717,7 +5717,7 @@ void CGObjCNonFragileABIMac::EmitObjCGlobalAssign(CodeGen::CodeGenFunction &CGF,
}
namespace {
struct CallSyncExit : EHScopeStack::LazyCleanup {
struct CallSyncExit : EHScopeStack::Cleanup {
llvm::Value *SyncExitFn;
llvm::Value *SyncArg;
CallSyncExit(llvm::Value *SyncExitFn, llvm::Value *SyncArg)
@ -5741,9 +5741,9 @@ CGObjCNonFragileABIMac::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF,
->setDoesNotThrow();
// Register an all-paths cleanup to release the lock.
CGF.EHStack.pushLazyCleanup<CallSyncExit>(NormalAndEHCleanup,
ObjCTypes.getSyncExitFn(),
SyncArg);
CGF.EHStack.pushCleanup<CallSyncExit>(NormalAndEHCleanup,
ObjCTypes.getSyncExitFn(),
SyncArg);
// Emit the body of the statement.
CGF.EmitStmt(S.getSynchBody());
@ -5760,7 +5760,7 @@ namespace {
llvm::Value *TypeInfo;
};
struct CallObjCEndCatch : EHScopeStack::LazyCleanup {
struct CallObjCEndCatch : EHScopeStack::Cleanup {
CallObjCEndCatch(bool MightThrow, llvm::Value *Fn) :
MightThrow(MightThrow), Fn(Fn) {}
bool MightThrow;
@ -5865,9 +5865,9 @@ void CGObjCNonFragileABIMac::EmitTryStmt(CodeGen::CodeGenFunction &CGF,
// Add a cleanup to leave the catch.
bool EndCatchMightThrow = (Handler.Variable == 0);
CGF.EHStack.pushLazyCleanup<CallObjCEndCatch>(NormalAndEHCleanup,
EndCatchMightThrow,
ObjCTypes.getObjCEndCatchFn());
CGF.EHStack.pushCleanup<CallObjCEndCatch>(NormalAndEHCleanup,
EndCatchMightThrow,
ObjCTypes.getObjCEndCatchFn());
// Bind the catch parameter if it exists.
if (const VarDecl *CatchParam = Handler.Variable) {

View File

@ -16,7 +16,7 @@ using namespace clang;
using namespace CodeGen;
namespace {
struct DestroyTemporary : EHScopeStack::LazyCleanup {
struct DestroyTemporary : EHScopeStack::Cleanup {
const CXXTemporary *Temporary;
llvm::Value *Addr;
llvm::Value *CondPtr;
@ -71,8 +71,8 @@ void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary,
Builder.CreateStore(Builder.getTrue(), CondPtr);
}
EHStack.pushLazyCleanup<DestroyTemporary>(NormalAndEHCleanup,
Temporary, Ptr, CondPtr);
EHStack.pushCleanup<DestroyTemporary>(NormalAndEHCleanup,
Temporary, Ptr, CondPtr);
}
RValue

View File

@ -796,19 +796,19 @@ static void SimplifyCleanupEdges(CodeGenFunction &CGF,
SimplifyCleanupEntry(CGF, Entry);
}
static void EmitLazyCleanup(CodeGenFunction &CGF,
EHScopeStack::LazyCleanup *Fn,
bool ForEH) {
static void EmitCleanup(CodeGenFunction &CGF,
EHScopeStack::Cleanup *Fn,
bool ForEH) {
if (ForEH) CGF.EHStack.pushTerminate();
Fn->Emit(CGF, ForEH);
if (ForEH) CGF.EHStack.popTerminate();
assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?");
}
static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF,
EHScopeStack::LazyCleanup *Fn,
bool ForEH,
llvm::BasicBlock *Entry) {
static void SplitAndEmitCleanup(CodeGenFunction &CGF,
EHScopeStack::Cleanup *Fn,
bool ForEH,
llvm::BasicBlock *Entry) {
assert(Entry && "no entry block for cleanup");
// Remove the switch and load from the end of the entry block.
@ -824,7 +824,7 @@ static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF,
// Emit the actual cleanup at the end of the entry block.
CGF.Builder.SetInsertPoint(Entry);
EmitLazyCleanup(CGF, Fn, ForEH);
EmitCleanup(CGF, Fn, ForEH);
// Put the load and switch at the end of the exit block.
llvm::BasicBlock *Exit = CGF.Builder.GetInsertBlock();
@ -837,10 +837,14 @@ static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF,
CGF.Builder.ClearInsertionPoint();
}
static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
assert(isa<EHLazyCleanupScope>(*CGF.EHStack.begin()) && "top not a cleanup!");
EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*CGF.EHStack.begin());
assert(Scope.getFixupDepth() <= CGF.EHStack.getNumBranchFixups());
/// Pops a cleanup block. If the block includes a normal cleanup, the
/// current insertion point is threaded through the cleanup, as are
/// any branch fixups on the cleanup.
void CodeGenFunction::PopCleanupBlock() {
assert(!EHStack.empty() && "cleanup stack is empty!");
assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
// Check whether we need an EH cleanup. This is only true if we've
// generated a lazy EH cleanup block.
@ -851,14 +855,14 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
// - whether there are branch fix-ups through this cleanup
unsigned FixupDepth = Scope.getFixupDepth();
bool HasFixups = CGF.EHStack.getNumBranchFixups() != FixupDepth;
bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
// - whether control has already been threaded through this cleanup
llvm::BasicBlock *NormalEntry = Scope.getNormalBlock();
bool HasExistingBranches = (NormalEntry != 0);
// - whether there's a fallthrough
llvm::BasicBlock *FallthroughSource = CGF.Builder.GetInsertBlock();
llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock();
bool HasFallthrough = (FallthroughSource != 0);
bool RequiresNormalCleanup = false;
@ -869,9 +873,9 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
// If we don't need the cleanup at all, we're done.
if (!RequiresNormalCleanup && !RequiresEHCleanup) {
CGF.EHStack.popCleanup();
assert(CGF.EHStack.getNumBranchFixups() == 0 ||
CGF.EHStack.hasNormalCleanups());
EHStack.popCleanup();
assert(EHStack.getNumBranchFixups() == 0 ||
EHStack.hasNormalCleanups());
return;
}
@ -883,35 +887,35 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
memcpy(CleanupBuffer.data(),
Scope.getCleanupBuffer(), Scope.getCleanupSize());
CleanupBuffer.set_size(Scope.getCleanupSize());
EHScopeStack::LazyCleanup *Fn =
reinterpret_cast<EHScopeStack::LazyCleanup*>(CleanupBuffer.data());
EHScopeStack::Cleanup *Fn =
reinterpret_cast<EHScopeStack::Cleanup*>(CleanupBuffer.data());
// We're done with the scope; pop it off so we can emit the cleanups.
CGF.EHStack.popCleanup();
EHStack.popCleanup();
if (RequiresNormalCleanup) {
// If we have a fallthrough and no other need for the cleanup,
// emit it directly.
if (HasFallthrough && !HasFixups && !HasExistingBranches) {
EmitLazyCleanup(CGF, Fn, /*ForEH*/ false);
EmitCleanup(*this, Fn, /*ForEH*/ false);
// Otherwise, the best approach is to thread everything through
// the cleanup block and then try to clean up after ourselves.
} else {
// Force the entry block to exist.
if (!HasExistingBranches) {
NormalEntry = CGF.createBasicBlock("cleanup");
CreateCleanupSwitch(CGF, NormalEntry);
NormalEntry = createBasicBlock("cleanup");
CreateCleanupSwitch(*this, NormalEntry);
}
CGF.EmitBlock(NormalEntry);
EmitBlock(NormalEntry);
// Thread the fallthrough edge through the (momentarily trivial)
// cleanup.
llvm::BasicBlock *FallthroughDestination = 0;
if (HasFallthrough) {
assert(isa<llvm::BranchInst>(FallthroughSource->getTerminator()));
FallthroughDestination = CGF.createBasicBlock("cleanup.cont");
FallthroughDestination = createBasicBlock("cleanup.cont");
BranchFixup Fix;
Fix.Destination = FallthroughDestination;
@ -924,41 +928,32 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
cast<llvm::BranchInst>(Fix.LatestBranch)
->setSuccessor(0, Fix.Destination);
ThreadFixupThroughCleanup(CGF, Fix, NormalEntry, NormalEntry);
ThreadFixupThroughCleanup(*this, Fix, NormalEntry, NormalEntry);
}
// Thread any "real" fixups we need to thread.
for (unsigned I = FixupDepth, E = CGF.EHStack.getNumBranchFixups();
for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
I != E; ++I)
if (CGF.EHStack.getBranchFixup(I).Destination)
ThreadFixupThroughCleanup(CGF, CGF.EHStack.getBranchFixup(I),
ThreadFixupThroughCleanup(*this, EHStack.getBranchFixup(I),
NormalEntry, NormalEntry);
SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ false, NormalEntry);
SplitAndEmitCleanup(*this, Fn, /*ForEH*/ false, NormalEntry);
if (HasFallthrough)
CGF.EmitBlock(FallthroughDestination);
EmitBlock(FallthroughDestination);
}
}
// Emit the EH cleanup if required.
if (RequiresEHCleanup) {
CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP();
CGF.EmitBlock(EHEntry);
SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ true, EHEntry);
CGF.Builder.restoreIP(SavedIP);
CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
EmitBlock(EHEntry);
SplitAndEmitCleanup(*this, Fn, /*ForEH*/ true, EHEntry);
Builder.restoreIP(SavedIP);
}
}
/// Pops a cleanup block. If the block includes a normal cleanup, the
/// current insertion point is threaded through the cleanup, as are
/// any branch fixups on the cleanup.
void CodeGenFunction::PopCleanupBlock() {
assert(!EHStack.empty() && "cleanup stack is empty!");
assert(isa<EHLazyCleanupScope>(*EHStack.begin()));
return PopLazyCleanupBlock(*this);
}
void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
if (!HaveInsertPoint())
return;
@ -990,8 +985,8 @@ void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
for (EHScopeStack::iterator I = EHStack.begin(),
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
if (isa<EHLazyCleanupScope>(*I)) {
EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I);
if (isa<EHCleanupScope>(*I)) {
EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
if (Scope.isNormalCleanup()) {
llvm::BasicBlock *Block = Scope.getNormalBlock();
if (!Block) {
@ -1034,8 +1029,8 @@ void CodeGenFunction::EmitBranchThroughEHCleanup(JumpDest Dest) {
for (EHScopeStack::iterator I = EHStack.begin(),
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
if (isa<EHLazyCleanupScope>(*I)) {
EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I);
if (isa<EHCleanupScope>(*I)) {
EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
if (Scope.isEHCleanup()) {
llvm::BasicBlock *Block = Scope.getEHBlock();
if (!Block) {

View File

@ -129,13 +129,14 @@ public:
}
};
/// A lazy cleanup. Subclasses must be POD-like: cleanups will
/// not be destructed, and they will be allocated on the cleanup
/// stack and freely copied and moved around.
/// Information for lazily generating a cleanup. Subclasses must be
/// POD-like: cleanups will not be destructed, and they will be
/// allocated on the cleanup stack and freely copied and moved
/// around.
///
/// LazyCleanup implementations should generally be declared in an
/// Cleanup implementations should generally be declared in an
/// anonymous namespace.
class LazyCleanup {
class Cleanup {
public:
// Anchor the construction vtable. We use the destructor because
// gcc gives an obnoxious warning if there are virtual methods
@ -144,7 +145,7 @@ public:
// doesn't seem to be any other way around this warning.
//
// This destructor will never be called.
virtual ~LazyCleanup();
virtual ~Cleanup();
/// Emit the cleanup. For normal cleanups, this is run in the
/// same EH context as when the cleanup was pushed, i.e. the
@ -204,7 +205,7 @@ private:
void popNullFixups();
void *pushLazyCleanup(CleanupKind K, size_t DataSize);
void *pushCleanup(CleanupKind K, size_t DataSize);
public:
EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
@ -217,49 +218,49 @@ public:
/// Push a lazily-created cleanup on the stack.
template <class T>
void pushLazyCleanup(CleanupKind Kind) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T();
void pushCleanup(CleanupKind Kind) {
void *Buffer = pushCleanup(Kind, sizeof(T));
Cleanup *Obj = new(Buffer) T();
(void) Obj;
}
/// Push a lazily-created cleanup on the stack.
template <class T, class A0>
void pushLazyCleanup(CleanupKind Kind, A0 a0) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(a0);
void pushCleanup(CleanupKind Kind, A0 a0) {
void *Buffer = pushCleanup(Kind, sizeof(T));
Cleanup *Obj = new(Buffer) T(a0);
(void) Obj;
}
/// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1>
void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(a0, a1);
void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) {
void *Buffer = pushCleanup(Kind, sizeof(T));
Cleanup *Obj = new(Buffer) T(a0, a1);
(void) Obj;
}
/// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1, class A2>
void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(a0, a1, a2);
void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
void *Buffer = pushCleanup(Kind, sizeof(T));
Cleanup *Obj = new(Buffer) T(a0, a1, a2);
(void) Obj;
}
/// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1, class A2, class A3>
void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
void *Buffer = pushCleanup(Kind, sizeof(T));
Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
(void) Obj;
}
/// Push a lazily-created cleanup on the stack.
template <class T, class A0, class A1, class A2, class A3, class A4>
void pushLazyCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) {
void *Buffer = pushLazyCleanup(Kind, sizeof(T));
LazyCleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4);
void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) {
void *Buffer = pushCleanup(Kind, sizeof(T));
Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4);
(void) Obj;
}