2011-01-28 19:13:47 +08:00
|
|
|
//===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// These classes support the generation of LLVM IR for cleanups.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2014-08-14 00:25:19 +08:00
|
|
|
#ifndef LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
|
|
|
|
#define LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2013-06-20 01:07:50 +08:00
|
|
|
#include "EHScopeStack.h"
|
Compute and preserve alignment more faithfully in IR-generation.
Introduce an Address type to bundle a pointer value with an
alignment. Introduce APIs on CGBuilderTy to work with Address
values. Change core APIs on CGF/CGM to traffic in Address where
appropriate. Require alignments to be non-zero. Update a ton
of code to compute and propagate alignment information.
As part of this, I've promoted CGBuiltin's EmitPointerWithAlignment
helper function to CGF and made use of it in a number of places in
the expression emitter.
The end result is that we should now be significantly more correct
when performing operations on objects that are locally known to
be under-aligned. Since alignment is not reliably tracked in the
type system, there are inherent limits to this, but at least we
are no longer confused by standard operations like derived-to-base
conversions and array-to-pointer decay. I've also fixed a large
number of bugs where we were applying the complete-object alignment
to a pointer instead of the non-virtual alignment, although most of
these were hidden by the very conservative approach we took with
member alignment.
Also, because IRGen now reliably asserts on zero alignments, we
should no longer be subject to an absurd but frustrating recurring
bug where an incomplete type would report a zero alignment and then
we'd naively do a alignmentAtOffset on it and emit code using an
alignment equal to the largest power-of-two factor of the offset.
We should also now be emitting much more aggressive alignment
attributes in the presence of over-alignment. In particular,
field access now uses alignmentAtOffset instead of min.
Several times in this patch, I had to change the existing
code-generation pattern in order to more effectively use
the Address APIs. For the most part, this seems to be a strict
improvement, like doing pointer arithmetic with GEPs instead of
ptrtoint. That said, I've tried very hard to not change semantics,
but it is likely that I've failed in a few places, for which I
apologize.
ABIArgInfo now always carries the assumed alignment of indirect and
indirect byval arguments. In order to cut down on what was already
a dauntingly large patch, I changed the code to never set align
attributes in the IR on non-byval indirect arguments. That is,
we still generate code which assumes that indirect arguments have
the given alignment, but we don't express this information to the
backend except where it's semantically required (i.e. on byvals).
This is likely a minor regression for those targets that did provide
this information, but it'll be trivial to add it back in a later
patch.
I partially punted on applying this work to CGBuiltin. Please
do not add more uses of the CreateDefaultAligned{Load,Store}
APIs; they will be going away eventually.
llvm-svn: 246985
2015-09-08 16:05:57 +08:00
|
|
|
|
|
|
|
#include "Address.h"
|
2013-06-10 00:45:02 +08:00
|
|
|
#include "llvm/ADT/SmallPtrSet.h"
|
|
|
|
#include "llvm/ADT/SmallVector.h"
|
2013-06-20 01:07:50 +08:00
|
|
|
|
|
|
|
namespace llvm {
|
|
|
|
class BasicBlock;
|
|
|
|
class Value;
|
|
|
|
class ConstantInt;
|
|
|
|
class AllocaInst;
|
|
|
|
}
|
2011-01-28 19:13:47 +08:00
|
|
|
|
|
|
|
namespace clang {
|
2015-07-23 07:46:21 +08:00
|
|
|
class FunctionDecl;
|
2011-01-28 19:13:47 +08:00
|
|
|
namespace CodeGen {
|
2015-07-23 07:46:21 +08:00
|
|
|
class CodeGenModule;
|
|
|
|
class CodeGenFunction;
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2015-09-17 04:15:55 +08:00
|
|
|
/// The MS C++ ABI needs a pointer to RTTI data plus some flags to describe the
|
|
|
|
/// type of a catch handler, so we use this wrapper.
|
|
|
|
struct CatchTypeInfo {
|
|
|
|
llvm::Constant *RTTI;
|
|
|
|
unsigned Flags;
|
|
|
|
};
|
|
|
|
|
2011-01-28 19:13:47 +08:00
|
|
|
/// A protected scope for zero-cost EH handling.
|
|
|
|
class EHScope {
|
|
|
|
llvm::BasicBlock *CachedLandingPad;
|
2011-08-11 10:22:43 +08:00
|
|
|
llvm::BasicBlock *CachedEHDispatchBlock;
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
EHScopeStack::stable_iterator EnclosingEHScope;
|
|
|
|
|
|
|
|
class CommonBitFields {
|
|
|
|
friend class EHScope;
|
2015-08-01 01:58:45 +08:00
|
|
|
unsigned Kind : 3;
|
2011-08-11 10:22:43 +08:00
|
|
|
};
|
2015-08-01 01:58:45 +08:00
|
|
|
enum { NumCommonBits = 3 };
|
2011-01-28 19:13:47 +08:00
|
|
|
|
|
|
|
protected:
|
2011-08-11 10:22:43 +08:00
|
|
|
class CatchBitFields {
|
|
|
|
friend class EHCatchScope;
|
|
|
|
unsigned : NumCommonBits;
|
|
|
|
|
|
|
|
unsigned NumHandlers : 32 - NumCommonBits;
|
|
|
|
};
|
|
|
|
|
|
|
|
class CleanupBitFields {
|
|
|
|
friend class EHCleanupScope;
|
|
|
|
unsigned : NumCommonBits;
|
|
|
|
|
|
|
|
/// Whether this cleanup needs to be run along normal edges.
|
|
|
|
unsigned IsNormalCleanup : 1;
|
|
|
|
|
|
|
|
/// Whether this cleanup needs to be run along exception edges.
|
|
|
|
unsigned IsEHCleanup : 1;
|
|
|
|
|
|
|
|
/// Whether this cleanup is currently active.
|
|
|
|
unsigned IsActive : 1;
|
|
|
|
|
2015-04-23 05:38:15 +08:00
|
|
|
/// Whether this cleanup is a lifetime marker
|
|
|
|
unsigned IsLifetimeMarker : 1;
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
/// Whether the normal cleanup should test the activation flag.
|
|
|
|
unsigned TestFlagInNormalCleanup : 1;
|
|
|
|
|
|
|
|
/// Whether the EH cleanup should test the activation flag.
|
|
|
|
unsigned TestFlagInEHCleanup : 1;
|
|
|
|
|
|
|
|
/// The amount of extra storage needed by the Cleanup.
|
|
|
|
/// Always a multiple of the scope-stack alignment.
|
|
|
|
unsigned CleanupSize : 12;
|
|
|
|
};
|
|
|
|
|
|
|
|
class FilterBitFields {
|
|
|
|
friend class EHFilterScope;
|
|
|
|
unsigned : NumCommonBits;
|
|
|
|
|
|
|
|
unsigned NumFilters : 32 - NumCommonBits;
|
|
|
|
};
|
|
|
|
|
|
|
|
union {
|
|
|
|
CommonBitFields CommonBits;
|
|
|
|
CatchBitFields CatchBits;
|
|
|
|
CleanupBitFields CleanupBits;
|
|
|
|
FilterBitFields FilterBits;
|
|
|
|
};
|
2011-01-28 19:13:47 +08:00
|
|
|
|
|
|
|
public:
|
2015-09-11 06:11:13 +08:00
|
|
|
enum Kind { Cleanup, Catch, Terminate, Filter, PadEnd };
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope)
|
2014-05-21 13:09:00 +08:00
|
|
|
: CachedLandingPad(nullptr), CachedEHDispatchBlock(nullptr),
|
2011-08-11 10:22:43 +08:00
|
|
|
EnclosingEHScope(enclosingEHScope) {
|
|
|
|
CommonBits.Kind = kind;
|
|
|
|
}
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); }
|
2011-01-28 19:13:47 +08:00
|
|
|
|
|
|
|
llvm::BasicBlock *getCachedLandingPad() const {
|
|
|
|
return CachedLandingPad;
|
|
|
|
}
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
void setCachedLandingPad(llvm::BasicBlock *block) {
|
|
|
|
CachedLandingPad = block;
|
|
|
|
}
|
|
|
|
|
|
|
|
llvm::BasicBlock *getCachedEHDispatchBlock() const {
|
|
|
|
return CachedEHDispatchBlock;
|
|
|
|
}
|
|
|
|
|
|
|
|
void setCachedEHDispatchBlock(llvm::BasicBlock *block) {
|
|
|
|
CachedEHDispatchBlock = block;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool hasEHBranches() const {
|
|
|
|
if (llvm::BasicBlock *block = getCachedEHDispatchBlock())
|
|
|
|
return !block->use_empty();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
EHScopeStack::stable_iterator getEnclosingEHScope() const {
|
|
|
|
return EnclosingEHScope;
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
/// A scope which attempts to handle some, possibly all, types of
|
|
|
|
/// exceptions.
|
|
|
|
///
|
2012-06-16 06:10:14 +08:00
|
|
|
/// Objective C \@finally blocks are represented using a cleanup scope
|
2011-01-28 19:13:47 +08:00
|
|
|
/// after the catch scope.
|
|
|
|
class EHCatchScope : public EHScope {
|
|
|
|
// In effect, we have a flexible array member
|
|
|
|
// Handler Handlers[0];
|
|
|
|
// But that's only standard in C99, not C++, so we have to do
|
|
|
|
// annoying pointer arithmetic instead.
|
|
|
|
|
|
|
|
public:
|
|
|
|
struct Handler {
|
|
|
|
/// A type info value, or null (C++ null, not an LLVM null pointer)
|
|
|
|
/// for a catch-all.
|
2015-09-17 04:15:55 +08:00
|
|
|
CatchTypeInfo Type;
|
2011-01-28 19:13:47 +08:00
|
|
|
|
|
|
|
/// The catch handler for this type.
|
|
|
|
llvm::BasicBlock *Block;
|
|
|
|
|
2015-09-17 04:15:55 +08:00
|
|
|
bool isCatchAll() const { return Type.RTTI == nullptr; }
|
2011-01-28 19:13:47 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
private:
|
|
|
|
friend class EHScopeStack;
|
|
|
|
|
|
|
|
Handler *getHandlers() {
|
|
|
|
return reinterpret_cast<Handler*>(this+1);
|
|
|
|
}
|
|
|
|
|
|
|
|
const Handler *getHandlers() const {
|
|
|
|
return reinterpret_cast<const Handler*>(this+1);
|
|
|
|
}
|
|
|
|
|
|
|
|
public:
|
|
|
|
static size_t getSizeForNumHandlers(unsigned N) {
|
|
|
|
return sizeof(EHCatchScope) + N * sizeof(Handler);
|
|
|
|
}
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
EHCatchScope(unsigned numHandlers,
|
|
|
|
EHScopeStack::stable_iterator enclosingEHScope)
|
|
|
|
: EHScope(Catch, enclosingEHScope) {
|
|
|
|
CatchBits.NumHandlers = numHandlers;
|
2016-06-23 00:21:14 +08:00
|
|
|
assert(CatchBits.NumHandlers == numHandlers && "NumHandlers overflow?");
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
unsigned getNumHandlers() const {
|
2011-08-11 10:22:43 +08:00
|
|
|
return CatchBits.NumHandlers;
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
|
2015-09-17 04:15:55 +08:00
|
|
|
setHandler(I, CatchTypeInfo{nullptr, 0}, Block);
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
2014-06-05 02:51:46 +08:00
|
|
|
void setHandler(unsigned I, llvm::Constant *Type, llvm::BasicBlock *Block) {
|
2015-09-17 04:15:55 +08:00
|
|
|
assert(I < getNumHandlers());
|
|
|
|
getHandlers()[I].Type = CatchTypeInfo{Type, 0};
|
|
|
|
getHandlers()[I].Block = Block;
|
|
|
|
}
|
|
|
|
|
|
|
|
void setHandler(unsigned I, CatchTypeInfo Type, llvm::BasicBlock *Block) {
|
2011-01-28 19:13:47 +08:00
|
|
|
assert(I < getNumHandlers());
|
|
|
|
getHandlers()[I].Type = Type;
|
|
|
|
getHandlers()[I].Block = Block;
|
|
|
|
}
|
|
|
|
|
|
|
|
const Handler &getHandler(unsigned I) const {
|
|
|
|
assert(I < getNumHandlers());
|
|
|
|
return getHandlers()[I];
|
|
|
|
}
|
|
|
|
|
2014-01-09 17:22:32 +08:00
|
|
|
// Clear all handler blocks.
|
|
|
|
// FIXME: it's better to always call clearHandlerBlocks in DTOR and have a
|
|
|
|
// 'takeHandler' or some such function which removes ownership from the
|
|
|
|
// EHCatchScope object if the handlers should live longer than EHCatchScope.
|
|
|
|
void clearHandlerBlocks() {
|
|
|
|
for (unsigned I = 0, N = getNumHandlers(); I != N; ++I)
|
|
|
|
delete getHandler(I).Block;
|
|
|
|
}
|
|
|
|
|
2011-01-28 19:13:47 +08:00
|
|
|
typedef const Handler *iterator;
|
|
|
|
iterator begin() const { return getHandlers(); }
|
|
|
|
iterator end() const { return getHandlers() + getNumHandlers(); }
|
|
|
|
|
|
|
|
static bool classof(const EHScope *Scope) {
|
|
|
|
return Scope->getKind() == Catch;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
/// A cleanup scope which generates the cleanup blocks lazily.
|
2015-07-18 02:21:37 +08:00
|
|
|
class LLVM_ALIGNAS(/*alignof(uint64_t)*/ 8) EHCleanupScope : public EHScope {
|
2011-01-28 19:13:47 +08:00
|
|
|
/// The nearest normal cleanup scope enclosing this one.
|
|
|
|
EHScopeStack::stable_iterator EnclosingNormal;
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
/// The nearest EH scope enclosing this one.
|
2011-01-28 19:13:47 +08:00
|
|
|
EHScopeStack::stable_iterator EnclosingEH;
|
|
|
|
|
|
|
|
/// The dual entry/exit block along the normal edge. This is lazily
|
|
|
|
/// created if needed before the cleanup is popped.
|
|
|
|
llvm::BasicBlock *NormalBlock;
|
|
|
|
|
|
|
|
/// An optional i1 variable indicating whether this cleanup has been
|
|
|
|
/// activated yet.
|
|
|
|
llvm::AllocaInst *ActiveFlag;
|
|
|
|
|
|
|
|
/// Extra information required for cleanups that have resolved
|
|
|
|
/// branches through them. This has to be allocated on the side
|
|
|
|
/// because everything on the cleanup stack has be trivially
|
|
|
|
/// movable.
|
|
|
|
struct ExtInfo {
|
|
|
|
/// The destinations of normal branch-afters and branch-throughs.
|
|
|
|
llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
|
|
|
|
|
|
|
|
/// Normal branch-afters.
|
2011-07-20 14:58:45 +08:00
|
|
|
SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
|
2011-01-28 19:13:47 +08:00
|
|
|
BranchAfters;
|
|
|
|
};
|
|
|
|
mutable struct ExtInfo *ExtInfo;
|
|
|
|
|
2016-06-23 00:21:14 +08:00
|
|
|
/// The number of fixups required by enclosing scopes (not including
|
|
|
|
/// this one). If this is the top cleanup scope, all the fixups
|
|
|
|
/// from this index onwards belong to this scope.
|
|
|
|
unsigned FixupDepth;
|
|
|
|
|
2011-01-28 19:13:47 +08:00
|
|
|
struct ExtInfo &getExtInfo() {
|
|
|
|
if (!ExtInfo) ExtInfo = new struct ExtInfo();
|
|
|
|
return *ExtInfo;
|
|
|
|
}
|
|
|
|
|
|
|
|
const struct ExtInfo &getExtInfo() const {
|
|
|
|
if (!ExtInfo) ExtInfo = new struct ExtInfo();
|
|
|
|
return *ExtInfo;
|
|
|
|
}
|
|
|
|
|
|
|
|
public:
|
|
|
|
/// Gets the size required for a lazy cleanup scope with the given
|
|
|
|
/// cleanup-data requirements.
|
|
|
|
static size_t getSizeForCleanupSize(size_t Size) {
|
|
|
|
return sizeof(EHCleanupScope) + Size;
|
|
|
|
}
|
|
|
|
|
|
|
|
size_t getAllocatedSize() const {
|
2011-08-11 10:22:43 +08:00
|
|
|
return sizeof(EHCleanupScope) + CleanupBits.CleanupSize;
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
EHCleanupScope(bool isNormal, bool isEH, bool isActive,
|
|
|
|
unsigned cleanupSize, unsigned fixupDepth,
|
|
|
|
EHScopeStack::stable_iterator enclosingNormal,
|
|
|
|
EHScopeStack::stable_iterator enclosingEH)
|
2016-06-23 00:21:14 +08:00
|
|
|
: EHScope(EHScope::Cleanup, enclosingEH),
|
|
|
|
EnclosingNormal(enclosingNormal), NormalBlock(nullptr),
|
|
|
|
ActiveFlag(nullptr), ExtInfo(nullptr), FixupDepth(fixupDepth) {
|
2011-08-11 10:22:43 +08:00
|
|
|
CleanupBits.IsNormalCleanup = isNormal;
|
|
|
|
CleanupBits.IsEHCleanup = isEH;
|
|
|
|
CleanupBits.IsActive = isActive;
|
2015-04-23 05:38:15 +08:00
|
|
|
CleanupBits.IsLifetimeMarker = false;
|
2011-08-11 10:22:43 +08:00
|
|
|
CleanupBits.TestFlagInNormalCleanup = false;
|
|
|
|
CleanupBits.TestFlagInEHCleanup = false;
|
|
|
|
CleanupBits.CleanupSize = cleanupSize;
|
|
|
|
|
|
|
|
assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow");
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
2014-10-09 02:31:54 +08:00
|
|
|
void Destroy() {
|
2011-01-28 19:13:47 +08:00
|
|
|
delete ExtInfo;
|
|
|
|
}
|
2014-10-09 02:31:54 +08:00
|
|
|
// Objects of EHCleanupScope are not destructed. Use Destroy().
|
2015-02-16 06:54:08 +08:00
|
|
|
~EHCleanupScope() = delete;
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; }
|
2011-01-28 19:13:47 +08:00
|
|
|
llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
|
|
|
|
void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
bool isEHCleanup() const { return CleanupBits.IsEHCleanup; }
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
bool isActive() const { return CleanupBits.IsActive; }
|
|
|
|
void setActive(bool A) { CleanupBits.IsActive = A; }
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2015-04-23 05:38:15 +08:00
|
|
|
bool isLifetimeMarker() const { return CleanupBits.IsLifetimeMarker; }
|
|
|
|
void setLifetimeMarker() { CleanupBits.IsLifetimeMarker = true; }
|
|
|
|
|
Compute and preserve alignment more faithfully in IR-generation.
Introduce an Address type to bundle a pointer value with an
alignment. Introduce APIs on CGBuilderTy to work with Address
values. Change core APIs on CGF/CGM to traffic in Address where
appropriate. Require alignments to be non-zero. Update a ton
of code to compute and propagate alignment information.
As part of this, I've promoted CGBuiltin's EmitPointerWithAlignment
helper function to CGF and made use of it in a number of places in
the expression emitter.
The end result is that we should now be significantly more correct
when performing operations on objects that are locally known to
be under-aligned. Since alignment is not reliably tracked in the
type system, there are inherent limits to this, but at least we
are no longer confused by standard operations like derived-to-base
conversions and array-to-pointer decay. I've also fixed a large
number of bugs where we were applying the complete-object alignment
to a pointer instead of the non-virtual alignment, although most of
these were hidden by the very conservative approach we took with
member alignment.
Also, because IRGen now reliably asserts on zero alignments, we
should no longer be subject to an absurd but frustrating recurring
bug where an incomplete type would report a zero alignment and then
we'd naively do a alignmentAtOffset on it and emit code using an
alignment equal to the largest power-of-two factor of the offset.
We should also now be emitting much more aggressive alignment
attributes in the presence of over-alignment. In particular,
field access now uses alignmentAtOffset instead of min.
Several times in this patch, I had to change the existing
code-generation pattern in order to more effectively use
the Address APIs. For the most part, this seems to be a strict
improvement, like doing pointer arithmetic with GEPs instead of
ptrtoint. That said, I've tried very hard to not change semantics,
but it is likely that I've failed in a few places, for which I
apologize.
ABIArgInfo now always carries the assumed alignment of indirect and
indirect byval arguments. In order to cut down on what was already
a dauntingly large patch, I changed the code to never set align
attributes in the IR on non-byval indirect arguments. That is,
we still generate code which assumes that indirect arguments have
the given alignment, but we don't express this information to the
backend except where it's semantically required (i.e. on byvals).
This is likely a minor regression for those targets that did provide
this information, but it'll be trivial to add it back in a later
patch.
I partially punted on applying this work to CGBuiltin. Please
do not add more uses of the CreateDefaultAligned{Load,Store}
APIs; they will be going away eventually.
llvm-svn: 246985
2015-09-08 16:05:57 +08:00
|
|
|
bool hasActiveFlag() const { return ActiveFlag != nullptr; }
|
|
|
|
Address getActiveFlag() const {
|
|
|
|
return Address(ActiveFlag, CharUnits::One());
|
|
|
|
}
|
|
|
|
void setActiveFlag(Address Var) {
|
|
|
|
assert(Var.getAlignment().isOne());
|
|
|
|
ActiveFlag = cast<llvm::AllocaInst>(Var.getPointer());
|
|
|
|
}
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
void setTestFlagInNormalCleanup() {
|
|
|
|
CleanupBits.TestFlagInNormalCleanup = true;
|
|
|
|
}
|
|
|
|
bool shouldTestFlagInNormalCleanup() const {
|
|
|
|
return CleanupBits.TestFlagInNormalCleanup;
|
|
|
|
}
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
void setTestFlagInEHCleanup() {
|
|
|
|
CleanupBits.TestFlagInEHCleanup = true;
|
|
|
|
}
|
|
|
|
bool shouldTestFlagInEHCleanup() const {
|
|
|
|
return CleanupBits.TestFlagInEHCleanup;
|
|
|
|
}
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2016-06-23 00:21:14 +08:00
|
|
|
unsigned getFixupDepth() const { return FixupDepth; }
|
2011-01-28 19:13:47 +08:00
|
|
|
EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
|
|
|
|
return EnclosingNormal;
|
|
|
|
}
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
size_t getCleanupSize() const { return CleanupBits.CleanupSize; }
|
2011-01-28 19:13:47 +08:00
|
|
|
void *getCleanupBuffer() { return this + 1; }
|
|
|
|
|
|
|
|
EHScopeStack::Cleanup *getCleanup() {
|
|
|
|
return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
|
|
|
|
}
|
|
|
|
|
|
|
|
/// True if this cleanup scope has any branch-afters or branch-throughs.
|
|
|
|
bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
|
|
|
|
|
|
|
|
/// Add a branch-after to this cleanup scope. A branch-after is a
|
|
|
|
/// branch from a point protected by this (normal) cleanup to a
|
|
|
|
/// point in the normal cleanup scope immediately containing it.
|
|
|
|
/// For example,
|
|
|
|
/// for (;;) { A a; break; }
|
|
|
|
/// contains a branch-after.
|
|
|
|
///
|
|
|
|
/// Branch-afters each have their own destination out of the
|
|
|
|
/// cleanup, guaranteed distinct from anything else threaded through
|
|
|
|
/// it. Therefore branch-afters usually force a switch after the
|
|
|
|
/// cleanup.
|
|
|
|
void addBranchAfter(llvm::ConstantInt *Index,
|
|
|
|
llvm::BasicBlock *Block) {
|
|
|
|
struct ExtInfo &ExtInfo = getExtInfo();
|
2014-11-19 15:49:47 +08:00
|
|
|
if (ExtInfo.Branches.insert(Block).second)
|
2011-01-28 19:13:47 +08:00
|
|
|
ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Return the number of unique branch-afters on this scope.
|
|
|
|
unsigned getNumBranchAfters() const {
|
|
|
|
return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
|
|
|
|
assert(I < getNumBranchAfters());
|
|
|
|
return ExtInfo->BranchAfters[I].first;
|
|
|
|
}
|
|
|
|
|
|
|
|
llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
|
|
|
|
assert(I < getNumBranchAfters());
|
|
|
|
return ExtInfo->BranchAfters[I].second;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Add a branch-through to this cleanup scope. A branch-through is
|
|
|
|
/// a branch from a scope protected by this (normal) cleanup to an
|
|
|
|
/// enclosing scope other than the immediately-enclosing normal
|
|
|
|
/// cleanup scope.
|
|
|
|
///
|
|
|
|
/// In the following example, the branch through B's scope is a
|
|
|
|
/// branch-through, while the branch through A's scope is a
|
|
|
|
/// branch-after:
|
|
|
|
/// for (;;) { A a; B b; break; }
|
|
|
|
///
|
|
|
|
/// All branch-throughs have a common destination out of the
|
|
|
|
/// cleanup, one possibly shared with the fall-through. Therefore
|
|
|
|
/// branch-throughs usually don't force a switch after the cleanup.
|
|
|
|
///
|
|
|
|
/// \return true if the branch-through was new to this scope
|
|
|
|
bool addBranchThrough(llvm::BasicBlock *Block) {
|
2014-11-19 15:49:47 +08:00
|
|
|
return getExtInfo().Branches.insert(Block).second;
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Determines if this cleanup scope has any branch throughs.
|
|
|
|
bool hasBranchThroughs() const {
|
|
|
|
if (!ExtInfo) return false;
|
|
|
|
return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool classof(const EHScope *Scope) {
|
|
|
|
return (Scope->getKind() == Cleanup);
|
|
|
|
}
|
|
|
|
};
|
2015-07-18 02:21:37 +08:00
|
|
|
// NOTE: there's a bunch of different data classes tacked on after an
|
|
|
|
// EHCleanupScope. It is asserted (in EHScopeStack::pushCleanup*) that
|
|
|
|
// they don't require greater alignment than ScopeStackAlignment. So,
|
|
|
|
// EHCleanupScope ought to have alignment equal to that -- not more
|
|
|
|
// (would be misaligned by the stack allocator), and not less (would
|
|
|
|
// break the appended classes).
|
2016-10-20 22:27:22 +08:00
|
|
|
static_assert(alignof(EHCleanupScope) == EHScopeStack::ScopeStackAlignment,
|
2015-07-18 02:21:37 +08:00
|
|
|
"EHCleanupScope expected alignment");
|
2011-01-28 19:13:47 +08:00
|
|
|
|
|
|
|
/// An exceptions scope which filters exceptions thrown through it.
|
|
|
|
/// Only exceptions matching the filter types will be permitted to be
|
|
|
|
/// thrown.
|
|
|
|
///
|
|
|
|
/// This is used to implement C++ exception specifications.
|
|
|
|
class EHFilterScope : public EHScope {
|
|
|
|
// Essentially ends in a flexible array member:
|
|
|
|
// llvm::Value *FilterTypes[0];
|
|
|
|
|
|
|
|
llvm::Value **getFilters() {
|
|
|
|
return reinterpret_cast<llvm::Value**>(this+1);
|
|
|
|
}
|
|
|
|
|
|
|
|
llvm::Value * const *getFilters() const {
|
|
|
|
return reinterpret_cast<llvm::Value* const *>(this+1);
|
|
|
|
}
|
|
|
|
|
|
|
|
public:
|
2011-08-11 10:22:43 +08:00
|
|
|
EHFilterScope(unsigned numFilters)
|
|
|
|
: EHScope(Filter, EHScopeStack::stable_end()) {
|
|
|
|
FilterBits.NumFilters = numFilters;
|
2016-06-23 00:21:14 +08:00
|
|
|
assert(FilterBits.NumFilters == numFilters && "NumFilters overflow");
|
2011-08-11 10:22:43 +08:00
|
|
|
}
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
static size_t getSizeForNumFilters(unsigned numFilters) {
|
|
|
|
return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*);
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
unsigned getNumFilters() const { return FilterBits.NumFilters; }
|
2011-01-28 19:13:47 +08:00
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
void setFilter(unsigned i, llvm::Value *filterValue) {
|
|
|
|
assert(i < getNumFilters());
|
|
|
|
getFilters()[i] = filterValue;
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
llvm::Value *getFilter(unsigned i) const {
|
|
|
|
assert(i < getNumFilters());
|
|
|
|
return getFilters()[i];
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
static bool classof(const EHScope *scope) {
|
|
|
|
return scope->getKind() == Filter;
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
/// An exceptions scope which calls std::terminate if any exception
|
|
|
|
/// reaches it.
|
|
|
|
class EHTerminateScope : public EHScope {
|
|
|
|
public:
|
2011-08-11 10:22:43 +08:00
|
|
|
EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope)
|
|
|
|
: EHScope(Terminate, enclosingEHScope) {}
|
2011-01-28 19:13:47 +08:00
|
|
|
static size_t getSize() { return sizeof(EHTerminateScope); }
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
static bool classof(const EHScope *scope) {
|
|
|
|
return scope->getKind() == Terminate;
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2015-09-11 06:11:13 +08:00
|
|
|
class EHPadEndScope : public EHScope {
|
2015-08-01 01:58:45 +08:00
|
|
|
public:
|
2015-09-11 06:11:13 +08:00
|
|
|
EHPadEndScope(EHScopeStack::stable_iterator enclosingEHScope)
|
|
|
|
: EHScope(PadEnd, enclosingEHScope) {}
|
|
|
|
static size_t getSize() { return sizeof(EHPadEndScope); }
|
2015-08-01 01:58:45 +08:00
|
|
|
|
|
|
|
static bool classof(const EHScope *scope) {
|
2015-09-11 06:11:13 +08:00
|
|
|
return scope->getKind() == PadEnd;
|
2015-08-01 01:58:45 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2011-01-28 19:13:47 +08:00
|
|
|
/// A non-stable pointer into the scope stack.
|
|
|
|
class EHScopeStack::iterator {
|
|
|
|
char *Ptr;
|
|
|
|
|
|
|
|
friend class EHScopeStack;
|
|
|
|
explicit iterator(char *Ptr) : Ptr(Ptr) {}
|
|
|
|
|
|
|
|
public:
|
2014-05-21 13:09:00 +08:00
|
|
|
iterator() : Ptr(nullptr) {}
|
2011-01-28 19:13:47 +08:00
|
|
|
|
|
|
|
EHScope *get() const {
|
|
|
|
return reinterpret_cast<EHScope*>(Ptr);
|
|
|
|
}
|
|
|
|
|
|
|
|
EHScope *operator->() const { return get(); }
|
|
|
|
EHScope &operator*() const { return *get(); }
|
|
|
|
|
|
|
|
iterator &operator++() {
|
2015-07-18 02:21:37 +08:00
|
|
|
size_t Size;
|
2011-01-28 19:13:47 +08:00
|
|
|
switch (get()->getKind()) {
|
|
|
|
case EHScope::Catch:
|
2015-07-18 02:21:37 +08:00
|
|
|
Size = EHCatchScope::getSizeForNumHandlers(
|
|
|
|
static_cast<const EHCatchScope *>(get())->getNumHandlers());
|
2011-01-28 19:13:47 +08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case EHScope::Filter:
|
2015-07-18 02:21:37 +08:00
|
|
|
Size = EHFilterScope::getSizeForNumFilters(
|
|
|
|
static_cast<const EHFilterScope *>(get())->getNumFilters());
|
2011-01-28 19:13:47 +08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case EHScope::Cleanup:
|
2015-07-18 02:21:37 +08:00
|
|
|
Size = static_cast<const EHCleanupScope *>(get())->getAllocatedSize();
|
2011-01-28 19:13:47 +08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case EHScope::Terminate:
|
2015-07-18 02:21:37 +08:00
|
|
|
Size = EHTerminateScope::getSize();
|
2011-01-28 19:13:47 +08:00
|
|
|
break;
|
2015-08-01 01:58:45 +08:00
|
|
|
|
2015-09-11 06:11:13 +08:00
|
|
|
case EHScope::PadEnd:
|
|
|
|
Size = EHPadEndScope::getSize();
|
2015-08-01 01:58:45 +08:00
|
|
|
break;
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
2016-01-15 05:00:27 +08:00
|
|
|
Ptr += llvm::alignTo(Size, ScopeStackAlignment);
|
2011-01-28 19:13:47 +08:00
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
iterator next() {
|
|
|
|
iterator copy = *this;
|
|
|
|
++copy;
|
|
|
|
return copy;
|
|
|
|
}
|
|
|
|
|
|
|
|
iterator operator++(int) {
|
|
|
|
iterator copy = *this;
|
|
|
|
operator++();
|
|
|
|
return copy;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool encloses(iterator other) const { return Ptr >= other.Ptr; }
|
|
|
|
bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
|
|
|
|
|
|
|
|
bool operator==(iterator other) const { return Ptr == other.Ptr; }
|
|
|
|
bool operator!=(iterator other) const { return Ptr != other.Ptr; }
|
|
|
|
};
|
|
|
|
|
|
|
|
inline EHScopeStack::iterator EHScopeStack::begin() const {
|
|
|
|
return iterator(StartOfData);
|
|
|
|
}
|
|
|
|
|
|
|
|
inline EHScopeStack::iterator EHScopeStack::end() const {
|
|
|
|
return iterator(EndOfBuffer);
|
|
|
|
}
|
|
|
|
|
|
|
|
inline void EHScopeStack::popCatch() {
|
|
|
|
assert(!empty() && "popping exception stack when not empty");
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
EHCatchScope &scope = cast<EHCatchScope>(*begin());
|
|
|
|
InnermostEHScope = scope.getEnclosingEHScope();
|
2015-07-18 02:21:37 +08:00
|
|
|
deallocate(EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers()));
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
inline void EHScopeStack::popTerminate() {
|
|
|
|
assert(!empty() && "popping exception stack when not empty");
|
|
|
|
|
2011-08-11 10:22:43 +08:00
|
|
|
EHTerminateScope &scope = cast<EHTerminateScope>(*begin());
|
|
|
|
InnermostEHScope = scope.getEnclosingEHScope();
|
2015-07-18 02:21:37 +08:00
|
|
|
deallocate(EHTerminateScope::getSize());
|
2011-01-28 19:13:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
|
|
|
|
assert(sp.isValid() && "finding invalid savepoint");
|
|
|
|
assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
|
|
|
|
return iterator(EndOfBuffer - sp.Size);
|
|
|
|
}
|
|
|
|
|
|
|
|
inline EHScopeStack::stable_iterator
|
|
|
|
EHScopeStack::stabilize(iterator ir) const {
|
|
|
|
assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
|
|
|
|
return stable_iterator(EndOfBuffer - ir.Ptr);
|
|
|
|
}
|
|
|
|
|
2015-07-23 07:46:21 +08:00
|
|
|
/// The exceptions personality for a function.
|
|
|
|
struct EHPersonality {
|
|
|
|
const char *PersonalityFn;
|
|
|
|
|
|
|
|
// If this is non-null, this personality requires a non-standard
|
|
|
|
// function for rethrowing an exception after a catchall cleanup.
|
|
|
|
// This function must have prototype void(void*).
|
|
|
|
const char *CatchallRethrowFn;
|
|
|
|
|
|
|
|
static const EHPersonality &get(CodeGenModule &CGM, const FunctionDecl *FD);
|
|
|
|
static const EHPersonality &get(CodeGenFunction &CGF);
|
|
|
|
|
|
|
|
static const EHPersonality GNU_C;
|
|
|
|
static const EHPersonality GNU_C_SJLJ;
|
|
|
|
static const EHPersonality GNU_C_SEH;
|
|
|
|
static const EHPersonality GNU_ObjC;
|
2017-01-09 06:58:07 +08:00
|
|
|
static const EHPersonality GNU_ObjC_SJLJ;
|
|
|
|
static const EHPersonality GNU_ObjC_SEH;
|
2015-07-23 07:46:21 +08:00
|
|
|
static const EHPersonality GNUstep_ObjC;
|
|
|
|
static const EHPersonality GNU_ObjCXX;
|
|
|
|
static const EHPersonality NeXT_ObjC;
|
|
|
|
static const EHPersonality GNU_CPlusPlus;
|
|
|
|
static const EHPersonality GNU_CPlusPlus_SJLJ;
|
|
|
|
static const EHPersonality GNU_CPlusPlus_SEH;
|
|
|
|
static const EHPersonality MSVC_except_handler;
|
|
|
|
static const EHPersonality MSVC_C_specific_handler;
|
|
|
|
static const EHPersonality MSVC_CxxFrameHandler3;
|
2015-08-01 01:58:45 +08:00
|
|
|
|
2015-10-08 09:13:52 +08:00
|
|
|
/// Does this personality use landingpads or the family of pad instructions
|
|
|
|
/// designed to form funclets?
|
|
|
|
bool usesFuncletPads() const { return isMSVCPersonality(); }
|
|
|
|
|
2015-08-01 01:58:45 +08:00
|
|
|
bool isMSVCPersonality() const {
|
|
|
|
return this == &MSVC_except_handler || this == &MSVC_C_specific_handler ||
|
|
|
|
this == &MSVC_CxxFrameHandler3;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool isMSVCXXPersonality() const { return this == &MSVC_CxxFrameHandler3; }
|
2015-07-23 07:46:21 +08:00
|
|
|
};
|
2015-06-23 07:07:51 +08:00
|
|
|
}
|
|
|
|
}
|
2011-01-28 19:13:47 +08:00
|
|
|
|
|
|
|
#endif
|