forked from OSchip/llvm-project
Move the vtable builder to CGVtable.cpp, general cleanup.
llvm-svn: 83798
This commit is contained in:
parent
6e6ac47125
commit
2bb27f53e0
|
@ -41,6 +41,7 @@
|
|||
1A6FE7090FD6F85800E00CA9 /* CGCXXTemp.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1A6FE7080FD6F85800E00CA9 /* CGCXXTemp.cpp */; };
|
||||
1A701B640F7C8FE400FEC4D1 /* SemaAccess.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1A701B630F7C8FE400FEC4D1 /* SemaAccess.cpp */; };
|
||||
1A7342480C7B57D500122F56 /* CGObjC.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1A7342470C7B57D500122F56 /* CGObjC.cpp */; };
|
||||
1A81AA19108144F40094E50B /* CGVtable.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1A81AA18108144F40094E50B /* CGVtable.cpp */; };
|
||||
1A869A700BA2164C008DA07A /* LiteralSupport.h in CopyFiles */ = {isa = PBXBuildFile; fileRef = 1A869A6E0BA2164C008DA07A /* LiteralSupport.h */; };
|
||||
1A869AA80BA21ABA008DA07A /* LiteralSupport.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1A869AA70BA21ABA008DA07A /* LiteralSupport.cpp */; };
|
||||
1AA1D91810125DE30078DEBC /* RecordLayoutBuilder.cpp in Sources */ = {isa = PBXBuildFile; fileRef = 1AA1D91610125DE30078DEBC /* RecordLayoutBuilder.cpp */; };
|
||||
|
@ -386,6 +387,8 @@
|
|||
1A701B630F7C8FE400FEC4D1 /* SemaAccess.cpp */ = {isa = PBXFileReference; fileEncoding = 4; indentWidth = 2; lastKnownFileType = sourcecode.cpp.cpp; name = SemaAccess.cpp; path = lib/Sema/SemaAccess.cpp; sourceTree = "<group>"; tabWidth = 2; };
|
||||
1A72BEAC0D641E9400B085E9 /* Attr.h */ = {isa = PBXFileReference; fileEncoding = 4; indentWidth = 2; lastKnownFileType = sourcecode.c.h; name = Attr.h; path = clang/AST/Attr.h; sourceTree = "<group>"; tabWidth = 2; };
|
||||
1A7342470C7B57D500122F56 /* CGObjC.cpp */ = {isa = PBXFileReference; fileEncoding = 4; indentWidth = 2; lastKnownFileType = sourcecode.cpp.cpp; name = CGObjC.cpp; path = lib/CodeGen/CGObjC.cpp; sourceTree = "<group>"; tabWidth = 2; };
|
||||
1A81AA18108144F40094E50B /* CGVtable.cpp */ = {isa = PBXFileReference; fileEncoding = 4; indentWidth = 2; lastKnownFileType = sourcecode.cpp.cpp; name = CGVtable.cpp; path = lib/CodeGen/CGVtable.cpp; sourceTree = "<group>"; tabWidth = 2; };
|
||||
1A81AA5D108278A20094E50B /* CGVtable.h */ = {isa = PBXFileReference; fileEncoding = 4; indentWidth = 2; lastKnownFileType = sourcecode.c.h; name = CGVtable.h; path = lib/CodeGen/CGVtable.h; sourceTree = "<group>"; tabWidth = 2; };
|
||||
1A869A6E0BA2164C008DA07A /* LiteralSupport.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = LiteralSupport.h; sourceTree = "<group>"; };
|
||||
1A869AA70BA21ABA008DA07A /* LiteralSupport.cpp */ = {isa = PBXFileReference; fileEncoding = 30; lastKnownFileType = sourcecode.cpp.cpp; path = LiteralSupport.cpp; sourceTree = "<group>"; };
|
||||
1AA1D91610125DE30078DEBC /* RecordLayoutBuilder.cpp */ = {isa = PBXFileReference; fileEncoding = 4; indentWidth = 2; lastKnownFileType = sourcecode.cpp.cpp; name = RecordLayoutBuilder.cpp; path = lib/AST/RecordLayoutBuilder.cpp; sourceTree = "<group>"; tabWidth = 2; };
|
||||
|
@ -1268,6 +1271,8 @@
|
|||
1A6C01F6108128710072DEE4 /* CGRtti.cpp */,
|
||||
DE4772F90C10EAE5002239E8 /* CGStmt.cpp */,
|
||||
35475B230E7997680000BFE4 /* CGValue.h */,
|
||||
1A81AA18108144F40094E50B /* CGVtable.cpp */,
|
||||
1A81AA5D108278A20094E50B /* CGVtable.h */,
|
||||
DE928B800C0A615B00231DA4 /* CodeGenFunction.h */,
|
||||
DE928B820C0A616000231DA4 /* CodeGenFunction.cpp */,
|
||||
DE928B7C0C0A615100231DA4 /* CodeGenModule.h */,
|
||||
|
@ -1895,6 +1900,7 @@
|
|||
1A6B6E9A1069833600BB4A8F /* CGCXXExpr.cpp in Sources */,
|
||||
1A535ED9107BC45E000C3AE7 /* CXXInheritance.cpp in Sources */,
|
||||
1A6C01F7108128710072DEE4 /* CGRtti.cpp in Sources */,
|
||||
1A81AA19108144F40094E50B /* CGVtable.cpp in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
|
|
|
@ -649,538 +649,6 @@ const char *CodeGenModule::getMangledCXXDtorName(const CXXDestructorDecl *D,
|
|||
return UniqueMangledName(Name.begin(), Name.end());
|
||||
}
|
||||
|
||||
class VtableBuilder {
|
||||
public:
|
||||
/// Index_t - Vtable index type.
|
||||
typedef uint64_t Index_t;
|
||||
private:
|
||||
std::vector<llvm::Constant *> &methods;
|
||||
std::vector<llvm::Constant *> submethods;
|
||||
llvm::Type *Ptr8Ty;
|
||||
/// Class - The most derived class that this vtable is being built for.
|
||||
const CXXRecordDecl *Class;
|
||||
/// BLayout - Layout for the most derived class that this vtable is being
|
||||
/// built for.
|
||||
const ASTRecordLayout &BLayout;
|
||||
llvm::SmallSet<const CXXRecordDecl *, 32> IndirectPrimary;
|
||||
llvm::SmallSet<const CXXRecordDecl *, 32> SeenVBase;
|
||||
llvm::Constant *rtti;
|
||||
llvm::LLVMContext &VMContext;
|
||||
CodeGenModule &CGM; // Per-module state.
|
||||
/// Index - Maps a method decl into a vtable index. Useful for virtual
|
||||
/// dispatch codegen.
|
||||
llvm::DenseMap<const CXXMethodDecl *, Index_t> Index;
|
||||
llvm::DenseMap<const CXXMethodDecl *, Index_t> VCall;
|
||||
llvm::DenseMap<const CXXMethodDecl *, Index_t> VCallOffset;
|
||||
llvm::DenseMap<const CXXRecordDecl *, Index_t> VBIndex;
|
||||
typedef std::pair<Index_t, Index_t> CallOffset;
|
||||
typedef llvm::DenseMap<const CXXMethodDecl *, CallOffset> Thunks_t;
|
||||
Thunks_t Thunks;
|
||||
typedef llvm::DenseMap<const CXXMethodDecl *,
|
||||
std::pair<CallOffset, CallOffset> > CovariantThunks_t;
|
||||
CovariantThunks_t CovariantThunks;
|
||||
std::vector<Index_t> VCalls;
|
||||
typedef CXXRecordDecl::method_iterator method_iter;
|
||||
// FIXME: Linkage should follow vtable
|
||||
const bool Extern;
|
||||
const uint32_t LLVMPointerWidth;
|
||||
Index_t extra;
|
||||
public:
|
||||
VtableBuilder(std::vector<llvm::Constant *> &meth,
|
||||
const CXXRecordDecl *c,
|
||||
CodeGenModule &cgm)
|
||||
: methods(meth), Class(c), BLayout(cgm.getContext().getASTRecordLayout(c)),
|
||||
rtti(cgm.GenerateRtti(c)), VMContext(cgm.getModule().getContext()),
|
||||
CGM(cgm), Extern(true),
|
||||
LLVMPointerWidth(cgm.getContext().Target.getPointerWidth(0)) {
|
||||
Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0);
|
||||
}
|
||||
|
||||
llvm::DenseMap<const CXXMethodDecl *, Index_t> &getIndex() { return Index; }
|
||||
llvm::DenseMap<const CXXRecordDecl *, Index_t> &getVBIndex()
|
||||
{ return VBIndex; }
|
||||
|
||||
llvm::Constant *wrap(Index_t i) {
|
||||
llvm::Constant *m;
|
||||
m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), i);
|
||||
return llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
|
||||
}
|
||||
|
||||
llvm::Constant *wrap(llvm::Constant *m) {
|
||||
return llvm::ConstantExpr::getBitCast(m, Ptr8Ty);
|
||||
}
|
||||
|
||||
void GenerateVBaseOffsets(std::vector<llvm::Constant *> &offsets,
|
||||
const CXXRecordDecl *RD, uint64_t Offset) {
|
||||
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
|
||||
e = RD->bases_end(); i != e; ++i) {
|
||||
const CXXRecordDecl *Base =
|
||||
cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
|
||||
if (i->isVirtual() && !SeenVBase.count(Base)) {
|
||||
SeenVBase.insert(Base);
|
||||
int64_t BaseOffset = -(Offset/8) + BLayout.getVBaseClassOffset(Base)/8;
|
||||
llvm::Constant *m = wrap(BaseOffset);
|
||||
m = wrap((0?700:0) + BaseOffset);
|
||||
VBIndex[Base] = -(offsets.size()*LLVMPointerWidth/8)
|
||||
- 3*LLVMPointerWidth/8;
|
||||
offsets.push_back(m);
|
||||
}
|
||||
GenerateVBaseOffsets(offsets, Base, Offset);
|
||||
}
|
||||
}
|
||||
|
||||
void StartNewTable() {
|
||||
SeenVBase.clear();
|
||||
}
|
||||
|
||||
Index_t VBlookup(CXXRecordDecl *D, CXXRecordDecl *B);
|
||||
|
||||
/// getVbaseOffset - Returns the index into the vtable for the virtual base
|
||||
/// offset for the given (B) virtual base of the derived class D.
|
||||
Index_t getVbaseOffset(QualType qB, QualType qD) {
|
||||
qD = qD->getAs<PointerType>()->getPointeeType();
|
||||
qB = qB->getAs<PointerType>()->getPointeeType();
|
||||
CXXRecordDecl *D = cast<CXXRecordDecl>(qD->getAs<RecordType>()->getDecl());
|
||||
CXXRecordDecl *B = cast<CXXRecordDecl>(qB->getAs<RecordType>()->getDecl());
|
||||
if (D != Class)
|
||||
return VBlookup(D, B);
|
||||
llvm::DenseMap<const CXXRecordDecl *, Index_t>::iterator i;
|
||||
i = VBIndex.find(B);
|
||||
if (i != VBIndex.end())
|
||||
return i->second;
|
||||
// FIXME: temporal botch, is this data here, by the time we need it?
|
||||
|
||||
assert(false && "FIXME: Locate the containing virtual base first");
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool OverrideMethod(const CXXMethodDecl *MD, llvm::Constant *m,
|
||||
bool MorallyVirtual, Index_t Offset) {
|
||||
typedef CXXMethodDecl::method_iterator meth_iter;
|
||||
|
||||
// FIXME: Don't like the nested loops. For very large inheritance
|
||||
// heirarchies we could have a table on the side with the final overridder
|
||||
// and just replace each instance of an overridden method once. Would be
|
||||
// nice to measure the cost/benefit on real code.
|
||||
|
||||
for (meth_iter mi = MD->begin_overridden_methods(),
|
||||
e = MD->end_overridden_methods();
|
||||
mi != e; ++mi) {
|
||||
const CXXMethodDecl *OMD = *mi;
|
||||
llvm::Constant *om;
|
||||
om = CGM.GetAddrOfFunction(OMD, Ptr8Ty);
|
||||
om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty);
|
||||
|
||||
for (Index_t i = 0, e = submethods.size();
|
||||
i != e; ++i) {
|
||||
// FIXME: begin_overridden_methods might be too lax, covariance */
|
||||
if (submethods[i] != om)
|
||||
continue;
|
||||
QualType nc_oret = OMD->getType()->getAs<FunctionType>()->getResultType();
|
||||
CanQualType oret = CGM.getContext().getCanonicalType(nc_oret);
|
||||
QualType nc_ret = MD->getType()->getAs<FunctionType>()->getResultType();
|
||||
CanQualType ret = CGM.getContext().getCanonicalType(nc_ret);
|
||||
CallOffset ReturnOffset = std::make_pair(0, 0);
|
||||
if (oret != ret) {
|
||||
// FIXME: calculate offsets for covariance
|
||||
ReturnOffset = std::make_pair(42,getVbaseOffset(oret, ret));
|
||||
}
|
||||
Index[MD] = i;
|
||||
submethods[i] = m;
|
||||
|
||||
Thunks.erase(OMD);
|
||||
if (MorallyVirtual) {
|
||||
Index_t &idx = VCall[OMD];
|
||||
if (idx == 0) {
|
||||
VCallOffset[MD] = Offset/8;
|
||||
idx = VCalls.size()+1;
|
||||
VCalls.push_back(0);
|
||||
} else {
|
||||
VCallOffset[MD] = VCallOffset[OMD];
|
||||
VCalls[idx-1] = -VCallOffset[OMD] + Offset/8;
|
||||
}
|
||||
VCall[MD] = idx;
|
||||
CallOffset ThisOffset;
|
||||
// FIXME: calculate non-virtual offset
|
||||
ThisOffset = std::make_pair(0, -((idx+extra+2)*LLVMPointerWidth/8));
|
||||
if (ReturnOffset.first || ReturnOffset.second)
|
||||
CovariantThunks[MD] = std::make_pair(ThisOffset, ReturnOffset);
|
||||
else
|
||||
Thunks[MD] = ThisOffset;
|
||||
return true;
|
||||
}
|
||||
#if 0
|
||||
// FIXME: finish off
|
||||
int64_t O = VCallOffset[OMD] - Offset/8;
|
||||
if (O) {
|
||||
Thunks[MD] = std::make_pair(O, 0);
|
||||
}
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void InstallThunks() {
|
||||
for (Thunks_t::iterator i = Thunks.begin(), e = Thunks.end();
|
||||
i != e; ++i) {
|
||||
const CXXMethodDecl *MD = i->first;
|
||||
Index_t idx = Index[MD];
|
||||
Index_t nv_O = i->second.first;
|
||||
Index_t v_O = i->second.second;
|
||||
submethods[idx] = CGM.BuildThunk(MD, Extern, nv_O, v_O);
|
||||
}
|
||||
Thunks.clear();
|
||||
for (CovariantThunks_t::iterator i = CovariantThunks.begin(),
|
||||
e = CovariantThunks.end();
|
||||
i != e; ++i) {
|
||||
const CXXMethodDecl *MD = i->first;
|
||||
Index_t idx = Index[MD];
|
||||
Index_t nv_t = i->second.first.first;
|
||||
Index_t v_t = i->second.first.second;
|
||||
Index_t nv_r = i->second.second.first;
|
||||
Index_t v_r = i->second.second.second;
|
||||
submethods[idx] = CGM.BuildCovariantThunk(MD, Extern, nv_t, v_t, nv_r,
|
||||
v_r);
|
||||
}
|
||||
CovariantThunks.clear();
|
||||
}
|
||||
|
||||
void OverrideMethods(std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> > *Path, bool MorallyVirtual) {
|
||||
for (std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> >::reverse_iterator i =Path->rbegin(),
|
||||
e = Path->rend(); i != e; ++i) {
|
||||
const CXXRecordDecl *RD = i->first;
|
||||
int64_t Offset = i->second;
|
||||
for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
|
||||
++mi) {
|
||||
if (!mi->isVirtual())
|
||||
continue;
|
||||
|
||||
const CXXMethodDecl *MD = *mi;
|
||||
llvm::Constant *m = 0;
|
||||
if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
|
||||
m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
|
||||
else {
|
||||
const FunctionProtoType *FPT =
|
||||
MD->getType()->getAs<FunctionProtoType>();
|
||||
const llvm::Type *Ty =
|
||||
CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
|
||||
FPT->isVariadic());
|
||||
|
||||
m = wrap(CGM.GetAddrOfFunction(MD, Ty));
|
||||
}
|
||||
|
||||
OverrideMethod(MD, m, MorallyVirtual, Offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AddMethod(const CXXMethodDecl *MD, bool MorallyVirtual, Index_t Offset) {
|
||||
llvm::Constant *m = 0;
|
||||
if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
|
||||
m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
|
||||
else {
|
||||
const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
|
||||
const llvm::Type *Ty =
|
||||
CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
|
||||
FPT->isVariadic());
|
||||
|
||||
m = wrap(CGM.GetAddrOfFunction(MD, Ty));
|
||||
}
|
||||
|
||||
// If we can find a previously allocated slot for this, reuse it.
|
||||
if (OverrideMethod(MD, m, MorallyVirtual, Offset))
|
||||
return;
|
||||
|
||||
// else allocate a new slot.
|
||||
Index[MD] = submethods.size();
|
||||
submethods.push_back(m);
|
||||
if (MorallyVirtual) {
|
||||
VCallOffset[MD] = Offset/8;
|
||||
Index_t &idx = VCall[MD];
|
||||
// Allocate the first one, after that, we reuse the previous one.
|
||||
if (idx == 0) {
|
||||
idx = VCalls.size()+1;
|
||||
VCalls.push_back(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AddMethods(const CXXRecordDecl *RD, bool MorallyVirtual,
|
||||
Index_t Offset) {
|
||||
for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
|
||||
++mi)
|
||||
if (mi->isVirtual())
|
||||
AddMethod(*mi, MorallyVirtual, Offset);
|
||||
}
|
||||
|
||||
void NonVirtualBases(const CXXRecordDecl *RD, const ASTRecordLayout &Layout,
|
||||
const CXXRecordDecl *PrimaryBase,
|
||||
bool PrimaryBaseWasVirtual, bool MorallyVirtual,
|
||||
int64_t Offset) {
|
||||
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
|
||||
e = RD->bases_end(); i != e; ++i) {
|
||||
if (i->isVirtual())
|
||||
continue;
|
||||
const CXXRecordDecl *Base =
|
||||
cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
|
||||
if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
|
||||
uint64_t o = Offset + Layout.getBaseClassOffset(Base);
|
||||
StartNewTable();
|
||||
std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> > S;
|
||||
S.push_back(std::make_pair(RD, Offset));
|
||||
GenerateVtableForBase(Base, MorallyVirtual, o, false, &S);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Index_t end(const CXXRecordDecl *RD, std::vector<llvm::Constant *> &offsets,
|
||||
const ASTRecordLayout &Layout,
|
||||
const CXXRecordDecl *PrimaryBase,
|
||||
bool PrimaryBaseWasVirtual, bool MorallyVirtual,
|
||||
int64_t Offset, bool ForVirtualBase) {
|
||||
StartNewTable();
|
||||
extra = 0;
|
||||
// FIXME: Cleanup.
|
||||
if (!ForVirtualBase) {
|
||||
// then virtual base offsets...
|
||||
for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
|
||||
e = offsets.rend(); i != e; ++i)
|
||||
methods.push_back(*i);
|
||||
}
|
||||
|
||||
// The vcalls come first...
|
||||
for (std::vector<Index_t>::reverse_iterator i=VCalls.rbegin(),
|
||||
e=VCalls.rend();
|
||||
i != e; ++i)
|
||||
methods.push_back(wrap((0?600:0) + *i));
|
||||
VCalls.clear();
|
||||
|
||||
if (ForVirtualBase) {
|
||||
// then virtual base offsets...
|
||||
for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
|
||||
e = offsets.rend(); i != e; ++i)
|
||||
methods.push_back(*i);
|
||||
}
|
||||
|
||||
methods.push_back(wrap(-(Offset/8)));
|
||||
methods.push_back(rtti);
|
||||
Index_t AddressPoint = methods.size();
|
||||
|
||||
InstallThunks();
|
||||
methods.insert(methods.end(), submethods.begin(), submethods.end());
|
||||
submethods.clear();
|
||||
|
||||
// and then the non-virtual bases.
|
||||
NonVirtualBases(RD, Layout, PrimaryBase, PrimaryBaseWasVirtual,
|
||||
MorallyVirtual, Offset);
|
||||
return AddressPoint;
|
||||
}
|
||||
|
||||
void Primaries(const CXXRecordDecl *RD, bool MorallyVirtual, int64_t Offset) {
|
||||
if (!RD->isDynamicClass())
|
||||
return;
|
||||
|
||||
const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
|
||||
const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
|
||||
const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
|
||||
|
||||
// vtables are composed from the chain of primaries.
|
||||
if (PrimaryBase) {
|
||||
if (PrimaryBaseWasVirtual)
|
||||
IndirectPrimary.insert(PrimaryBase);
|
||||
Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
|
||||
}
|
||||
|
||||
// And add the virtuals for the class to the primary vtable.
|
||||
AddMethods(RD, MorallyVirtual, Offset);
|
||||
}
|
||||
|
||||
int64_t GenerateVtableForBase(const CXXRecordDecl *RD,
|
||||
bool MorallyVirtual = false, int64_t Offset = 0,
|
||||
bool ForVirtualBase = false,
|
||||
std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> > *Path = 0) {
|
||||
if (!RD->isDynamicClass())
|
||||
return 0;
|
||||
|
||||
const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
|
||||
const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
|
||||
const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
|
||||
|
||||
std::vector<llvm::Constant *> offsets;
|
||||
extra = 0;
|
||||
GenerateVBaseOffsets(offsets, RD, Offset);
|
||||
if (ForVirtualBase)
|
||||
extra = offsets.size();
|
||||
|
||||
// vtables are composed from the chain of primaries.
|
||||
if (PrimaryBase) {
|
||||
if (PrimaryBaseWasVirtual)
|
||||
IndirectPrimary.insert(PrimaryBase);
|
||||
Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
|
||||
}
|
||||
|
||||
// And add the virtuals for the class to the primary vtable.
|
||||
AddMethods(RD, MorallyVirtual, Offset);
|
||||
|
||||
if (Path)
|
||||
OverrideMethods(Path, MorallyVirtual);
|
||||
|
||||
return end(RD, offsets, Layout, PrimaryBase, PrimaryBaseWasVirtual,
|
||||
MorallyVirtual, Offset, ForVirtualBase);
|
||||
}
|
||||
|
||||
void GenerateVtableForVBases(const CXXRecordDecl *RD,
|
||||
int64_t Offset = 0,
|
||||
std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> > *Path = 0) {
|
||||
bool alloc = false;
|
||||
if (Path == 0) {
|
||||
alloc = true;
|
||||
Path = new std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> >;
|
||||
}
|
||||
// FIXME: We also need to override using all paths to a virtual base,
|
||||
// right now, we just process the first path
|
||||
Path->push_back(std::make_pair(RD, Offset));
|
||||
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
|
||||
e = RD->bases_end(); i != e; ++i) {
|
||||
const CXXRecordDecl *Base =
|
||||
cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
|
||||
if (i->isVirtual() && !IndirectPrimary.count(Base)) {
|
||||
// Mark it so we don't output it twice.
|
||||
IndirectPrimary.insert(Base);
|
||||
StartNewTable();
|
||||
int64_t BaseOffset = BLayout.getVBaseClassOffset(Base);
|
||||
GenerateVtableForBase(Base, true, BaseOffset, true, Path);
|
||||
}
|
||||
int64_t BaseOffset = Offset;
|
||||
if (i->isVirtual())
|
||||
BaseOffset = BLayout.getVBaseClassOffset(Base);
|
||||
if (Base->getNumVBases())
|
||||
GenerateVtableForVBases(Base, BaseOffset, Path);
|
||||
}
|
||||
Path->pop_back();
|
||||
if (alloc)
|
||||
delete Path;
|
||||
}
|
||||
};
|
||||
|
||||
class VtableInfo {
|
||||
public:
|
||||
typedef VtableBuilder::Index_t Index_t;
|
||||
private:
|
||||
CodeGenModule &CGM; // Per-module state.
|
||||
/// Index_t - Vtable index type.
|
||||
typedef llvm::DenseMap<const CXXMethodDecl *, Index_t> ElTy;
|
||||
typedef llvm::DenseMap<const CXXRecordDecl *, ElTy *> MapTy;
|
||||
// FIXME: Move to Context.
|
||||
static MapTy IndexFor;
|
||||
|
||||
typedef llvm::DenseMap<const CXXRecordDecl *, Index_t> VBElTy;
|
||||
typedef llvm::DenseMap<const CXXRecordDecl *, VBElTy *> VBMapTy;
|
||||
// FIXME: Move to Context.
|
||||
static VBMapTy VBIndexFor;
|
||||
public:
|
||||
VtableInfo(CodeGenModule &cgm) : CGM(cgm) { }
|
||||
void RegisterIndex(const CXXRecordDecl *RD, const ElTy &e) {
|
||||
assert(IndexFor.find(RD) == IndexFor.end() && "Don't compute vtbl twice");
|
||||
// We own a copy of this, it will go away shortly.
|
||||
IndexFor[RD] = new ElTy (e);
|
||||
}
|
||||
void RegisterVBIndex(const CXXRecordDecl *RD, const VBElTy &e) {
|
||||
assert(VBIndexFor.find(RD) == VBIndexFor.end() && "Don't compute vtbl twice");
|
||||
// We own a copy of this, it will go away shortly.
|
||||
VBIndexFor[RD] = new VBElTy (e);
|
||||
}
|
||||
Index_t lookup(const CXXMethodDecl *MD) {
|
||||
const CXXRecordDecl *RD = MD->getParent();
|
||||
MapTy::iterator I = IndexFor.find(RD);
|
||||
if (I == IndexFor.end()) {
|
||||
std::vector<llvm::Constant *> methods;
|
||||
// FIXME: This seems expensive. Can we do a partial job to get
|
||||
// just this data.
|
||||
VtableBuilder b(methods, RD, CGM);
|
||||
b.GenerateVtableForBase(RD);
|
||||
b.GenerateVtableForVBases(RD);
|
||||
RegisterIndex(RD, b.getIndex());
|
||||
I = IndexFor.find(RD);
|
||||
}
|
||||
assert(I->second->find(MD)!=I->second->end() && "Can't find vtable index");
|
||||
return (*I->second)[MD];
|
||||
}
|
||||
Index_t VBlookup(const CXXRecordDecl *RD, const CXXRecordDecl *BD) {
|
||||
VBMapTy::iterator I = VBIndexFor.find(RD);
|
||||
if (I == VBIndexFor.end()) {
|
||||
std::vector<llvm::Constant *> methods;
|
||||
// FIXME: This seems expensive. Can we do a partial job to get
|
||||
// just this data.
|
||||
VtableBuilder b(methods, RD, CGM);
|
||||
b.GenerateVtableForBase(RD);
|
||||
b.GenerateVtableForVBases(RD);
|
||||
RegisterVBIndex(RD, b.getVBIndex());
|
||||
I = VBIndexFor.find(RD);
|
||||
}
|
||||
assert(I->second->find(BD)!=I->second->end() && "Can't find vtable index");
|
||||
return (*I->second)[BD];
|
||||
}
|
||||
};
|
||||
|
||||
// FIXME: move to Context
|
||||
static VtableInfo *vtableinfo;
|
||||
|
||||
VtableBuilder::Index_t VtableBuilder::VBlookup(CXXRecordDecl *D,
|
||||
CXXRecordDecl *B) {
|
||||
if (vtableinfo == 0)
|
||||
vtableinfo = new VtableInfo(CGM);
|
||||
|
||||
return vtableinfo->VBlookup(D, B);
|
||||
}
|
||||
|
||||
|
||||
// FIXME: Move to Context.
|
||||
VtableInfo::MapTy VtableInfo::IndexFor;
|
||||
|
||||
// FIXME: Move to Context.
|
||||
VtableInfo::VBMapTy VtableInfo::VBIndexFor;
|
||||
|
||||
llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
|
||||
llvm::SmallString<256> OutName;
|
||||
llvm::raw_svector_ostream Out(OutName);
|
||||
QualType ClassTy;
|
||||
mangleCXXVtable(CGM.getMangleContext(), RD, Out);
|
||||
llvm::GlobalVariable::LinkageTypes linktype;
|
||||
linktype = llvm::GlobalValue::WeakAnyLinkage;
|
||||
std::vector<llvm::Constant *> methods;
|
||||
llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0);
|
||||
int64_t AddressPoint;
|
||||
|
||||
VtableBuilder b(methods, RD, CGM);
|
||||
|
||||
// First comes the vtables for all the non-virtual bases...
|
||||
AddressPoint = b.GenerateVtableForBase(RD);
|
||||
|
||||
// then the vtables for all the virtual bases.
|
||||
b.GenerateVtableForVBases(RD);
|
||||
|
||||
llvm::Constant *C;
|
||||
llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size());
|
||||
C = llvm::ConstantArray::get(type, methods);
|
||||
llvm::Value *vtable = new llvm::GlobalVariable(CGM.getModule(), type, true,
|
||||
linktype, C, Out.str());
|
||||
vtable = Builder.CreateBitCast(vtable, Ptr8Ty);
|
||||
vtable = Builder.CreateGEP(vtable,
|
||||
llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext),
|
||||
AddressPoint*LLVMPointerWidth/8));
|
||||
return vtable;
|
||||
}
|
||||
|
||||
llvm::Constant *CodeGenFunction::GenerateThunk(llvm::Function *Fn,
|
||||
const CXXMethodDecl *MD,
|
||||
bool Extern, int64_t nv,
|
||||
|
@ -1301,10 +769,6 @@ llvm::Value *
|
|||
CodeGenFunction::GetVirtualCXXBaseClassOffset(llvm::Value *This,
|
||||
const CXXRecordDecl *ClassDecl,
|
||||
const CXXRecordDecl *BaseClassDecl) {
|
||||
// FIXME: move to Context
|
||||
if (vtableinfo == 0)
|
||||
vtableinfo = new VtableInfo(CGM);
|
||||
|
||||
const llvm::Type *Int8PtrTy =
|
||||
llvm::Type::getInt8Ty(VMContext)->getPointerTo();
|
||||
|
||||
|
@ -1312,10 +776,11 @@ CodeGenFunction::GetVirtualCXXBaseClassOffset(llvm::Value *This,
|
|||
Int8PtrTy->getPointerTo());
|
||||
VTablePtr = Builder.CreateLoad(VTablePtr, "vtable");
|
||||
|
||||
int64_t VBaseOffsetIndex =
|
||||
CGM.getVtableInfo().getVirtualBaseOffsetIndex(ClassDecl, BaseClassDecl);
|
||||
|
||||
llvm::Value *VBaseOffsetPtr =
|
||||
Builder.CreateConstGEP1_64(VTablePtr,
|
||||
vtableinfo->VBlookup(ClassDecl, BaseClassDecl),
|
||||
"vbase.offset.ptr");
|
||||
Builder.CreateConstGEP1_64(VTablePtr, VBaseOffsetIndex, "vbase.offset.ptr");
|
||||
const llvm::Type *PtrDiffTy =
|
||||
ConvertType(getContext().getPointerDiffType());
|
||||
|
||||
|
@ -1332,7 +797,7 @@ CodeGenFunction::BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *&This,
|
|||
const llvm::Type *Ty) {
|
||||
// FIXME: If we know the dynamic type, we don't have to do a virtual dispatch.
|
||||
|
||||
uint64_t Index = CGM.GetVtableIndex(MD);
|
||||
int64_t Index = CGM.getVtableInfo().getMethodVtableIndex(MD);
|
||||
|
||||
Ty = llvm::PointerType::get(Ty, 0);
|
||||
Ty = llvm::PointerType::get(Ty, 0);
|
||||
|
@ -1345,14 +810,6 @@ CodeGenFunction::BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *&This,
|
|||
return vfn;
|
||||
}
|
||||
|
||||
uint64_t CodeGenModule::GetVtableIndex(const CXXMethodDecl *MD) {
|
||||
// FIXME: move to CodeGenModule.
|
||||
if (vtableinfo == 0)
|
||||
vtableinfo = new VtableInfo(*this);
|
||||
|
||||
return vtableinfo->lookup(MD);
|
||||
}
|
||||
|
||||
/// EmitClassAggrMemberwiseCopy - This routine generates code to copy a class
|
||||
/// array of objects from SrcValue to DestValue. Copying can be either a bitwise
|
||||
/// copy or via a copy constructor call.
|
||||
|
|
|
@ -298,7 +298,8 @@ void AggExprEmitter::VisitUnaryAddrOf(const UnaryOperator *E) {
|
|||
llvm::Value *FuncPtr;
|
||||
|
||||
if (MD->isVirtual()) {
|
||||
uint64_t Index = CGF.CGM.GetVtableIndex(MD->getCanonicalDecl());
|
||||
int64_t Index =
|
||||
CGF.CGM.getVtableInfo().getMethodVtableIndex(MD);
|
||||
|
||||
FuncPtr = llvm::ConstantInt::get(PtrDiffTy, Index + 1);
|
||||
} else {
|
||||
|
|
|
@ -413,7 +413,7 @@ public:
|
|||
|
||||
// Get the function pointer (or index if this is a virtual function).
|
||||
if (MD->isVirtual()) {
|
||||
uint64_t Index = CGM.GetVtableIndex(MD->getCanonicalDecl());
|
||||
int64_t Index = CGM.getVtableInfo().getMethodVtableIndex(MD);
|
||||
|
||||
Values[0] = llvm::ConstantInt::get(PtrDiffTy, Index + 1);
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,534 @@
|
|||
//===--- CGVtable.cpp - Emit LLVM Code for C++ vtables --------------------===//
|
||||
//
|
||||
// The LLVM Compiler Infrastructure
|
||||
//
|
||||
// This file is distributed under the University of Illinois Open Source
|
||||
// License. See LICENSE.TXT for details.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
//
|
||||
// This contains code dealing with C++ code generation of virtual tables.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "CodeGenModule.h"
|
||||
#include "CodeGenFunction.h"
|
||||
|
||||
#include "clang/AST/RecordLayout.h"
|
||||
|
||||
using namespace clang;
|
||||
using namespace CodeGen;
|
||||
|
||||
class VtableBuilder {
|
||||
public:
|
||||
/// Index_t - Vtable index type.
|
||||
typedef uint64_t Index_t;
|
||||
private:
|
||||
std::vector<llvm::Constant *> &methods;
|
||||
std::vector<llvm::Constant *> submethods;
|
||||
llvm::Type *Ptr8Ty;
|
||||
/// Class - The most derived class that this vtable is being built for.
|
||||
const CXXRecordDecl *Class;
|
||||
/// BLayout - Layout for the most derived class that this vtable is being
|
||||
/// built for.
|
||||
const ASTRecordLayout &BLayout;
|
||||
llvm::SmallSet<const CXXRecordDecl *, 32> IndirectPrimary;
|
||||
llvm::SmallSet<const CXXRecordDecl *, 32> SeenVBase;
|
||||
llvm::Constant *rtti;
|
||||
llvm::LLVMContext &VMContext;
|
||||
CodeGenModule &CGM; // Per-module state.
|
||||
/// Index - Maps a method decl into a vtable index. Useful for virtual
|
||||
/// dispatch codegen.
|
||||
llvm::DenseMap<const CXXMethodDecl *, Index_t> Index;
|
||||
llvm::DenseMap<const CXXMethodDecl *, Index_t> VCall;
|
||||
llvm::DenseMap<const CXXMethodDecl *, Index_t> VCallOffset;
|
||||
llvm::DenseMap<const CXXRecordDecl *, Index_t> VBIndex;
|
||||
typedef std::pair<Index_t, Index_t> CallOffset;
|
||||
typedef llvm::DenseMap<const CXXMethodDecl *, CallOffset> Thunks_t;
|
||||
Thunks_t Thunks;
|
||||
typedef llvm::DenseMap<const CXXMethodDecl *,
|
||||
std::pair<CallOffset, CallOffset> > CovariantThunks_t;
|
||||
CovariantThunks_t CovariantThunks;
|
||||
std::vector<Index_t> VCalls;
|
||||
typedef CXXRecordDecl::method_iterator method_iter;
|
||||
// FIXME: Linkage should follow vtable
|
||||
const bool Extern;
|
||||
const uint32_t LLVMPointerWidth;
|
||||
Index_t extra;
|
||||
public:
|
||||
VtableBuilder(std::vector<llvm::Constant *> &meth,
|
||||
const CXXRecordDecl *c,
|
||||
CodeGenModule &cgm)
|
||||
: methods(meth), Class(c), BLayout(cgm.getContext().getASTRecordLayout(c)),
|
||||
rtti(cgm.GenerateRtti(c)), VMContext(cgm.getModule().getContext()),
|
||||
CGM(cgm), Extern(true),
|
||||
LLVMPointerWidth(cgm.getContext().Target.getPointerWidth(0)) {
|
||||
Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0);
|
||||
}
|
||||
|
||||
llvm::DenseMap<const CXXMethodDecl *, Index_t> &getIndex() { return Index; }
|
||||
llvm::DenseMap<const CXXRecordDecl *, Index_t> &getVBIndex()
|
||||
{ return VBIndex; }
|
||||
|
||||
llvm::Constant *wrap(Index_t i) {
|
||||
llvm::Constant *m;
|
||||
m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), i);
|
||||
return llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
|
||||
}
|
||||
|
||||
llvm::Constant *wrap(llvm::Constant *m) {
|
||||
return llvm::ConstantExpr::getBitCast(m, Ptr8Ty);
|
||||
}
|
||||
|
||||
void GenerateVBaseOffsets(std::vector<llvm::Constant *> &offsets,
|
||||
const CXXRecordDecl *RD, uint64_t Offset) {
|
||||
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
|
||||
e = RD->bases_end(); i != e; ++i) {
|
||||
const CXXRecordDecl *Base =
|
||||
cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
|
||||
if (i->isVirtual() && !SeenVBase.count(Base)) {
|
||||
SeenVBase.insert(Base);
|
||||
int64_t BaseOffset = -(Offset/8) + BLayout.getVBaseClassOffset(Base)/8;
|
||||
llvm::Constant *m = wrap(BaseOffset);
|
||||
m = wrap((0?700:0) + BaseOffset);
|
||||
VBIndex[Base] = -(offsets.size()*LLVMPointerWidth/8)
|
||||
- 3*LLVMPointerWidth/8;
|
||||
offsets.push_back(m);
|
||||
}
|
||||
GenerateVBaseOffsets(offsets, Base, Offset);
|
||||
}
|
||||
}
|
||||
|
||||
void StartNewTable() {
|
||||
SeenVBase.clear();
|
||||
}
|
||||
|
||||
Index_t VBlookup(CXXRecordDecl *D, CXXRecordDecl *B);
|
||||
|
||||
/// getVbaseOffset - Returns the index into the vtable for the virtual base
|
||||
/// offset for the given (B) virtual base of the derived class D.
|
||||
Index_t getVbaseOffset(QualType qB, QualType qD) {
|
||||
qD = qD->getAs<PointerType>()->getPointeeType();
|
||||
qB = qB->getAs<PointerType>()->getPointeeType();
|
||||
CXXRecordDecl *D = cast<CXXRecordDecl>(qD->getAs<RecordType>()->getDecl());
|
||||
CXXRecordDecl *B = cast<CXXRecordDecl>(qB->getAs<RecordType>()->getDecl());
|
||||
if (D != Class)
|
||||
return VBlookup(D, B);
|
||||
llvm::DenseMap<const CXXRecordDecl *, Index_t>::iterator i;
|
||||
i = VBIndex.find(B);
|
||||
if (i != VBIndex.end())
|
||||
return i->second;
|
||||
// FIXME: temporal botch, is this data here, by the time we need it?
|
||||
|
||||
assert(false && "FIXME: Locate the containing virtual base first");
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool OverrideMethod(const CXXMethodDecl *MD, llvm::Constant *m,
|
||||
bool MorallyVirtual, Index_t Offset) {
|
||||
typedef CXXMethodDecl::method_iterator meth_iter;
|
||||
|
||||
// FIXME: Don't like the nested loops. For very large inheritance
|
||||
// heirarchies we could have a table on the side with the final overridder
|
||||
// and just replace each instance of an overridden method once. Would be
|
||||
// nice to measure the cost/benefit on real code.
|
||||
|
||||
for (meth_iter mi = MD->begin_overridden_methods(),
|
||||
e = MD->end_overridden_methods();
|
||||
mi != e; ++mi) {
|
||||
const CXXMethodDecl *OMD = *mi;
|
||||
llvm::Constant *om;
|
||||
om = CGM.GetAddrOfFunction(OMD, Ptr8Ty);
|
||||
om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty);
|
||||
|
||||
for (Index_t i = 0, e = submethods.size();
|
||||
i != e; ++i) {
|
||||
// FIXME: begin_overridden_methods might be too lax, covariance */
|
||||
if (submethods[i] != om)
|
||||
continue;
|
||||
QualType nc_oret = OMD->getType()->getAs<FunctionType>()->getResultType();
|
||||
CanQualType oret = CGM.getContext().getCanonicalType(nc_oret);
|
||||
QualType nc_ret = MD->getType()->getAs<FunctionType>()->getResultType();
|
||||
CanQualType ret = CGM.getContext().getCanonicalType(nc_ret);
|
||||
CallOffset ReturnOffset = std::make_pair(0, 0);
|
||||
if (oret != ret) {
|
||||
// FIXME: calculate offsets for covariance
|
||||
ReturnOffset = std::make_pair(42,getVbaseOffset(oret, ret));
|
||||
}
|
||||
Index[MD] = i;
|
||||
submethods[i] = m;
|
||||
|
||||
Thunks.erase(OMD);
|
||||
if (MorallyVirtual) {
|
||||
Index_t &idx = VCall[OMD];
|
||||
if (idx == 0) {
|
||||
VCallOffset[MD] = Offset/8;
|
||||
idx = VCalls.size()+1;
|
||||
VCalls.push_back(0);
|
||||
} else {
|
||||
VCallOffset[MD] = VCallOffset[OMD];
|
||||
VCalls[idx-1] = -VCallOffset[OMD] + Offset/8;
|
||||
}
|
||||
VCall[MD] = idx;
|
||||
CallOffset ThisOffset;
|
||||
// FIXME: calculate non-virtual offset
|
||||
ThisOffset = std::make_pair(0, -((idx+extra+2)*LLVMPointerWidth/8));
|
||||
if (ReturnOffset.first || ReturnOffset.second)
|
||||
CovariantThunks[MD] = std::make_pair(ThisOffset, ReturnOffset);
|
||||
else
|
||||
Thunks[MD] = ThisOffset;
|
||||
return true;
|
||||
}
|
||||
#if 0
|
||||
// FIXME: finish off
|
||||
int64_t O = VCallOffset[OMD] - Offset/8;
|
||||
if (O) {
|
||||
Thunks[MD] = std::make_pair(O, 0);
|
||||
}
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void InstallThunks() {
|
||||
for (Thunks_t::iterator i = Thunks.begin(), e = Thunks.end();
|
||||
i != e; ++i) {
|
||||
const CXXMethodDecl *MD = i->first;
|
||||
Index_t idx = Index[MD];
|
||||
Index_t nv_O = i->second.first;
|
||||
Index_t v_O = i->second.second;
|
||||
submethods[idx] = CGM.BuildThunk(MD, Extern, nv_O, v_O);
|
||||
}
|
||||
Thunks.clear();
|
||||
for (CovariantThunks_t::iterator i = CovariantThunks.begin(),
|
||||
e = CovariantThunks.end();
|
||||
i != e; ++i) {
|
||||
const CXXMethodDecl *MD = i->first;
|
||||
Index_t idx = Index[MD];
|
||||
Index_t nv_t = i->second.first.first;
|
||||
Index_t v_t = i->second.first.second;
|
||||
Index_t nv_r = i->second.second.first;
|
||||
Index_t v_r = i->second.second.second;
|
||||
submethods[idx] = CGM.BuildCovariantThunk(MD, Extern, nv_t, v_t, nv_r,
|
||||
v_r);
|
||||
}
|
||||
CovariantThunks.clear();
|
||||
}
|
||||
|
||||
void OverrideMethods(std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> > *Path, bool MorallyVirtual) {
|
||||
for (std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> >::reverse_iterator i =Path->rbegin(),
|
||||
e = Path->rend(); i != e; ++i) {
|
||||
const CXXRecordDecl *RD = i->first;
|
||||
int64_t Offset = i->second;
|
||||
for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
|
||||
++mi) {
|
||||
if (!mi->isVirtual())
|
||||
continue;
|
||||
|
||||
const CXXMethodDecl *MD = *mi;
|
||||
llvm::Constant *m = 0;
|
||||
if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
|
||||
m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
|
||||
else {
|
||||
const FunctionProtoType *FPT =
|
||||
MD->getType()->getAs<FunctionProtoType>();
|
||||
const llvm::Type *Ty =
|
||||
CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
|
||||
FPT->isVariadic());
|
||||
|
||||
m = wrap(CGM.GetAddrOfFunction(MD, Ty));
|
||||
}
|
||||
|
||||
OverrideMethod(MD, m, MorallyVirtual, Offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AddMethod(const CXXMethodDecl *MD, bool MorallyVirtual, Index_t Offset) {
|
||||
llvm::Constant *m = 0;
|
||||
if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(MD))
|
||||
m = wrap(CGM.GetAddrOfCXXDestructor(Dtor, Dtor_Complete));
|
||||
else {
|
||||
const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
|
||||
const llvm::Type *Ty =
|
||||
CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
|
||||
FPT->isVariadic());
|
||||
|
||||
m = wrap(CGM.GetAddrOfFunction(MD, Ty));
|
||||
}
|
||||
|
||||
// If we can find a previously allocated slot for this, reuse it.
|
||||
if (OverrideMethod(MD, m, MorallyVirtual, Offset))
|
||||
return;
|
||||
|
||||
// else allocate a new slot.
|
||||
Index[MD] = submethods.size();
|
||||
submethods.push_back(m);
|
||||
if (MorallyVirtual) {
|
||||
VCallOffset[MD] = Offset/8;
|
||||
Index_t &idx = VCall[MD];
|
||||
// Allocate the first one, after that, we reuse the previous one.
|
||||
if (idx == 0) {
|
||||
idx = VCalls.size()+1;
|
||||
VCalls.push_back(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AddMethods(const CXXRecordDecl *RD, bool MorallyVirtual,
|
||||
Index_t Offset) {
|
||||
for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
|
||||
++mi)
|
||||
if (mi->isVirtual())
|
||||
AddMethod(*mi, MorallyVirtual, Offset);
|
||||
}
|
||||
|
||||
void NonVirtualBases(const CXXRecordDecl *RD, const ASTRecordLayout &Layout,
|
||||
const CXXRecordDecl *PrimaryBase,
|
||||
bool PrimaryBaseWasVirtual, bool MorallyVirtual,
|
||||
int64_t Offset) {
|
||||
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
|
||||
e = RD->bases_end(); i != e; ++i) {
|
||||
if (i->isVirtual())
|
||||
continue;
|
||||
const CXXRecordDecl *Base =
|
||||
cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
|
||||
if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
|
||||
uint64_t o = Offset + Layout.getBaseClassOffset(Base);
|
||||
StartNewTable();
|
||||
std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> > S;
|
||||
S.push_back(std::make_pair(RD, Offset));
|
||||
GenerateVtableForBase(Base, MorallyVirtual, o, false, &S);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Index_t end(const CXXRecordDecl *RD, std::vector<llvm::Constant *> &offsets,
|
||||
const ASTRecordLayout &Layout,
|
||||
const CXXRecordDecl *PrimaryBase,
|
||||
bool PrimaryBaseWasVirtual, bool MorallyVirtual,
|
||||
int64_t Offset, bool ForVirtualBase) {
|
||||
StartNewTable();
|
||||
extra = 0;
|
||||
// FIXME: Cleanup.
|
||||
if (!ForVirtualBase) {
|
||||
// then virtual base offsets...
|
||||
for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
|
||||
e = offsets.rend(); i != e; ++i)
|
||||
methods.push_back(*i);
|
||||
}
|
||||
|
||||
// The vcalls come first...
|
||||
for (std::vector<Index_t>::reverse_iterator i=VCalls.rbegin(),
|
||||
e=VCalls.rend();
|
||||
i != e; ++i)
|
||||
methods.push_back(wrap((0?600:0) + *i));
|
||||
VCalls.clear();
|
||||
|
||||
if (ForVirtualBase) {
|
||||
// then virtual base offsets...
|
||||
for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
|
||||
e = offsets.rend(); i != e; ++i)
|
||||
methods.push_back(*i);
|
||||
}
|
||||
|
||||
methods.push_back(wrap(-(Offset/8)));
|
||||
methods.push_back(rtti);
|
||||
Index_t AddressPoint = methods.size();
|
||||
|
||||
InstallThunks();
|
||||
methods.insert(methods.end(), submethods.begin(), submethods.end());
|
||||
submethods.clear();
|
||||
|
||||
// and then the non-virtual bases.
|
||||
NonVirtualBases(RD, Layout, PrimaryBase, PrimaryBaseWasVirtual,
|
||||
MorallyVirtual, Offset);
|
||||
return AddressPoint;
|
||||
}
|
||||
|
||||
void Primaries(const CXXRecordDecl *RD, bool MorallyVirtual, int64_t Offset) {
|
||||
if (!RD->isDynamicClass())
|
||||
return;
|
||||
|
||||
const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
|
||||
const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
|
||||
const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
|
||||
|
||||
// vtables are composed from the chain of primaries.
|
||||
if (PrimaryBase) {
|
||||
if (PrimaryBaseWasVirtual)
|
||||
IndirectPrimary.insert(PrimaryBase);
|
||||
Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
|
||||
}
|
||||
|
||||
// And add the virtuals for the class to the primary vtable.
|
||||
AddMethods(RD, MorallyVirtual, Offset);
|
||||
}
|
||||
|
||||
int64_t GenerateVtableForBase(const CXXRecordDecl *RD,
|
||||
bool MorallyVirtual = false, int64_t Offset = 0,
|
||||
bool ForVirtualBase = false,
|
||||
std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> > *Path = 0) {
|
||||
if (!RD->isDynamicClass())
|
||||
return 0;
|
||||
|
||||
const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
|
||||
const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
|
||||
const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
|
||||
|
||||
std::vector<llvm::Constant *> offsets;
|
||||
extra = 0;
|
||||
GenerateVBaseOffsets(offsets, RD, Offset);
|
||||
if (ForVirtualBase)
|
||||
extra = offsets.size();
|
||||
|
||||
// vtables are composed from the chain of primaries.
|
||||
if (PrimaryBase) {
|
||||
if (PrimaryBaseWasVirtual)
|
||||
IndirectPrimary.insert(PrimaryBase);
|
||||
Primaries(PrimaryBase, PrimaryBaseWasVirtual|MorallyVirtual, Offset);
|
||||
}
|
||||
|
||||
// And add the virtuals for the class to the primary vtable.
|
||||
AddMethods(RD, MorallyVirtual, Offset);
|
||||
|
||||
if (Path)
|
||||
OverrideMethods(Path, MorallyVirtual);
|
||||
|
||||
return end(RD, offsets, Layout, PrimaryBase, PrimaryBaseWasVirtual,
|
||||
MorallyVirtual, Offset, ForVirtualBase);
|
||||
}
|
||||
|
||||
void GenerateVtableForVBases(const CXXRecordDecl *RD,
|
||||
int64_t Offset = 0,
|
||||
std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> > *Path = 0) {
|
||||
bool alloc = false;
|
||||
if (Path == 0) {
|
||||
alloc = true;
|
||||
Path = new std::vector<std::pair<const CXXRecordDecl *,
|
||||
int64_t> >;
|
||||
}
|
||||
// FIXME: We also need to override using all paths to a virtual base,
|
||||
// right now, we just process the first path
|
||||
Path->push_back(std::make_pair(RD, Offset));
|
||||
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
|
||||
e = RD->bases_end(); i != e; ++i) {
|
||||
const CXXRecordDecl *Base =
|
||||
cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
|
||||
if (i->isVirtual() && !IndirectPrimary.count(Base)) {
|
||||
// Mark it so we don't output it twice.
|
||||
IndirectPrimary.insert(Base);
|
||||
StartNewTable();
|
||||
int64_t BaseOffset = BLayout.getVBaseClassOffset(Base);
|
||||
GenerateVtableForBase(Base, true, BaseOffset, true, Path);
|
||||
}
|
||||
int64_t BaseOffset = Offset;
|
||||
if (i->isVirtual())
|
||||
BaseOffset = BLayout.getVBaseClassOffset(Base);
|
||||
if (Base->getNumVBases())
|
||||
GenerateVtableForVBases(Base, BaseOffset, Path);
|
||||
}
|
||||
Path->pop_back();
|
||||
if (alloc)
|
||||
delete Path;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
VtableBuilder::Index_t VtableBuilder::VBlookup(CXXRecordDecl *D,
|
||||
CXXRecordDecl *B) {
|
||||
return CGM.getVtableInfo().getVirtualBaseOffsetIndex(D, B);
|
||||
}
|
||||
|
||||
int64_t CGVtableInfo::getMethodVtableIndex(const CXXMethodDecl *MD) {
|
||||
MD = MD->getCanonicalDecl();
|
||||
|
||||
MethodVtableIndicesTy::iterator I = MethodVtableIndices.find(MD);
|
||||
if (I != MethodVtableIndices.end())
|
||||
return I->second;
|
||||
|
||||
const CXXRecordDecl *RD = MD->getParent();
|
||||
|
||||
std::vector<llvm::Constant *> methods;
|
||||
// FIXME: This seems expensive. Can we do a partial job to get
|
||||
// just this data.
|
||||
VtableBuilder b(methods, RD, CGM);
|
||||
b.GenerateVtableForBase(RD);
|
||||
b.GenerateVtableForVBases(RD);
|
||||
|
||||
MethodVtableIndices.insert(b.getIndex().begin(),
|
||||
b.getIndex().end());
|
||||
|
||||
I = MethodVtableIndices.find(MD);
|
||||
assert(I != MethodVtableIndices.end() && "Did not find index!");
|
||||
return I->second;
|
||||
}
|
||||
|
||||
int64_t CGVtableInfo::getVirtualBaseOffsetIndex(const CXXRecordDecl *RD,
|
||||
const CXXRecordDecl *VBase) {
|
||||
ClassPairTy ClassPair(RD, VBase);
|
||||
|
||||
VirtualBaseClassIndiciesTy::iterator I =
|
||||
VirtualBaseClassIndicies.find(ClassPair);
|
||||
if (I != VirtualBaseClassIndicies.end())
|
||||
return I->second;
|
||||
|
||||
std::vector<llvm::Constant *> methods;
|
||||
// FIXME: This seems expensive. Can we do a partial job to get
|
||||
// just this data.
|
||||
VtableBuilder b(methods, RD, CGM);
|
||||
b.GenerateVtableForBase(RD);
|
||||
b.GenerateVtableForVBases(RD);
|
||||
|
||||
for (llvm::DenseMap<const CXXRecordDecl *, uint64_t>::iterator I =
|
||||
b.getVBIndex().begin(), E = b.getVBIndex().end(); I != E; ++I) {
|
||||
// Insert all types.
|
||||
ClassPairTy ClassPair(RD, I->first);
|
||||
|
||||
VirtualBaseClassIndicies.insert(std::make_pair(ClassPair, I->second));
|
||||
}
|
||||
|
||||
I = VirtualBaseClassIndicies.find(ClassPair);
|
||||
assert(I != VirtualBaseClassIndicies.end() && "Did not find index!");
|
||||
|
||||
return I->second;
|
||||
}
|
||||
|
||||
llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
|
||||
llvm::SmallString<256> OutName;
|
||||
llvm::raw_svector_ostream Out(OutName);
|
||||
QualType ClassTy;
|
||||
mangleCXXVtable(CGM.getMangleContext(), RD, Out);
|
||||
llvm::GlobalVariable::LinkageTypes linktype;
|
||||
linktype = llvm::GlobalValue::WeakAnyLinkage;
|
||||
std::vector<llvm::Constant *> methods;
|
||||
llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0);
|
||||
int64_t AddressPoint;
|
||||
|
||||
VtableBuilder b(methods, RD, CGM);
|
||||
|
||||
// First comes the vtables for all the non-virtual bases...
|
||||
AddressPoint = b.GenerateVtableForBase(RD);
|
||||
|
||||
// then the vtables for all the virtual bases.
|
||||
b.GenerateVtableForVBases(RD);
|
||||
|
||||
llvm::Constant *C;
|
||||
llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size());
|
||||
C = llvm::ConstantArray::get(type, methods);
|
||||
llvm::Value *vtable = new llvm::GlobalVariable(CGM.getModule(), type, true,
|
||||
linktype, C, Out.str());
|
||||
vtable = Builder.CreateBitCast(vtable, Ptr8Ty);
|
||||
vtable = Builder.CreateGEP(vtable,
|
||||
llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext),
|
||||
AddressPoint*LLVMPointerWidth/8));
|
||||
return vtable;
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
//===--- CGVtable.h - Emit LLVM Code for C++ vtables ----------------------===//
|
||||
//
|
||||
// The LLVM Compiler Infrastructure
|
||||
//
|
||||
// This file is distributed under the University of Illinois Open Source
|
||||
// License. See LICENSE.TXT for details.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
//
|
||||
// This contains code dealing with C++ code generation of virtual tables.
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#ifndef CLANG_CODEGEN_CGVTABLE_H
|
||||
#define CLANG_CODEGEN_CGVTABLE_H
|
||||
|
||||
#include "llvm/ADT/DenseMap.h"
|
||||
|
||||
namespace clang {
|
||||
class CXXMethodDecl;
|
||||
class CXXRecordDecl;
|
||||
|
||||
namespace CodeGen {
|
||||
class CodeGenModule;
|
||||
|
||||
class CGVtableInfo {
|
||||
CodeGenModule &CGM;
|
||||
|
||||
/// MethodVtableIndices - Contains the index (relative to the vtable address
|
||||
/// point) where the function pointer for a virtual function is stored.
|
||||
typedef llvm::DenseMap<const CXXMethodDecl *, int64_t> MethodVtableIndicesTy;
|
||||
MethodVtableIndicesTy MethodVtableIndices;
|
||||
|
||||
typedef std::pair<const CXXRecordDecl *,
|
||||
const CXXRecordDecl *> ClassPairTy;
|
||||
|
||||
/// VirtualBaseClassIndicies - Contains the index into the vtable where the
|
||||
/// offsets for virtual bases of a class are stored.
|
||||
typedef llvm::DenseMap<ClassPairTy, int64_t> VirtualBaseClassIndiciesTy;
|
||||
VirtualBaseClassIndiciesTy VirtualBaseClassIndicies;
|
||||
public:
|
||||
CGVtableInfo(CodeGenModule &CGM)
|
||||
: CGM(CGM) { }
|
||||
|
||||
/// getMethodVtableIndex - Return the index (relative to the vtable address
|
||||
/// point) where the function pointer for the given virtual function is
|
||||
/// stored.
|
||||
int64_t getMethodVtableIndex(const CXXMethodDecl *MD);
|
||||
|
||||
/// getVirtualBaseOffsetIndex - Return the index (relative to the vtable address
|
||||
/// point) where the offset of the given virtual base of the given class is
|
||||
// stored.
|
||||
int64_t getVirtualBaseOffsetIndex(const CXXRecordDecl *RD,
|
||||
const CXXRecordDecl *VBase);
|
||||
};
|
||||
|
||||
}
|
||||
}
|
||||
#endif
|
|
@ -39,7 +39,8 @@ CodeGenModule::CodeGenModule(ASTContext &C, const CompileOptions &compileOpts,
|
|||
Diagnostic &diags)
|
||||
: BlockModule(C, M, TD, Types, *this), Context(C),
|
||||
Features(C.getLangOptions()), CompileOpts(compileOpts), TheModule(M),
|
||||
TheTargetData(TD), Diags(diags), Types(C, M, TD), MangleCtx(C), Runtime(0),
|
||||
TheTargetData(TD), Diags(diags), Types(C, M, TD), MangleCtx(C),
|
||||
VtableInfo(*this), Runtime(0),
|
||||
MemCpyFn(0), MemMoveFn(0), MemSetFn(0), CFConstantStringClassRef(0),
|
||||
VMContext(M.getContext()) {
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include "CGBlocks.h"
|
||||
#include "CGCall.h"
|
||||
#include "CGCXX.h"
|
||||
#include "CGVtable.h"
|
||||
#include "CodeGenTypes.h"
|
||||
#include "Mangle.h"
|
||||
#include "llvm/Module.h"
|
||||
|
@ -126,6 +127,9 @@ class CodeGenModule : public BlockModule {
|
|||
CodeGenTypes Types;
|
||||
MangleContext MangleCtx;
|
||||
|
||||
/// VtableInfo - Holds information about C++ vtables.
|
||||
CGVtableInfo VtableInfo;
|
||||
|
||||
CGObjCRuntime* Runtime;
|
||||
CGDebugInfo* DebugInfo;
|
||||
|
||||
|
@ -221,6 +225,7 @@ public:
|
|||
llvm::Module &getModule() const { return TheModule; }
|
||||
CodeGenTypes &getTypes() { return Types; }
|
||||
MangleContext &getMangleContext() { return MangleCtx; }
|
||||
CGVtableInfo &getVtableInfo() { return VtableInfo; }
|
||||
Diagnostic &getDiags() const { return Diags; }
|
||||
const llvm::TargetData &getTargetData() const { return TheTargetData; }
|
||||
llvm::LLVMContext &getLLVMContext() { return VMContext; }
|
||||
|
@ -256,9 +261,6 @@ public:
|
|||
int64_t nv_t, int64_t v_t,
|
||||
int64_t nv_r, int64_t v_r);
|
||||
|
||||
/// GetVtableIndex - Return the vtable index for a virtual member function.
|
||||
uint64_t GetVtableIndex(const CXXMethodDecl *MD);
|
||||
|
||||
/// GetCXXBaseClassOffset - Returns the offset from a derived class to its
|
||||
/// base class. Returns null if the offset is 0.
|
||||
llvm::Constant *GetCXXBaseClassOffset(const CXXRecordDecl *ClassDecl,
|
||||
|
|
Loading…
Reference in New Issue