forked from OSchip/llvm-project
Install thunks later to fixup overrides. Track space taken by vbase
offsets better for thunk refinements. Cleanups. WIP. llvm-svn: 81067
This commit is contained in:
parent
1309a852d3
commit
453fe4285d
|
@ -855,17 +855,23 @@ private:
|
|||
llvm::DenseMap<const CXXMethodDecl *, Index_t> Index;
|
||||
llvm::DenseMap<const CXXMethodDecl *, Index_t> VCall;
|
||||
llvm::DenseMap<const CXXMethodDecl *, Index_t> VCallOffset;
|
||||
typedef llvm::DenseMap<const CXXMethodDecl *,
|
||||
std::pair<Index_t, Index_t> > Thunks_t;
|
||||
Thunks_t Thunks;
|
||||
std::vector<Index_t> VCalls;
|
||||
typedef CXXRecordDecl::method_iterator method_iter;
|
||||
// FIXME: Linkage should follow vtable
|
||||
const bool Extern;
|
||||
const uint32_t LLVMPointerWidth;
|
||||
Index_t extra;
|
||||
public:
|
||||
VtableBuilder(std::vector<llvm::Constant *> &meth,
|
||||
const CXXRecordDecl *c,
|
||||
CodeGenModule &cgm)
|
||||
: methods(meth), Class(c), BLayout(cgm.getContext().getASTRecordLayout(c)),
|
||||
rtti(cgm.GenerateRtti(c)), VMContext(cgm.getModule().getContext()),
|
||||
CGM(cgm), Extern(true) {
|
||||
CGM(cgm), Extern(true),
|
||||
LLVMPointerWidth(cgm.getContext().Target.getPointerWidth(0)) {
|
||||
Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0);
|
||||
}
|
||||
|
||||
|
@ -913,7 +919,6 @@ public:
|
|||
// and just replace each instance of an overridden method once. Would be
|
||||
// nice to measure the cost/benefit on real code.
|
||||
|
||||
// If we can find a previously allocated slot for this, reuse it.
|
||||
for (meth_iter mi = MD->begin_overridden_methods(),
|
||||
e = MD->end_overridden_methods();
|
||||
mi != e; ++mi) {
|
||||
|
@ -925,28 +930,51 @@ public:
|
|||
for (Index_t i = AddressPoint, e = submethods.size();
|
||||
i != e; ++i) {
|
||||
// FIXME: begin_overridden_methods might be too lax, covariance */
|
||||
if (submethods[i] == om) {
|
||||
int64_t O = VCallOffset[OMD] - Offset/8;
|
||||
// FIXME: thunks
|
||||
if (O) {
|
||||
submethods[i] = CGM.BuildThunk(MD, Extern, true, 0, O);
|
||||
} else
|
||||
submethods[i] = m;
|
||||
// FIXME: audit
|
||||
Index[MD] = i - AddressPoint;
|
||||
if (MorallyVirtual) {
|
||||
VCallOffset[MD] = Offset/8;
|
||||
VCalls[VCall[OMD]] = Offset/8 - VCallOffset[OMD];
|
||||
if (submethods[i] != om)
|
||||
continue;
|
||||
submethods[i] = m;
|
||||
Index[MD] = i - AddressPoint;
|
||||
|
||||
Thunks.erase(OMD);
|
||||
if (MorallyVirtual) {
|
||||
VCallOffset[MD] = Offset/8;
|
||||
Index_t &idx = VCall[OMD];
|
||||
if (idx == 0) {
|
||||
idx = VCalls.size()+1;
|
||||
VCalls.push_back(0);
|
||||
}
|
||||
// submethods[VCall[OMD]] = wrap(Offset/8 - VCallOffset[OMD]);
|
||||
VCalls[idx] = Offset/8 - VCallOffset[OMD];
|
||||
VCall[MD] = idx;
|
||||
// FIXME: 0?
|
||||
Thunks[MD] = std::make_pair(0, -((idx+extra+2)*LLVMPointerWidth/8));
|
||||
return true;
|
||||
}
|
||||
#if 0
|
||||
// FIXME: finish off
|
||||
int64_t O = VCallOffset[OMD] - Offset/8;
|
||||
if (O) {
|
||||
Thunks[MD] = std::make_pair(O, 0);
|
||||
}
|
||||
#endif
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void InstallThunks(Index_t AddressPoint) {
|
||||
for (Thunks_t::iterator i = Thunks.begin(), e = Thunks.end();
|
||||
i != e; ++i) {
|
||||
const CXXMethodDecl *MD = i->first;
|
||||
Index_t idx = Index[MD];
|
||||
Index_t nv_O = i->second.first;
|
||||
Index_t v_O = i->second.second;
|
||||
methods[AddressPoint + idx] = CGM.BuildThunk(MD, Extern, nv_O, v_O);
|
||||
}
|
||||
Thunks.clear();
|
||||
}
|
||||
|
||||
void OverrideMethods(const CXXRecordDecl *RD, Index_t AddressPoint,
|
||||
bool MorallyVirtual, Index_t Offset) {
|
||||
for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
|
||||
|
@ -961,19 +989,18 @@ public:
|
|||
void AddMethod(const CXXMethodDecl *MD, Index_t AddressPoint,
|
||||
bool MorallyVirtual, Index_t Offset) {
|
||||
llvm::Constant *m = wrap(CGM.GetAddrOfFunction(GlobalDecl(MD), Ptr8Ty));
|
||||
// If we can find a previously allocated slot for this, reuse it.
|
||||
if (OverrideMethod(MD, m, MorallyVirtual, Offset, submethods, 0))
|
||||
return;
|
||||
|
||||
// else allocate a new slot.
|
||||
Index[MD] = submethods.size();
|
||||
// VCall[MD] = Offset;
|
||||
if (MorallyVirtual) {
|
||||
VCallOffset[MD] = Offset/8;
|
||||
Index_t &idx = VCall[MD];
|
||||
// Allocate the first one, after that, we reuse the previous one.
|
||||
if (idx == 0) {
|
||||
idx = VCalls.size()+1;
|
||||
VCallOffset[MD] = Offset/8;
|
||||
VCalls.push_back(0);
|
||||
}
|
||||
}
|
||||
|
@ -988,6 +1015,27 @@ public:
|
|||
AddMethod(*mi, AddressPoint, MorallyVirtual, Offset);
|
||||
}
|
||||
|
||||
void NonVirtualBases(const CXXRecordDecl *RD, const ASTRecordLayout &Layout,
|
||||
const CXXRecordDecl *PrimaryBase,
|
||||
bool PrimaryBaseWasVirtual, bool MorallyVirtual,
|
||||
int64_t Offset) {
|
||||
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
|
||||
e = RD->bases_end(); i != e; ++i) {
|
||||
if (i->isVirtual())
|
||||
continue;
|
||||
const CXXRecordDecl *Base =
|
||||
cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
|
||||
if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
|
||||
uint64_t o = Offset + Layout.getBaseClassOffset(Base);
|
||||
StartNewTable();
|
||||
Index_t AP;
|
||||
AP = GenerateVtableForBase(Base, true, true, MorallyVirtual, o, false);
|
||||
OverrideMethods(RD, AP, MorallyVirtual, o);
|
||||
InstallThunks(AP);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int64_t GenerateVtableForBase(const CXXRecordDecl *RD, bool forPrimary,
|
||||
bool Bottom, bool MorallyVirtual,
|
||||
int64_t Offset, bool ForVirtualBase) {
|
||||
|
@ -1004,8 +1052,12 @@ public:
|
|||
std::vector<llvm::Constant *> offsets;
|
||||
// FIXME: Audit, is this right?
|
||||
if (Bottom && (PrimaryBase == 0 || forPrimary || !PrimaryBaseWasVirtual
|
||||
|| Bottom))
|
||||
|| Bottom)) {
|
||||
extra = 0;
|
||||
GenerateVBaseOffsets(offsets, RD, Offset);
|
||||
if (ForVirtualBase)
|
||||
extra = offsets.size();
|
||||
}
|
||||
|
||||
bool Top = true;
|
||||
|
||||
|
@ -1026,6 +1078,7 @@ public:
|
|||
return AddressPoint;
|
||||
|
||||
StartNewTable();
|
||||
extra = 0;
|
||||
// FIXME: Cleanup.
|
||||
if (!ForVirtualBase) {
|
||||
// then virtual base offsets...
|
||||
|
@ -1054,22 +1107,11 @@ public:
|
|||
|
||||
methods.insert(methods.end(), submethods.begin(), submethods.end());
|
||||
submethods.clear();
|
||||
InstallThunks(AddressPoint);
|
||||
|
||||
// and then the non-virtual bases.
|
||||
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
|
||||
e = RD->bases_end(); i != e; ++i) {
|
||||
if (i->isVirtual())
|
||||
continue;
|
||||
const CXXRecordDecl *Base =
|
||||
cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
|
||||
if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
|
||||
uint64_t o = Offset + Layout.getBaseClassOffset(Base);
|
||||
StartNewTable();
|
||||
Index_t AP;
|
||||
AP = GenerateVtableForBase(Base, true, true, MorallyVirtual, o, false);
|
||||
OverrideMethods(RD, AP, MorallyVirtual, o);
|
||||
}
|
||||
}
|
||||
NonVirtualBases(RD, Layout, PrimaryBase, PrimaryBaseWasVirtual, MorallyVirtual,
|
||||
Offset);
|
||||
return AddressPoint;
|
||||
}
|
||||
|
||||
|
@ -1087,6 +1129,7 @@ public:
|
|||
Index_t AP;
|
||||
AP = GenerateVtableForBase(Base, false, true, true, BaseOffset, true);
|
||||
OverrideMethods(RD, AP, true, BaseOffset);
|
||||
InstallThunks(AP);
|
||||
}
|
||||
if (Base->getNumVBases())
|
||||
GenerateVtableForVBases(Base, Class);
|
||||
|
@ -1168,8 +1211,8 @@ static VtableInfo *vtableinfo;
|
|||
|
||||
llvm::Constant *CodeGenFunction::GenerateThunk(llvm::Function *Fn,
|
||||
const CXXMethodDecl *MD,
|
||||
bool Extern, bool Virtual,
|
||||
int64_t nv, int64_t v) {
|
||||
bool Extern, int64_t nv,
|
||||
int64_t v) {
|
||||
QualType R = MD->getType()->getAsFunctionType()->getResultType();
|
||||
|
||||
FunctionArgList Args;
|
||||
|
@ -1198,12 +1241,11 @@ llvm::Constant *CodeGenFunction::GenerateThunk(llvm::Function *Fn,
|
|||
return Fn;
|
||||
}
|
||||
|
||||
llvm::Constant *CodeGenModule::BuildThunk(const CXXMethodDecl *MD,
|
||||
bool Extern, bool Virtual, int64_t nv,
|
||||
int64_t v) {
|
||||
llvm::Constant *CodeGenModule::BuildThunk(const CXXMethodDecl *MD, bool Extern,
|
||||
int64_t nv, int64_t v) {
|
||||
llvm::SmallString<256> OutName;
|
||||
llvm::raw_svector_ostream Out(OutName);
|
||||
mangleThunk(MD, Virtual, nv, v, getContext(), Out);
|
||||
mangleThunk(MD, nv, v, getContext(), Out);
|
||||
llvm::GlobalVariable::LinkageTypes linktype;
|
||||
linktype = llvm::GlobalValue::WeakAnyLinkage;
|
||||
if (!Extern)
|
||||
|
@ -1216,7 +1258,7 @@ llvm::Constant *CodeGenModule::BuildThunk(const CXXMethodDecl *MD,
|
|||
|
||||
llvm::Function *Fn = llvm::Function::Create(FTy, linktype, Out.str(),
|
||||
&getModule());
|
||||
CodeGenFunction(*this).GenerateThunk(Fn, MD, Extern, Virtual, nv, v);
|
||||
CodeGenFunction(*this).GenerateThunk(Fn, MD, Extern, nv, v);
|
||||
// Fn = Builder.CreateBitCast(Fn, Ptr8Ty);
|
||||
llvm::Constant *m = llvm::ConstantExpr::getBitCast(Fn, Ptr8Ty);
|
||||
return m;
|
||||
|
|
|
@ -366,8 +366,7 @@ public:
|
|||
|
||||
/// GenerateThunk - Generate a thunk for the given method
|
||||
llvm::Constant *GenerateThunk(llvm::Function *Fn, const CXXMethodDecl *MD,
|
||||
bool Extern, bool Virtual, int64_t nv,
|
||||
int64_t v);
|
||||
bool Extern, int64_t nv, int64_t v);
|
||||
|
||||
void EmitCtorPrologue(const CXXConstructorDecl *CD);
|
||||
|
||||
|
|
|
@ -236,8 +236,8 @@ public:
|
|||
llvm::Constant *GenerateRtti(const CXXRecordDecl *RD);
|
||||
|
||||
/// BuildThunk - Build a thunk for the given method
|
||||
llvm::Constant *BuildThunk(const CXXMethodDecl *MD, bool Extern, bool Virtual,
|
||||
int64_t nv, int64_t v);
|
||||
llvm::Constant *BuildThunk(const CXXMethodDecl *MD, bool Extern, int64_t nv,
|
||||
int64_t v);
|
||||
|
||||
/// GetStringForStringLiteral - Return the appropriate bytes for a string
|
||||
/// literal, properly padded to match the literal type. If only the address of
|
||||
|
|
|
@ -39,10 +39,10 @@ namespace {
|
|||
: Context(C), Out(os), Structor(0), StructorType(0) { }
|
||||
|
||||
bool mangle(const NamedDecl *D);
|
||||
void mangleCalloffset(bool Virtual, int64_t nv, int64_t v);
|
||||
void mangleThunk(const NamedDecl *ND, bool Virtual, int64_t nv, int64_t v);
|
||||
void mangleCovariantThunk(const NamedDecl *ND, bool VirtualThis,
|
||||
int64_t nv_t, int64_t v_t, bool VirtualResult,
|
||||
void mangleCalloffset(int64_t nv, int64_t v);
|
||||
void mangleThunk(const NamedDecl *ND, int64_t nv, int64_t v);
|
||||
void mangleCovariantThunk(const NamedDecl *ND,
|
||||
int64_t nv_t, int64_t v_t,
|
||||
int64_t nv_r, int64_t v_r);
|
||||
void mangleGuardVariable(const VarDecl *D);
|
||||
|
||||
|
@ -241,14 +241,13 @@ void CXXNameMangler::mangleName(const NamedDecl *ND) {
|
|||
mangleNestedName(ND);
|
||||
}
|
||||
|
||||
void CXXNameMangler::mangleCalloffset(bool Virtual, int64_t nv,
|
||||
int64_t v) {
|
||||
void CXXNameMangler::mangleCalloffset(int64_t nv, int64_t v) {
|
||||
// <call-offset> ::= h <nv-offset> _
|
||||
// ::= v <v-offset> _
|
||||
// <nv-offset> ::= <offset number> # non-virtual base override
|
||||
// <v-offset> ::= <offset nubmer> _ <virtual offset number>
|
||||
// # virtual base override, with vcall offset
|
||||
if (!Virtual) {
|
||||
if (v == 0) {
|
||||
Out << "h";
|
||||
if (nv < 0) {
|
||||
Out << "n";
|
||||
|
@ -272,26 +271,24 @@ void CXXNameMangler::mangleCalloffset(bool Virtual, int64_t nv,
|
|||
Out << "_";
|
||||
}
|
||||
|
||||
void CXXNameMangler::mangleThunk(const NamedDecl *D, bool Virtual, int64_t nv,
|
||||
int64_t v) {
|
||||
void CXXNameMangler::mangleThunk(const NamedDecl *D, int64_t nv, int64_t v) {
|
||||
// <special-name> ::= T <call-offset> <base encoding>
|
||||
// # base is the nominal target function of thunk
|
||||
Out << "_T";
|
||||
mangleCalloffset(Virtual, nv, v);
|
||||
mangleCalloffset(nv, v);
|
||||
mangleName(D);
|
||||
}
|
||||
|
||||
void CXXNameMangler::mangleCovariantThunk(const NamedDecl *D,
|
||||
bool VirtualThis, int64_t nv_t,
|
||||
int64_t v_t, bool VirtualResult,
|
||||
int64_t nv_t, int64_t v_t,
|
||||
int64_t nv_r, int64_t v_r) {
|
||||
// <special-name> ::= Tc <call-offset> <call-offset> <base encoding>
|
||||
// # base is the nominal target function of thunk
|
||||
// # first call-offset is 'this' adjustment
|
||||
// # second call-offset is result adjustment
|
||||
Out << "_Tc";
|
||||
mangleCalloffset(VirtualThis, nv_t, v_t);
|
||||
mangleCalloffset(VirtualResult, nv_r, v_r);
|
||||
mangleCalloffset(nv_t, v_t);
|
||||
mangleCalloffset(nv_r, v_r);
|
||||
mangleName(D);
|
||||
}
|
||||
|
||||
|
@ -859,7 +856,7 @@ namespace clang {
|
|||
|
||||
/// \brief Mangles the a thunk with the offset n for the declaration D and
|
||||
/// emits that name to the given output stream.
|
||||
void mangleThunk(const NamedDecl *D, bool Virtual, int64_t nv, int64_t v,
|
||||
void mangleThunk(const NamedDecl *D, int64_t nv, int64_t v,
|
||||
ASTContext &Context, llvm::raw_ostream &os) {
|
||||
// FIXME: Hum, we might have to thunk these, fix.
|
||||
assert(!isa<CXXConstructorDecl>(D) &&
|
||||
|
@ -868,15 +865,14 @@ namespace clang {
|
|||
"Use mangleCXXDtor for destructor decls!");
|
||||
|
||||
CXXNameMangler Mangler(Context, os);
|
||||
Mangler.mangleThunk(D, Virtual, nv, v);
|
||||
Mangler.mangleThunk(D, nv, v);
|
||||
os.flush();
|
||||
}
|
||||
|
||||
/// \brief Mangles the a covariant thunk for the declaration D and emits that
|
||||
/// name to the given output stream.
|
||||
void mangleCovariantThunk(const NamedDecl *D, bool VirtualThis, int64_t nv_t,
|
||||
int64_t v_t, bool VirtualResult, int64_t nv_r,
|
||||
int64_t v_r, ASTContext &Context,
|
||||
void mangleCovariantThunk(const NamedDecl *D, int64_t nv_t, int64_t v_t,
|
||||
int64_t nv_r, int64_t v_r, ASTContext &Context,
|
||||
llvm::raw_ostream &os) {
|
||||
// FIXME: Hum, we might have to thunk these, fix.
|
||||
assert(!isa<CXXConstructorDecl>(D) &&
|
||||
|
@ -885,8 +881,7 @@ namespace clang {
|
|||
"Use mangleCXXDtor for destructor decls!");
|
||||
|
||||
CXXNameMangler Mangler(Context, os);
|
||||
Mangler.mangleCovariantThunk(D, VirtualThis, nv_t, v_t, VirtualResult,
|
||||
nv_r, v_r);
|
||||
Mangler.mangleCovariantThunk(D, nv_t, v_t, nv_r, v_r);
|
||||
os.flush();
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ namespace clang {
|
|||
|
||||
bool mangleName(const NamedDecl *D, ASTContext &Context,
|
||||
llvm::raw_ostream &os);
|
||||
void mangleThunk(const NamedDecl *D, bool Virtual, int64_t n, int64_t vn,
|
||||
void mangleThunk(const NamedDecl *D, int64_t n, int64_t vn,
|
||||
ASTContext &Context, llvm::raw_ostream &os);
|
||||
void mangleCovariantThunk(const NamedDecl *D, bool VirtualThis, int64_t nv_t,
|
||||
int64_t v_t, bool VirtualResult, int64_t nv_r,
|
||||
|
|
Loading…
Reference in New Issue