Revert r80064 since it broke the build.

llvm-svn: 80066
This commit is contained in:
Anders Carlsson 2009-08-26 03:30:14 +00:00
parent aef5317edd
commit 458055a890
3 changed files with 28 additions and 160 deletions

View File

@ -200,9 +200,15 @@ RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE) {
const FunctionProtoType *FPT = MD->getType()->getAsFunctionProtoType();
if (MD->isVirtual()) {
ErrorUnsupported(CE, "virtual dispatch");
}
const llvm::Type *Ty =
CGM.getTypes().GetFunctionType(CGM.getTypes().getFunctionInfo(MD),
FPT->isVariadic());
llvm::Constant *Callee = CGM.GetAddrOfFunction(GlobalDecl(MD), Ty);
llvm::Value *This;
if (ME->isArrow())
@ -211,12 +217,6 @@ RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE) {
LValue BaseLV = EmitLValue(ME->getBase());
This = BaseLV.getAddress();
}
llvm::Value *Callee;
if (MD->isVirtual())
Callee = BuildVirtualCall(MD, This, Ty);
else
Callee = CGM.GetAddrOfFunction(GlobalDecl(MD), Ty);
return EmitCXXMemberCall(MD, Callee, This,
CE->arg_begin(), CE->arg_end());
@ -826,10 +826,6 @@ llvm::Constant *CodeGenModule::GenerateRtti(const CXXRecordDecl *RD) {
}
class VtableBuilder {
public:
/// Index_t - Vtable index type.
typedef uint64_t Index_t;
private:
std::vector<llvm::Constant *> &methods;
llvm::Type *Ptr8Ty;
/// Class - The most derived class that this vtable is being built for.
@ -844,7 +840,7 @@ private:
CodeGenModule &CGM; // Per-module state.
/// Index - Maps a method decl into a vtable index. Useful for virtual
/// dispatch codegen.
llvm::DenseMap<const CXXMethodDecl *, Index_t> Index;
llvm::DenseMap<const CXXMethodDecl *, int32_t> Index;
typedef CXXRecordDecl::method_iterator method_iter;
public:
VtableBuilder(std::vector<llvm::Constant *> &meth,
@ -856,7 +852,6 @@ public:
Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0);
}
llvm::DenseMap<const CXXMethodDecl *, Index_t> &getIndex() { return Index; }
llvm::Constant *GenerateVcall(const CXXMethodDecl *MD,
const CXXRecordDecl *RD,
bool VBoundary,
@ -937,17 +932,17 @@ public:
SeenVBase.clear();
}
inline Index_t nottoobig(uint64_t t) {
assert(t < (Index_t)-1ULL || "vtable too big");
inline uint32_t nottoobig(uint64_t t) {
assert(t < (uint32_t)-1ULL || "vtable too big");
return t;
}
#if 0
inline Index_t nottoobig(Index_t t) {
inline uint32_t nottoobig(uint32_t t) {
return t;
}
#endif
void AddMethod(const CXXMethodDecl *MD, Index_t AddressPoint) {
void AddMethod(const CXXMethodDecl *MD, int32_t FirstIndex) {
typedef CXXMethodDecl::method_iterator meth_iter;
llvm::Constant *m;
@ -968,34 +963,34 @@ public:
om = CGM.GetAddrOfFunction(GlobalDecl(OMD), Ptr8Ty);
om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty);
for (Index_t i = AddressPoint, e = methods.size();
i != e; ++i) {
for (int32_t i = FirstIndex, e = nottoobig(methods.size()); i != e; ++i) {
// FIXME: begin_overridden_methods might be too lax, covariance */
if (methods[i] == om) {
methods[i] = m;
Index[MD] = i - AddressPoint;
Index[MD] = i;
return;
}
}
}
// else allocate a new slot.
Index[MD] = methods.size() - AddressPoint;
Index[MD] = methods.size();
methods.push_back(m);
}
void GenerateMethods(const CXXRecordDecl *RD, Index_t AddressPoint) {
void GenerateMethods(const CXXRecordDecl *RD, int32_t FirstIndex) {
for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me;
++mi)
if (mi->isVirtual())
AddMethod(*mi, AddressPoint);
AddMethod(*mi, FirstIndex);
}
int64_t GenerateVtableForBase(const CXXRecordDecl *RD,
bool forPrimary,
bool VBoundary,
int64_t Offset,
bool ForVirtualBase) {
bool ForVirtualBase,
int32_t FirstIndex) {
llvm::Constant *m = llvm::Constant::getNullValue(Ptr8Ty);
int64_t AddressPoint=0;
@ -1028,9 +1023,8 @@ public:
if (PrimaryBaseWasVirtual)
IndirectPrimary.insert(PrimaryBase);
Top = false;
AddressPoint = GenerateVtableForBase(PrimaryBase, true,
PrimaryBaseWasVirtual|VBoundary,
Offset, PrimaryBaseWasVirtual);
AddressPoint = GenerateVtableForBase(PrimaryBase, true, PrimaryBaseWasVirtual|VBoundary,
Offset, PrimaryBaseWasVirtual, FirstIndex);
}
if (Top) {
@ -1047,7 +1041,7 @@ public:
}
// And add the virtuals for the class to the primary vtable.
GenerateMethods(RD, AddressPoint);
GenerateMethods(RD, FirstIndex);
// and then the non-virtual bases.
for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
@ -1059,7 +1053,8 @@ public:
if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
uint64_t o = Offset + Layout.getBaseClassOffset(Base);
StartNewTable();
GenerateVtableForBase(Base, true, false, o, false);
FirstIndex = methods.size();
GenerateVtableForBase(Base, true, false, o, false, FirstIndex);
}
}
return AddressPoint;
@ -1076,7 +1071,8 @@ public:
IndirectPrimary.insert(Base);
StartNewTable();
int64_t BaseOffset = BLayout.getVBaseClassOffset(Base);
GenerateVtableForBase(Base, false, true, BaseOffset, true);
int32_t FirstIndex = methods.size();
GenerateVtableForBase(Base, false, true, BaseOffset, true, FirstIndex);
}
if (Base->getNumVBases())
GenerateVtableForVBases(Base, Class);
@ -1084,43 +1080,6 @@ public:
}
};
class VtableInfo {
public:
typedef VtableBuilder::Index_t Index_t;
private:
CodeGenModule &CGM; // Per-module state.
/// Index_t - Vtable index type.
typedef llvm::DenseMap<const CXXMethodDecl *, Index_t> ElTy;
typedef llvm::DenseMap<const CXXRecordDecl *, ElTy *> MapTy;
// FIXME: Move to Context.
static MapTy IndexFor;
public:
VtableInfo(CodeGenModule &cgm) : CGM(cgm) { }
void register_index(const CXXRecordDecl *RD, const ElTy &e) {
assert(IndexFor.find(RD) == IndexFor.end() || "Don't compute vtbl twice");
// We own a copy of this, it will go away shortly.
new ElTy (e);
IndexFor[RD] = new ElTy (e);
}
Index_t lookup(const CXXMethodDecl *MD) {
const CXXRecordDecl *RD = MD->getParent();
MapTy::iterator I = IndexFor.find(RD);
if (I == IndexFor.end()) {
std::vector<llvm::Constant *> methods;
VtableBuilder b(methods, RD, CGM);
b.GenerateVtableForBase(RD, true, false, 0, false);
b.GenerateVtableForVBases(RD, RD);
register_index(RD, b.getIndex());
I = IndexFor.find(RD);
}
assert(I->second->find(MD)!=I->second->end() || "Can't find vtable index");
return (*I->second)[MD];
}
};
// FIXME: Move to Context.
VtableInfo::MapTy VtableInfo::IndexFor;
llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
llvm::SmallString<256> OutName;
llvm::raw_svector_ostream Out(OutName);
@ -1136,7 +1095,7 @@ llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
VtableBuilder b(methods, RD, CGM);
// First comes the vtables for all the non-virtual bases...
Offset = b.GenerateVtableForBase(RD, true, false, 0, false);
Offset = b.GenerateVtableForBase(RD, true, false, 0, false, 0);
// then the vtables for all the virtual bases.
b.GenerateVtableForVBases(RD, RD);
@ -1153,31 +1112,6 @@ llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
return vtable;
}
// FIXME: move to Context
static VtableInfo *vtableinfo;
llvm::Value *
CodeGenFunction::BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *&This,
const llvm::Type *Ty) {
// FIXME: If we know the dynamic type, we don't have to do a virtual dispatch.
// FIXME: move to Context
if (vtableinfo == 0)
vtableinfo = new VtableInfo(CGM);
VtableInfo::Index_t Idx = vtableinfo->lookup(MD);
Ty = llvm::PointerType::get(Ty, 0);
Ty = llvm::PointerType::get(Ty, 0);
Ty = llvm::PointerType::get(Ty, 0);
llvm::Value *vtbl = Builder.CreateBitCast(This, Ty);
vtbl = Builder.CreateLoad(vtbl);
llvm::Value *vfn = Builder.CreateConstInBoundsGEP1_64(vtbl,
Idx, "vfn");
vfn = Builder.CreateLoad(vfn);
return vfn;
}
/// EmitClassAggrMemberwiseCopy - This routine generates code to copy a class
/// array of objects from SrcValue to DestValue. Copying can be either a bitwise
/// copy or via a copy constructor call.

View File

@ -826,8 +826,6 @@ public:
const Decl *TargetDecl = 0);
RValue EmitCallExpr(const CallExpr *E);
llvm::Value *BuildVirtualCall(const CXXMethodDecl *MD, llvm::Value *&This,
const llvm::Type *Ty);
RValue EmitCXXMemberCall(const CXXMethodDecl *MD,
llvm::Value *Callee,
llvm::Value *This,

View File

@ -91,71 +91,6 @@ int main() {
// CHECK-LP64: movl $1, 12(%rax)
// CHECK-LP64: movl $2, 8(%rax)
struct test12_A {
virtual void foo0() { }
virtual void foo() { }
} *test12_pa;
struct test12_B : public test12_A {
virtual void foo() { }
} *test12_pb;
struct test12_D : public test12_B {
} *test12_pd;
void test12_foo() {
test12_pa->foo0();
test12_pb->foo0();
test12_pd->foo0();
test12_pa->foo();
test12_pb->foo();
test12_pd->foo();
}
// CHECK-LPOPT32:__Z10test12_foov:
// CHECK-LPOPT32: movl _test12_pa, %eax
// CHECK-LPOPT32-NEXT: movl (%eax), %ecx
// CHECK-LPOPT32-NEXT: movl %eax, (%esp)
// CHECK-LPOPT32-NEXT: call *(%ecx)
// CHECK-LPOPT32-NEXT: movl _test12_pb, %eax
// CHECK-LPOPT32-NEXT: movl (%eax), %ecx
// CHECK-LPOPT32-NEXT: movl %eax, (%esp)
// CHECK-LPOPT32-NEXT: call *(%ecx)
// CHECK-LPOPT32-NEXT: movl _test12_pd, %eax
// CHECK-LPOPT32-NEXT: movl (%eax), %ecx
// CHECK-LPOPT32-NEXT: movl %eax, (%esp)
// CHECK-LPOPT32-NEXT: call *(%ecx)
// CHECK-LPOPT32-NEXT: movl _test12_pa, %eax
// CHECK-LPOPT32-NEXT: movl (%eax), %ecx
// CHECK-LPOPT32-NEXT: movl %eax, (%esp)
// CHECK-LPOPT32-NEXT: call *4(%ecx)
// CHECK-LPOPT32-NEXT: movl _test12_pb, %eax
// CHECK-LPOPT32-NEXT: movl (%eax), %ecx
// CHECK-LPOPT32-NEXT: movl %eax, (%esp)
// CHECK-LPOPT32-NEXT: call *4(%ecx)
// CHECK-LPOPT32-NEXT: movl _test12_pd, %eax
// CHECK-LPOPT32-NEXT: movl (%eax), %ecx
// CHECK-LPOPT32-NEXT: movl %eax, (%esp)
// CHECK-LPOPT32-NEXT: call *4(%ecx)
// CHECK-LPOPT64:__Z10test12_foov:
// CHECK-LPOPT64: movq _test12_pa(%rip), %rdi
// CHECK-LPOPT64-NEXT: movq (%rdi), %rax
// CHECK-LPOPT64-NEXT: call *(%rax)
// CHECK-LPOPT64-NEXT: movq _test12_pb(%rip), %rdi
// CHECK-LPOPT64-NEXT: movq (%rdi), %rax
// CHECK-LPOPT64-NEXT: call *(%rax)
// CHECK-LPOPT64-NEXT: movq _test12_pd(%rip), %rdi
// CHECK-LPOPT64-NEXT: movq (%rdi), %rax
// CHECK-LPOPT64-NEXT: call *(%rax)
// CHECK-LPOPT64-NEXT: movq _test12_pa(%rip), %rdi
// CHECK-LPOPT64-NEXT: movq (%rdi), %rax
// CHECK-LPOPT64-NEXT: call *8(%rax)
// CHECK-LPOPT64-NEXT: movq _test12_pb(%rip), %rdi
// CHECK-LPOPT64-NEXT: movq (%rdi), %rax
// CHECK-LPOPT64-NEXT: call *8(%rax)
// CHECK-LPOPT64-NEXT: movq _test12_pd(%rip), %rdi
// CHECK-LPOPT64-NEXT: movq (%rdi), %rax
// CHECK-LPOPT64-NEXT: call *8(%rax)
struct test6_B2 { virtual void funcB2(); char b[1000]; };
struct test6_B1 : virtual test6_B2 { virtual void funcB1(); };
@ -180,7 +115,7 @@ struct test3_B3 { virtual void funcB3(); };
struct test3_B2 : virtual test3_B3 { virtual void funcB2(); };
struct test3_B1 : virtual test3_B2 { virtual void funcB1(); };
struct test3_D : virtual test3_B1 {
struct test3_D : virtual test3_B1 {
virtual void funcD() { }
};
@ -717,6 +652,7 @@ struct test11_D : test11_B {
// CHECK-LP64-NEXT: .quad __ZN8test11_D2D2Ev
// CHECK-LP64: __ZTV1B:
// CHECK-LP64-NEXT: .space 8
// CHECK-LP64-NEXT: .quad __ZTI1B