From a002945a20db65259de7cdcd8eaa8e929da75961 Mon Sep 17 00:00:00 2001 From: Mike Stump Date: Fri, 21 Aug 2009 01:45:00 +0000 Subject: [PATCH] We now support overriding base functions in vtables. WIP. llvm-svn: 79587 --- clang/lib/AST/DeclCXX.cpp | 2 + clang/lib/CodeGen/CGCXX.cpp | 86 +++++++++++++++++++++++++++------- clang/test/CodeGenCXX/virt.cpp | 31 ++++++++++++ 3 files changed, 102 insertions(+), 17 deletions(-) diff --git a/clang/lib/AST/DeclCXX.cpp b/clang/lib/AST/DeclCXX.cpp index c00d361c6d0a..b126a09fc448 100644 --- a/clang/lib/AST/DeclCXX.cpp +++ b/clang/lib/AST/DeclCXX.cpp @@ -332,6 +332,8 @@ typedef llvm::DenseMap *> OverriddenMethodsMapTy; +// FIXME: We hate static data. This doesn't survive PCH saving/loading, and +// the vtable building code uses it at CG time. static OverriddenMethodsMapTy *OverriddenMethods = 0; void CXXMethodDecl::addOverriddenMethod(const CXXMethodDecl *MD) { diff --git a/clang/lib/CodeGen/CGCXX.cpp b/clang/lib/CodeGen/CGCXX.cpp index 8b1f2fc1fe64..1298feb2fd0d 100644 --- a/clang/lib/CodeGen/CGCXX.cpp +++ b/clang/lib/CodeGen/CGCXX.cpp @@ -851,14 +851,19 @@ llvm::Constant *CodeGenModule::GenerateRtti(const CXXRecordDecl *RD) { class VtableBuilder { std::vector &methods; llvm::Type *Ptr8Ty; + /// Class - The most derived class that this vtable is being built for. const CXXRecordDecl *Class; + /// BLayout - Layout for the most derived class that this vtable is being + /// built for. const ASTRecordLayout &BLayout; llvm::SmallSet IndirectPrimary; llvm::SmallSet SeenVBase; llvm::Constant *rtti; llvm::LLVMContext &VMContext; CodeGenModule &CGM; // Per-module state. - + /// Index - Maps a method decl into a vtable index. Useful for virtual + /// dispatch codegen. + llvm::DenseMap Index; typedef CXXRecordDecl::method_iterator method_iter; public: VtableBuilder(std::vector &meth, @@ -914,24 +919,69 @@ public: } } - void GenerateMethods(const CXXRecordDecl *RD) { - llvm::Constant *m; + void StartNewTable() { + SeenVBase.clear(); + } - for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me; - ++mi) { - if (mi->isVirtual()) { - m = CGM.GetAddrOfFunction(GlobalDecl(*mi)); - m = llvm::ConstantExpr::getBitCast(m, Ptr8Ty); - methods.push_back(m); + inline uint32_t nottoobig(uint64_t t) { + assert(t < (uint32_t)-1ULL || "vtable too big"); + return t; + } +#if 0 + inline uint32_t nottoobig(uint32_t t) { + return t; + } +#endif + + void AddMethod(const CXXMethodDecl *MD, int32_t FirstIndex) { + typedef CXXMethodDecl::method_iterator meth_iter; + + llvm::Constant *m; + m = CGM.GetAddrOfFunction(GlobalDecl(MD), Ptr8Ty); + m = llvm::ConstantExpr::getBitCast(m, Ptr8Ty); + + // FIXME: Don't like the nested loops. For very large inheritance + // heirarchies we could have a table on the side with the final overridder + // and just replace each instance of an overridden method once. Would be + // nice to measure the cost/benefit on real code. + + // If we can find a previously allocated slot for this, reuse it. + for (meth_iter mi = MD->begin_overridden_methods(), + e = MD->end_overridden_methods(); + mi != e; ++mi) { + const CXXMethodDecl *OMD = *mi; + llvm::Constant *om; + om = CGM.GetAddrOfFunction(GlobalDecl(OMD), Ptr8Ty); + om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty); + + for (int32_t i = FirstIndex, e = nottoobig(methods.size()); i != e; ++i) { + // FIXME: begin_overridden_methods might be too lax, covariance */ + if (methods[i] == om) { + methods[i] = m; + Index[MD] = i; + return; + } } } + + // else allocate a new slot. + Index[MD] = methods.size(); + methods.push_back(m); + } + + void GenerateMethods(const CXXRecordDecl *RD, int32_t FirstIndex) { + for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me; + ++mi) + if (mi->isVirtual()) + AddMethod(*mi, FirstIndex); } void GenerateVtableForBase(const CXXRecordDecl *RD, bool forPrimary, bool VBoundary, int64_t Offset, - bool ForVirtualBase) { + bool ForVirtualBase, + int32_t FirstIndex) { llvm::Constant *m = llvm::Constant::getNullValue(Ptr8Ty); if (RD && !RD->isDynamicClass()) @@ -964,7 +1014,7 @@ public: IndirectPrimary.insert(PrimaryBase); Top = false; GenerateVtableForBase(PrimaryBase, true, PrimaryBaseWasVirtual|VBoundary, - Offset, PrimaryBaseWasVirtual); + Offset, PrimaryBaseWasVirtual, FirstIndex); } if (Top) { @@ -980,7 +1030,7 @@ public: } // And add the virtuals for the class to the primary vtable. - GenerateMethods(RD); + GenerateMethods(RD, FirstIndex); // and then the non-virtual bases. for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), @@ -991,8 +1041,9 @@ public: cast(i->getType()->getAs()->getDecl()); if (Base != PrimaryBase || PrimaryBaseWasVirtual) { uint64_t o = Offset + Layout.getBaseClassOffset(Base); - SeenVBase.clear(); - GenerateVtableForBase(Base, true, false, o, false); + StartNewTable(); + FirstIndex = methods.size(); + GenerateVtableForBase(Base, true, false, o, false, FirstIndex); } } } @@ -1006,9 +1057,10 @@ public: if (i->isVirtual() && !IndirectPrimary.count(Base)) { // Mark it so we don't output it twice. IndirectPrimary.insert(Base); - SeenVBase.clear(); + StartNewTable(); int64_t BaseOffset = BLayout.getVBaseClassOffset(Base); - GenerateVtableForBase(Base, false, true, BaseOffset, true); + int32_t FirstIndex = methods.size(); + GenerateVtableForBase(Base, false, true, BaseOffset, true, FirstIndex); } if (Base->getNumVBases()) GenerateVtableForVBases(Base, Class); @@ -1034,7 +1086,7 @@ llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) { VtableBuilder b(methods, RD, CGM); // First comes the vtables for all the non-virtual bases... - b.GenerateVtableForBase(RD, true, false, 0, false); + b.GenerateVtableForBase(RD, true, false, 0, false, 0); // then the vtables for all the virtual bases. b.GenerateVtableForVBases(RD, RD); diff --git a/clang/test/CodeGenCXX/virt.cpp b/clang/test/CodeGenCXX/virt.cpp index 59aa2efb1eee..939ccdf8aafe 100644 --- a/clang/test/CodeGenCXX/virt.cpp +++ b/clang/test/CodeGenCXX/virt.cpp @@ -620,6 +620,36 @@ class test10_D : test10_B1, test10_B2 { // CHECK-LP32-NEXT: .long 4294967276 // CHECK-LP32-NEXT: .long __ZTI8test10_D +struct test11_B { + virtual void B1() { } + virtual void D() { } + virtual void B2() { } +}; + +struct test11_D : test11_B { + virtual void D1() { } + virtual void D() { } + virtual void D2() { } +}; + +// CHECK-LP32:__ZTV8test11_D: +// CHECK-LP32-NEXT: .space 4 +// CHECK-LP32-NEXT: .long __ZTI8test11_D +// CHECK-LP32-NEXT: .long __ZN8test11_B2B1Ev +// CHECK-LP32-NEXT: .long __ZN8test11_D1DEv +// CHECK-LP32-NEXT: .long __ZN8test11_B2B2Ev +// CHECK-LP32-NEXT: .long __ZN8test11_D2D1Ev +// CHECK-LP32-NEXT: .long __ZN8test11_D2D2Ev + + +// CHECK-LP64:__ZTV8test11_D: +// CHECK-LP64-NEXT: .space 8 +// CHECK-LP64-NEXT: .quad __ZTI8test11_D +// CHECK-LP64-NEXT: .quad __ZN8test11_B2B1Ev +// CHECK-LP64-NEXT: .quad __ZN8test11_D1DEv +// CHECK-LP64-NEXT: .quad __ZN8test11_B2B2Ev +// CHECK-LP64-NEXT: .quad __ZN8test11_D2D1Ev +// CHECK-LP64-NEXT: .quad __ZN8test11_D2D2Ev @@ -702,6 +732,7 @@ class test10_D : test10_B1, test10_B2 { // CHECK-LP64-NEXT: .quad __ZN2D14bar5Ev +test11_D d11; test10_D d10; test9_D d9; test8_D d8;