From e73d5ca9a8fff1af6432e8dafdf457e4e29696b0 Mon Sep 17 00:00:00 2001 From: Mike Stump Date: Mon, 7 Sep 2009 04:27:52 +0000 Subject: [PATCH] Refine vcall offsets. Cleanups. WIP. llvm-svn: 81143 --- clang/lib/CodeGen/CGCXX.cpp | 59 +++++++++------- clang/lib/CodeGen/Mangle.cpp | 33 ++++----- clang/lib/CodeGen/Mangle.h | 3 +- clang/test/CodeGenCXX/virt.cpp | 119 +++++++++++++++++++++++++++++++++ 4 files changed, 173 insertions(+), 41 deletions(-) diff --git a/clang/lib/CodeGen/CGCXX.cpp b/clang/lib/CodeGen/CGCXX.cpp index 22ff22e22e4e..f610de827650 100644 --- a/clang/lib/CodeGen/CGCXX.cpp +++ b/clang/lib/CodeGen/CGCXX.cpp @@ -909,9 +909,7 @@ public: } bool OverrideMethod(const CXXMethodDecl *MD, llvm::Constant *m, - bool MorallyVirtual, Index_t Offset, - std::vector &submethods, - Index_t AddressPoint) { + bool MorallyVirtual, Index_t Offset) { typedef CXXMethodDecl::method_iterator meth_iter; // FIXME: Don't like the nested loops. For very large inheritance @@ -927,23 +925,25 @@ public: om = CGM.GetAddrOfFunction(GlobalDecl(OMD), Ptr8Ty); om = llvm::ConstantExpr::getBitCast(om, Ptr8Ty); - for (Index_t i = AddressPoint, e = submethods.size(); + for (Index_t i = 0, e = submethods.size(); i != e; ++i) { // FIXME: begin_overridden_methods might be too lax, covariance */ if (submethods[i] != om) continue; + Index[MD] = i; submethods[i] = m; - Index[MD] = i - AddressPoint; Thunks.erase(OMD); if (MorallyVirtual) { - VCallOffset[MD] = Offset/8; Index_t &idx = VCall[OMD]; if (idx == 0) { + VCallOffset[MD] = Offset/8; idx = VCalls.size()+1; VCalls.push_back(0); + } else { + VCallOffset[MD] = VCallOffset[OMD]; + VCalls[idx-1] = -VCallOffset[OMD] + Offset/8; } - VCalls[idx] = Offset/8 - VCallOffset[OMD]; VCall[MD] = idx; // FIXME: 0? Thunks[MD] = std::make_pair(0, -((idx+extra+2)*LLVMPointerWidth/8)); @@ -975,18 +975,20 @@ public: Thunks.clear(); } - void OverrideMethods(std::vector *Path, - bool MorallyVirtual, Index_t Offset) { - for (std::vector::reverse_iterator i =Path->rbegin(), + void OverrideMethods(std::vector > *Path, bool MorallyVirtual) { + for (std::vector >::reverse_iterator i =Path->rbegin(), e = Path->rend(); i != e; ++i) { - const CXXRecordDecl *RD = *i; + const CXXRecordDecl *RD = i->first; + int64_t Offset = i->second; for (method_iter mi = RD->method_begin(), me = RD->method_end(); mi != me; ++mi) if (mi->isVirtual()) { const CXXMethodDecl *MD = *mi; llvm::Constant *m = wrap(CGM.GetAddrOfFunction(GlobalDecl(MD), Ptr8Ty)); - OverrideMethod(MD, m, MorallyVirtual, Offset, submethods, 0); + OverrideMethod(MD, m, MorallyVirtual, Offset); } } } @@ -994,11 +996,12 @@ public: void AddMethod(const CXXMethodDecl *MD, bool MorallyVirtual, Index_t Offset) { llvm::Constant *m = wrap(CGM.GetAddrOfFunction(GlobalDecl(MD), Ptr8Ty)); // If we can find a previously allocated slot for this, reuse it. - if (OverrideMethod(MD, m, MorallyVirtual, Offset, submethods, 0)) + if (OverrideMethod(MD, m, MorallyVirtual, Offset)) return; // else allocate a new slot. Index[MD] = submethods.size(); + submethods.push_back(m); if (MorallyVirtual) { VCallOffset[MD] = Offset/8; Index_t &idx = VCall[MD]; @@ -1008,7 +1011,6 @@ public: VCalls.push_back(0); } } - submethods.push_back(m); } void AddMethods(const CXXRecordDecl *RD, bool MorallyVirtual, @@ -1032,8 +1034,9 @@ public: if (Base != PrimaryBase || PrimaryBaseWasVirtual) { uint64_t o = Offset + Layout.getBaseClassOffset(Base); StartNewTable(); - std::vector S; - S.push_back(RD); + std::vector > S; + S.push_back(std::make_pair(RD, Offset)); GenerateVtableForBase(Base, MorallyVirtual, o, false, &S); } } @@ -1055,8 +1058,9 @@ public: } // The vcalls come first... - for (std::vector::iterator i=VCalls.begin(), e=VCalls.end(); - i < e; ++i) + for (std::vector::reverse_iterator i=VCalls.rbegin(), + e=VCalls.rend(); + i != e; ++i) methods.push_back(wrap((0?600:0) + *i)); VCalls.clear(); @@ -1103,7 +1107,8 @@ public: int64_t GenerateVtableForBase(const CXXRecordDecl *RD, bool MorallyVirtual = false, int64_t Offset = 0, bool ForVirtualBase = false, - std::vector *Path = 0) { + std::vector > *Path = 0) { if (!RD->isDynamicClass()) return 0; @@ -1128,22 +1133,25 @@ public: AddMethods(RD, MorallyVirtual, Offset); if (Path) - OverrideMethods(Path, MorallyVirtual, Offset); + OverrideMethods(Path, MorallyVirtual); return end(RD, offsets, Layout, PrimaryBase, PrimaryBaseWasVirtual, MorallyVirtual, Offset, ForVirtualBase); } void GenerateVtableForVBases(const CXXRecordDecl *RD, - std::vector *Path = 0) { + int64_t Offset = 0, + std::vector > *Path = 0) { bool alloc = false; if (Path == 0) { alloc = true; - Path = new std::vector; + Path = new std::vector >; } // FIXME: We also need to override using all paths to a virtual base, // right now, we just process the first path - Path->push_back(RD); + Path->push_back(std::make_pair(RD, Offset)); for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(), e = RD->bases_end(); i != e; ++i) { const CXXRecordDecl *Base = @@ -1155,8 +1163,11 @@ public: int64_t BaseOffset = BLayout.getVBaseClassOffset(Base); GenerateVtableForBase(Base, true, BaseOffset, true, Path); } + int64_t BaseOffset = Offset; + if (i->isVirtual()) + BaseOffset = BLayout.getVBaseClassOffset(Base); if (Base->getNumVBases()) - GenerateVtableForVBases(Base, Path); + GenerateVtableForVBases(Base, BaseOffset, Path); } Path->pop_back(); if (alloc) diff --git a/clang/lib/CodeGen/Mangle.cpp b/clang/lib/CodeGen/Mangle.cpp index 53a57d7de92c..04bd52b8d4b8 100644 --- a/clang/lib/CodeGen/Mangle.cpp +++ b/clang/lib/CodeGen/Mangle.cpp @@ -41,8 +41,8 @@ namespace { bool mangle(const NamedDecl *D); void mangleCalloffset(int64_t nv, int64_t v); - void mangleThunk(const NamedDecl *ND, int64_t nv, int64_t v); - void mangleCovariantThunk(const NamedDecl *ND, + void mangleThunk(const FunctionDecl *FD, int64_t nv, int64_t v); + void mangleCovariantThunk(const FunctionDecl *FD, int64_t nv_t, int64_t v_t, int64_t nv_r, int64_t v_r); void mangleGuardVariable(const VarDecl *D); @@ -274,25 +274,26 @@ void CXXNameMangler::mangleCalloffset(int64_t nv, int64_t v) { Out << "_"; } -void CXXNameMangler::mangleThunk(const NamedDecl *D, int64_t nv, int64_t v) { +void CXXNameMangler::mangleThunk(const FunctionDecl *FD, int64_t nv, + int64_t v) { // ::= T // # base is the nominal target function of thunk - Out << "_T"; + Out << "_ZT"; mangleCalloffset(nv, v); - mangleName(D); + mangleFunctionEncoding(FD); } - void CXXNameMangler::mangleCovariantThunk(const NamedDecl *D, + void CXXNameMangler::mangleCovariantThunk(const FunctionDecl *FD, int64_t nv_t, int64_t v_t, int64_t nv_r, int64_t v_r) { // ::= Tc // # base is the nominal target function of thunk // # first call-offset is 'this' adjustment // # second call-offset is result adjustment - Out << "_Tc"; + Out << "_ZTc"; mangleCalloffset(nv_t, v_t); mangleCalloffset(nv_r, v_r); - mangleName(D); + mangleFunctionEncoding(FD); } void CXXNameMangler::mangleUnqualifiedName(const NamedDecl *ND) { @@ -894,32 +895,32 @@ namespace clang { /// \brief Mangles the a thunk with the offset n for the declaration D and /// emits that name to the given output stream. - void mangleThunk(const NamedDecl *D, int64_t nv, int64_t v, + void mangleThunk(const FunctionDecl *FD, int64_t nv, int64_t v, ASTContext &Context, llvm::raw_ostream &os) { // FIXME: Hum, we might have to thunk these, fix. - assert(!isa(D) && + assert(!isa(FD) && "Use mangleCXXCtor for constructor decls!"); - assert(!isa(D) && + assert(!isa(FD) && "Use mangleCXXDtor for destructor decls!"); CXXNameMangler Mangler(Context, os); - Mangler.mangleThunk(D, nv, v); + Mangler.mangleThunk(FD, nv, v); os.flush(); } /// \brief Mangles the a covariant thunk for the declaration D and emits that /// name to the given output stream. - void mangleCovariantThunk(const NamedDecl *D, int64_t nv_t, int64_t v_t, + void mangleCovariantThunk(const FunctionDecl *FD, int64_t nv_t, int64_t v_t, int64_t nv_r, int64_t v_r, ASTContext &Context, llvm::raw_ostream &os) { // FIXME: Hum, we might have to thunk these, fix. - assert(!isa(D) && + assert(!isa(FD) && "Use mangleCXXCtor for constructor decls!"); - assert(!isa(D) && + assert(!isa(FD) && "Use mangleCXXDtor for destructor decls!"); CXXNameMangler Mangler(Context, os); - Mangler.mangleCovariantThunk(D, nv_t, v_t, nv_r, v_r); + Mangler.mangleCovariantThunk(FD, nv_t, v_t, nv_r, v_r); os.flush(); } diff --git a/clang/lib/CodeGen/Mangle.h b/clang/lib/CodeGen/Mangle.h index 5490a7f6018f..855839170ed2 100644 --- a/clang/lib/CodeGen/Mangle.h +++ b/clang/lib/CodeGen/Mangle.h @@ -29,12 +29,13 @@ namespace clang { class ASTContext; class CXXConstructorDecl; class CXXDestructorDecl; + class FunctionDecl; class NamedDecl; class VarDecl; bool mangleName(const NamedDecl *D, ASTContext &Context, llvm::raw_ostream &os); - void mangleThunk(const NamedDecl *D, int64_t n, int64_t vn, + void mangleThunk(const FunctionDecl *FD, int64_t n, int64_t vn, ASTContext &Context, llvm::raw_ostream &os); void mangleCovariantThunk(const NamedDecl *D, bool VirtualThis, int64_t nv_t, int64_t v_t, bool VirtualResult, int64_t nv_r, diff --git a/clang/test/CodeGenCXX/virt.cpp b/clang/test/CodeGenCXX/virt.cpp index 563ecf6a5b34..acc7853cd684 100644 --- a/clang/test/CodeGenCXX/virt.cpp +++ b/clang/test/CodeGenCXX/virt.cpp @@ -722,6 +722,125 @@ struct test11_D : test11_B { // CHECK-LP64-NEXT: .quad __ZN8test11_D2D1Ev // CHECK-LP64-NEXT: .quad __ZN8test11_D2D2Ev +struct test13_B { + virtual void B1() { } + virtual void D() { } + virtual void Da(); + virtual void Db() { } + virtual void Dc() { } + virtual void B2() { } + int i; +}; + + +struct test13_NV1 { + virtual void fooNV1() { } + virtual void D() { } +}; + + +struct test13_B2 : /* test13_NV1, */ virtual test13_B { + virtual void B2a() { } + virtual void B2() { } + virtual void D() { } + virtual void Da(); + virtual void Dd() { } + virtual void B2b() { } + int i; +}; + + +struct test13_D : test13_NV1, virtual test13_B2 { + virtual void D1() { } + virtual void D() { } + virtual void Db() { } + virtual void Dd() { } + virtual void D2() { } + virtual void fooNV1() { } +} test13_d; + +// CHECK-LP64:__ZTV8test13_D: +// CHECK-LP64-NEXT: .quad 24 +// CHECK-LP64-NEXT: .quad 8 +// CHECK-LP64-NEXT: .space 8 +// CHECK-LP64-NEXT: .quad __ZTI8test13_D +// CHECK-LP64-NEXT: .quad __ZN8test13_D6fooNV1Ev +// CHECK-LP64-NEXT: .quad __ZN8test13_D1DEv +// CHECK-LP64-NEXT: .quad __ZN8test13_D2D1Ev +// CHECK-LP64-NEXT: .quad __ZN8test13_D2DbEv +// CHECK-LP64-NEXT: .quad __ZN8test13_D2DdEv +// CHECK-LP64-NEXT: .quad __ZN8test13_D2D2Ev +// CHECK-LP64-NEXT: .space 8 +// CHECK-LP64-NEXT: .quad 18446744073709551608 +// CHECK-LP64-NEXT: .space 8 +// CHECK-LP64-NEXT: .quad 18446744073709551608 +// CHECK-LP64-NEXT: .space 8 +// CHECK-LP64-NEXT: .space 8 +// CHECK-LP64-NEXT: .quad 16 +// CHECK-LP64-NEXT: .quad 18446744073709551608 +// CHECK-LP64-NEXT: .quad __ZTI8test13_D +// CHECK-LP64-NEXT: .quad __ZN9test13_B23B2aEv +// CHECK-LP64-NEXT: .quad __ZN9test13_B22B2Ev +// CHECK-LP64-NEXT: .quad __ZTv0_n48_N8test13_D1DEv +// CHECK-LP64-NEXT: .quad __ZN9test13_B22DaEv +// CHECK-LP64-NEXT: .quad __ZTv0_n64_N8test13_D2DdEv +// CHECK-LP64-NEXT: .quad __ZN9test13_B23B2bEv +// CHECK-LP64-NEXT: .quad 18446744073709551600 +// CHECK-LP64-NEXT: .space 8 +// CHECK-LP64-NEXT: .quad 18446744073709551592 +// CHECK-LP64-NEXT: .quad 18446744073709551600 +// CHECK-LP64-NEXT: .quad 18446744073709551592 +// CHECK-LP64-NEXT: .space 8 +// CHECK-LP64-NEXT: .quad 18446744073709551592 +// CHECK-LP64-NEXT: .quad __ZTI8test13_D +// CHECK-LP64-NEXT: .quad __ZN8test13_B2B1Ev +// CHECK-LP64-NEXT: .quad __ZTv0_n32_N8test13_D1DEv +// CHECK-LP64-NEXT: .quad __ZTv0_n40_N9test13_B22DaEv +// CHECK-LP64-NEXT: .quad __ZTv0_n48_N8test13_D2DbEv +// CHECK-LP64-NEXT: .quad __ZN8test13_B2DcEv +// CHECK-LP64-NEXT: .quad __ZTv0_n64_N9test13_B22B2Ev + +// CHECK-LP32:__ZTV8test13_D: +// CHECK-LP32-NEXT: .long 12 +// CHECK-LP32-NEXT: .long 4 +// CHECK-LP32-NEXT: .space 4 +// CHECK-LP32-NEXT: .long __ZTI8test13_D +// CHECK-LP32-NEXT: .long __ZN8test13_D6fooNV1Ev +// CHECK-LP32-NEXT: .long __ZN8test13_D1DEv +// CHECK-LP32-NEXT: .long __ZN8test13_D2D1Ev +// CHECK-LP32-NEXT: .long __ZN8test13_D2DbEv +// CHECK-LP32-NEXT: .long __ZN8test13_D2DdEv +// CHECK-LP32-NEXT: .long __ZN8test13_D2D2Ev +// CHECK-LP32-NEXT: .space 4 +// CHECK-LP32-NEXT: .long 4294967292 +// CHECK-LP32-NEXT: .space 4 +// CHECK-LP32-NEXT: .long 4294967292 +// CHECK-LP32-NEXT: .space 4 +// CHECK-LP32-NEXT: .space 4 +// CHECK-LP32-NEXT: .long 8 +// CHECK-LP32-NEXT: .long 4294967292 +// CHECK-LP32-NEXT: .long __ZTI8test13_D +// CHECK-LP32-NEXT: .long __ZN9test13_B23B2aEv +// CHECK-LP32-NEXT: .long __ZN9test13_B22B2Ev +// CHECK-LP32-NEXT: .long __ZTv0_n24_N8test13_D1DEv +// CHECK-LP32-NEXT: .long __ZN9test13_B22DaEv +// CHECK-LP32-NEXT: .long __ZTv0_n32_N8test13_D2DdEv +// CHECK-LP32-NEXT: .long __ZN9test13_B23B2bEv +// CHECK-LP32-NEXT: .long 4294967288 +// CHECK-LP32-NEXT: .space 4 +// CHECK-LP32-NEXT: .long 4294967284 +// CHECK-LP32-NEXT: .long 4294967288 +// CHECK-LP32-NEXT: .long 4294967284 +// CHECK-LP32-NEXT: .space 4 +// CHECK-LP32-NEXT: .long 4294967284 +// CHECK-LP32-NEXT: .long __ZTI8test13_D +// CHECK-LP32-NEXT: .long __ZN8test13_B2B1Ev +// CHECK-LP32-NEXT: .long __ZTv0_n16_N8test13_D1DEv +// CHECK-LP32-NEXT: .long __ZTv0_n20_N9test13_B22DaEv +// CHECK-LP32-NEXT: .long __ZTv0_n24_N8test13_D2DbEv +// CHECK-LP32-NEXT: .long __ZN8test13_B2DcEv +// CHECK-LP32-NEXT: .long __ZTv0_n32_N9test13_B22B2Ev + // CHECK-LP64: __ZTV1B: // CHECK-LP64-NEXT: .space 8