forked from OSchip/llvm-project
AST: Cleanup RecordLayoutBuilder
No functionality changed, just some cleanups: - Switch some loops to range-based for. - Name some iterators with a more creative name than "I". - Reduce dependence on auto. Does RD->bases() give you a list of CXXBaseSpecifiers or CXXRecordDecls? It's more clear to just say which upfront. llvm-svn: 213121
This commit is contained in:
parent
458ea76041
commit
c964b4b4d0
|
@ -140,8 +140,8 @@ public:
|
|||
|
||||
void EmptySubobjectMap::ComputeEmptySubobjectSizes() {
|
||||
// Check the bases.
|
||||
for (const auto &I : Class->bases()) {
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &Base : Class->bases()) {
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
|
||||
CharUnits EmptySize;
|
||||
const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
|
||||
|
@ -158,9 +158,9 @@ void EmptySubobjectMap::ComputeEmptySubobjectSizes() {
|
|||
}
|
||||
|
||||
// Check the fields.
|
||||
for (const auto *I : Class->fields()) {
|
||||
for (const FieldDecl *FD : Class->fields()) {
|
||||
const RecordType *RT =
|
||||
Context.getBaseElementType(I->getType())->getAs<RecordType>();
|
||||
Context.getBaseElementType(FD->getType())->getAs<RecordType>();
|
||||
|
||||
// We only care about record types.
|
||||
if (!RT)
|
||||
|
@ -192,8 +192,8 @@ EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
|
|||
EmptyClassOffsetsMapTy::const_iterator I = EmptyClassOffsets.find(Offset);
|
||||
if (I == EmptyClassOffsets.end())
|
||||
return true;
|
||||
|
||||
const ClassVectorTy& Classes = I->second;
|
||||
|
||||
const ClassVectorTy &Classes = I->second;
|
||||
if (std::find(Classes.begin(), Classes.end(), RD) == Classes.end())
|
||||
return true;
|
||||
|
||||
|
@ -209,7 +209,7 @@ void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD,
|
|||
|
||||
// If we have empty structures inside a union, we can assign both
|
||||
// the same offset. Just avoid pushing them twice in the list.
|
||||
ClassVectorTy& Classes = EmptyClassOffsets[Offset];
|
||||
ClassVectorTy &Classes = EmptyClassOffsets[Offset];
|
||||
if (std::find(Classes.begin(), Classes.end(), RD) != Classes.end())
|
||||
return;
|
||||
|
||||
|
@ -233,8 +233,7 @@ EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
|
|||
|
||||
// Traverse all non-virtual bases.
|
||||
const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
|
||||
for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) {
|
||||
BaseSubobjectInfo* Base = Info->Bases[I];
|
||||
for (const BaseSubobjectInfo *Base : Info->Bases) {
|
||||
if (Base->IsVirtual)
|
||||
continue;
|
||||
|
||||
|
@ -259,12 +258,12 @@ EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
|
|||
E = Info->Class->field_end(); I != E; ++I, ++FieldNo) {
|
||||
if (I->isBitField())
|
||||
continue;
|
||||
|
||||
|
||||
CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
|
||||
if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -284,8 +283,7 @@ void EmptySubobjectMap::UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
|
|||
|
||||
// Traverse all non-virtual bases.
|
||||
const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
|
||||
for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) {
|
||||
BaseSubobjectInfo* Base = Info->Bases[I];
|
||||
for (const BaseSubobjectInfo *Base : Info->Bases) {
|
||||
if (Base->IsVirtual)
|
||||
continue;
|
||||
|
||||
|
@ -344,11 +342,11 @@ EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
|
|||
const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
|
||||
|
||||
// Traverse all non-virtual bases.
|
||||
for (const auto &I : RD->bases()) {
|
||||
if (I.isVirtual())
|
||||
for (const CXXBaseSpecifier &Base : RD->bases()) {
|
||||
if (Base.isVirtual())
|
||||
continue;
|
||||
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
|
||||
CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
|
||||
if (!CanPlaceFieldSubobjectAtOffset(BaseDecl, Class, BaseOffset))
|
||||
|
@ -357,8 +355,8 @@ EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
|
|||
|
||||
if (RD == Class) {
|
||||
// This is the most derived class, traverse virtual bases as well.
|
||||
for (const auto &I : RD->vbases()) {
|
||||
const CXXRecordDecl *VBaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &Base : RD->vbases()) {
|
||||
const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
|
||||
CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
|
||||
if (!CanPlaceFieldSubobjectAtOffset(VBaseDecl, Class, VBaseOffset))
|
||||
|
@ -450,11 +448,11 @@ void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD,
|
|||
const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
|
||||
|
||||
// Traverse all non-virtual bases.
|
||||
for (const auto &I : RD->bases()) {
|
||||
if (I.isVirtual())
|
||||
for (const CXXBaseSpecifier &Base : RD->bases()) {
|
||||
if (Base.isVirtual())
|
||||
continue;
|
||||
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
|
||||
CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
|
||||
UpdateEmptyFieldSubobjects(BaseDecl, Class, BaseOffset);
|
||||
|
@ -462,8 +460,8 @@ void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD,
|
|||
|
||||
if (RD == Class) {
|
||||
// This is the most derived class, traverse virtual bases as well.
|
||||
for (const auto &I : RD->vbases()) {
|
||||
const CXXRecordDecl *VBaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &Base : RD->vbases()) {
|
||||
const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
|
||||
CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
|
||||
UpdateEmptyFieldSubobjects(VBaseDecl, Class, VBaseOffset);
|
||||
|
@ -1069,8 +1067,7 @@ RecordLayoutBuilder::AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info,
|
|||
|
||||
// Now go through all direct non-virtual bases.
|
||||
const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
|
||||
for (unsigned I = 0, E = Info->Bases.size(); I != E; ++I) {
|
||||
const BaseSubobjectInfo *Base = Info->Bases[I];
|
||||
for (const BaseSubobjectInfo *Base : Info->Bases) {
|
||||
if (Base->IsVirtual)
|
||||
continue;
|
||||
|
||||
|
@ -1094,13 +1091,13 @@ RecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD,
|
|||
PrimaryBaseIsVirtual = Layout.isPrimaryBaseVirtual();
|
||||
}
|
||||
|
||||
for (const auto &I : RD->bases()) {
|
||||
assert(!I.getType()->isDependentType() &&
|
||||
for (const CXXBaseSpecifier &Base : RD->bases()) {
|
||||
assert(!Base.getType()->isDependentType() &&
|
||||
"Cannot layout class with dependent bases.");
|
||||
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
|
||||
if (I.isVirtual()) {
|
||||
if (Base.isVirtual()) {
|
||||
if (PrimaryBase != BaseDecl || !PrimaryBaseIsVirtual) {
|
||||
bool IndirectPrimaryBase = IndirectPrimaryBases.count(BaseDecl);
|
||||
|
||||
|
@ -1300,18 +1297,18 @@ void RecordLayoutBuilder::Layout(const CXXRecordDecl *RD) {
|
|||
|
||||
#ifndef NDEBUG
|
||||
// Check that we have base offsets for all bases.
|
||||
for (const auto &I : RD->bases()) {
|
||||
if (I.isVirtual())
|
||||
for (const CXXBaseSpecifier &Base : RD->bases()) {
|
||||
if (Base.isVirtual())
|
||||
continue;
|
||||
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
|
||||
assert(Bases.count(BaseDecl) && "Did not find base offset!");
|
||||
}
|
||||
|
||||
// And all virtual bases.
|
||||
for (const auto &I : RD->vbases()) {
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &Base : RD->vbases()) {
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
|
||||
assert(VBases.count(BaseDecl) && "Did not find base offset!");
|
||||
}
|
||||
|
@ -1365,14 +1362,13 @@ void RecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize,
|
|||
};
|
||||
|
||||
QualType Type;
|
||||
for (unsigned I = 0, E = llvm::array_lengthof(IntegralPODTypes);
|
||||
I != E; ++I) {
|
||||
uint64_t Size = Context.getTypeSize(IntegralPODTypes[I]);
|
||||
for (const QualType &QT : IntegralPODTypes) {
|
||||
uint64_t Size = Context.getTypeSize(QT);
|
||||
|
||||
if (Size > FieldSize)
|
||||
break;
|
||||
|
||||
Type = IntegralPODTypes[I];
|
||||
Type = QT;
|
||||
}
|
||||
assert(!Type.isNull() && "Did not find a type!");
|
||||
|
||||
|
@ -1955,7 +1951,7 @@ static const CXXMethodDecl *computeKeyFunction(ASTContext &Context,
|
|||
bool allowInlineFunctions =
|
||||
Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline();
|
||||
|
||||
for (const auto *MD : RD->methods()) {
|
||||
for (const CXXMethodDecl *MD : RD->methods()) {
|
||||
if (!MD->isVirtual())
|
||||
continue;
|
||||
|
||||
|
@ -2377,11 +2373,11 @@ MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(const CXXRecordDecl *RD) {
|
|||
// zero sized members.
|
||||
const ASTRecordLayout *PreviousBaseLayout = nullptr;
|
||||
// Iterate through the bases and lay out the non-virtual ones.
|
||||
for (const auto &I : RD->bases()) {
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &Base : RD->bases()) {
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
|
||||
// Mark and skip virtual bases.
|
||||
if (I.isVirtual()) {
|
||||
if (Base.isVirtual()) {
|
||||
HasVBPtr = true;
|
||||
continue;
|
||||
}
|
||||
|
@ -2411,10 +2407,10 @@ MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(const CXXRecordDecl *RD) {
|
|||
// itself lead with a zero-sized object, something we track.
|
||||
bool CheckLeadingLayout = !PrimaryBase;
|
||||
// Iterate through the bases and lay out the non-virtual ones.
|
||||
for (const auto &I : RD->bases()) {
|
||||
if (I.isVirtual())
|
||||
for (const CXXBaseSpecifier &Base : RD->bases()) {
|
||||
if (Base.isVirtual())
|
||||
continue;
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
|
||||
// Only lay out bases without extendable VFPtrs on the second pass.
|
||||
if (BaseLayout.hasExtendableVFPtr()) {
|
||||
|
@ -2459,7 +2455,7 @@ void MicrosoftRecordLayoutBuilder::layoutNonVirtualBase(
|
|||
|
||||
void MicrosoftRecordLayoutBuilder::layoutFields(const RecordDecl *RD) {
|
||||
LastFieldIsNonZeroWidthBitfield = false;
|
||||
for (const auto *Field : RD->fields())
|
||||
for (const FieldDecl *Field : RD->fields())
|
||||
layoutField(Field);
|
||||
}
|
||||
|
||||
|
@ -2558,14 +2554,11 @@ void MicrosoftRecordLayoutBuilder::injectVBPtr(const CXXRecordDecl *RD) {
|
|||
// Increase the size of the object and push back all fields by the offset
|
||||
// amount.
|
||||
Size += Offset;
|
||||
for (SmallVector<uint64_t, 16>::iterator i = FieldOffsets.begin(),
|
||||
e = FieldOffsets.end();
|
||||
i != e; ++i)
|
||||
*i += Context.toBits(Offset);
|
||||
for (BaseOffsetsMapTy::iterator i = Bases.begin(), e = Bases.end();
|
||||
i != e; ++i)
|
||||
if (i->second >= InjectionSite)
|
||||
i->second += Offset;
|
||||
for (uint64_t &FieldOffset : FieldOffsets)
|
||||
FieldOffset += Context.toBits(Offset);
|
||||
for (BaseOffsetsMapTy::value_type &Base : Bases)
|
||||
if (Base.second >= InjectionSite)
|
||||
Base.second += Offset;
|
||||
}
|
||||
|
||||
void MicrosoftRecordLayoutBuilder::injectVFPtr(const CXXRecordDecl *RD) {
|
||||
|
@ -2578,15 +2571,12 @@ void MicrosoftRecordLayoutBuilder::injectVFPtr(const CXXRecordDecl *RD) {
|
|||
// Increase the size of the object and push back all fields, the vbptr and all
|
||||
// bases by the offset amount.
|
||||
Size += Offset;
|
||||
for (SmallVectorImpl<uint64_t>::iterator i = FieldOffsets.begin(),
|
||||
e = FieldOffsets.end();
|
||||
i != e; ++i)
|
||||
*i += Context.toBits(Offset);
|
||||
for (uint64_t &FieldOffset : FieldOffsets)
|
||||
FieldOffset += Context.toBits(Offset);
|
||||
if (HasVBPtr)
|
||||
VBPtrOffset += Offset;
|
||||
for (BaseOffsetsMapTy::iterator i = Bases.begin(), e = Bases.end();
|
||||
i != e; ++i)
|
||||
i->second += Offset;
|
||||
for (BaseOffsetsMapTy::value_type &Base : Bases)
|
||||
Base.second += Offset;
|
||||
}
|
||||
|
||||
void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) {
|
||||
|
@ -2601,8 +2591,8 @@ void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) {
|
|||
// The alignment of the vtordisp is at least the required alignment of the
|
||||
// entire record. This requirement may be present to support vtordisp
|
||||
// injection.
|
||||
for (const auto &I : RD->vbases()) {
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &VBase : RD->vbases()) {
|
||||
const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
|
||||
const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
|
||||
RequiredAlignment =
|
||||
std::max(RequiredAlignment, BaseLayout.getRequiredAlignment());
|
||||
|
@ -2613,8 +2603,8 @@ void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) {
|
|||
computeVtorDispSet(RD);
|
||||
// Iterate through the virtual bases and lay them out.
|
||||
const ASTRecordLayout *PreviousBaseLayout = nullptr;
|
||||
for (const auto &I : RD->vbases()) {
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &VBase : RD->vbases()) {
|
||||
const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
|
||||
const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
|
||||
bool HasVtordisp = HasVtordispSet.count(BaseDecl);
|
||||
// Insert padding between two bases if the left first one is zero sized or
|
||||
|
@ -2665,10 +2655,10 @@ static bool RequiresVtordisp(
|
|||
return true;
|
||||
// If any of a virtual bases non-virtual bases (recursively) requires a
|
||||
// vtordisp than so does this virtual base.
|
||||
for (const auto &I : RD->bases())
|
||||
if (!I.isVirtual() &&
|
||||
for (const CXXBaseSpecifier &Base : RD->bases())
|
||||
if (!Base.isVirtual() &&
|
||||
RequiresVtordisp(BasesWithOverriddenMethods,
|
||||
I.getType()->getAsCXXRecordDecl()))
|
||||
Base.getType()->getAsCXXRecordDecl()))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
@ -2680,8 +2670,8 @@ MicrosoftRecordLayoutBuilder::computeVtorDispSet(const CXXRecordDecl *RD) {
|
|||
// /vd2 or #pragma vtordisp(2): Always use vtordisps for virtual bases with
|
||||
// vftables.
|
||||
if (RD->getMSVtorDispMode() == MSVtorDispAttr::ForVFTable) {
|
||||
for (const auto &I : RD->vbases()) {
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &Base : RD->vbases()) {
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
|
||||
if (Layout.hasExtendableVFPtr())
|
||||
HasVtordispSet.insert(BaseDecl);
|
||||
|
@ -2691,8 +2681,8 @@ MicrosoftRecordLayoutBuilder::computeVtorDispSet(const CXXRecordDecl *RD) {
|
|||
|
||||
// If any of our bases need a vtordisp for this type, so do we. Check our
|
||||
// direct bases for vtordisp requirements.
|
||||
for (const auto &I : RD->bases()) {
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &Base : RD->bases()) {
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
|
||||
for (const auto &bi : Layout.getVBaseOffsetsMap())
|
||||
if (bi.second.hasVtorDisp())
|
||||
|
@ -2715,9 +2705,9 @@ MicrosoftRecordLayoutBuilder::computeVtorDispSet(const CXXRecordDecl *RD) {
|
|||
llvm::SmallPtrSet<const CXXMethodDecl *, 8> Work;
|
||||
llvm::SmallPtrSet<const CXXRecordDecl *, 2> BasesWithOverriddenMethods;
|
||||
// Seed the working set with our non-destructor virtual methods.
|
||||
for (const auto *I : RD->methods())
|
||||
if (I->isVirtual() && !isa<CXXDestructorDecl>(I))
|
||||
Work.insert(I);
|
||||
for (const CXXMethodDecl *MD : RD->methods())
|
||||
if (MD->isVirtual() && !isa<CXXDestructorDecl>(MD))
|
||||
Work.insert(MD);
|
||||
while (!Work.empty()) {
|
||||
const CXXMethodDecl *MD = *Work.begin();
|
||||
CXXMethodDecl::method_iterator i = MD->begin_overridden_methods(),
|
||||
|
@ -2732,8 +2722,8 @@ MicrosoftRecordLayoutBuilder::computeVtorDispSet(const CXXRecordDecl *RD) {
|
|||
}
|
||||
// For each of our virtual bases, check if it is in the set of overridden
|
||||
// bases or if it transitively contains a non-virtual base that is.
|
||||
for (const auto &I : RD->vbases()) {
|
||||
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &Base : RD->vbases()) {
|
||||
const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
|
||||
if (!HasVtordispSet.count(BaseDecl) &&
|
||||
RequiresVtordisp(BasesWithOverriddenMethods, BaseDecl))
|
||||
HasVtordispSet.insert(BaseDecl);
|
||||
|
@ -2912,8 +2902,8 @@ uint64_t ASTContext::getFieldOffset(const ValueDecl *VD) const {
|
|||
const IndirectFieldDecl *IFD = cast<IndirectFieldDecl>(VD);
|
||||
|
||||
OffsetInBits = 0;
|
||||
for (const auto *CI : IFD->chain())
|
||||
OffsetInBits += ::getFieldOffset(*this, cast<FieldDecl>(CI));
|
||||
for (const NamedDecl *ND : IFD->chain())
|
||||
OffsetInBits += ::getFieldOffset(*this, cast<FieldDecl>(ND));
|
||||
}
|
||||
|
||||
return OffsetInBits;
|
||||
|
@ -3012,11 +3002,11 @@ static void DumpCXXRecordLayout(raw_ostream &OS,
|
|||
|
||||
// Collect nvbases.
|
||||
SmallVector<const CXXRecordDecl *, 4> Bases;
|
||||
for (const auto &I : RD->bases()) {
|
||||
assert(!I.getType()->isDependentType() &&
|
||||
for (const CXXBaseSpecifier &Base : RD->bases()) {
|
||||
assert(!Base.getType()->isDependentType() &&
|
||||
"Cannot layout class with dependent bases.");
|
||||
if (!I.isVirtual())
|
||||
Bases.push_back(I.getType()->getAsCXXRecordDecl());
|
||||
if (!Base.isVirtual())
|
||||
Bases.push_back(Base.getType()->getAsCXXRecordDecl());
|
||||
}
|
||||
|
||||
// Sort nvbases by offset.
|
||||
|
@ -3026,10 +3016,7 @@ static void DumpCXXRecordLayout(raw_ostream &OS,
|
|||
});
|
||||
|
||||
// Dump (non-virtual) bases
|
||||
for (SmallVectorImpl<const CXXRecordDecl *>::iterator I = Bases.begin(),
|
||||
E = Bases.end();
|
||||
I != E; ++I) {
|
||||
const CXXRecordDecl *Base = *I;
|
||||
for (const CXXRecordDecl *Base : Bases) {
|
||||
CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base);
|
||||
DumpCXXRecordLayout(OS, Base, C, BaseOffset, IndentLevel,
|
||||
Base == PrimaryBase ? "(primary base)" : "(base)",
|
||||
|
@ -3067,9 +3054,9 @@ static void DumpCXXRecordLayout(raw_ostream &OS,
|
|||
// Dump virtual bases.
|
||||
const ASTRecordLayout::VBaseOffsetsMapTy &vtordisps =
|
||||
Layout.getVBaseOffsetsMap();
|
||||
for (const auto &I : RD->vbases()) {
|
||||
assert(I.isVirtual() && "Found non-virtual class!");
|
||||
const CXXRecordDecl *VBase = I.getType()->getAsCXXRecordDecl();
|
||||
for (const CXXBaseSpecifier &Base : RD->vbases()) {
|
||||
assert(Base.isVirtual() && "Found non-virtual class!");
|
||||
const CXXRecordDecl *VBase = Base.getType()->getAsCXXRecordDecl();
|
||||
|
||||
CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBase);
|
||||
|
||||
|
|
Loading…
Reference in New Issue