forked from OSchip/llvm-project
Temporarily revert "[LLD] Remove global state in lld/COFF" and "[lld] Add test to
check for timer output" Seems to be causing a number of asan test failures. This reverts commitb4fa71eed3
ande03c7e367a
.
This commit is contained in:
parent
5de8c7f138
commit
a2fd05ada9
|
@ -5,7 +5,6 @@ add_public_tablegen_target(COFFOptionsTableGen)
|
|||
add_lld_library(lldCOFF
|
||||
CallGraphSort.cpp
|
||||
Chunks.cpp
|
||||
COFFLinkerContext.cpp
|
||||
DebugTypes.cpp
|
||||
DLL.cpp
|
||||
Driver.cpp
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
//===- COFFContext.cpp ----------------------------------------------------===//
|
||||
//
|
||||
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||
// See https://llvm.org/LICENSE.txt for license information.
|
||||
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
//
|
||||
// Description
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "lld/Common/Memory.h"
|
||||
#include "llvm/DebugInfo/CodeView/TypeHashing.h"
|
||||
|
||||
namespace lld {
|
||||
namespace coff {
|
||||
|
||||
COFFLinkerContext::COFFLinkerContext()
|
||||
: symtab(*this), rootTimer("Total Linking Time"),
|
||||
inputFileTimer("Input File Reading", rootTimer),
|
||||
ltoTimer("LTO", rootTimer), gcTimer("GC", rootTimer),
|
||||
icfTimer("ICF", rootTimer), codeLayoutTimer("Code Layout", rootTimer),
|
||||
outputCommitTimer("Commit Output File", rootTimer),
|
||||
totalMapTimer("MAP Emission (Cumulative)", rootTimer),
|
||||
symbolGatherTimer("Gather Symbols", totalMapTimer),
|
||||
symbolStringsTimer("Build Symbol Strings", totalMapTimer),
|
||||
writeTimer("Write to File", totalMapTimer),
|
||||
totalPdbLinkTimer("PDB Emission (Cumulative)", rootTimer),
|
||||
addObjectsTimer("Add Objects", totalPdbLinkTimer),
|
||||
typeMergingTimer("Type Merging", addObjectsTimer),
|
||||
loadGHashTimer("Global Type Hashing", addObjectsTimer),
|
||||
mergeGHashTimer("GHash Type Merging", addObjectsTimer),
|
||||
symbolMergingTimer("Symbol Merging", addObjectsTimer),
|
||||
publicsLayoutTimer("Publics Stream Layout", totalPdbLinkTimer),
|
||||
tpiStreamLayoutTimer("TPI Stream Layout", totalPdbLinkTimer),
|
||||
diskCommitTimer("Commit to Disk", totalPdbLinkTimer) {}
|
||||
|
||||
COFFLinkerContext::~COFFLinkerContext() { clearGHashes(); }
|
||||
|
||||
void COFFLinkerContext::clearGHashes() {
|
||||
for (TpiSource *src : tpiSourceList) {
|
||||
if (src->ownedGHashes)
|
||||
delete[] src->ghashes.data();
|
||||
src->ghashes = {};
|
||||
src->isItemIndex.clear();
|
||||
src->uniqueTypes.clear();
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace coff
|
||||
} // namespace lld
|
|
@ -1,88 +0,0 @@
|
|||
//===- COFFLinkerContext.h --------------------------------------*- C++ -*-===//
|
||||
//
|
||||
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
||||
// See https://llvm.org/LICENSE.txt for license information.
|
||||
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
||||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#ifndef LLD_COFF_COFFLinkerContext_H
|
||||
#define LLD_COFF_COFFLinkerContext_H
|
||||
|
||||
#include "Chunks.h"
|
||||
#include "Config.h"
|
||||
#include "DebugTypes.h"
|
||||
#include "InputFiles.h"
|
||||
#include "SymbolTable.h"
|
||||
#include "Writer.h"
|
||||
#include "lld/Common/Timer.h"
|
||||
|
||||
namespace lld {
|
||||
namespace coff {
|
||||
|
||||
class COFFLinkerContext {
|
||||
public:
|
||||
COFFLinkerContext();
|
||||
COFFLinkerContext(const COFFLinkerContext &) = delete;
|
||||
COFFLinkerContext &operator=(const COFFLinkerContext &) = delete;
|
||||
~COFFLinkerContext();
|
||||
|
||||
void addTpiSource(TpiSource *tpi) { tpiSourceList.push_back(tpi); }
|
||||
|
||||
/// Free heap allocated ghashes.
|
||||
void clearGHashes();
|
||||
|
||||
SymbolTable symtab;
|
||||
|
||||
std::vector<ObjFile *> objFileInstances;
|
||||
std::map<std::string, PDBInputFile *> pdbInputFileInstances;
|
||||
std::vector<ImportFile *> importFileInstances;
|
||||
std::vector<BitcodeFile *> bitcodeFileInstances;
|
||||
|
||||
MergeChunk *mergeChunkInstances[Log2MaxSectionAlignment + 1] = {};
|
||||
|
||||
/// All sources of type information in the program.
|
||||
std::vector<TpiSource *> tpiSourceList;
|
||||
|
||||
std::map<llvm::codeview::GUID, TpiSource *> typeServerSourceMappings;
|
||||
std::map<uint32_t, TpiSource *> precompSourceMappings;
|
||||
|
||||
/// List of all output sections. After output sections are finalized, this
|
||||
/// can be indexed by getOutputSection.
|
||||
std::vector<OutputSection *> outputSections;
|
||||
|
||||
OutputSection *getOutputSection(const Chunk *c) const {
|
||||
return c->osidx == 0 ? nullptr : outputSections[c->osidx - 1];
|
||||
}
|
||||
|
||||
// All timers used in the COFF linker.
|
||||
Timer rootTimer;
|
||||
Timer inputFileTimer;
|
||||
Timer ltoTimer;
|
||||
Timer gcTimer;
|
||||
Timer icfTimer;
|
||||
|
||||
// Writer timers.
|
||||
Timer codeLayoutTimer;
|
||||
Timer outputCommitTimer;
|
||||
Timer totalMapTimer;
|
||||
Timer symbolGatherTimer;
|
||||
Timer symbolStringsTimer;
|
||||
Timer writeTimer;
|
||||
|
||||
// PDB timers.
|
||||
Timer totalPdbLinkTimer;
|
||||
Timer addObjectsTimer;
|
||||
Timer typeMergingTimer;
|
||||
Timer loadGHashTimer;
|
||||
Timer mergeGHashTimer;
|
||||
Timer symbolMergingTimer;
|
||||
Timer publicsLayoutTimer;
|
||||
Timer tpiStreamLayoutTimer;
|
||||
Timer diskCommitTimer;
|
||||
};
|
||||
|
||||
} // namespace coff
|
||||
} // namespace lld
|
||||
|
||||
#endif
|
|
@ -12,7 +12,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "CallGraphSort.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "InputFiles.h"
|
||||
#include "SymbolTable.h"
|
||||
#include "Symbols.h"
|
||||
|
@ -49,7 +48,7 @@ struct Cluster {
|
|||
|
||||
class CallGraphSort {
|
||||
public:
|
||||
CallGraphSort(const COFFLinkerContext &ctx);
|
||||
CallGraphSort();
|
||||
|
||||
DenseMap<const SectionChunk *, int> run();
|
||||
|
||||
|
@ -71,7 +70,7 @@ using SectionPair = std::pair<const SectionChunk *, const SectionChunk *>;
|
|||
// Take the edge list in Config->CallGraphProfile, resolve symbol names to
|
||||
// Symbols, and generate a graph between InputSections with the provided
|
||||
// weights.
|
||||
CallGraphSort::CallGraphSort(const COFFLinkerContext &ctx) {
|
||||
CallGraphSort::CallGraphSort() {
|
||||
MapVector<SectionPair, uint64_t> &profile = config->callGraphProfile;
|
||||
DenseMap<const SectionChunk *, int> secToCluster;
|
||||
|
||||
|
@ -96,7 +95,7 @@ CallGraphSort::CallGraphSort(const COFFLinkerContext &ctx) {
|
|||
// output. This messes with the cluster size and density calculations. We
|
||||
// would also end up moving input sections in other output sections without
|
||||
// moving them closer to what calls them.
|
||||
if (ctx.getOutputSection(fromSec) != ctx.getOutputSection(toSec))
|
||||
if (fromSec->getOutputSection() != toSec->getOutputSection())
|
||||
continue;
|
||||
|
||||
int from = getOrCreateNode(fromSec);
|
||||
|
@ -241,7 +240,6 @@ DenseMap<const SectionChunk *, int> CallGraphSort::run() {
|
|||
// This first builds a call graph based on the profile data then merges sections
|
||||
// according to the C³ heuristic. All clusters are then sorted by a density
|
||||
// metric to further improve locality.
|
||||
DenseMap<const SectionChunk *, int>
|
||||
coff::computeCallGraphProfileOrder(const COFFLinkerContext &ctx) {
|
||||
return CallGraphSort(ctx).run();
|
||||
DenseMap<const SectionChunk *, int> coff::computeCallGraphProfileOrder() {
|
||||
return CallGraphSort().run();
|
||||
}
|
||||
|
|
|
@ -14,10 +14,8 @@
|
|||
namespace lld {
|
||||
namespace coff {
|
||||
class SectionChunk;
|
||||
class COFFLinkerContext;
|
||||
|
||||
llvm::DenseMap<const SectionChunk *, int>
|
||||
computeCallGraphProfileOrder(const COFFLinkerContext &ctx);
|
||||
llvm::DenseMap<const SectionChunk *, int> computeCallGraphProfileOrder();
|
||||
} // namespace coff
|
||||
} // namespace lld
|
||||
|
||||
|
|
|
@ -7,11 +7,10 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "Chunks.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "InputFiles.h"
|
||||
#include "SymbolTable.h"
|
||||
#include "Symbols.h"
|
||||
#include "Writer.h"
|
||||
#include "SymbolTable.h"
|
||||
#include "lld/Common/ErrorHandler.h"
|
||||
#include "llvm/ADT/Twine.h"
|
||||
#include "llvm/BinaryFormat/COFF.h"
|
||||
|
@ -386,7 +385,7 @@ void SectionChunk::applyRelocation(uint8_t *off,
|
|||
// section is needed to compute SECREL and SECTION relocations used in debug
|
||||
// info.
|
||||
Chunk *c = sym ? sym->getChunk() : nullptr;
|
||||
OutputSection *os = c ? file->ctx.getOutputSection(c) : nullptr;
|
||||
OutputSection *os = c ? c->getOutputSection() : nullptr;
|
||||
|
||||
// Skip the relocation if it refers to a discarded section, and diagnose it
|
||||
// as an error if appropriate. If a symbol was discarded early, it may be
|
||||
|
@ -939,16 +938,18 @@ uint8_t Baserel::getDefaultType() {
|
|||
}
|
||||
}
|
||||
|
||||
MergeChunk *MergeChunk::instances[Log2MaxSectionAlignment + 1] = {};
|
||||
|
||||
MergeChunk::MergeChunk(uint32_t alignment)
|
||||
: builder(StringTableBuilder::RAW, alignment) {
|
||||
setAlignment(alignment);
|
||||
}
|
||||
|
||||
void MergeChunk::addSection(COFFLinkerContext &ctx, SectionChunk *c) {
|
||||
void MergeChunk::addSection(SectionChunk *c) {
|
||||
assert(isPowerOf2_32(c->getAlignment()));
|
||||
uint8_t p2Align = llvm::Log2_32(c->getAlignment());
|
||||
assert(p2Align < array_lengthof(ctx.mergeChunkInstances));
|
||||
auto *&mc = ctx.mergeChunkInstances[p2Align];
|
||||
assert(p2Align < array_lengthof(instances));
|
||||
auto *&mc = instances[p2Align];
|
||||
if (!mc)
|
||||
mc = make<MergeChunk>(c->getAlignment());
|
||||
mc->sections.push_back(c);
|
||||
|
|
|
@ -101,6 +101,7 @@ public:
|
|||
// chunk has a back pointer to an output section.
|
||||
void setOutputSectionIdx(uint16_t o) { osidx = o; }
|
||||
uint16_t getOutputSectionIdx() const { return osidx; }
|
||||
OutputSection *getOutputSection() const;
|
||||
|
||||
// Windows-specific.
|
||||
// Collect all locations that contain absolute addresses for base relocations.
|
||||
|
@ -414,7 +415,7 @@ inline StringRef Chunk::getDebugName() const {
|
|||
class MergeChunk : public NonSectionChunk {
|
||||
public:
|
||||
MergeChunk(uint32_t alignment);
|
||||
static void addSection(COFFLinkerContext &ctx, SectionChunk *c);
|
||||
static void addSection(SectionChunk *c);
|
||||
void finalizeContents();
|
||||
void assignSubsectionRVAs();
|
||||
|
||||
|
@ -423,6 +424,7 @@ public:
|
|||
size_t getSize() const override;
|
||||
void writeTo(uint8_t *buf) const override;
|
||||
|
||||
static MergeChunk *instances[Log2MaxSectionAlignment + 1];
|
||||
std::vector<SectionChunk *> sections;
|
||||
|
||||
private:
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "DLL.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "Chunks.h"
|
||||
#include "SymbolTable.h"
|
||||
#include "llvm/Object/COFF.h"
|
||||
|
@ -632,7 +631,7 @@ uint64_t DelayLoadContents::getDirSize() {
|
|||
return dirs.size() * sizeof(delay_import_directory_table_entry);
|
||||
}
|
||||
|
||||
void DelayLoadContents::create(COFFLinkerContext &ctx, Defined *h) {
|
||||
void DelayLoadContents::create(Defined *h) {
|
||||
helper = h;
|
||||
std::vector<std::vector<DefinedImportData *>> v = binImports(imports);
|
||||
|
||||
|
@ -661,13 +660,13 @@ void DelayLoadContents::create(COFFLinkerContext &ctx, Defined *h) {
|
|||
// call targets for Control Flow Guard.
|
||||
StringRef symName = saver.save("__imp_load_" + extName);
|
||||
s->loadThunkSym =
|
||||
cast<DefinedSynthetic>(ctx.symtab.addSynthetic(symName, t));
|
||||
cast<DefinedSynthetic>(symtab->addSynthetic(symName, t));
|
||||
}
|
||||
}
|
||||
thunks.push_back(tm);
|
||||
StringRef tmName =
|
||||
saver.save("__tailMerge_" + syms[0]->getDLLName().lower());
|
||||
ctx.symtab.addSynthetic(tmName, tm);
|
||||
symtab->addSynthetic(tmName, tm);
|
||||
// Terminate with null values.
|
||||
addresses.push_back(make<NullChunk>(8));
|
||||
names.push_back(make<NullChunk>(8));
|
||||
|
|
|
@ -40,7 +40,7 @@ class DelayLoadContents {
|
|||
public:
|
||||
void add(DefinedImportData *sym) { imports.push_back(sym); }
|
||||
bool empty() { return imports.empty(); }
|
||||
void create(COFFLinkerContext &ctx, Defined *helper);
|
||||
void create(Defined *helper);
|
||||
std::vector<Chunk *> getChunks();
|
||||
std::vector<Chunk *> getDataChunks();
|
||||
ArrayRef<Chunk *> getCodeChunks() { return thunks; }
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "DebugTypes.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "Chunks.h"
|
||||
#include "Driver.h"
|
||||
#include "InputFiles.h"
|
||||
|
@ -15,6 +14,7 @@
|
|||
#include "TypeMerger.h"
|
||||
#include "lld/Common/ErrorHandler.h"
|
||||
#include "lld/Common/Memory.h"
|
||||
#include "lld/Common/Timer.h"
|
||||
#include "llvm/DebugInfo/CodeView/TypeIndexDiscovery.h"
|
||||
#include "llvm/DebugInfo/CodeView/TypeRecord.h"
|
||||
#include "llvm/DebugInfo/CodeView/TypeRecordHelpers.h"
|
||||
|
@ -46,8 +46,8 @@ class TypeServerIpiSource;
|
|||
// before any dependent OBJ.
|
||||
class TypeServerSource : public TpiSource {
|
||||
public:
|
||||
explicit TypeServerSource(COFFLinkerContext &ctx, PDBInputFile *f)
|
||||
: TpiSource(ctx, PDB, nullptr), pdbInputFile(f) {
|
||||
explicit TypeServerSource(PDBInputFile *f)
|
||||
: TpiSource(PDB, nullptr), pdbInputFile(f) {
|
||||
if (f->loadErr && *f->loadErr)
|
||||
return;
|
||||
pdb::PDBFile &file = f->session->getPDBFile();
|
||||
|
@ -55,7 +55,7 @@ public:
|
|||
if (!expectedInfo)
|
||||
return;
|
||||
Guid = expectedInfo->getGuid();
|
||||
auto it = ctx.typeServerSourceMappings.emplace(Guid, this);
|
||||
auto it = mappings.emplace(Guid, this);
|
||||
assert(it.second);
|
||||
(void)it;
|
||||
}
|
||||
|
@ -74,6 +74,8 @@ public:
|
|||
|
||||
// The PDB signature GUID.
|
||||
codeview::GUID Guid;
|
||||
|
||||
static std::map<codeview::GUID, TypeServerSource *> mappings;
|
||||
};
|
||||
|
||||
// Companion to TypeServerSource. Stores the index map for the IPI stream in the
|
||||
|
@ -81,8 +83,7 @@ public:
|
|||
// invariant of one type index space per source.
|
||||
class TypeServerIpiSource : public TpiSource {
|
||||
public:
|
||||
explicit TypeServerIpiSource(COFFLinkerContext &ctx)
|
||||
: TpiSource(ctx, PDBIpi, nullptr) {}
|
||||
explicit TypeServerIpiSource() : TpiSource(PDBIpi, nullptr) {}
|
||||
|
||||
friend class TypeServerSource;
|
||||
|
||||
|
@ -100,8 +101,8 @@ class UseTypeServerSource : public TpiSource {
|
|||
Expected<TypeServerSource *> getTypeServerSource();
|
||||
|
||||
public:
|
||||
UseTypeServerSource(COFFLinkerContext &ctx, ObjFile *f, TypeServer2Record ts)
|
||||
: TpiSource(ctx, UsingPDB, f), typeServerDependency(ts) {}
|
||||
UseTypeServerSource(ObjFile *f, TypeServer2Record ts)
|
||||
: TpiSource(UsingPDB, f), typeServerDependency(ts) {}
|
||||
|
||||
Error mergeDebugT(TypeMerger *m) override;
|
||||
|
||||
|
@ -120,11 +121,11 @@ public:
|
|||
// such files, clang does not.
|
||||
class PrecompSource : public TpiSource {
|
||||
public:
|
||||
PrecompSource(COFFLinkerContext &ctx, ObjFile *f) : TpiSource(ctx, PCH, f) {
|
||||
PrecompSource(ObjFile *f) : TpiSource(PCH, f) {
|
||||
if (!f->pchSignature || !*f->pchSignature)
|
||||
fatal(toString(f) +
|
||||
" claims to be a PCH object, but does not have a valid signature");
|
||||
auto it = ctx.precompSourceMappings.emplace(*f->pchSignature, this);
|
||||
auto it = mappings.emplace(*f->pchSignature, this);
|
||||
if (!it.second)
|
||||
fatal("a PCH object with the same signature has already been provided (" +
|
||||
toString(it.first->second->file) + " and " + toString(file) + ")");
|
||||
|
@ -133,14 +134,16 @@ public:
|
|||
void loadGHashes() override;
|
||||
|
||||
bool isDependency() const override { return true; }
|
||||
|
||||
static std::map<uint32_t, PrecompSource *> mappings;
|
||||
};
|
||||
|
||||
// This class represents the debug type stream of an OBJ file that depends on a
|
||||
// Microsoft precompiled headers OBJ (see PrecompSource).
|
||||
class UsePrecompSource : public TpiSource {
|
||||
public:
|
||||
UsePrecompSource(COFFLinkerContext &ctx, ObjFile *f, PrecompRecord precomp)
|
||||
: TpiSource(ctx, UsingPCH, f), precompDependency(precomp) {}
|
||||
UsePrecompSource(ObjFile *f, PrecompRecord precomp)
|
||||
: TpiSource(UsingPCH, f), precompDependency(precomp) {}
|
||||
|
||||
Error mergeDebugT(TypeMerger *m) override;
|
||||
|
||||
|
@ -150,10 +153,6 @@ public:
|
|||
private:
|
||||
Error mergeInPrecompHeaderObj();
|
||||
|
||||
PrecompSource *findObjByName(StringRef fileNameOnly);
|
||||
PrecompSource *findPrecompSource(ObjFile *file, PrecompRecord &pr);
|
||||
Expected<PrecompSource *> findPrecompMap(ObjFile *file, PrecompRecord &pr);
|
||||
|
||||
public:
|
||||
// Information about the Precomp OBJ dependency, that needs to be loaded in
|
||||
// before merging this OBJ.
|
||||
|
@ -161,9 +160,13 @@ public:
|
|||
};
|
||||
} // namespace
|
||||
|
||||
TpiSource::TpiSource(COFFLinkerContext &ctx, TpiKind k, ObjFile *f)
|
||||
: ctx(ctx), kind(k), tpiSrcIdx(ctx.tpiSourceList.size()), file(f) {
|
||||
ctx.addTpiSource(this);
|
||||
std::vector<TpiSource *> TpiSource::instances;
|
||||
ArrayRef<TpiSource *> TpiSource::dependencySources;
|
||||
ArrayRef<TpiSource *> TpiSource::objectSources;
|
||||
|
||||
TpiSource::TpiSource(TpiKind k, ObjFile *f)
|
||||
: kind(k), tpiSrcIdx(instances.size()), file(f) {
|
||||
instances.push_back(this);
|
||||
}
|
||||
|
||||
// Vtable key method.
|
||||
|
@ -172,35 +175,52 @@ TpiSource::~TpiSource() {
|
|||
consumeError(std::move(typeMergingError));
|
||||
}
|
||||
|
||||
TpiSource *lld::coff::makeTpiSource(COFFLinkerContext &ctx, ObjFile *file) {
|
||||
return make<TpiSource>(ctx, TpiSource::Regular, file);
|
||||
void TpiSource::sortDependencies() {
|
||||
// Order dependencies first, but preserve the existing order.
|
||||
std::vector<TpiSource *> deps;
|
||||
std::vector<TpiSource *> objs;
|
||||
for (TpiSource *s : instances)
|
||||
(s->isDependency() ? deps : objs).push_back(s);
|
||||
uint32_t numDeps = deps.size();
|
||||
uint32_t numObjs = objs.size();
|
||||
instances = std::move(deps);
|
||||
instances.insert(instances.end(), objs.begin(), objs.end());
|
||||
for (uint32_t i = 0, e = instances.size(); i < e; ++i)
|
||||
instances[i]->tpiSrcIdx = i;
|
||||
dependencySources = makeArrayRef(instances.data(), numDeps);
|
||||
objectSources = makeArrayRef(instances.data() + numDeps, numObjs);
|
||||
}
|
||||
|
||||
TpiSource *lld::coff::makeTypeServerSource(COFFLinkerContext &ctx,
|
||||
PDBInputFile *pdbInputFile) {
|
||||
TpiSource *lld::coff::makeTpiSource(ObjFile *file) {
|
||||
return make<TpiSource>(TpiSource::Regular, file);
|
||||
}
|
||||
|
||||
TpiSource *lld::coff::makeTypeServerSource(PDBInputFile *pdbInputFile) {
|
||||
// Type server sources come in pairs: the TPI stream, and the IPI stream.
|
||||
auto *tpiSource = make<TypeServerSource>(ctx, pdbInputFile);
|
||||
auto *tpiSource = make<TypeServerSource>(pdbInputFile);
|
||||
if (pdbInputFile->session->getPDBFile().hasPDBIpiStream())
|
||||
tpiSource->ipiSrc = make<TypeServerIpiSource>(ctx);
|
||||
tpiSource->ipiSrc = make<TypeServerIpiSource>();
|
||||
return tpiSource;
|
||||
}
|
||||
|
||||
TpiSource *lld::coff::makeUseTypeServerSource(COFFLinkerContext &ctx,
|
||||
ObjFile *file,
|
||||
TpiSource *lld::coff::makeUseTypeServerSource(ObjFile *file,
|
||||
TypeServer2Record ts) {
|
||||
return make<UseTypeServerSource>(ctx, file, ts);
|
||||
return make<UseTypeServerSource>(file, ts);
|
||||
}
|
||||
|
||||
TpiSource *lld::coff::makePrecompSource(COFFLinkerContext &ctx, ObjFile *file) {
|
||||
return make<PrecompSource>(ctx, file);
|
||||
TpiSource *lld::coff::makePrecompSource(ObjFile *file) {
|
||||
return make<PrecompSource>(file);
|
||||
}
|
||||
|
||||
TpiSource *lld::coff::makeUsePrecompSource(COFFLinkerContext &ctx,
|
||||
ObjFile *file,
|
||||
TpiSource *lld::coff::makeUsePrecompSource(ObjFile *file,
|
||||
PrecompRecord precomp) {
|
||||
return make<UsePrecompSource>(ctx, file, precomp);
|
||||
return make<UsePrecompSource>(file, precomp);
|
||||
}
|
||||
|
||||
std::map<codeview::GUID, TypeServerSource *> TypeServerSource::mappings;
|
||||
|
||||
std::map<uint32_t, PrecompSource *> PrecompSource::mappings;
|
||||
|
||||
bool TpiSource::remapTypeIndex(TypeIndex &ti, TiRefKind refKind) const {
|
||||
if (ti.isSimple())
|
||||
return true;
|
||||
|
@ -399,12 +419,12 @@ Expected<TypeServerSource *> UseTypeServerSource::getTypeServerSource() {
|
|||
StringRef tsPath = typeServerDependency.getName();
|
||||
|
||||
TypeServerSource *tsSrc;
|
||||
auto it = ctx.typeServerSourceMappings.find(tsId);
|
||||
if (it != ctx.typeServerSourceMappings.end()) {
|
||||
tsSrc = (TypeServerSource *)it->second;
|
||||
auto it = TypeServerSource::mappings.find(tsId);
|
||||
if (it != TypeServerSource::mappings.end()) {
|
||||
tsSrc = it->second;
|
||||
} else {
|
||||
// The file failed to load, lookup by name
|
||||
PDBInputFile *pdb = PDBInputFile::findFromRecordPath(ctx, tsPath, file);
|
||||
PDBInputFile *pdb = PDBInputFile::findFromRecordPath(tsPath, file);
|
||||
if (!pdb)
|
||||
return createFileError(tsPath, errorCodeToError(std::error_code(
|
||||
ENOENT, std::generic_category())));
|
||||
|
@ -451,37 +471,36 @@ static bool equalsPath(StringRef path1, StringRef path2) {
|
|||
}
|
||||
|
||||
// Find by name an OBJ provided on the command line
|
||||
PrecompSource *UsePrecompSource::findObjByName(StringRef fileNameOnly) {
|
||||
static PrecompSource *findObjByName(StringRef fileNameOnly) {
|
||||
SmallString<128> currentPath;
|
||||
for (auto kv : ctx.precompSourceMappings) {
|
||||
for (auto kv : PrecompSource::mappings) {
|
||||
StringRef currentFileName = sys::path::filename(kv.second->file->getName(),
|
||||
sys::path::Style::windows);
|
||||
|
||||
// Compare based solely on the file name (link.exe behavior)
|
||||
if (equalsPath(currentFileName, fileNameOnly))
|
||||
return (PrecompSource *)kv.second;
|
||||
return kv.second;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
PrecompSource *UsePrecompSource::findPrecompSource(ObjFile *file,
|
||||
PrecompRecord &pr) {
|
||||
static PrecompSource *findPrecompSource(ObjFile *file, PrecompRecord &pr) {
|
||||
// Cross-compile warning: given that Clang doesn't generate LF_PRECOMP
|
||||
// records, we assume the OBJ comes from a Windows build of cl.exe. Thusly,
|
||||
// the paths embedded in the OBJs are in the Windows format.
|
||||
SmallString<128> prFileName =
|
||||
sys::path::filename(pr.getPrecompFilePath(), sys::path::Style::windows);
|
||||
|
||||
auto it = ctx.precompSourceMappings.find(pr.getSignature());
|
||||
if (it != ctx.precompSourceMappings.end()) {
|
||||
return (PrecompSource *)it->second;
|
||||
auto it = PrecompSource::mappings.find(pr.getSignature());
|
||||
if (it != PrecompSource::mappings.end()) {
|
||||
return it->second;
|
||||
}
|
||||
// Lookup by name
|
||||
return findObjByName(prFileName);
|
||||
}
|
||||
|
||||
Expected<PrecompSource *> UsePrecompSource::findPrecompMap(ObjFile *file,
|
||||
PrecompRecord &pr) {
|
||||
static Expected<PrecompSource *> findPrecompMap(ObjFile *file,
|
||||
PrecompRecord &pr) {
|
||||
PrecompSource *precomp = findPrecompSource(file, pr);
|
||||
|
||||
if (!precomp)
|
||||
|
@ -536,6 +555,22 @@ Error UsePrecompSource::mergeDebugT(TypeMerger *m) {
|
|||
return TpiSource::mergeDebugT(m);
|
||||
}
|
||||
|
||||
uint32_t TpiSource::countTypeServerPDBs() {
|
||||
return TypeServerSource::mappings.size();
|
||||
}
|
||||
|
||||
uint32_t TpiSource::countPrecompObjs() {
|
||||
return PrecompSource::mappings.size();
|
||||
}
|
||||
|
||||
void TpiSource::clear() {
|
||||
// Clean up any owned ghash allocations.
|
||||
clearGHashes();
|
||||
TpiSource::instances.clear();
|
||||
TypeServerSource::mappings.clear();
|
||||
PrecompSource::mappings.clear();
|
||||
}
|
||||
|
||||
//===----------------------------------------------------------------------===//
|
||||
// Parellel GHash type merging implementation.
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
@ -891,8 +926,7 @@ struct GHashTable {
|
|||
/// Insert the cell with the given ghash into the table. Return the insertion
|
||||
/// position in the table. It is safe for the caller to store the insertion
|
||||
/// position because the table cannot be resized.
|
||||
uint32_t insert(COFFLinkerContext &ctx, GloballyHashedType ghash,
|
||||
GHashCell newCell);
|
||||
uint32_t insert(GloballyHashedType ghash, GHashCell newCell);
|
||||
};
|
||||
|
||||
/// A ghash table cell for deduplicating types from TpiSources.
|
||||
|
@ -931,8 +965,8 @@ public:
|
|||
bool isItem() const { return data & (1ULL << 63U); }
|
||||
|
||||
/// Get the ghash key for this cell.
|
||||
GloballyHashedType getGHash(const COFFLinkerContext &ctx) const {
|
||||
return ctx.tpiSourceList[getTpiSrcIdx()]->ghashes[getGHashIdx()];
|
||||
GloballyHashedType getGHash() const {
|
||||
return TpiSource::instances[getTpiSrcIdx()]->ghashes[getGHashIdx()];
|
||||
}
|
||||
|
||||
/// The priority function for the cell. The data is stored such that lower
|
||||
|
@ -962,8 +996,7 @@ void GHashTable::init(uint32_t newTableSize) {
|
|||
tableSize = newTableSize;
|
||||
}
|
||||
|
||||
uint32_t GHashTable::insert(COFFLinkerContext &ctx, GloballyHashedType ghash,
|
||||
GHashCell newCell) {
|
||||
uint32_t GHashTable::insert(GloballyHashedType ghash, GHashCell newCell) {
|
||||
assert(!newCell.isEmpty() && "cannot insert empty cell value");
|
||||
|
||||
// FIXME: The low bytes of SHA1 have low entropy for short records, which
|
||||
|
@ -982,7 +1015,7 @@ uint32_t GHashTable::insert(COFFLinkerContext &ctx, GloballyHashedType ghash,
|
|||
// - cell has non-matching key: hash collision, probe next cell
|
||||
auto *cellPtr = reinterpret_cast<std::atomic<GHashCell> *>(&table[idx]);
|
||||
GHashCell oldCell(cellPtr->load());
|
||||
while (oldCell.isEmpty() || oldCell.getGHash(ctx) == ghash) {
|
||||
while (oldCell.isEmpty() || oldCell.getGHash() == ghash) {
|
||||
// Check if there is an existing ghash entry with a higher priority
|
||||
// (earlier ordering). If so, this is a duplicate, we are done.
|
||||
if (!oldCell.isEmpty() && oldCell < newCell)
|
||||
|
@ -1007,22 +1040,22 @@ uint32_t GHashTable::insert(COFFLinkerContext &ctx, GloballyHashedType ghash,
|
|||
llvm_unreachable("left infloop");
|
||||
}
|
||||
|
||||
TypeMerger::TypeMerger(COFFLinkerContext &c, llvm::BumpPtrAllocator &alloc)
|
||||
: typeTable(alloc), idTable(alloc), ctx(c) {}
|
||||
TypeMerger::TypeMerger(llvm::BumpPtrAllocator &alloc)
|
||||
: typeTable(alloc), idTable(alloc) {}
|
||||
|
||||
TypeMerger::~TypeMerger() = default;
|
||||
|
||||
void TypeMerger::mergeTypesWithGHash() {
|
||||
// Load ghashes. Do type servers and PCH objects first.
|
||||
{
|
||||
ScopedTimer t1(ctx.loadGHashTimer);
|
||||
parallelForEach(dependencySources,
|
||||
ScopedTimer t1(loadGHashTimer);
|
||||
parallelForEach(TpiSource::dependencySources,
|
||||
[&](TpiSource *source) { source->loadGHashes(); });
|
||||
parallelForEach(objectSources,
|
||||
parallelForEach(TpiSource::objectSources,
|
||||
[&](TpiSource *source) { source->loadGHashes(); });
|
||||
}
|
||||
|
||||
ScopedTimer t2(ctx.mergeGHashTimer);
|
||||
ScopedTimer t2(mergeGHashTimer);
|
||||
GHashState ghashState;
|
||||
|
||||
// Estimate the size of hash table needed to deduplicate ghashes. This *must*
|
||||
|
@ -1033,7 +1066,7 @@ void TypeMerger::mergeTypesWithGHash() {
|
|||
// small compared to total memory usage, at eight bytes per input type record,
|
||||
// and most input type records are larger than eight bytes.
|
||||
size_t tableSize = 0;
|
||||
for (TpiSource *source : ctx.tpiSourceList)
|
||||
for (TpiSource *source : TpiSource::instances)
|
||||
tableSize += source->ghashes.size();
|
||||
|
||||
// Cap the table size so that we can use 32-bit cell indices. Type indices are
|
||||
|
@ -1047,8 +1080,8 @@ void TypeMerger::mergeTypesWithGHash() {
|
|||
// position. Because the table does not rehash, the position will not change
|
||||
// under insertion. After insertion is done, the value of the cell can be read
|
||||
// to retrieve the final PDB type index.
|
||||
parallelForEachN(0, ctx.tpiSourceList.size(), [&](size_t tpiSrcIdx) {
|
||||
TpiSource *source = ctx.tpiSourceList[tpiSrcIdx];
|
||||
parallelForEachN(0, TpiSource::instances.size(), [&](size_t tpiSrcIdx) {
|
||||
TpiSource *source = TpiSource::instances[tpiSrcIdx];
|
||||
source->indexMapStorage.resize(source->ghashes.size());
|
||||
for (uint32_t i = 0, e = source->ghashes.size(); i < e; i++) {
|
||||
if (source->shouldOmitFromPdb(i)) {
|
||||
|
@ -1058,7 +1091,7 @@ void TypeMerger::mergeTypesWithGHash() {
|
|||
GloballyHashedType ghash = source->ghashes[i];
|
||||
bool isItem = source->isItemIndex.test(i);
|
||||
uint32_t cellIdx =
|
||||
ghashState.table.insert(ctx, ghash, GHashCell(isItem, tpiSrcIdx, i));
|
||||
ghashState.table.insert(ghash, GHashCell(isItem, tpiSrcIdx, i));
|
||||
|
||||
// Store the ghash cell index as a type index in indexMapStorage. Later
|
||||
// we will replace it with the PDB type index.
|
||||
|
@ -1104,7 +1137,7 @@ void TypeMerger::mergeTypesWithGHash() {
|
|||
for (uint32_t i = 0, e = entries.size(); i < e; ++i) {
|
||||
auto &cell = entries[i];
|
||||
uint32_t tpiSrcIdx = cell.getTpiSrcIdx();
|
||||
TpiSource *source = ctx.tpiSourceList[tpiSrcIdx];
|
||||
TpiSource *source = TpiSource::instances[tpiSrcIdx];
|
||||
source->uniqueTypes.push_back(cell.getGHashIdx());
|
||||
|
||||
// Update the ghash table to store the destination PDB type index in the
|
||||
|
@ -1117,37 +1150,21 @@ void TypeMerger::mergeTypesWithGHash() {
|
|||
}
|
||||
|
||||
// In parallel, remap all types.
|
||||
for_each(dependencySources, [&](TpiSource *source) {
|
||||
for_each(TpiSource::dependencySources, [&](TpiSource *source) {
|
||||
source->remapTpiWithGHashes(&ghashState);
|
||||
});
|
||||
parallelForEach(objectSources, [&](TpiSource *source) {
|
||||
parallelForEach(TpiSource::objectSources, [&](TpiSource *source) {
|
||||
source->remapTpiWithGHashes(&ghashState);
|
||||
});
|
||||
|
||||
// Build a global map of from function ID to function type.
|
||||
for (TpiSource *source : ctx.tpiSourceList) {
|
||||
for (TpiSource *source : TpiSource::instances) {
|
||||
for (auto idToType : source->funcIdToType)
|
||||
funcIdToType.insert(idToType);
|
||||
source->funcIdToType.clear();
|
||||
}
|
||||
|
||||
ctx.clearGHashes();
|
||||
}
|
||||
|
||||
void TypeMerger::sortDependencies() {
|
||||
// Order dependencies first, but preserve the existing order.
|
||||
std::vector<TpiSource *> deps;
|
||||
std::vector<TpiSource *> objs;
|
||||
for (TpiSource *s : ctx.tpiSourceList)
|
||||
(s->isDependency() ? deps : objs).push_back(s);
|
||||
uint32_t numDeps = deps.size();
|
||||
uint32_t numObjs = objs.size();
|
||||
ctx.tpiSourceList = std::move(deps);
|
||||
ctx.tpiSourceList.insert(ctx.tpiSourceList.end(), objs.begin(), objs.end());
|
||||
for (uint32_t i = 0, e = ctx.tpiSourceList.size(); i < e; ++i)
|
||||
ctx.tpiSourceList[i]->tpiSrcIdx = i;
|
||||
dependencySources = makeArrayRef(ctx.tpiSourceList.data(), numDeps);
|
||||
objectSources = makeArrayRef(ctx.tpiSourceList.data() + numDeps, numObjs);
|
||||
TpiSource::clearGHashes();
|
||||
}
|
||||
|
||||
/// Given the index into the ghash table for a particular type, return the type
|
||||
|
@ -1170,3 +1187,13 @@ void TpiSource::fillMapFromGHashes(GHashState *g) {
|
|||
loadPdbTypeIndexFromCell(g, fakeCellIndex.toArrayIndex());
|
||||
}
|
||||
}
|
||||
|
||||
void TpiSource::clearGHashes() {
|
||||
for (TpiSource *src : TpiSource::instances) {
|
||||
if (src->ownedGHashes)
|
||||
delete[] src->ghashes.data();
|
||||
src->ghashes = {};
|
||||
src->isItemIndex.clear();
|
||||
src->uniqueTypes.clear();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,13 +37,12 @@ class ObjFile;
|
|||
class PDBInputFile;
|
||||
class TypeMerger;
|
||||
struct GHashState;
|
||||
class COFFLinkerContext;
|
||||
|
||||
class TpiSource {
|
||||
public:
|
||||
enum TpiKind : uint8_t { Regular, PCH, UsingPCH, PDB, PDBIpi, UsingPDB };
|
||||
|
||||
TpiSource(COFFLinkerContext &ctx, TpiKind k, ObjFile *f);
|
||||
TpiSource(TpiKind k, ObjFile *f);
|
||||
virtual ~TpiSource();
|
||||
|
||||
/// Produce a mapping from the type and item indices used in the object
|
||||
|
@ -94,8 +93,6 @@ protected:
|
|||
// Walk over file->debugTypes and fill in the isItemIndex bit vector.
|
||||
void fillIsItemIndexFromDebugT();
|
||||
|
||||
COFFLinkerContext &ctx;
|
||||
|
||||
public:
|
||||
bool remapTypesInSymbolRecord(MutableArrayRef<uint8_t> rec);
|
||||
|
||||
|
@ -112,6 +109,29 @@ public:
|
|||
return ghashIdx == endPrecompGHashIdx;
|
||||
}
|
||||
|
||||
/// All sources of type information in the program.
|
||||
static std::vector<TpiSource *> instances;
|
||||
|
||||
/// Dependency type sources, such as type servers or PCH object files. These
|
||||
/// must be processed before objects that rely on them. Set by
|
||||
/// TpiSources::sortDependencies.
|
||||
static ArrayRef<TpiSource *> dependencySources;
|
||||
|
||||
/// Object file sources. These must be processed after dependencySources.
|
||||
static ArrayRef<TpiSource *> objectSources;
|
||||
|
||||
/// Sorts the dependencies and reassigns TpiSource indices.
|
||||
static void sortDependencies();
|
||||
|
||||
static uint32_t countTypeServerPDBs();
|
||||
static uint32_t countPrecompObjs();
|
||||
|
||||
/// Free heap allocated ghashes.
|
||||
static void clearGHashes();
|
||||
|
||||
/// Clear global data structures for TpiSources.
|
||||
static void clear();
|
||||
|
||||
const TpiKind kind;
|
||||
bool ownedGHashes = true;
|
||||
uint32_t tpiSrcIdx = 0;
|
||||
|
@ -166,13 +186,12 @@ public:
|
|||
uint64_t nbTypeRecordsBytes = 0;
|
||||
};
|
||||
|
||||
TpiSource *makeTpiSource(COFFLinkerContext &ctx, ObjFile *f);
|
||||
TpiSource *makeTypeServerSource(COFFLinkerContext &ctx,
|
||||
PDBInputFile *pdbInputFile);
|
||||
TpiSource *makeUseTypeServerSource(COFFLinkerContext &ctx, ObjFile *file,
|
||||
TpiSource *makeTpiSource(ObjFile *file);
|
||||
TpiSource *makeTypeServerSource(PDBInputFile *pdbInputFile);
|
||||
TpiSource *makeUseTypeServerSource(ObjFile *file,
|
||||
llvm::codeview::TypeServer2Record ts);
|
||||
TpiSource *makePrecompSource(COFFLinkerContext &ctx, ObjFile *file);
|
||||
TpiSource *makeUsePrecompSource(COFFLinkerContext &ctx, ObjFile *file,
|
||||
TpiSource *makePrecompSource(ObjFile *file);
|
||||
TpiSource *makeUsePrecompSource(ObjFile *file,
|
||||
llvm::codeview::PrecompRecord ts);
|
||||
|
||||
} // namespace coff
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "Driver.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "Config.h"
|
||||
#include "DebugTypes.h"
|
||||
#include "ICF.h"
|
||||
|
@ -60,6 +59,8 @@ using namespace llvm::sys;
|
|||
namespace lld {
|
||||
namespace coff {
|
||||
|
||||
static Timer inputFileTimer("Input File Reading", Timer::root());
|
||||
|
||||
Configuration *config;
|
||||
LinkerDriver *driver;
|
||||
|
||||
|
@ -69,7 +70,14 @@ bool link(ArrayRef<const char *> args, bool canExitEarly, raw_ostream &stdoutOS,
|
|||
lld::stderrOS = &stderrOS;
|
||||
|
||||
errorHandler().cleanupCallback = []() {
|
||||
TpiSource::clear();
|
||||
freeArena();
|
||||
ObjFile::instances.clear();
|
||||
PDBInputFile::instances.clear();
|
||||
ImportFile::instances.clear();
|
||||
BitcodeFile::instances.clear();
|
||||
memset(MergeChunk::instances, 0, sizeof(MergeChunk::instances));
|
||||
OutputSection::clear();
|
||||
};
|
||||
|
||||
errorHandler().logName = args::getFilenameWithoutExe(args[0]);
|
||||
|
@ -79,9 +87,9 @@ bool link(ArrayRef<const char *> args, bool canExitEarly, raw_ostream &stdoutOS,
|
|||
errorHandler().exitEarly = canExitEarly;
|
||||
stderrOS.enable_colors(stderrOS.has_colors());
|
||||
|
||||
COFFLinkerContext ctx;
|
||||
config = make<Configuration>();
|
||||
driver = make<LinkerDriver>(ctx);
|
||||
symtab = make<SymbolTable>();
|
||||
driver = make<LinkerDriver>();
|
||||
|
||||
driver->linkerMain(args);
|
||||
|
||||
|
@ -166,8 +174,8 @@ static StringRef mangle(StringRef sym) {
|
|||
return sym;
|
||||
}
|
||||
|
||||
bool LinkerDriver::findUnderscoreMangle(StringRef sym) {
|
||||
Symbol *s = ctx.symtab.findMangle(mangle(sym));
|
||||
static bool findUnderscoreMangle(StringRef sym) {
|
||||
Symbol *s = symtab->findMangle(mangle(sym));
|
||||
return s && !isa<Undefined>(s);
|
||||
}
|
||||
|
||||
|
@ -205,30 +213,30 @@ void LinkerDriver::addBuffer(std::unique_ptr<MemoryBuffer> mb,
|
|||
addArchiveBuffer(m, "<whole-archive>", filename, memberIndex++);
|
||||
return;
|
||||
}
|
||||
ctx.symtab.addFile(make<ArchiveFile>(ctx, mbref));
|
||||
symtab->addFile(make<ArchiveFile>(mbref));
|
||||
break;
|
||||
case file_magic::bitcode:
|
||||
if (lazy)
|
||||
ctx.symtab.addFile(make<LazyObjFile>(ctx, mbref));
|
||||
symtab->addFile(make<LazyObjFile>(mbref));
|
||||
else
|
||||
ctx.symtab.addFile(make<BitcodeFile>(ctx, mbref, "", 0));
|
||||
symtab->addFile(make<BitcodeFile>(mbref, "", 0));
|
||||
break;
|
||||
case file_magic::coff_object:
|
||||
case file_magic::coff_import_library:
|
||||
if (lazy)
|
||||
ctx.symtab.addFile(make<LazyObjFile>(ctx, mbref));
|
||||
symtab->addFile(make<LazyObjFile>(mbref));
|
||||
else
|
||||
ctx.symtab.addFile(make<ObjFile>(ctx, mbref));
|
||||
symtab->addFile(make<ObjFile>(mbref));
|
||||
break;
|
||||
case file_magic::pdb:
|
||||
ctx.symtab.addFile(make<PDBInputFile>(ctx, mbref));
|
||||
symtab->addFile(make<PDBInputFile>(mbref));
|
||||
break;
|
||||
case file_magic::coff_cl_gl_object:
|
||||
error(filename + ": is not a native COFF file. Recompile without /GL");
|
||||
break;
|
||||
case file_magic::pecoff_executable:
|
||||
if (config->mingw) {
|
||||
ctx.symtab.addFile(make<DLLFile>(ctx, mbref));
|
||||
symtab->addFile(make<DLLFile>(mbref));
|
||||
break;
|
||||
}
|
||||
if (filename.endswith_insensitive(".dll")) {
|
||||
|
@ -272,24 +280,24 @@ void LinkerDriver::addArchiveBuffer(MemoryBufferRef mb, StringRef symName,
|
|||
uint64_t offsetInArchive) {
|
||||
file_magic magic = identify_magic(mb.getBuffer());
|
||||
if (magic == file_magic::coff_import_library) {
|
||||
InputFile *imp = make<ImportFile>(ctx, mb);
|
||||
InputFile *imp = make<ImportFile>(mb);
|
||||
imp->parentName = parentName;
|
||||
ctx.symtab.addFile(imp);
|
||||
symtab->addFile(imp);
|
||||
return;
|
||||
}
|
||||
|
||||
InputFile *obj;
|
||||
if (magic == file_magic::coff_object) {
|
||||
obj = make<ObjFile>(ctx, mb);
|
||||
obj = make<ObjFile>(mb);
|
||||
} else if (magic == file_magic::bitcode) {
|
||||
obj = make<BitcodeFile>(ctx, mb, parentName, offsetInArchive);
|
||||
obj = make<BitcodeFile>(mb, parentName, offsetInArchive);
|
||||
} else {
|
||||
error("unknown file type: " + mb.getBufferIdentifier());
|
||||
return;
|
||||
}
|
||||
|
||||
obj->parentName = parentName;
|
||||
ctx.symtab.addFile(obj);
|
||||
symtab->addFile(obj);
|
||||
log("Loaded " + toString(obj) + " for " + symName);
|
||||
}
|
||||
|
||||
|
@ -539,7 +547,7 @@ void LinkerDriver::addLibSearchPaths() {
|
|||
}
|
||||
|
||||
Symbol *LinkerDriver::addUndefined(StringRef name) {
|
||||
Symbol *b = ctx.symtab.addUndefined(name);
|
||||
Symbol *b = symtab->addUndefined(name);
|
||||
if (!b->isGCRoot) {
|
||||
b->isGCRoot = true;
|
||||
config->gcroot.push_back(b);
|
||||
|
@ -554,14 +562,14 @@ StringRef LinkerDriver::mangleMaybe(Symbol *s) {
|
|||
return "";
|
||||
|
||||
// Otherwise, see if a similar, mangled symbol exists in the symbol table.
|
||||
Symbol *mangled = ctx.symtab.findMangle(unmangled->getName());
|
||||
Symbol *mangled = symtab->findMangle(unmangled->getName());
|
||||
if (!mangled)
|
||||
return "";
|
||||
|
||||
// If we find a similar mangled symbol, make this an alias to it and return
|
||||
// its name.
|
||||
log(unmangled->getName() + " aliased to " + mangled->getName());
|
||||
unmangled->weakAlias = ctx.symtab.addUndefined(mangled->getName());
|
||||
unmangled->weakAlias = symtab->addUndefined(mangled->getName());
|
||||
return mangled->getName();
|
||||
}
|
||||
|
||||
|
@ -931,7 +939,7 @@ void LinkerDriver::enqueueTask(std::function<void()> task) {
|
|||
}
|
||||
|
||||
bool LinkerDriver::run() {
|
||||
ScopedTimer t(ctx.inputFileTimer);
|
||||
ScopedTimer t(inputFileTimer);
|
||||
|
||||
bool didWork = !taskQueue.empty();
|
||||
while (!taskQueue.empty()) {
|
||||
|
@ -944,7 +952,7 @@ bool LinkerDriver::run() {
|
|||
// Parse an /order file. If an option is given, the linker places
|
||||
// COMDAT sections in the same order as their names appear in the
|
||||
// given file.
|
||||
static void parseOrderFile(COFFLinkerContext &ctx, StringRef arg) {
|
||||
static void parseOrderFile(StringRef arg) {
|
||||
// For some reason, the MSVC linker requires a filename to be
|
||||
// preceded by "@".
|
||||
if (!arg.startswith("@")) {
|
||||
|
@ -954,7 +962,7 @@ static void parseOrderFile(COFFLinkerContext &ctx, StringRef arg) {
|
|||
|
||||
// Get a list of all comdat sections for error checking.
|
||||
DenseSet<StringRef> set;
|
||||
for (Chunk *c : ctx.symtab.getChunks())
|
||||
for (Chunk *c : symtab->getChunks())
|
||||
if (auto *sec = dyn_cast<SectionChunk>(c))
|
||||
if (sec->sym)
|
||||
set.insert(sec->sym->getName());
|
||||
|
@ -988,7 +996,7 @@ static void parseOrderFile(COFFLinkerContext &ctx, StringRef arg) {
|
|||
driver->takeBuffer(std::move(mb));
|
||||
}
|
||||
|
||||
static void parseCallGraphFile(COFFLinkerContext &ctx, StringRef path) {
|
||||
static void parseCallGraphFile(StringRef path) {
|
||||
std::unique_ptr<MemoryBuffer> mb =
|
||||
CHECK(MemoryBuffer::getFile(path, /*IsText=*/false,
|
||||
/*RequiresNullTerminator=*/false,
|
||||
|
@ -997,7 +1005,7 @@ static void parseCallGraphFile(COFFLinkerContext &ctx, StringRef path) {
|
|||
|
||||
// Build a map from symbol name to section.
|
||||
DenseMap<StringRef, Symbol *> map;
|
||||
for (ObjFile *file : ctx.objFileInstances)
|
||||
for (ObjFile *file : ObjFile::instances)
|
||||
for (Symbol *sym : file->getSymbols())
|
||||
if (sym)
|
||||
map[sym->getName()] = sym;
|
||||
|
@ -1034,8 +1042,8 @@ static void parseCallGraphFile(COFFLinkerContext &ctx, StringRef path) {
|
|||
driver->takeBuffer(std::move(mb));
|
||||
}
|
||||
|
||||
static void readCallGraphsFromObjectFiles(COFFLinkerContext &ctx) {
|
||||
for (ObjFile *obj : ctx.objFileInstances) {
|
||||
static void readCallGraphsFromObjectFiles() {
|
||||
for (ObjFile *obj : ObjFile::instances) {
|
||||
if (obj->callgraphSec) {
|
||||
ArrayRef<uint8_t> contents;
|
||||
cantFail(
|
||||
|
@ -1069,7 +1077,7 @@ static void markAddrsig(Symbol *s) {
|
|||
c->keepUnique = true;
|
||||
}
|
||||
|
||||
static void findKeepUniqueSections(COFFLinkerContext &ctx) {
|
||||
static void findKeepUniqueSections() {
|
||||
// Exported symbols could be address-significant in other executables or DSOs,
|
||||
// so we conservatively mark them as address-significant.
|
||||
for (Export &r : config->exports)
|
||||
|
@ -1077,7 +1085,7 @@ static void findKeepUniqueSections(COFFLinkerContext &ctx) {
|
|||
|
||||
// Visit the address-significance table in each object file and mark each
|
||||
// referenced symbol as address-significant.
|
||||
for (ObjFile *obj : ctx.objFileInstances) {
|
||||
for (ObjFile *obj : ObjFile::instances) {
|
||||
ArrayRef<Symbol *> syms = obj->getSymbols();
|
||||
if (obj->addrsigSec) {
|
||||
ArrayRef<uint8_t> contents;
|
||||
|
@ -1161,7 +1169,7 @@ static void parsePDBAltPath(StringRef altPath) {
|
|||
void LinkerDriver::convertResources() {
|
||||
std::vector<ObjFile *> resourceObjFiles;
|
||||
|
||||
for (ObjFile *f : ctx.objFileInstances) {
|
||||
for (ObjFile *f : ObjFile::instances) {
|
||||
if (f->isResourceObjFile())
|
||||
resourceObjFiles.push_back(f);
|
||||
}
|
||||
|
@ -1183,9 +1191,8 @@ void LinkerDriver::convertResources() {
|
|||
f->includeResourceChunks();
|
||||
return;
|
||||
}
|
||||
ObjFile *f =
|
||||
make<ObjFile>(ctx, convertResToCOFF(resources, resourceObjFiles));
|
||||
ctx.symtab.addFile(f);
|
||||
ObjFile *f = make<ObjFile>(convertResToCOFF(resources, resourceObjFiles));
|
||||
symtab->addFile(f);
|
||||
f->includeResourceChunks();
|
||||
}
|
||||
|
||||
|
@ -1212,9 +1219,9 @@ void LinkerDriver::maybeExportMinGWSymbols(const opt::InputArgList &args) {
|
|||
if (Optional<StringRef> path = doFindFile(arg->getValue()))
|
||||
exporter.addWholeArchive(*path);
|
||||
|
||||
ctx.symtab.forEachSymbol([&](Symbol *s) {
|
||||
symtab->forEachSymbol([&](Symbol *s) {
|
||||
auto *def = dyn_cast<Defined>(s);
|
||||
if (!exporter.shouldExport(ctx, def))
|
||||
if (!exporter.shouldExport(def))
|
||||
return;
|
||||
|
||||
if (!def->isGCRoot) {
|
||||
|
@ -1259,7 +1266,7 @@ Optional<std::string> getReproduceFile(const opt::InputArgList &args) {
|
|||
}
|
||||
|
||||
void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
||||
ScopedTimer rootTimer(ctx.rootTimer);
|
||||
ScopedTimer rootTimer(Timer::root());
|
||||
|
||||
// Needed for LTO.
|
||||
InitializeAllTargetInfos();
|
||||
|
@ -2011,32 +2018,32 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
if (config->imageBase == uint64_t(-1))
|
||||
config->imageBase = getDefaultImageBase();
|
||||
|
||||
ctx.symtab.addSynthetic(mangle("__ImageBase"), nullptr);
|
||||
symtab->addSynthetic(mangle("__ImageBase"), nullptr);
|
||||
if (config->machine == I386) {
|
||||
ctx.symtab.addAbsolute("___safe_se_handler_table", 0);
|
||||
ctx.symtab.addAbsolute("___safe_se_handler_count", 0);
|
||||
symtab->addAbsolute("___safe_se_handler_table", 0);
|
||||
symtab->addAbsolute("___safe_se_handler_count", 0);
|
||||
}
|
||||
|
||||
ctx.symtab.addAbsolute(mangle("__guard_fids_count"), 0);
|
||||
ctx.symtab.addAbsolute(mangle("__guard_fids_table"), 0);
|
||||
ctx.symtab.addAbsolute(mangle("__guard_flags"), 0);
|
||||
ctx.symtab.addAbsolute(mangle("__guard_iat_count"), 0);
|
||||
ctx.symtab.addAbsolute(mangle("__guard_iat_table"), 0);
|
||||
ctx.symtab.addAbsolute(mangle("__guard_longjmp_count"), 0);
|
||||
ctx.symtab.addAbsolute(mangle("__guard_longjmp_table"), 0);
|
||||
symtab->addAbsolute(mangle("__guard_fids_count"), 0);
|
||||
symtab->addAbsolute(mangle("__guard_fids_table"), 0);
|
||||
symtab->addAbsolute(mangle("__guard_flags"), 0);
|
||||
symtab->addAbsolute(mangle("__guard_iat_count"), 0);
|
||||
symtab->addAbsolute(mangle("__guard_iat_table"), 0);
|
||||
symtab->addAbsolute(mangle("__guard_longjmp_count"), 0);
|
||||
symtab->addAbsolute(mangle("__guard_longjmp_table"), 0);
|
||||
// Needed for MSVC 2017 15.5 CRT.
|
||||
ctx.symtab.addAbsolute(mangle("__enclave_config"), 0);
|
||||
symtab->addAbsolute(mangle("__enclave_config"), 0);
|
||||
// Needed for MSVC 2019 16.8 CRT.
|
||||
ctx.symtab.addAbsolute(mangle("__guard_eh_cont_count"), 0);
|
||||
ctx.symtab.addAbsolute(mangle("__guard_eh_cont_table"), 0);
|
||||
symtab->addAbsolute(mangle("__guard_eh_cont_count"), 0);
|
||||
symtab->addAbsolute(mangle("__guard_eh_cont_table"), 0);
|
||||
|
||||
if (config->pseudoRelocs) {
|
||||
ctx.symtab.addAbsolute(mangle("__RUNTIME_PSEUDO_RELOC_LIST__"), 0);
|
||||
ctx.symtab.addAbsolute(mangle("__RUNTIME_PSEUDO_RELOC_LIST_END__"), 0);
|
||||
symtab->addAbsolute(mangle("__RUNTIME_PSEUDO_RELOC_LIST__"), 0);
|
||||
symtab->addAbsolute(mangle("__RUNTIME_PSEUDO_RELOC_LIST_END__"), 0);
|
||||
}
|
||||
if (config->mingw) {
|
||||
ctx.symtab.addAbsolute(mangle("__CTOR_LIST__"), 0);
|
||||
ctx.symtab.addAbsolute(mangle("__DTOR_LIST__"), 0);
|
||||
symtab->addAbsolute(mangle("__CTOR_LIST__"), 0);
|
||||
symtab->addAbsolute(mangle("__DTOR_LIST__"), 0);
|
||||
}
|
||||
|
||||
// This code may add new undefined symbols to the link, which may enqueue more
|
||||
|
@ -2062,12 +2069,12 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
for (auto pair : config->alternateNames) {
|
||||
StringRef from = pair.first;
|
||||
StringRef to = pair.second;
|
||||
Symbol *sym = ctx.symtab.find(from);
|
||||
Symbol *sym = symtab->find(from);
|
||||
if (!sym)
|
||||
continue;
|
||||
if (auto *u = dyn_cast<Undefined>(sym))
|
||||
if (!u->weakAlias)
|
||||
u->weakAlias = ctx.symtab.addUndefined(to);
|
||||
u->weakAlias = symtab->addUndefined(to);
|
||||
}
|
||||
|
||||
// If any inputs are bitcode files, the LTO code generator may create
|
||||
|
@ -2075,25 +2082,25 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
// file's symbol table. If any of those library functions are defined in a
|
||||
// bitcode file in an archive member, we need to arrange to use LTO to
|
||||
// compile those archive members by adding them to the link beforehand.
|
||||
if (!ctx.bitcodeFileInstances.empty())
|
||||
if (!BitcodeFile::instances.empty())
|
||||
for (auto *s : lto::LTO::getRuntimeLibcallSymbols())
|
||||
ctx.symtab.addLibcall(s);
|
||||
symtab->addLibcall(s);
|
||||
|
||||
// Windows specific -- if __load_config_used can be resolved, resolve it.
|
||||
if (ctx.symtab.findUnderscore("_load_config_used"))
|
||||
if (symtab->findUnderscore("_load_config_used"))
|
||||
addUndefined(mangle("_load_config_used"));
|
||||
} while (run());
|
||||
|
||||
if (args.hasArg(OPT_include_optional)) {
|
||||
// Handle /includeoptional
|
||||
for (auto *arg : args.filtered(OPT_include_optional))
|
||||
if (dyn_cast_or_null<LazyArchive>(ctx.symtab.find(arg->getValue())))
|
||||
if (dyn_cast_or_null<LazyArchive>(symtab->find(arg->getValue())))
|
||||
addUndefined(arg->getValue());
|
||||
while (run());
|
||||
}
|
||||
|
||||
// Create wrapped symbols for -wrap option.
|
||||
std::vector<WrappedSymbol> wrapped = addWrappedSymbols(ctx, args);
|
||||
std::vector<WrappedSymbol> wrapped = addWrappedSymbols(args);
|
||||
// Load more object files that might be needed for wrapped symbols.
|
||||
if (!wrapped.empty())
|
||||
while (run());
|
||||
|
@ -2119,7 +2126,7 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
// If it ends up pulling in more object files from static libraries,
|
||||
// (and maybe doing more stdcall fixups along the way), this would need
|
||||
// to loop these two calls.
|
||||
ctx.symtab.loadMinGWSymbols();
|
||||
symtab->loadMinGWSymbols();
|
||||
run();
|
||||
}
|
||||
|
||||
|
@ -2127,8 +2134,8 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
// If we are going to do codegen for link-time optimization, check for
|
||||
// unresolvable symbols first, so we don't spend time generating code that
|
||||
// will fail to link anyway.
|
||||
if (!ctx.bitcodeFileInstances.empty() && !config->forceUnresolved)
|
||||
ctx.symtab.reportUnresolvable();
|
||||
if (!BitcodeFile::instances.empty() && !config->forceUnresolved)
|
||||
symtab->reportUnresolvable();
|
||||
if (errorCount())
|
||||
return;
|
||||
|
||||
|
@ -2142,7 +2149,7 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
// Do LTO by compiling bitcode input files to a set of native COFF files then
|
||||
// link those files (unless -thinlto-index-only was given, in which case we
|
||||
// resolve symbols and write indices, but don't generate native code or link).
|
||||
ctx.symtab.addCombinedLTOObjects();
|
||||
symtab->addCombinedLTOObjects();
|
||||
|
||||
// If -thinlto-index-only is given, we should create only "index
|
||||
// files" and not object files. Index file creation is already done
|
||||
|
@ -2156,10 +2163,10 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
|
||||
// Apply symbol renames for -wrap.
|
||||
if (!wrapped.empty())
|
||||
wrapSymbols(ctx, wrapped);
|
||||
wrapSymbols(wrapped);
|
||||
|
||||
// Resolve remaining undefined symbols and warn about imported locals.
|
||||
ctx.symtab.resolveRemainingUndefines();
|
||||
symtab->resolveRemainingUndefines();
|
||||
if (errorCount())
|
||||
return;
|
||||
|
||||
|
@ -2170,12 +2177,12 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
// order provided on the command line, while lld will pull in needed
|
||||
// files from static libraries only after the last object file on the
|
||||
// command line.
|
||||
for (auto i = ctx.objFileInstances.begin(), e = ctx.objFileInstances.end();
|
||||
for (auto i = ObjFile::instances.begin(), e = ObjFile::instances.end();
|
||||
i != e; i++) {
|
||||
ObjFile *file = *i;
|
||||
if (isCrtend(file->getName())) {
|
||||
ctx.objFileInstances.erase(i);
|
||||
ctx.objFileInstances.push_back(file);
|
||||
ObjFile::instances.erase(i);
|
||||
ObjFile::instances.push_back(file);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -2200,7 +2207,7 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
StringRef name = pair.first;
|
||||
uint32_t alignment = pair.second;
|
||||
|
||||
Symbol *sym = ctx.symtab.find(name);
|
||||
Symbol *sym = symtab->find(name);
|
||||
if (!sym) {
|
||||
warn("/aligncomm symbol " + name + " not found");
|
||||
continue;
|
||||
|
@ -2232,16 +2239,16 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
if (auto *arg = args.getLastArg(OPT_order)) {
|
||||
if (args.hasArg(OPT_call_graph_ordering_file))
|
||||
error("/order and /call-graph-order-file may not be used together");
|
||||
parseOrderFile(ctx, arg->getValue());
|
||||
parseOrderFile(arg->getValue());
|
||||
config->callGraphProfileSort = false;
|
||||
}
|
||||
|
||||
// Handle /call-graph-ordering-file and /call-graph-profile-sort (default on).
|
||||
if (config->callGraphProfileSort) {
|
||||
if (auto *arg = args.getLastArg(OPT_call_graph_ordering_file)) {
|
||||
parseCallGraphFile(ctx, arg->getValue());
|
||||
parseCallGraphFile(arg->getValue());
|
||||
}
|
||||
readCallGraphsFromObjectFiles(ctx);
|
||||
readCallGraphsFromObjectFiles();
|
||||
}
|
||||
|
||||
// Handle /print-symbol-order.
|
||||
|
@ -2258,7 +2265,7 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
// functions. This doesn't bring in more object files, but only marks
|
||||
// functions that already have been included to be retained.
|
||||
for (const char *n : {"__gxx_personality_v0", "__gcc_personality_v0"}) {
|
||||
Defined *d = dyn_cast_or_null<Defined>(ctx.symtab.findUnderscore(n));
|
||||
Defined *d = dyn_cast_or_null<Defined>(symtab->findUnderscore(n));
|
||||
if (d && !d->isGCRoot) {
|
||||
d->isGCRoot = true;
|
||||
config->gcroot.push_back(d);
|
||||
|
@ -2266,7 +2273,7 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
}
|
||||
}
|
||||
|
||||
markLive(ctx);
|
||||
markLive(symtab->getChunks());
|
||||
}
|
||||
|
||||
// Needs to happen after the last call to addFile().
|
||||
|
@ -2274,17 +2281,17 @@ void LinkerDriver::linkerMain(ArrayRef<const char *> argsArr) {
|
|||
|
||||
// Identify identical COMDAT sections to merge them.
|
||||
if (config->doICF != ICFLevel::None) {
|
||||
findKeepUniqueSections(ctx);
|
||||
doICF(ctx, config->doICF);
|
||||
findKeepUniqueSections();
|
||||
doICF(symtab->getChunks(), config->doICF);
|
||||
}
|
||||
|
||||
// Write the result.
|
||||
writeResult(ctx);
|
||||
writeResult();
|
||||
|
||||
// Stop early so we can print the results.
|
||||
rootTimer.stop();
|
||||
if (config->showTiming)
|
||||
ctx.rootTimer.print();
|
||||
Timer::root().print();
|
||||
}
|
||||
|
||||
} // namespace coff
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
#ifndef LLD_COFF_DRIVER_H
|
||||
#define LLD_COFF_DRIVER_H
|
||||
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "Config.h"
|
||||
#include "SymbolTable.h"
|
||||
#include "lld/Common/LLVM.h"
|
||||
|
@ -79,8 +78,6 @@ private:
|
|||
|
||||
class LinkerDriver {
|
||||
public:
|
||||
LinkerDriver(COFFLinkerContext &c) : ctx(c) {}
|
||||
|
||||
void linkerMain(llvm::ArrayRef<const char *> args);
|
||||
|
||||
// Used by the resolver to parse .drectve section contents.
|
||||
|
@ -106,8 +103,6 @@ private:
|
|||
StringRef doFindLib(StringRef filename);
|
||||
StringRef doFindLibMinGW(StringRef filename);
|
||||
|
||||
bool findUnderscoreMangle(StringRef sym);
|
||||
|
||||
// Parses LIB environment which contains a list of search paths.
|
||||
void addLibSearchPaths();
|
||||
|
||||
|
@ -153,8 +148,6 @@ private:
|
|||
std::vector<MemoryBufferRef> resources;
|
||||
|
||||
llvm::StringSet<> directivesExports;
|
||||
|
||||
COFFLinkerContext &ctx;
|
||||
};
|
||||
|
||||
// Functions below this line are defined in DriverUtils.cpp.
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "ICF.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "Chunks.h"
|
||||
#include "Symbols.h"
|
||||
#include "lld/Common/ErrorHandler.h"
|
||||
|
@ -37,10 +36,12 @@ using namespace llvm;
|
|||
namespace lld {
|
||||
namespace coff {
|
||||
|
||||
static Timer icfTimer("ICF", Timer::root());
|
||||
|
||||
class ICF {
|
||||
public:
|
||||
ICF(COFFLinkerContext &c, ICFLevel icfLevel) : icfLevel(icfLevel), ctx(c){};
|
||||
void run();
|
||||
ICF(ICFLevel icfLevel) : icfLevel(icfLevel){};
|
||||
void run(ArrayRef<Chunk *> v);
|
||||
|
||||
private:
|
||||
void segregate(size_t begin, size_t end, bool constant);
|
||||
|
@ -63,8 +64,6 @@ private:
|
|||
int cnt = 0;
|
||||
std::atomic<bool> repeat = {false};
|
||||
ICFLevel icfLevel = ICFLevel::All;
|
||||
|
||||
COFFLinkerContext &ctx;
|
||||
};
|
||||
|
||||
// Returns true if section S is subject of ICF.
|
||||
|
@ -247,12 +246,12 @@ void ICF::forEachClass(std::function<void(size_t, size_t)> fn) {
|
|||
// Merge identical COMDAT sections.
|
||||
// Two sections are considered the same if their section headers,
|
||||
// contents and relocations are all the same.
|
||||
void ICF::run() {
|
||||
ScopedTimer t(ctx.icfTimer);
|
||||
void ICF::run(ArrayRef<Chunk *> vec) {
|
||||
ScopedTimer t(icfTimer);
|
||||
|
||||
// Collect only mergeable sections and group by hash value.
|
||||
uint32_t nextId = 1;
|
||||
for (Chunk *c : ctx.symtab.getChunks()) {
|
||||
for (Chunk *c : vec) {
|
||||
if (auto *sc = dyn_cast<SectionChunk>(c)) {
|
||||
if (isEligible(sc))
|
||||
chunks.push_back(sc);
|
||||
|
@ -263,7 +262,7 @@ void ICF::run() {
|
|||
|
||||
// Make sure that ICF doesn't merge sections that are being handled by string
|
||||
// tail merging.
|
||||
for (MergeChunk *mc : ctx.mergeChunkInstances)
|
||||
for (MergeChunk *mc : MergeChunk::instances)
|
||||
if (mc)
|
||||
for (SectionChunk *sc : mc->sections)
|
||||
sc->eqClass[0] = nextId++;
|
||||
|
@ -318,8 +317,8 @@ void ICF::run() {
|
|||
}
|
||||
|
||||
// Entry point to ICF.
|
||||
void doICF(COFFLinkerContext &ctx, ICFLevel icfLevel) {
|
||||
ICF(ctx, icfLevel).run();
|
||||
void doICF(ArrayRef<Chunk *> chunks, ICFLevel icfLevel) {
|
||||
ICF(icfLevel).run(chunks);
|
||||
}
|
||||
|
||||
} // namespace coff
|
||||
|
|
|
@ -17,9 +17,8 @@ namespace lld {
|
|||
namespace coff {
|
||||
|
||||
class Chunk;
|
||||
class COFFLinkerContext;
|
||||
|
||||
void doICF(COFFLinkerContext &ctx, ICFLevel);
|
||||
void doICF(ArrayRef<Chunk *> chunks, ICFLevel);
|
||||
|
||||
} // namespace coff
|
||||
} // namespace lld
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "InputFiles.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "Chunks.h"
|
||||
#include "Config.h"
|
||||
#include "DebugTypes.h"
|
||||
|
@ -70,6 +69,11 @@ std::string lld::toString(const coff::InputFile *file) {
|
|||
.str();
|
||||
}
|
||||
|
||||
std::vector<ObjFile *> ObjFile::instances;
|
||||
std::map<std::string, PDBInputFile *> PDBInputFile::instances;
|
||||
std::vector<ImportFile *> ImportFile::instances;
|
||||
std::vector<BitcodeFile *> BitcodeFile::instances;
|
||||
|
||||
/// Checks that Source is compatible with being a weak alias to Target.
|
||||
/// If Source is Undefined and has no weak alias set, makes it a weak
|
||||
/// alias to Target.
|
||||
|
@ -94,8 +98,7 @@ static bool ignoredSymbolName(StringRef name) {
|
|||
return name == "@feat.00" || name == "@comp.id";
|
||||
}
|
||||
|
||||
ArchiveFile::ArchiveFile(COFFLinkerContext &ctx, MemoryBufferRef m)
|
||||
: InputFile(ctx, ArchiveKind, m) {}
|
||||
ArchiveFile::ArchiveFile(MemoryBufferRef m) : InputFile(ArchiveKind, m) {}
|
||||
|
||||
void ArchiveFile::parse() {
|
||||
// Parse a MemoryBufferRef as an archive file.
|
||||
|
@ -103,7 +106,7 @@ void ArchiveFile::parse() {
|
|||
|
||||
// Read the symbol table to construct Lazy objects.
|
||||
for (const Archive::Symbol &sym : file->symbols())
|
||||
ctx.symtab.addLazyArchive(this, sym);
|
||||
symtab->addLazyArchive(this, sym);
|
||||
}
|
||||
|
||||
// Returns a buffer pointing to a member file containing a given symbol.
|
||||
|
@ -141,11 +144,11 @@ void LazyObjFile::fetch() {
|
|||
|
||||
InputFile *file;
|
||||
if (isBitcode(mb))
|
||||
file = make<BitcodeFile>(ctx, mb, "", 0, std::move(symbols));
|
||||
file = make<BitcodeFile>(mb, "", 0, std::move(symbols));
|
||||
else
|
||||
file = make<ObjFile>(ctx, mb, std::move(symbols));
|
||||
file = make<ObjFile>(mb, std::move(symbols));
|
||||
mb = {};
|
||||
ctx.symtab.addFile(file);
|
||||
symtab->addFile(file);
|
||||
}
|
||||
|
||||
void LazyObjFile::parse() {
|
||||
|
@ -155,7 +158,7 @@ void LazyObjFile::parse() {
|
|||
CHECK(lto::InputFile::create(this->mb), this);
|
||||
for (const lto::InputFile::Symbol &sym : obj->symbols()) {
|
||||
if (!sym.isUndefined())
|
||||
ctx.symtab.addLazyObject(this, sym.getName());
|
||||
symtab->addLazyObject(this, sym.getName());
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
@ -172,7 +175,7 @@ void LazyObjFile::parse() {
|
|||
StringRef name = check(coffObj->getSymbolName(coffSym));
|
||||
if (coffSym.isAbsolute() && ignoredSymbolName(name))
|
||||
continue;
|
||||
ctx.symtab.addLazyObject(this, name);
|
||||
symtab->addLazyObject(this, name);
|
||||
i += coffSym.getNumberOfAuxSymbols();
|
||||
}
|
||||
}
|
||||
|
@ -290,7 +293,7 @@ SectionChunk *ObjFile::readSection(uint32_t sectionNumber,
|
|||
// COFF sections that look like string literal sections (i.e. no
|
||||
// relocations, in .rdata, leader symbol name matches the MSVC name mangling
|
||||
// for string literals) are subject to string tail merging.
|
||||
MergeChunk::addSection(ctx, c);
|
||||
MergeChunk::addSection(c);
|
||||
else if (name == ".rsrc" || name.startswith(".rsrc$"))
|
||||
resourceChunks.push_back(c);
|
||||
else
|
||||
|
@ -384,8 +387,8 @@ Symbol *ObjFile::createRegular(COFFSymbolRef sym) {
|
|||
if (sym.isExternal()) {
|
||||
StringRef name = check(coffObj->getSymbolName(sym));
|
||||
if (sc)
|
||||
return ctx.symtab.addRegular(this, name, sym.getGeneric(), sc,
|
||||
sym.getValue());
|
||||
return symtab->addRegular(this, name, sym.getGeneric(), sc,
|
||||
sym.getValue());
|
||||
// For MinGW symbols named .weak.* that point to a discarded section,
|
||||
// don't create an Undefined symbol. If nothing ever refers to the symbol,
|
||||
// everything should be fine. If something actually refers to the symbol
|
||||
|
@ -393,7 +396,7 @@ Symbol *ObjFile::createRegular(COFFSymbolRef sym) {
|
|||
// references at the end.
|
||||
if (config->mingw && name.startswith(".weak."))
|
||||
return nullptr;
|
||||
return ctx.symtab.addUndefined(name, this, false);
|
||||
return symtab->addUndefined(name, this, false);
|
||||
}
|
||||
if (sc)
|
||||
return make<DefinedRegular>(this, /*Name*/ "", /*IsCOMDAT*/ false,
|
||||
|
@ -461,7 +464,7 @@ void ObjFile::initializeSymbols() {
|
|||
for (auto &kv : weakAliases) {
|
||||
Symbol *sym = kv.first;
|
||||
uint32_t idx = kv.second;
|
||||
checkAndSetWeakAlias(&ctx.symtab, this, sym, symbols[idx]);
|
||||
checkAndSetWeakAlias(symtab, this, sym, symbols[idx]);
|
||||
}
|
||||
|
||||
// Free the memory used by sparseChunks now that symbol loading is finished.
|
||||
|
@ -470,7 +473,7 @@ void ObjFile::initializeSymbols() {
|
|||
|
||||
Symbol *ObjFile::createUndefined(COFFSymbolRef sym) {
|
||||
StringRef name = check(coffObj->getSymbolName(sym));
|
||||
return ctx.symtab.addUndefined(name, this, sym.isWeakExternal());
|
||||
return symtab->addUndefined(name, this, sym.isWeakExternal());
|
||||
}
|
||||
|
||||
static const coff_aux_section_definition *findSectionDef(COFFObjectFile *obj,
|
||||
|
@ -540,13 +543,13 @@ void ObjFile::handleComdatSelection(
|
|||
Twine((int)leaderSelection) + " in " + toString(leader->getFile()) +
|
||||
" and " + Twine((int)selection) + " in " + toString(this))
|
||||
.str());
|
||||
ctx.symtab.reportDuplicate(leader, this);
|
||||
symtab->reportDuplicate(leader, this);
|
||||
return;
|
||||
}
|
||||
|
||||
switch (selection) {
|
||||
case IMAGE_COMDAT_SELECT_NODUPLICATES:
|
||||
ctx.symtab.reportDuplicate(leader, this);
|
||||
symtab->reportDuplicate(leader, this);
|
||||
break;
|
||||
|
||||
case IMAGE_COMDAT_SELECT_ANY:
|
||||
|
@ -556,14 +559,14 @@ void ObjFile::handleComdatSelection(
|
|||
case IMAGE_COMDAT_SELECT_SAME_SIZE:
|
||||
if (leaderChunk->getSize() != getSection(sym)->SizeOfRawData) {
|
||||
if (!config->mingw) {
|
||||
ctx.symtab.reportDuplicate(leader, this);
|
||||
symtab->reportDuplicate(leader, this);
|
||||
} else {
|
||||
const coff_aux_section_definition *leaderDef = nullptr;
|
||||
if (leaderChunk->file)
|
||||
leaderDef = findSectionDef(leaderChunk->file->getCOFFObj(),
|
||||
leaderChunk->getSectionNumber());
|
||||
if (!leaderDef || leaderDef->Length != def->Length)
|
||||
ctx.symtab.reportDuplicate(leader, this);
|
||||
symtab->reportDuplicate(leader, this);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
@ -574,7 +577,7 @@ void ObjFile::handleComdatSelection(
|
|||
// if the two comdat sections have e.g. different alignment.
|
||||
// Match that.
|
||||
if (leaderChunk->getContents() != newChunk.getContents())
|
||||
ctx.symtab.reportDuplicate(leader, this, &newChunk, sym.getValue());
|
||||
symtab->reportDuplicate(leader, this, &newChunk, sym.getValue());
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -617,8 +620,8 @@ Optional<Symbol *> ObjFile::createDefined(
|
|||
if (sym.isCommon()) {
|
||||
auto *c = make<CommonChunk>(sym);
|
||||
chunks.push_back(c);
|
||||
return ctx.symtab.addCommon(this, getName(), sym.getValue(),
|
||||
sym.getGeneric(), c);
|
||||
return symtab->addCommon(this, getName(), sym.getValue(), sym.getGeneric(),
|
||||
c);
|
||||
}
|
||||
|
||||
if (sym.isAbsolute()) {
|
||||
|
@ -631,7 +634,7 @@ Optional<Symbol *> ObjFile::createDefined(
|
|||
return nullptr;
|
||||
|
||||
if (sym.isExternal())
|
||||
return ctx.symtab.addAbsolute(name, sym);
|
||||
return symtab->addAbsolute(name, sym);
|
||||
return make<DefinedAbsolute>(name, sym);
|
||||
}
|
||||
|
||||
|
@ -664,7 +667,7 @@ Optional<Symbol *> ObjFile::createDefined(
|
|||
|
||||
if (sym.isExternal()) {
|
||||
std::tie(leader, prevailing) =
|
||||
ctx.symtab.addComdat(this, getName(), sym.getGeneric());
|
||||
symtab->addComdat(this, getName(), sym.getGeneric());
|
||||
} else {
|
||||
leader = make<DefinedRegular>(this, /*Name*/ "", /*IsCOMDAT*/ false,
|
||||
/*IsExternal*/ false, sym.getGeneric());
|
||||
|
@ -786,11 +789,12 @@ void ObjFile::initializeDependencies() {
|
|||
else
|
||||
data = getDebugSection(".debug$T");
|
||||
|
||||
// Don't make a TpiSource for objects with no debug info. If the object has
|
||||
// symbols but no types, make a plain, empty TpiSource anyway, because it
|
||||
// simplifies adding the symbols later.
|
||||
if (data.empty()) {
|
||||
if (!debugChunks.empty())
|
||||
debugTypesObj = makeTpiSource(ctx, this);
|
||||
debugTypesObj = makeTpiSource(this);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -808,7 +812,7 @@ void ObjFile::initializeDependencies() {
|
|||
|
||||
// This object file is a PCH file that others will depend on.
|
||||
if (isPCH) {
|
||||
debugTypesObj = makePrecompSource(ctx, this);
|
||||
debugTypesObj = makePrecompSource(this);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -816,8 +820,8 @@ void ObjFile::initializeDependencies() {
|
|||
if (firstType->kind() == LF_TYPESERVER2) {
|
||||
TypeServer2Record ts = cantFail(
|
||||
TypeDeserializer::deserializeAs<TypeServer2Record>(firstType->data()));
|
||||
debugTypesObj = makeUseTypeServerSource(ctx, this, ts);
|
||||
enqueuePdbFile(ts.getName(), this);
|
||||
debugTypesObj = makeUseTypeServerSource(this, ts);
|
||||
PDBInputFile::enqueue(ts.getName(), this);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -826,14 +830,14 @@ void ObjFile::initializeDependencies() {
|
|||
if (firstType->kind() == LF_PRECOMP) {
|
||||
PrecompRecord precomp = cantFail(
|
||||
TypeDeserializer::deserializeAs<PrecompRecord>(firstType->data()));
|
||||
debugTypesObj = makeUsePrecompSource(ctx, this, precomp);
|
||||
debugTypesObj = makeUsePrecompSource(this, precomp);
|
||||
// Drop the LF_PRECOMP record from the input stream.
|
||||
debugTypes = debugTypes.drop_front(firstType->RecordData.size());
|
||||
return;
|
||||
}
|
||||
|
||||
// This is a plain old object file.
|
||||
debugTypesObj = makeTpiSource(ctx, this);
|
||||
debugTypesObj = makeTpiSource(this);
|
||||
}
|
||||
|
||||
// Make a PDB path assuming the PDB is in the same folder as the OBJ
|
||||
|
@ -851,7 +855,7 @@ static std::string getPdbBaseName(ObjFile *file, StringRef tSPath) {
|
|||
|
||||
// The casing of the PDB path stamped in the OBJ can differ from the actual path
|
||||
// on disk. With this, we ensure to always use lowercase as a key for the
|
||||
// pdbInputFileInstances map, at least on Windows.
|
||||
// PDBInputFile::instances map, at least on Windows.
|
||||
static std::string normalizePdbPath(StringRef path) {
|
||||
#if defined(_WIN32)
|
||||
return path.lower();
|
||||
|
@ -875,25 +879,33 @@ static Optional<std::string> findPdbPath(StringRef pdbPath,
|
|||
return None;
|
||||
}
|
||||
|
||||
PDBInputFile::PDBInputFile(COFFLinkerContext &ctx, MemoryBufferRef m)
|
||||
: InputFile(ctx, PDBKind, m) {}
|
||||
PDBInputFile::PDBInputFile(MemoryBufferRef m) : InputFile(PDBKind, m) {}
|
||||
|
||||
PDBInputFile::~PDBInputFile() = default;
|
||||
|
||||
PDBInputFile *PDBInputFile::findFromRecordPath(const COFFLinkerContext &ctx,
|
||||
StringRef path,
|
||||
PDBInputFile *PDBInputFile::findFromRecordPath(StringRef path,
|
||||
ObjFile *fromFile) {
|
||||
auto p = findPdbPath(path.str(), fromFile);
|
||||
if (!p)
|
||||
return nullptr;
|
||||
auto it = ctx.pdbInputFileInstances.find(*p);
|
||||
if (it != ctx.pdbInputFileInstances.end())
|
||||
auto it = PDBInputFile::instances.find(*p);
|
||||
if (it != PDBInputFile::instances.end())
|
||||
return it->second;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
void PDBInputFile::enqueue(StringRef path, ObjFile *fromFile) {
|
||||
auto p = findPdbPath(path.str(), fromFile);
|
||||
if (!p)
|
||||
return;
|
||||
auto it = PDBInputFile::instances.emplace(*p, nullptr);
|
||||
if (!it.second)
|
||||
return; // already scheduled for load
|
||||
driver->enqueuePDB(*p);
|
||||
}
|
||||
|
||||
void PDBInputFile::parse() {
|
||||
ctx.pdbInputFileInstances[mb.getBufferIdentifier().str()] = this;
|
||||
PDBInputFile::instances[mb.getBufferIdentifier().str()] = this;
|
||||
|
||||
std::unique_ptr<pdb::IPDBSession> thisSession;
|
||||
loadErr.emplace(pdb::NativeSession::createFromPdb(
|
||||
|
@ -911,7 +923,7 @@ void PDBInputFile::parse() {
|
|||
loadErr.emplace(expectedInfo.takeError());
|
||||
return;
|
||||
}
|
||||
debugTypesObj = makeTypeServerSource(ctx, this);
|
||||
debugTypesObj = makeTypeServerSource(this);
|
||||
}
|
||||
|
||||
// Used only for DWARF debug info, which is not common (except in MinGW
|
||||
|
@ -945,16 +957,6 @@ Optional<DILineInfo> ObjFile::getDILineInfo(uint32_t offset,
|
|||
return dwarf->getDILineInfo(offset, sectionIndex);
|
||||
}
|
||||
|
||||
void ObjFile::enqueuePdbFile(StringRef path, ObjFile *fromFile) {
|
||||
auto p = findPdbPath(path.str(), fromFile);
|
||||
if (!p)
|
||||
return;
|
||||
auto it = ctx.pdbInputFileInstances.emplace(*p, nullptr);
|
||||
if (!it.second)
|
||||
return; // already scheduled for load
|
||||
driver->enqueuePDB(*p);
|
||||
}
|
||||
|
||||
void ImportFile::parse() {
|
||||
const char *buf = mb.getBufferStart();
|
||||
const auto *hdr = reinterpret_cast<const coff_import_header *>(buf);
|
||||
|
@ -988,31 +990,31 @@ void ImportFile::parse() {
|
|||
this->hdr = hdr;
|
||||
externalName = extName;
|
||||
|
||||
impSym = ctx.symtab.addImportData(impName, this);
|
||||
impSym = symtab->addImportData(impName, this);
|
||||
// If this was a duplicate, we logged an error but may continue;
|
||||
// in this case, impSym is nullptr.
|
||||
if (!impSym)
|
||||
return;
|
||||
|
||||
if (hdr->getType() == llvm::COFF::IMPORT_CONST)
|
||||
static_cast<void>(ctx.symtab.addImportData(name, this));
|
||||
static_cast<void>(symtab->addImportData(name, this));
|
||||
|
||||
// If type is function, we need to create a thunk which jump to an
|
||||
// address pointed by the __imp_ symbol. (This allows you to call
|
||||
// DLL functions just like regular non-DLL functions.)
|
||||
if (hdr->getType() == llvm::COFF::IMPORT_CODE)
|
||||
thunkSym = ctx.symtab.addImportThunk(
|
||||
thunkSym = symtab->addImportThunk(
|
||||
name, cast_or_null<DefinedImportData>(impSym), hdr->Machine);
|
||||
}
|
||||
|
||||
BitcodeFile::BitcodeFile(COFFLinkerContext &ctx, MemoryBufferRef mb,
|
||||
StringRef archiveName, uint64_t offsetInArchive)
|
||||
: BitcodeFile(ctx, mb, archiveName, offsetInArchive, {}) {}
|
||||
BitcodeFile::BitcodeFile(MemoryBufferRef mb, StringRef archiveName,
|
||||
uint64_t offsetInArchive)
|
||||
: BitcodeFile(mb, archiveName, offsetInArchive, {}) {}
|
||||
|
||||
BitcodeFile::BitcodeFile(COFFLinkerContext &ctx, MemoryBufferRef mb,
|
||||
StringRef archiveName, uint64_t offsetInArchive,
|
||||
BitcodeFile::BitcodeFile(MemoryBufferRef mb, StringRef archiveName,
|
||||
uint64_t offsetInArchive,
|
||||
std::vector<Symbol *> &&symbols)
|
||||
: InputFile(ctx, BitcodeKind, mb), symbols(std::move(symbols)) {
|
||||
: InputFile(BitcodeKind, mb), symbols(std::move(symbols)) {
|
||||
std::string path = mb.getBufferIdentifier().str();
|
||||
if (config->thinLTOIndexOnly)
|
||||
path = replaceThinLTOSuffix(mb.getBufferIdentifier());
|
||||
|
@ -1067,7 +1069,7 @@ void BitcodeFile::parse() {
|
|||
for (size_t i = 0; i != obj->getComdatTable().size(); ++i)
|
||||
// FIXME: Check nodeduplicate
|
||||
comdat[i] =
|
||||
ctx.symtab.addComdat(this, saver.save(obj->getComdatTable()[i].first));
|
||||
symtab->addComdat(this, saver.save(obj->getComdatTable()[i].first));
|
||||
for (const lto::InputFile::Symbol &objSym : obj->symbols()) {
|
||||
StringRef symName = saver.save(objSym.getName());
|
||||
int comdatIndex = objSym.getComdatIndex();
|
||||
|
@ -1078,27 +1080,27 @@ void BitcodeFile::parse() {
|
|||
else
|
||||
fakeSC = <oDataSectionChunk.chunk;
|
||||
if (objSym.isUndefined()) {
|
||||
sym = ctx.symtab.addUndefined(symName, this, false);
|
||||
sym = symtab->addUndefined(symName, this, false);
|
||||
} else if (objSym.isCommon()) {
|
||||
sym = ctx.symtab.addCommon(this, symName, objSym.getCommonSize());
|
||||
sym = symtab->addCommon(this, symName, objSym.getCommonSize());
|
||||
} else if (objSym.isWeak() && objSym.isIndirect()) {
|
||||
// Weak external.
|
||||
sym = ctx.symtab.addUndefined(symName, this, true);
|
||||
sym = symtab->addUndefined(symName, this, true);
|
||||
std::string fallback = std::string(objSym.getCOFFWeakExternalFallback());
|
||||
Symbol *alias = ctx.symtab.addUndefined(saver.save(fallback));
|
||||
checkAndSetWeakAlias(&ctx.symtab, this, sym, alias);
|
||||
Symbol *alias = symtab->addUndefined(saver.save(fallback));
|
||||
checkAndSetWeakAlias(symtab, this, sym, alias);
|
||||
} else if (comdatIndex != -1) {
|
||||
if (symName == obj->getComdatTable()[comdatIndex].first) {
|
||||
sym = comdat[comdatIndex].first;
|
||||
if (cast<DefinedRegular>(sym)->data == nullptr)
|
||||
cast<DefinedRegular>(sym)->data = &fakeSC->repl;
|
||||
} else if (comdat[comdatIndex].second) {
|
||||
sym = ctx.symtab.addRegular(this, symName, nullptr, fakeSC);
|
||||
sym = symtab->addRegular(this, symName, nullptr, fakeSC);
|
||||
} else {
|
||||
sym = ctx.symtab.addUndefined(symName, this, false);
|
||||
sym = symtab->addUndefined(symName, this, false);
|
||||
}
|
||||
} else {
|
||||
sym = ctx.symtab.addRegular(this, symName, nullptr, fakeSC);
|
||||
sym = symtab->addRegular(this, symName, nullptr, fakeSC);
|
||||
}
|
||||
symbols.push_back(sym);
|
||||
if (objSym.isUsed())
|
||||
|
@ -1183,9 +1185,9 @@ void DLLFile::parse() {
|
|||
}
|
||||
|
||||
StringRef impName = saver.save("__imp_" + symbolName);
|
||||
ctx.symtab.addLazyDLLSymbol(this, s, impName);
|
||||
symtab->addLazyDLLSymbol(this, s, impName);
|
||||
if (code)
|
||||
ctx.symtab.addLazyDLLSymbol(this, s, symbolName);
|
||||
symtab->addLazyDLLSymbol(this, s, symbolName);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1217,6 +1219,6 @@ void DLLFile::makeImport(DLLFile::Symbol *s) {
|
|||
p += s->symbolName.size() + 1;
|
||||
memcpy(p, s->dllName.data(), s->dllName.size());
|
||||
MemoryBufferRef mbref = MemoryBufferRef(StringRef(buf, size), s->dllName);
|
||||
ImportFile *impFile = make<ImportFile>(ctx, mbref);
|
||||
ctx.symtab.addFile(impFile);
|
||||
ImportFile *impFile = make<ImportFile>(mbref);
|
||||
symtab->addFile(impFile);
|
||||
}
|
||||
|
|
|
@ -38,7 +38,6 @@ namespace lld {
|
|||
class DWARFCache;
|
||||
|
||||
namespace coff {
|
||||
class COFFLinkerContext;
|
||||
|
||||
std::vector<MemoryBufferRef> getArchiveMembers(llvm::object::Archive *file);
|
||||
|
||||
|
@ -92,11 +91,8 @@ public:
|
|||
// Returns .drectve section contents if exist.
|
||||
StringRef getDirectives() { return directives; }
|
||||
|
||||
COFFLinkerContext &ctx;
|
||||
|
||||
protected:
|
||||
InputFile(COFFLinkerContext &c, Kind k, MemoryBufferRef m)
|
||||
: mb(m), ctx(c), fileKind(k) {}
|
||||
InputFile(Kind k, MemoryBufferRef m) : mb(m), fileKind(k) {}
|
||||
|
||||
StringRef directives;
|
||||
|
||||
|
@ -107,7 +103,7 @@ private:
|
|||
// .lib or .a file.
|
||||
class ArchiveFile : public InputFile {
|
||||
public:
|
||||
explicit ArchiveFile(COFFLinkerContext &ctx, MemoryBufferRef m);
|
||||
explicit ArchiveFile(MemoryBufferRef m);
|
||||
static bool classof(const InputFile *f) { return f->kind() == ArchiveKind; }
|
||||
void parse() override;
|
||||
|
||||
|
@ -124,8 +120,7 @@ private:
|
|||
// .obj or .o file between -start-lib and -end-lib.
|
||||
class LazyObjFile : public InputFile {
|
||||
public:
|
||||
explicit LazyObjFile(COFFLinkerContext &ctx, MemoryBufferRef m)
|
||||
: InputFile(ctx, LazyObjectKind, m) {}
|
||||
explicit LazyObjFile(MemoryBufferRef m) : InputFile(LazyObjectKind, m) {}
|
||||
static bool classof(const InputFile *f) {
|
||||
return f->kind() == LazyObjectKind;
|
||||
}
|
||||
|
@ -141,11 +136,9 @@ private:
|
|||
// .obj or .o file. This may be a member of an archive file.
|
||||
class ObjFile : public InputFile {
|
||||
public:
|
||||
explicit ObjFile(COFFLinkerContext &ctx, MemoryBufferRef m)
|
||||
: InputFile(ctx, ObjectKind, m) {}
|
||||
explicit ObjFile(COFFLinkerContext &ctx, MemoryBufferRef m,
|
||||
std::vector<Symbol *> &&symbols)
|
||||
: InputFile(ctx, ObjectKind, m), symbols(std::move(symbols)) {}
|
||||
explicit ObjFile(MemoryBufferRef m) : InputFile(ObjectKind, m) {}
|
||||
explicit ObjFile(MemoryBufferRef m, std::vector<Symbol *> &&symbols)
|
||||
: InputFile(ObjectKind, m), symbols(std::move(symbols)) {}
|
||||
static bool classof(const InputFile *f) { return f->kind() == ObjectKind; }
|
||||
void parse() override;
|
||||
MachineTypes getMachineType() override;
|
||||
|
@ -182,6 +175,8 @@ public:
|
|||
|
||||
bool isResourceObjFile() const { return !resourceChunks.empty(); }
|
||||
|
||||
static std::vector<ObjFile *> instances;
|
||||
|
||||
// Flags in the absolute @feat.00 symbol if it is present. These usually
|
||||
// indicate if an object was compiled with certain security features enabled
|
||||
// like stack guard, safeseh, /guard:cf, or other things.
|
||||
|
@ -233,8 +228,6 @@ private:
|
|||
return getSection(sym.getSectionNumber());
|
||||
}
|
||||
|
||||
void enqueuePdbFile(StringRef path, ObjFile *fromFile);
|
||||
|
||||
void initializeChunks();
|
||||
void initializeSymbols();
|
||||
void initializeFlags();
|
||||
|
@ -325,13 +318,16 @@ private:
|
|||
// stream.
|
||||
class PDBInputFile : public InputFile {
|
||||
public:
|
||||
explicit PDBInputFile(COFFLinkerContext &ctx, MemoryBufferRef m);
|
||||
explicit PDBInputFile(MemoryBufferRef m);
|
||||
~PDBInputFile();
|
||||
static bool classof(const InputFile *f) { return f->kind() == PDBKind; }
|
||||
void parse() override;
|
||||
|
||||
static PDBInputFile *findFromRecordPath(const COFFLinkerContext &ctx,
|
||||
StringRef path, ObjFile *fromFile);
|
||||
static void enqueue(StringRef path, ObjFile *fromFile);
|
||||
|
||||
static PDBInputFile *findFromRecordPath(StringRef path, ObjFile *fromFile);
|
||||
|
||||
static std::map<std::string, PDBInputFile *> instances;
|
||||
|
||||
// Record possible errors while opening the PDB file
|
||||
llvm::Optional<Error> loadErr;
|
||||
|
@ -348,11 +344,12 @@ public:
|
|||
// for details about the format.
|
||||
class ImportFile : public InputFile {
|
||||
public:
|
||||
explicit ImportFile(COFFLinkerContext &ctx, MemoryBufferRef m)
|
||||
: InputFile(ctx, ImportKind, m) {}
|
||||
explicit ImportFile(MemoryBufferRef m) : InputFile(ImportKind, m) {}
|
||||
|
||||
static bool classof(const InputFile *f) { return f->kind() == ImportKind; }
|
||||
|
||||
static std::vector<ImportFile *> instances;
|
||||
|
||||
Symbol *impSym = nullptr;
|
||||
Symbol *thunkSym = nullptr;
|
||||
std::string dllName;
|
||||
|
@ -380,15 +377,16 @@ public:
|
|||
// Used for LTO.
|
||||
class BitcodeFile : public InputFile {
|
||||
public:
|
||||
BitcodeFile(COFFLinkerContext &ctx, MemoryBufferRef mb, StringRef archiveName,
|
||||
BitcodeFile(MemoryBufferRef mb, StringRef archiveName,
|
||||
uint64_t offsetInArchive);
|
||||
explicit BitcodeFile(COFFLinkerContext &ctx, MemoryBufferRef m,
|
||||
StringRef archiveName, uint64_t offsetInArchive,
|
||||
explicit BitcodeFile(MemoryBufferRef m, StringRef archiveName,
|
||||
uint64_t offsetInArchive,
|
||||
std::vector<Symbol *> &&symbols);
|
||||
~BitcodeFile();
|
||||
static bool classof(const InputFile *f) { return f->kind() == BitcodeKind; }
|
||||
ArrayRef<Symbol *> getSymbols() { return symbols; }
|
||||
MachineTypes getMachineType() override;
|
||||
static std::vector<BitcodeFile *> instances;
|
||||
std::unique_ptr<llvm::lto::InputFile> obj;
|
||||
|
||||
private:
|
||||
|
@ -400,8 +398,7 @@ private:
|
|||
// .dll file. MinGW only.
|
||||
class DLLFile : public InputFile {
|
||||
public:
|
||||
explicit DLLFile(COFFLinkerContext &ctx, MemoryBufferRef m)
|
||||
: InputFile(ctx, DLLKind, m) {}
|
||||
explicit DLLFile(MemoryBufferRef m) : InputFile(DLLKind, m) {}
|
||||
static bool classof(const InputFile *f) { return f->kind() == DLLKind; }
|
||||
void parse() override;
|
||||
MachineTypes getMachineType() override;
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "LLDMapFile.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "SymbolTable.h"
|
||||
#include "Symbols.h"
|
||||
#include "Writer.h"
|
||||
|
@ -45,9 +44,9 @@ static void writeHeader(raw_ostream &os, uint64_t addr, uint64_t size,
|
|||
}
|
||||
|
||||
// Returns a list of all symbols that we want to print out.
|
||||
static std::vector<DefinedRegular *> getSymbols(const COFFLinkerContext &ctx) {
|
||||
static std::vector<DefinedRegular *> getSymbols() {
|
||||
std::vector<DefinedRegular *> v;
|
||||
for (ObjFile *file : ctx.objFileInstances)
|
||||
for (ObjFile *file : ObjFile::instances)
|
||||
for (Symbol *b : file->getSymbols())
|
||||
if (auto *sym = dyn_cast_or_null<DefinedRegular>(b))
|
||||
if (sym && !sym->getCOFFSymbol().isSectionDefinition())
|
||||
|
@ -87,7 +86,7 @@ getSymbolStrings(ArrayRef<DefinedRegular *> syms) {
|
|||
return ret;
|
||||
}
|
||||
|
||||
void lld::coff::writeLLDMapFile(const COFFLinkerContext &ctx) {
|
||||
void lld::coff::writeLLDMapFile(ArrayRef<OutputSection *> outputSections) {
|
||||
if (config->lldmapFile.empty())
|
||||
return;
|
||||
|
||||
|
@ -97,7 +96,7 @@ void lld::coff::writeLLDMapFile(const COFFLinkerContext &ctx) {
|
|||
fatal("cannot open " + config->lldmapFile + ": " + ec.message());
|
||||
|
||||
// Collect symbol info that we want to print out.
|
||||
std::vector<DefinedRegular *> syms = getSymbols(ctx);
|
||||
std::vector<DefinedRegular *> syms = getSymbols();
|
||||
SymbolMapTy sectionSyms = getSectionSyms(syms);
|
||||
DenseMap<DefinedRegular *, std::string> symStr = getSymbolStrings(syms);
|
||||
|
||||
|
@ -105,7 +104,7 @@ void lld::coff::writeLLDMapFile(const COFFLinkerContext &ctx) {
|
|||
os << "Address Size Align Out In Symbol\n";
|
||||
|
||||
// Print out file contents.
|
||||
for (OutputSection *sec : ctx.outputSections) {
|
||||
for (OutputSection *sec : outputSections) {
|
||||
writeHeader(os, sec->getRVA(), sec->getVirtualSize(), /*align=*/pageSize);
|
||||
os << sec->name << '\n';
|
||||
|
||||
|
|
|
@ -9,10 +9,12 @@
|
|||
#ifndef LLD_COFF_LLDMAPFILE_H
|
||||
#define LLD_COFF_LLDMAPFILE_H
|
||||
|
||||
#include "llvm/ADT/ArrayRef.h"
|
||||
|
||||
namespace lld {
|
||||
namespace coff {
|
||||
class COFFLinkerContext;
|
||||
void writeLLDMapFile(const COFFLinkerContext &ctx);
|
||||
class OutputSection;
|
||||
void writeLLDMapFile(llvm::ArrayRef<OutputSection *> outputSections);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -156,7 +156,7 @@ void BitcodeCompiler::add(BitcodeFile &f) {
|
|||
|
||||
// Merge all the bitcode files we have seen, codegen the result
|
||||
// and return the resulting objects.
|
||||
std::vector<InputFile *> BitcodeCompiler::compile(COFFLinkerContext &ctx) {
|
||||
std::vector<InputFile *> BitcodeCompiler::compile() {
|
||||
unsigned maxTasks = ltoObj->getMaxTasks();
|
||||
buf.resize(maxTasks);
|
||||
files.resize(maxTasks);
|
||||
|
@ -224,7 +224,7 @@ std::vector<InputFile *> BitcodeCompiler::compile(COFFLinkerContext &ctx) {
|
|||
|
||||
if (config->saveTemps)
|
||||
saveBuffer(buf[i], ltoObjName);
|
||||
ret.push_back(make<ObjFile>(ctx, MemoryBufferRef(objBuf, ltoObjName)));
|
||||
ret.push_back(make<ObjFile>(MemoryBufferRef(objBuf, ltoObjName)));
|
||||
}
|
||||
|
||||
return ret;
|
||||
|
|
|
@ -38,7 +38,6 @@ namespace coff {
|
|||
|
||||
class BitcodeFile;
|
||||
class InputFile;
|
||||
class COFFLinkerContext;
|
||||
|
||||
class BitcodeCompiler {
|
||||
public:
|
||||
|
@ -46,7 +45,7 @@ public:
|
|||
~BitcodeCompiler();
|
||||
|
||||
void add(BitcodeFile &f);
|
||||
std::vector<InputFile *> compile(COFFLinkerContext &ctx);
|
||||
std::vector<InputFile *> compile();
|
||||
|
||||
private:
|
||||
std::unique_ptr<llvm::lto::LTO> ltoObj;
|
||||
|
|
|
@ -28,7 +28,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "MapFile.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "SymbolTable.h"
|
||||
#include "Symbols.h"
|
||||
#include "Writer.h"
|
||||
|
@ -43,6 +42,11 @@ using namespace llvm::object;
|
|||
using namespace lld;
|
||||
using namespace lld::coff;
|
||||
|
||||
static Timer totalMapTimer("MAP emission (Cumulative)", Timer::root());
|
||||
static Timer symbolGatherTimer("Gather symbols", totalMapTimer);
|
||||
static Timer symbolStringsTimer("Build symbol strings", totalMapTimer);
|
||||
static Timer writeTimer("Write to file", totalMapTimer);
|
||||
|
||||
// Print out the first two columns of a line.
|
||||
static void writeHeader(raw_ostream &os, uint32_t sec, uint64_t addr) {
|
||||
os << format(" %04x:%08llx", sec, addr);
|
||||
|
@ -95,10 +99,9 @@ static void sortUniqueSymbols(std::vector<Defined *> &syms) {
|
|||
|
||||
// Returns the lists of all symbols that we want to print out.
|
||||
static void getSymbols(std::vector<Defined *> &syms,
|
||||
std::vector<Defined *> &staticSyms,
|
||||
const COFFLinkerContext &ctx) {
|
||||
std::vector<Defined *> &staticSyms) {
|
||||
|
||||
for (ObjFile *file : ctx.objFileInstances)
|
||||
for (ObjFile *file : ObjFile::instances)
|
||||
for (Symbol *b : file->getSymbols()) {
|
||||
if (!b || !b->isLive())
|
||||
continue;
|
||||
|
@ -116,7 +119,7 @@ static void getSymbols(std::vector<Defined *> &syms,
|
|||
}
|
||||
}
|
||||
|
||||
for (ImportFile *file : ctx.importFileInstances) {
|
||||
for (ImportFile *file : ImportFile::instances) {
|
||||
if (!file->live)
|
||||
continue;
|
||||
|
||||
|
@ -139,7 +142,7 @@ static void getSymbols(std::vector<Defined *> &syms,
|
|||
|
||||
// Construct a map from symbols to their stringified representations.
|
||||
static DenseMap<Defined *, std::string>
|
||||
getSymbolStrings(const COFFLinkerContext &ctx, ArrayRef<Defined *> syms) {
|
||||
getSymbolStrings(ArrayRef<Defined *> syms) {
|
||||
std::vector<std::string> str(syms.size());
|
||||
parallelForEachN((size_t)0, syms.size(), [&](size_t i) {
|
||||
raw_string_ostream os(str[i]);
|
||||
|
@ -158,7 +161,7 @@ getSymbolStrings(const COFFLinkerContext &ctx, ArrayRef<Defined *> syms) {
|
|||
fileDescr = "<common>";
|
||||
} else if (Chunk *chunk = sym->getChunk()) {
|
||||
address = sym->getRVA();
|
||||
if (OutputSection *sec = ctx.getOutputSection(chunk))
|
||||
if (OutputSection *sec = chunk->getOutputSection())
|
||||
address -= sec->header.VirtualAddress;
|
||||
|
||||
sectionIdx = chunk->getOutputSectionIdx();
|
||||
|
@ -198,7 +201,7 @@ getSymbolStrings(const COFFLinkerContext &ctx, ArrayRef<Defined *> syms) {
|
|||
return ret;
|
||||
}
|
||||
|
||||
void lld::coff::writeMapFile(COFFLinkerContext &ctx) {
|
||||
void lld::coff::writeMapFile(ArrayRef<OutputSection *> outputSections) {
|
||||
if (config->mapFile.empty())
|
||||
return;
|
||||
|
||||
|
@ -207,22 +210,21 @@ void lld::coff::writeMapFile(COFFLinkerContext &ctx) {
|
|||
if (ec)
|
||||
fatal("cannot open " + config->mapFile + ": " + ec.message());
|
||||
|
||||
ScopedTimer t1(ctx.totalMapTimer);
|
||||
ScopedTimer t1(totalMapTimer);
|
||||
|
||||
// Collect symbol info that we want to print out.
|
||||
ScopedTimer t2(ctx.symbolGatherTimer);
|
||||
ScopedTimer t2(symbolGatherTimer);
|
||||
std::vector<Defined *> syms;
|
||||
std::vector<Defined *> staticSyms;
|
||||
getSymbols(syms, staticSyms, ctx);
|
||||
getSymbols(syms, staticSyms);
|
||||
t2.stop();
|
||||
|
||||
ScopedTimer t3(ctx.symbolStringsTimer);
|
||||
DenseMap<Defined *, std::string> symStr = getSymbolStrings(ctx, syms);
|
||||
DenseMap<Defined *, std::string> staticSymStr =
|
||||
getSymbolStrings(ctx, staticSyms);
|
||||
ScopedTimer t3(symbolStringsTimer);
|
||||
DenseMap<Defined *, std::string> symStr = getSymbolStrings(syms);
|
||||
DenseMap<Defined *, std::string> staticSymStr = getSymbolStrings(staticSyms);
|
||||
t3.stop();
|
||||
|
||||
ScopedTimer t4(ctx.writeTimer);
|
||||
ScopedTimer t4(writeTimer);
|
||||
SmallString<128> AppName = sys::path::filename(config->outputFile);
|
||||
sys::path::replace_extension(AppName, "");
|
||||
|
||||
|
@ -246,7 +248,7 @@ void lld::coff::writeMapFile(COFFLinkerContext &ctx) {
|
|||
// Print out section table.
|
||||
os << " Start Length Name Class\n";
|
||||
|
||||
for (OutputSection *sec : ctx.outputSections) {
|
||||
for (OutputSection *sec : outputSections) {
|
||||
// Merge display of chunks with same sectionName
|
||||
std::vector<std::pair<SectionChunk *, SectionChunk *>> ChunkRanges;
|
||||
for (Chunk *c : sec->chunks) {
|
||||
|
@ -301,7 +303,7 @@ void lld::coff::writeMapFile(COFFLinkerContext &ctx) {
|
|||
Chunk *chunk = entry->getChunk();
|
||||
entrySecIndex = chunk->getOutputSectionIdx();
|
||||
entryAddress =
|
||||
entry->getRVA() - ctx.getOutputSection(chunk)->header.VirtualAddress;
|
||||
entry->getRVA() - chunk->getOutputSection()->header.VirtualAddress;
|
||||
}
|
||||
}
|
||||
os << " entry point at ";
|
||||
|
|
|
@ -9,10 +9,12 @@
|
|||
#ifndef LLD_COFF_MAPFILE_H
|
||||
#define LLD_COFF_MAPFILE_H
|
||||
|
||||
#include "llvm/ADT/ArrayRef.h"
|
||||
|
||||
namespace lld {
|
||||
namespace coff {
|
||||
class COFFLinkerContext;
|
||||
void writeMapFile(COFFLinkerContext &ctx);
|
||||
class OutputSection;
|
||||
void writeMapFile(llvm::ArrayRef<OutputSection *> outputSections);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
//
|
||||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "Chunks.h"
|
||||
#include "Symbols.h"
|
||||
#include "lld/Common/Timer.h"
|
||||
|
@ -16,11 +15,13 @@
|
|||
namespace lld {
|
||||
namespace coff {
|
||||
|
||||
static Timer gctimer("GC", Timer::root());
|
||||
|
||||
// Set live bit on for each reachable chunk. Unmarked (unreachable)
|
||||
// COMDAT chunks will be ignored by Writer, so they will be excluded
|
||||
// from the final output.
|
||||
void markLive(COFFLinkerContext &ctx) {
|
||||
ScopedTimer t(ctx.gcTimer);
|
||||
void markLive(ArrayRef<Chunk *> chunks) {
|
||||
ScopedTimer t(gctimer);
|
||||
|
||||
// We build up a worklist of sections which have been marked as live. We only
|
||||
// push into the worklist when we discover an unmarked section, and we mark
|
||||
|
@ -30,7 +31,7 @@ void markLive(COFFLinkerContext &ctx) {
|
|||
// COMDAT section chunks are dead by default. Add non-COMDAT chunks. Do not
|
||||
// traverse DWARF sections. They are live, but they should not keep other
|
||||
// sections alive.
|
||||
for (Chunk *c : ctx.symtab.getChunks())
|
||||
for (Chunk *c : chunks)
|
||||
if (auto *sc = dyn_cast<SectionChunk>(c))
|
||||
if (sc->live && !sc->isDWARF())
|
||||
worklist.push_back(sc);
|
||||
|
@ -69,5 +70,6 @@ void markLive(COFFLinkerContext &ctx) {
|
|||
enqueue(&c);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,13 +10,14 @@
|
|||
#define LLD_COFF_MARKLIVE_H
|
||||
|
||||
#include "lld/Common/LLVM.h"
|
||||
#include "llvm/ADT/ArrayRef.h"
|
||||
|
||||
namespace lld {
|
||||
namespace coff {
|
||||
|
||||
class COFFLinkerContext;
|
||||
class Chunk;
|
||||
|
||||
void markLive(COFFLinkerContext &ctx);
|
||||
void markLive(ArrayRef<Chunk *> chunks);
|
||||
|
||||
} // namespace coff
|
||||
} // namespace lld
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "MinGW.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "Driver.h"
|
||||
#include "InputFiles.h"
|
||||
#include "SymbolTable.h"
|
||||
|
@ -123,8 +122,7 @@ void AutoExporter::addWholeArchive(StringRef path) {
|
|||
excludeLibs.erase(libName);
|
||||
}
|
||||
|
||||
bool AutoExporter::shouldExport(const COFFLinkerContext &ctx,
|
||||
Defined *sym) const {
|
||||
bool AutoExporter::shouldExport(Defined *sym) const {
|
||||
if (!sym || !sym->getChunk())
|
||||
return false;
|
||||
|
||||
|
@ -143,7 +141,7 @@ bool AutoExporter::shouldExport(const COFFLinkerContext &ctx,
|
|||
return false;
|
||||
|
||||
// If a corresponding __imp_ symbol exists and is defined, don't export it.
|
||||
if (ctx.symtab.find(("__imp_" + sym->getName()).str()))
|
||||
if (symtab->find(("__imp_" + sym->getName()).str()))
|
||||
return false;
|
||||
|
||||
// Check that file is non-null before dereferencing it, symbols not
|
||||
|
@ -194,7 +192,7 @@ static StringRef mangle(Twine sym) {
|
|||
// like they are not being used at all, so we explicitly set some flags so
|
||||
// that LTO won't eliminate them.
|
||||
std::vector<WrappedSymbol>
|
||||
lld::coff::addWrappedSymbols(COFFLinkerContext &ctx, opt::InputArgList &args) {
|
||||
lld::coff::addWrappedSymbols(opt::InputArgList &args) {
|
||||
std::vector<WrappedSymbol> v;
|
||||
DenseSet<StringRef> seen;
|
||||
|
||||
|
@ -203,18 +201,18 @@ lld::coff::addWrappedSymbols(COFFLinkerContext &ctx, opt::InputArgList &args) {
|
|||
if (!seen.insert(name).second)
|
||||
continue;
|
||||
|
||||
Symbol *sym = ctx.symtab.findUnderscore(name);
|
||||
Symbol *sym = symtab->findUnderscore(name);
|
||||
if (!sym)
|
||||
continue;
|
||||
|
||||
Symbol *real = ctx.symtab.addUndefined(mangle("__real_" + name));
|
||||
Symbol *wrap = ctx.symtab.addUndefined(mangle("__wrap_" + name));
|
||||
Symbol *real = symtab->addUndefined(mangle("__real_" + name));
|
||||
Symbol *wrap = symtab->addUndefined(mangle("__wrap_" + name));
|
||||
v.push_back({sym, real, wrap});
|
||||
|
||||
// These symbols may seem undefined initially, but don't bail out
|
||||
// at symtab.reportUnresolvable() due to them, but let wrapSymbols
|
||||
// at symtab->reportUnresolvable() due to them, but let wrapSymbols
|
||||
// below sort things out before checking finally with
|
||||
// symtab.resolveRemainingUndefines().
|
||||
// symtab->resolveRemainingUndefines().
|
||||
sym->deferUndefined = true;
|
||||
real->deferUndefined = true;
|
||||
// We want to tell LTO not to inline symbols to be overwritten
|
||||
|
@ -235,14 +233,13 @@ lld::coff::addWrappedSymbols(COFFLinkerContext &ctx, opt::InputArgList &args) {
|
|||
// When this function is executed, only InputFiles and symbol table
|
||||
// contain pointers to symbol objects. We visit them to replace pointers,
|
||||
// so that wrapped symbols are swapped as instructed by the command line.
|
||||
void lld::coff::wrapSymbols(COFFLinkerContext &ctx,
|
||||
ArrayRef<WrappedSymbol> wrapped) {
|
||||
void lld::coff::wrapSymbols(ArrayRef<WrappedSymbol> wrapped) {
|
||||
DenseMap<Symbol *, Symbol *> map;
|
||||
for (const WrappedSymbol &w : wrapped) {
|
||||
map[w.sym] = w.wrap;
|
||||
map[w.real] = w.sym;
|
||||
if (Defined *d = dyn_cast<Defined>(w.wrap)) {
|
||||
Symbol *imp = ctx.symtab.find(("__imp_" + w.sym->getName()).str());
|
||||
Symbol *imp = symtab->find(("__imp_" + w.sym->getName()).str());
|
||||
// Create a new defined local import for the wrap symbol. If
|
||||
// no imp prefixed symbol existed, there's no need for it.
|
||||
// (We can't easily distinguish whether any object file actually
|
||||
|
@ -250,14 +247,14 @@ void lld::coff::wrapSymbols(COFFLinkerContext &ctx,
|
|||
if (imp) {
|
||||
DefinedLocalImport *wrapimp = make<DefinedLocalImport>(
|
||||
saver.save("__imp_" + w.wrap->getName()), d);
|
||||
ctx.symtab.localImportChunks.push_back(wrapimp->getChunk());
|
||||
symtab->localImportChunks.push_back(wrapimp->getChunk());
|
||||
map[imp] = wrapimp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update pointers in input files.
|
||||
parallelForEach(ctx.objFileInstances, [&](ObjFile *file) {
|
||||
parallelForEach(ObjFile::instances, [&](ObjFile *file) {
|
||||
MutableArrayRef<Symbol *> syms = file->getMutableSymbols();
|
||||
for (size_t i = 0, e = syms.size(); i != e; ++i)
|
||||
if (Symbol *s = map.lookup(syms[i]))
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
namespace lld {
|
||||
namespace coff {
|
||||
class COFFLinkerContext;
|
||||
|
||||
// Logic for deciding what symbols to export, when exporting all
|
||||
// symbols for MinGW.
|
||||
|
@ -35,7 +34,7 @@ public:
|
|||
llvm::StringSet<> excludeLibs;
|
||||
llvm::StringSet<> excludeObjects;
|
||||
|
||||
bool shouldExport(const COFFLinkerContext &ctx, Defined *sym) const;
|
||||
bool shouldExport(Defined *sym) const;
|
||||
};
|
||||
|
||||
void writeDefFile(StringRef name);
|
||||
|
@ -54,10 +53,9 @@ struct WrappedSymbol {
|
|||
Symbol *wrap;
|
||||
};
|
||||
|
||||
std::vector<WrappedSymbol> addWrappedSymbols(COFFLinkerContext &ctx,
|
||||
llvm::opt::InputArgList &args);
|
||||
std::vector<WrappedSymbol> addWrappedSymbols(llvm::opt::InputArgList &args);
|
||||
|
||||
void wrapSymbols(COFFLinkerContext &ctx, ArrayRef<WrappedSymbol> wrapped);
|
||||
void wrapSymbols(ArrayRef<WrappedSymbol> wrapped);
|
||||
|
||||
} // namespace coff
|
||||
} // namespace lld
|
||||
|
|
106
lld/COFF/PDB.cpp
106
lld/COFF/PDB.cpp
|
@ -7,7 +7,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "PDB.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "Chunks.h"
|
||||
#include "Config.h"
|
||||
#include "DebugTypes.h"
|
||||
|
@ -67,6 +66,16 @@ using llvm::pdb::StringTableFixup;
|
|||
|
||||
static ExitOnError exitOnErr;
|
||||
|
||||
static Timer totalPdbLinkTimer("PDB Emission (Cumulative)", Timer::root());
|
||||
static Timer addObjectsTimer("Add Objects", totalPdbLinkTimer);
|
||||
Timer lld::coff::loadGHashTimer("Global Type Hashing", addObjectsTimer);
|
||||
Timer lld::coff::mergeGHashTimer("GHash Type Merging", addObjectsTimer);
|
||||
static Timer typeMergingTimer("Type Merging", addObjectsTimer);
|
||||
static Timer symbolMergingTimer("Symbol Merging", addObjectsTimer);
|
||||
static Timer publicsLayoutTimer("Publics Stream Layout", totalPdbLinkTimer);
|
||||
static Timer tpiStreamLayoutTimer("TPI Stream Layout", totalPdbLinkTimer);
|
||||
static Timer diskCommitTimer("Commit to Disk", totalPdbLinkTimer);
|
||||
|
||||
namespace {
|
||||
class DebugSHandler;
|
||||
|
||||
|
@ -74,8 +83,8 @@ class PDBLinker {
|
|||
friend DebugSHandler;
|
||||
|
||||
public:
|
||||
PDBLinker(COFFLinkerContext &ctx)
|
||||
: builder(bAlloc), tMerger(ctx, bAlloc), ctx(ctx) {
|
||||
PDBLinker(SymbolTable *symtab)
|
||||
: symtab(symtab), builder(bAlloc), tMerger(bAlloc) {
|
||||
// This isn't strictly necessary, but link.exe usually puts an empty string
|
||||
// as the first "valid" string in the string table, so we do the same in
|
||||
// order to maintain as much byte-for-byte compatibility as possible.
|
||||
|
@ -98,7 +107,7 @@ public:
|
|||
void addPublicsToPDB();
|
||||
|
||||
/// Link info for each import file in the symbol table into the PDB.
|
||||
void addImportFilesToPDB();
|
||||
void addImportFilesToPDB(ArrayRef<OutputSection *> outputSections);
|
||||
|
||||
void createModuleDBI(ObjFile *file);
|
||||
|
||||
|
@ -135,7 +144,8 @@ public:
|
|||
std::vector<uint8_t> &storage);
|
||||
|
||||
/// Add the section map and section contributions to the PDB.
|
||||
void addSections(ArrayRef<uint8_t> sectionTable);
|
||||
void addSections(ArrayRef<OutputSection *> outputSections,
|
||||
ArrayRef<uint8_t> sectionTable);
|
||||
|
||||
/// Write the PDB to disk and store the Guid generated for it in *Guid.
|
||||
void commit(codeview::GUID *guid);
|
||||
|
@ -144,13 +154,12 @@ public:
|
|||
void printStats();
|
||||
|
||||
private:
|
||||
SymbolTable *symtab;
|
||||
|
||||
pdb::PDBFileBuilder builder;
|
||||
|
||||
TypeMerger tMerger;
|
||||
|
||||
COFFLinkerContext &ctx;
|
||||
|
||||
/// PDBs use a single global string table for filenames in the file checksum
|
||||
/// table.
|
||||
DebugStringTableSubsection pdbStrTab;
|
||||
|
@ -290,12 +299,11 @@ static void addTypeInfo(pdb::TpiStreamBuilder &tpiBuilder,
|
|||
});
|
||||
}
|
||||
|
||||
static void addGHashTypeInfo(COFFLinkerContext &ctx,
|
||||
pdb::PDBFileBuilder &builder) {
|
||||
static void addGHashTypeInfo(pdb::PDBFileBuilder &builder) {
|
||||
// Start the TPI or IPI stream header.
|
||||
builder.getTpiBuilder().setVersionHeader(pdb::PdbTpiV80);
|
||||
builder.getIpiBuilder().setVersionHeader(pdb::PdbTpiV80);
|
||||
for_each(ctx.tpiSourceList, [&](TpiSource *source) {
|
||||
for_each(TpiSource::instances, [&](TpiSource *source) {
|
||||
builder.getTpiBuilder().addTypeRecords(source->mergedTpi.recs,
|
||||
source->mergedTpi.recSizes,
|
||||
source->mergedTpi.recHashes);
|
||||
|
@ -711,9 +719,8 @@ Error PDBLinker::commitSymbolsForObject(void *ctx, void *obj,
|
|||
static_cast<ObjFile *>(obj), writer);
|
||||
}
|
||||
|
||||
static pdb::SectionContrib createSectionContrib(COFFLinkerContext &ctx,
|
||||
const Chunk *c, uint32_t modi) {
|
||||
OutputSection *os = c ? ctx.getOutputSection(c) : nullptr;
|
||||
static pdb::SectionContrib createSectionContrib(const Chunk *c, uint32_t modi) {
|
||||
OutputSection *os = c ? c->getOutputSection() : nullptr;
|
||||
pdb::SectionContrib sc;
|
||||
memset(&sc, 0, sizeof(sc));
|
||||
sc.ISect = os ? os->sectionIndex : llvm::pdb::kInvalidStreamIndex;
|
||||
|
@ -1016,7 +1023,7 @@ void PDBLinker::addDebugSymbols(TpiSource *source) {
|
|||
if (!source->file)
|
||||
return;
|
||||
|
||||
ScopedTimer t(ctx.symbolMergingTimer);
|
||||
ScopedTimer t(symbolMergingTimer);
|
||||
pdb::DbiStreamBuilder &dbiBuilder = builder.getDbiBuilder();
|
||||
DebugSHandler dsh(*this, *source->file, source);
|
||||
// Now do all live .debug$S and .debug$F sections.
|
||||
|
@ -1075,7 +1082,7 @@ void PDBLinker::createModuleDBI(ObjFile *file) {
|
|||
auto *secChunk = dyn_cast<SectionChunk>(c);
|
||||
if (!secChunk || !secChunk->live)
|
||||
continue;
|
||||
pdb::SectionContrib sc = createSectionContrib(ctx, secChunk, modi);
|
||||
pdb::SectionContrib sc = createSectionContrib(secChunk, modi);
|
||||
file->moduleDBI->setFirstSectionContrib(sc);
|
||||
break;
|
||||
}
|
||||
|
@ -1088,7 +1095,7 @@ void PDBLinker::addDebug(TpiSource *source) {
|
|||
// indices to PDB type and item indices. If we are using ghashes, types have
|
||||
// already been merged.
|
||||
if (!config->debugGHashes) {
|
||||
ScopedTimer t(ctx.typeMergingTimer);
|
||||
ScopedTimer t(typeMergingTimer);
|
||||
if (Error e = source->mergeDebugT(&tMerger)) {
|
||||
// If type merging failed, ignore the symbols.
|
||||
warnUnusable(source->file, std::move(e));
|
||||
|
@ -1106,7 +1113,7 @@ void PDBLinker::addDebug(TpiSource *source) {
|
|||
addDebugSymbols(source);
|
||||
}
|
||||
|
||||
static pdb::BulkPublic createPublic(COFFLinkerContext &ctx, Defined *def) {
|
||||
static pdb::BulkPublic createPublic(Defined *def) {
|
||||
pdb::BulkPublic pub;
|
||||
pub.Name = def->getName().data();
|
||||
pub.NameLen = def->getName().size();
|
||||
|
@ -1120,7 +1127,7 @@ static pdb::BulkPublic createPublic(COFFLinkerContext &ctx, Defined *def) {
|
|||
}
|
||||
pub.setFlags(flags);
|
||||
|
||||
OutputSection *os = ctx.getOutputSection(def->getChunk());
|
||||
OutputSection *os = def->getChunk()->getOutputSection();
|
||||
assert(os && "all publics should be in final image");
|
||||
pub.Offset = def->getRVA() - os->getRVA();
|
||||
pub.Segment = os->sectionIndex;
|
||||
|
@ -1130,31 +1137,32 @@ static pdb::BulkPublic createPublic(COFFLinkerContext &ctx, Defined *def) {
|
|||
// Add all object files to the PDB. Merge .debug$T sections into IpiData and
|
||||
// TpiData.
|
||||
void PDBLinker::addObjectsToPDB() {
|
||||
ScopedTimer t1(ctx.addObjectsTimer);
|
||||
ScopedTimer t1(addObjectsTimer);
|
||||
|
||||
// Create module descriptors
|
||||
for_each(ctx.objFileInstances, [&](ObjFile *obj) { createModuleDBI(obj); });
|
||||
for_each(ObjFile::instances, [&](ObjFile *obj) { createModuleDBI(obj); });
|
||||
|
||||
// Reorder dependency type sources to come first.
|
||||
tMerger.sortDependencies();
|
||||
TpiSource::sortDependencies();
|
||||
|
||||
// Merge type information from input files using global type hashing.
|
||||
if (config->debugGHashes)
|
||||
tMerger.mergeTypesWithGHash();
|
||||
|
||||
// Merge dependencies and then regular objects.
|
||||
for_each(tMerger.dependencySources,
|
||||
for_each(TpiSource::dependencySources,
|
||||
[&](TpiSource *source) { addDebug(source); });
|
||||
for_each(TpiSource::objectSources,
|
||||
[&](TpiSource *source) { addDebug(source); });
|
||||
for_each(tMerger.objectSources, [&](TpiSource *source) { addDebug(source); });
|
||||
|
||||
builder.getStringTableBuilder().setStrings(pdbStrTab);
|
||||
t1.stop();
|
||||
|
||||
// Construct TPI and IPI stream contents.
|
||||
ScopedTimer t2(ctx.tpiStreamLayoutTimer);
|
||||
ScopedTimer t2(tpiStreamLayoutTimer);
|
||||
// Collect all the merged types.
|
||||
if (config->debugGHashes) {
|
||||
addGHashTypeInfo(ctx, builder);
|
||||
addGHashTypeInfo(builder);
|
||||
} else {
|
||||
addTypeInfo(builder.getTpiBuilder(), tMerger.getTypeTable());
|
||||
addTypeInfo(builder.getIpiBuilder(), tMerger.getIDTable());
|
||||
|
@ -1162,7 +1170,7 @@ void PDBLinker::addObjectsToPDB() {
|
|||
t2.stop();
|
||||
|
||||
if (config->showSummary) {
|
||||
for_each(ctx.tpiSourceList, [&](TpiSource *source) {
|
||||
for_each(TpiSource::instances, [&](TpiSource *source) {
|
||||
nbTypeRecords += source->nbTypeRecords;
|
||||
nbTypeRecordsBytes += source->nbTypeRecordsBytes;
|
||||
});
|
||||
|
@ -1170,11 +1178,11 @@ void PDBLinker::addObjectsToPDB() {
|
|||
}
|
||||
|
||||
void PDBLinker::addPublicsToPDB() {
|
||||
ScopedTimer t3(ctx.publicsLayoutTimer);
|
||||
ScopedTimer t3(publicsLayoutTimer);
|
||||
// Compute the public symbols.
|
||||
auto &gsiBuilder = builder.getGsiBuilder();
|
||||
std::vector<pdb::BulkPublic> publics;
|
||||
ctx.symtab.forEachSymbol([&publics, this](Symbol *s) {
|
||||
symtab->forEachSymbol([&publics](Symbol *s) {
|
||||
// Only emit external, defined, live symbols that have a chunk. Static,
|
||||
// non-external symbols do not appear in the symbol table.
|
||||
auto *def = dyn_cast<Defined>(s);
|
||||
|
@ -1195,7 +1203,7 @@ void PDBLinker::addPublicsToPDB() {
|
|||
return;
|
||||
}
|
||||
}
|
||||
publics.push_back(createPublic(ctx, def));
|
||||
publics.push_back(createPublic(def));
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -1219,10 +1227,10 @@ void PDBLinker::printStats() {
|
|||
stream << format_decimal(v, 15) << " " << s << '\n';
|
||||
};
|
||||
|
||||
print(ctx.objFileInstances.size(),
|
||||
print(ObjFile::instances.size(),
|
||||
"Input OBJ files (expanded from all cmd-line inputs)");
|
||||
print(ctx.typeServerSourceMappings.size(), "PDB type server dependencies");
|
||||
print(ctx.precompSourceMappings.size(), "Precomp OBJ dependencies");
|
||||
print(TpiSource::countTypeServerPDBs(), "PDB type server dependencies");
|
||||
print(TpiSource::countPrecompObjs(), "Precomp OBJ dependencies");
|
||||
print(nbTypeRecords, "Input type records");
|
||||
print(nbTypeRecordsBytes, "Input type records bytes");
|
||||
print(builder.getTpiBuilder().getRecordCount(), "Merged TPI records");
|
||||
|
@ -1475,13 +1483,13 @@ static void addLinkerModuleSectionSymbol(pdb::DbiModuleDescriptorBuilder &mod,
|
|||
}
|
||||
|
||||
// Add all import files as modules to the PDB.
|
||||
void PDBLinker::addImportFilesToPDB() {
|
||||
if (ctx.importFileInstances.empty())
|
||||
void PDBLinker::addImportFilesToPDB(ArrayRef<OutputSection *> outputSections) {
|
||||
if (ImportFile::instances.empty())
|
||||
return;
|
||||
|
||||
std::map<std::string, llvm::pdb::DbiModuleDescriptorBuilder *> dllToModuleDbi;
|
||||
|
||||
for (ImportFile *file : ctx.importFileInstances) {
|
||||
for (ImportFile *file : ImportFile::instances) {
|
||||
if (!file->live)
|
||||
continue;
|
||||
|
||||
|
@ -1505,7 +1513,7 @@ void PDBLinker::addImportFilesToPDB() {
|
|||
exitOnErr(dbiBuilder.addModuleInfo(file->dllName));
|
||||
firstMod.setObjFileName(libPath);
|
||||
pdb::SectionContrib sc =
|
||||
createSectionContrib(ctx, nullptr, llvm::pdb::kInvalidStreamIndex);
|
||||
createSectionContrib(nullptr, llvm::pdb::kInvalidStreamIndex);
|
||||
firstMod.setFirstSectionContrib(sc);
|
||||
|
||||
// The second module is where the import stream goes.
|
||||
|
@ -1515,7 +1523,7 @@ void PDBLinker::addImportFilesToPDB() {
|
|||
|
||||
DefinedImportThunk *thunk = cast<DefinedImportThunk>(file->thunkSym);
|
||||
Chunk *thunkChunk = thunk->getChunk();
|
||||
OutputSection *thunkOS = ctx.getOutputSection(thunkChunk);
|
||||
OutputSection *thunkOS = thunkChunk->getOutputSection();
|
||||
|
||||
ObjNameSym ons(SymbolRecordKind::ObjNameSym);
|
||||
Compile3Sym cs(SymbolRecordKind::Compile3Sym);
|
||||
|
@ -1557,27 +1565,28 @@ void PDBLinker::addImportFilesToPDB() {
|
|||
mod->addSymbol(newSym);
|
||||
|
||||
pdb::SectionContrib sc =
|
||||
createSectionContrib(ctx, thunk->getChunk(), mod->getModuleIndex());
|
||||
createSectionContrib(thunk->getChunk(), mod->getModuleIndex());
|
||||
mod->setFirstSectionContrib(sc);
|
||||
}
|
||||
}
|
||||
|
||||
// Creates a PDB file.
|
||||
void lld::coff::createPDB(COFFLinkerContext &ctx,
|
||||
void lld::coff::createPDB(SymbolTable *symtab,
|
||||
ArrayRef<OutputSection *> outputSections,
|
||||
ArrayRef<uint8_t> sectionTable,
|
||||
llvm::codeview::DebugInfo *buildId) {
|
||||
ScopedTimer t1(ctx.totalPdbLinkTimer);
|
||||
PDBLinker pdb(ctx);
|
||||
ScopedTimer t1(totalPdbLinkTimer);
|
||||
PDBLinker pdb(symtab);
|
||||
|
||||
pdb.initialize(buildId);
|
||||
pdb.addObjectsToPDB();
|
||||
pdb.addImportFilesToPDB();
|
||||
pdb.addSections(sectionTable);
|
||||
pdb.addImportFilesToPDB(outputSections);
|
||||
pdb.addSections(outputSections, sectionTable);
|
||||
pdb.addNatvisFiles();
|
||||
pdb.addNamedStreams();
|
||||
pdb.addPublicsToPDB();
|
||||
|
||||
ScopedTimer t2(ctx.diskCommitTimer);
|
||||
ScopedTimer t2(diskCommitTimer);
|
||||
codeview::GUID guid;
|
||||
pdb.commit(&guid);
|
||||
memcpy(&buildId->PDB70.Signature, &guid, 16);
|
||||
|
@ -1617,7 +1626,8 @@ void PDBLinker::initialize(llvm::codeview::DebugInfo *buildId) {
|
|||
dbiBuilder.setBuildNumber(14, 11);
|
||||
}
|
||||
|
||||
void PDBLinker::addSections(ArrayRef<uint8_t> sectionTable) {
|
||||
void PDBLinker::addSections(ArrayRef<OutputSection *> outputSections,
|
||||
ArrayRef<uint8_t> sectionTable) {
|
||||
// It's not entirely clear what this is, but the * Linker * module uses it.
|
||||
pdb::DbiStreamBuilder &dbiBuilder = builder.getDbiBuilder();
|
||||
nativePath = config->pdbPath;
|
||||
|
@ -1628,11 +1638,11 @@ void PDBLinker::addSections(ArrayRef<uint8_t> sectionTable) {
|
|||
addCommonLinkerModuleSymbols(nativePath, linkerModule);
|
||||
|
||||
// Add section contributions. They must be ordered by ascending RVA.
|
||||
for (OutputSection *os : ctx.outputSections) {
|
||||
for (OutputSection *os : outputSections) {
|
||||
addLinkerModuleSectionSymbol(linkerModule, *os);
|
||||
for (Chunk *c : os->chunks) {
|
||||
pdb::SectionContrib sc =
|
||||
createSectionContrib(ctx, c, linkerModule.getModuleIndex());
|
||||
createSectionContrib(c, linkerModule.getModuleIndex());
|
||||
builder.getDbiBuilder().addSectionContrib(sc);
|
||||
}
|
||||
}
|
||||
|
@ -1641,7 +1651,7 @@ void PDBLinker::addSections(ArrayRef<uint8_t> sectionTable) {
|
|||
// to provide trampolines thunks for incremental function patching. Set this
|
||||
// as "unused" because LLD doesn't support /INCREMENTAL link.
|
||||
pdb::SectionContrib sc =
|
||||
createSectionContrib(ctx, nullptr, llvm::pdb::kInvalidStreamIndex);
|
||||
createSectionContrib(nullptr, llvm::pdb::kInvalidStreamIndex);
|
||||
linkerModule.setFirstSectionContrib(sc);
|
||||
|
||||
// Add Section Map stream.
|
||||
|
|
|
@ -23,15 +23,21 @@ namespace lld {
|
|||
class Timer;
|
||||
|
||||
namespace coff {
|
||||
class OutputSection;
|
||||
class SectionChunk;
|
||||
class COFFLinkerContext;
|
||||
class SymbolTable;
|
||||
|
||||
void createPDB(COFFLinkerContext &ctx, llvm::ArrayRef<uint8_t> sectionTable,
|
||||
void createPDB(SymbolTable *symtab,
|
||||
llvm::ArrayRef<OutputSection *> outputSections,
|
||||
llvm::ArrayRef<uint8_t> sectionTable,
|
||||
llvm::codeview::DebugInfo *buildId);
|
||||
|
||||
llvm::Optional<std::pair<llvm::StringRef, uint32_t>>
|
||||
getFileLineCodeView(const SectionChunk *c, uint32_t addr);
|
||||
|
||||
extern Timer loadGHashTimer;
|
||||
extern Timer mergeGHashTimer;
|
||||
|
||||
} // namespace coff
|
||||
} // namespace lld
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "SymbolTable.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "Config.h"
|
||||
#include "Driver.h"
|
||||
#include "LTO.h"
|
||||
|
@ -35,6 +34,10 @@ StringRef ltrim1(StringRef s, const char *chars) {
|
|||
return s;
|
||||
}
|
||||
|
||||
static Timer ltoTimer("LTO", Timer::root());
|
||||
|
||||
SymbolTable *symtab;
|
||||
|
||||
void SymbolTable::addFile(InputFile *file) {
|
||||
log("Reading " + toString(file));
|
||||
file->parse();
|
||||
|
@ -49,11 +52,11 @@ void SymbolTable::addFile(InputFile *file) {
|
|||
}
|
||||
|
||||
if (auto *f = dyn_cast<ObjFile>(file)) {
|
||||
ctx.objFileInstances.push_back(f);
|
||||
ObjFile::instances.push_back(f);
|
||||
} else if (auto *f = dyn_cast<BitcodeFile>(file)) {
|
||||
ctx.bitcodeFileInstances.push_back(f);
|
||||
BitcodeFile::instances.push_back(f);
|
||||
} else if (auto *f = dyn_cast<ImportFile>(file)) {
|
||||
ctx.importFileInstances.push_back(f);
|
||||
ImportFile::instances.push_back(f);
|
||||
}
|
||||
|
||||
driver->parseDirectives(file);
|
||||
|
@ -369,9 +372,12 @@ bool SymbolTable::handleMinGWAutomaticImport(Symbol *sym, StringRef name) {
|
|||
/// defined symbol imported" diagnostic for symbols in localImports.
|
||||
/// objFiles and bitcodeFiles (if not nullptr) are used to report where
|
||||
/// undefined symbols are referenced.
|
||||
static void reportProblemSymbols(
|
||||
const COFFLinkerContext &ctx, const SmallPtrSetImpl<Symbol *> &undefs,
|
||||
const DenseMap<Symbol *, Symbol *> *localImports, bool needBitcodeFiles) {
|
||||
static void
|
||||
reportProblemSymbols(const SmallPtrSetImpl<Symbol *> &undefs,
|
||||
const DenseMap<Symbol *, Symbol *> *localImports,
|
||||
const std::vector<ObjFile *> objFiles,
|
||||
const std::vector<BitcodeFile *> *bitcodeFiles) {
|
||||
|
||||
// Return early if there is nothing to report (which should be
|
||||
// the common case).
|
||||
if (undefs.empty() && (!localImports || localImports->empty()))
|
||||
|
@ -412,11 +418,11 @@ static void reportProblemSymbols(
|
|||
}
|
||||
};
|
||||
|
||||
for (ObjFile *file : ctx.objFileInstances)
|
||||
for (ObjFile *file : objFiles)
|
||||
processFile(file, file->getSymbols());
|
||||
|
||||
if (needBitcodeFiles)
|
||||
for (BitcodeFile *file : ctx.bitcodeFileInstances)
|
||||
if (bitcodeFiles)
|
||||
for (BitcodeFile *file : *bitcodeFiles)
|
||||
processFile(file, file->getSymbols());
|
||||
|
||||
for (const UndefinedDiag &undefDiag : undefDiags)
|
||||
|
@ -445,8 +451,9 @@ void SymbolTable::reportUnresolvable() {
|
|||
undefs.insert(sym);
|
||||
}
|
||||
|
||||
reportProblemSymbols(ctx, undefs,
|
||||
/* localImports */ nullptr, true);
|
||||
reportProblemSymbols(undefs,
|
||||
/* localImports */ nullptr, ObjFile::instances,
|
||||
&BitcodeFile::instances);
|
||||
}
|
||||
|
||||
void SymbolTable::resolveRemainingUndefines() {
|
||||
|
@ -508,8 +515,8 @@ void SymbolTable::resolveRemainingUndefines() {
|
|||
}
|
||||
|
||||
reportProblemSymbols(
|
||||
ctx, undefs, config->warnLocallyDefinedImported ? &localImports : nullptr,
|
||||
false);
|
||||
undefs, config->warnLocallyDefinedImported ? &localImports : nullptr,
|
||||
ObjFile::instances, /* bitcode files no longer needed */ nullptr);
|
||||
}
|
||||
|
||||
std::pair<Symbol *, bool> SymbolTable::insert(StringRef name) {
|
||||
|
@ -790,20 +797,20 @@ void SymbolTable::addLibcall(StringRef name) {
|
|||
}
|
||||
}
|
||||
|
||||
std::vector<Chunk *> SymbolTable::getChunks() const {
|
||||
std::vector<Chunk *> SymbolTable::getChunks() {
|
||||
std::vector<Chunk *> res;
|
||||
for (ObjFile *file : ctx.objFileInstances) {
|
||||
for (ObjFile *file : ObjFile::instances) {
|
||||
ArrayRef<Chunk *> v = file->getChunks();
|
||||
res.insert(res.end(), v.begin(), v.end());
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
Symbol *SymbolTable::find(StringRef name) const {
|
||||
Symbol *SymbolTable::find(StringRef name) {
|
||||
return symMap.lookup(CachedHashStringRef(name));
|
||||
}
|
||||
|
||||
Symbol *SymbolTable::findUnderscore(StringRef name) const {
|
||||
Symbol *SymbolTable::findUnderscore(StringRef name) {
|
||||
if (config->machine == I386)
|
||||
return find(("_" + name).str());
|
||||
return find(name);
|
||||
|
@ -866,17 +873,17 @@ Symbol *SymbolTable::addUndefined(StringRef name) {
|
|||
}
|
||||
|
||||
void SymbolTable::addCombinedLTOObjects() {
|
||||
if (ctx.bitcodeFileInstances.empty())
|
||||
if (BitcodeFile::instances.empty())
|
||||
return;
|
||||
|
||||
ScopedTimer t(ctx.ltoTimer);
|
||||
lto.reset(new BitcodeCompiler());
|
||||
for (BitcodeFile *f : ctx.bitcodeFileInstances)
|
||||
ScopedTimer t(ltoTimer);
|
||||
lto.reset(new BitcodeCompiler);
|
||||
for (BitcodeFile *f : BitcodeFile::instances)
|
||||
lto->add(*f);
|
||||
for (InputFile *newObj : lto->compile(ctx)) {
|
||||
for (InputFile *newObj : lto->compile()) {
|
||||
ObjFile *obj = cast<ObjFile>(newObj);
|
||||
obj->parse();
|
||||
ctx.objFileInstances.push_back(obj);
|
||||
ObjFile::instances.push_back(obj);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ namespace coff {
|
|||
|
||||
class Chunk;
|
||||
class CommonChunk;
|
||||
class COFFLinkerContext;
|
||||
class Defined;
|
||||
class DefinedAbsolute;
|
||||
class DefinedRegular;
|
||||
|
@ -48,8 +47,6 @@ class Symbol;
|
|||
// There is one add* function per symbol type.
|
||||
class SymbolTable {
|
||||
public:
|
||||
SymbolTable(COFFLinkerContext &ctx) : ctx(ctx) {}
|
||||
|
||||
void addFile(InputFile *file);
|
||||
|
||||
// Emit errors for symbols that cannot be resolved.
|
||||
|
@ -66,11 +63,11 @@ public:
|
|||
bool handleMinGWAutomaticImport(Symbol *sym, StringRef name);
|
||||
|
||||
// Returns a list of chunks of selected symbols.
|
||||
std::vector<Chunk *> getChunks() const;
|
||||
std::vector<Chunk *> getChunks();
|
||||
|
||||
// Returns a symbol for a given name. Returns a nullptr if not found.
|
||||
Symbol *find(StringRef name) const;
|
||||
Symbol *findUnderscore(StringRef name) const;
|
||||
Symbol *find(StringRef name);
|
||||
Symbol *findUnderscore(StringRef name);
|
||||
|
||||
// Occasionally we have to resolve an undefined symbol to its
|
||||
// mangled symbol. This function tries to find a mangled name
|
||||
|
@ -134,10 +131,10 @@ private:
|
|||
|
||||
llvm::DenseMap<llvm::CachedHashStringRef, Symbol *> symMap;
|
||||
std::unique_ptr<BitcodeCompiler> lto;
|
||||
|
||||
COFFLinkerContext &ctx;
|
||||
};
|
||||
|
||||
extern SymbolTable *symtab;
|
||||
|
||||
std::vector<std::string> getSymbolLocations(ObjFile *file, uint32_t symIndex);
|
||||
|
||||
StringRef ltrim1(StringRef s, const char *chars);
|
||||
|
|
|
@ -10,8 +10,6 @@
|
|||
#define LLD_COFF_TYPEMERGER_H
|
||||
|
||||
#include "Config.h"
|
||||
#include "DebugTypes.h"
|
||||
#include "lld/Common/Timer.h"
|
||||
#include "llvm/DebugInfo/CodeView/MergingTypeTableBuilder.h"
|
||||
#include "llvm/DebugInfo/CodeView/TypeHashing.h"
|
||||
#include "llvm/Support/Allocator.h"
|
||||
|
@ -27,7 +25,7 @@ struct GHashState;
|
|||
|
||||
class TypeMerger {
|
||||
public:
|
||||
TypeMerger(COFFLinkerContext &ctx, llvm::BumpPtrAllocator &alloc);
|
||||
TypeMerger(llvm::BumpPtrAllocator &alloc);
|
||||
|
||||
~TypeMerger();
|
||||
|
||||
|
@ -61,20 +59,6 @@ public:
|
|||
// keyed by type index.
|
||||
SmallVector<uint32_t, 0> tpiCounts;
|
||||
SmallVector<uint32_t, 0> ipiCounts;
|
||||
|
||||
/// Dependency type sources, such as type servers or PCH object files. These
|
||||
/// must be processed before objects that rely on them. Set by
|
||||
/// sortDependencies.
|
||||
ArrayRef<TpiSource *> dependencySources;
|
||||
|
||||
/// Object file sources. These must be processed after dependencySources.
|
||||
ArrayRef<TpiSource *> objectSources;
|
||||
|
||||
/// Sorts the dependencies and reassigns TpiSource indices.
|
||||
void sortDependencies();
|
||||
|
||||
private:
|
||||
COFFLinkerContext &ctx;
|
||||
};
|
||||
|
||||
} // namespace coff
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
//===----------------------------------------------------------------------===//
|
||||
|
||||
#include "Writer.h"
|
||||
#include "COFFLinkerContext.h"
|
||||
#include "CallGraphSort.h"
|
||||
#include "Config.h"
|
||||
#include "DLL.h"
|
||||
|
@ -81,14 +80,23 @@ static_assert(dosStubSize % 8 == 0, "DOSStub size must be multiple of 8");
|
|||
|
||||
static const int numberOfDataDirectory = 16;
|
||||
|
||||
// Global vector of all output sections. After output sections are finalized,
|
||||
// this can be indexed by Chunk::getOutputSection.
|
||||
static std::vector<OutputSection *> outputSections;
|
||||
|
||||
OutputSection *Chunk::getOutputSection() const {
|
||||
return osidx == 0 ? nullptr : outputSections[osidx - 1];
|
||||
}
|
||||
|
||||
void OutputSection::clear() { outputSections.clear(); }
|
||||
|
||||
namespace {
|
||||
|
||||
class DebugDirectoryChunk : public NonSectionChunk {
|
||||
public:
|
||||
DebugDirectoryChunk(COFFLinkerContext &c,
|
||||
const std::vector<std::pair<COFF::DebugType, Chunk *>> &r,
|
||||
DebugDirectoryChunk(const std::vector<std::pair<COFF::DebugType, Chunk *>> &r,
|
||||
bool writeRepro)
|
||||
: records(r), writeRepro(writeRepro), ctx(c) {}
|
||||
: records(r), writeRepro(writeRepro) {}
|
||||
|
||||
size_t getSize() const override {
|
||||
return (records.size() + int(writeRepro)) * sizeof(debug_directory);
|
||||
|
@ -99,7 +107,7 @@ public:
|
|||
|
||||
for (const std::pair<COFF::DebugType, Chunk *>& record : records) {
|
||||
Chunk *c = record.second;
|
||||
OutputSection *os = ctx.getOutputSection(c);
|
||||
OutputSection *os = c->getOutputSection();
|
||||
uint64_t offs = os->getFileOff() + (c->getRVA() - os->getRVA());
|
||||
fillEntry(d, record.first, c->getSize(), c->getRVA(), offs);
|
||||
++d;
|
||||
|
@ -138,8 +146,6 @@ private:
|
|||
mutable std::vector<support::ulittle32_t *> timeDateStamps;
|
||||
const std::vector<std::pair<COFF::DebugType, Chunk *>> &records;
|
||||
bool writeRepro;
|
||||
|
||||
COFFLinkerContext &ctx;
|
||||
};
|
||||
|
||||
class CVDebugRecordChunk : public NonSectionChunk {
|
||||
|
@ -195,7 +201,7 @@ public:
|
|||
// The writer writes a SymbolTable result to a file.
|
||||
class Writer {
|
||||
public:
|
||||
Writer(COFFLinkerContext &c) : buffer(errorHandler().outputBuffer), ctx(c) {}
|
||||
Writer() : buffer(errorHandler().outputBuffer) {}
|
||||
void run();
|
||||
|
||||
private:
|
||||
|
@ -298,12 +304,13 @@ private:
|
|||
// files, so we need to keep track of them separately.
|
||||
Chunk *firstPdata = nullptr;
|
||||
Chunk *lastPdata;
|
||||
|
||||
COFFLinkerContext &ctx;
|
||||
};
|
||||
} // anonymous namespace
|
||||
|
||||
void lld::coff::writeResult(COFFLinkerContext &ctx) { Writer(ctx).run(); }
|
||||
static Timer codeLayoutTimer("Code Layout", Timer::root());
|
||||
static Timer diskCommitTimer("Commit Output File", Timer::root());
|
||||
|
||||
void lld::coff::writeResult() { Writer().run(); }
|
||||
|
||||
void OutputSection::addChunk(Chunk *c) {
|
||||
chunks.push_back(c);
|
||||
|
@ -542,7 +549,7 @@ void Writer::finalizeAddresses() {
|
|||
return;
|
||||
|
||||
size_t origNumChunks = 0;
|
||||
for (OutputSection *sec : ctx.outputSections) {
|
||||
for (OutputSection *sec : outputSections) {
|
||||
sec->origChunks = sec->chunks;
|
||||
origNumChunks += sec->chunks.size();
|
||||
}
|
||||
|
@ -554,7 +561,7 @@ void Writer::finalizeAddresses() {
|
|||
// adding them turned out ok.
|
||||
bool rangesOk = true;
|
||||
size_t numChunks = 0;
|
||||
for (OutputSection *sec : ctx.outputSections) {
|
||||
for (OutputSection *sec : outputSections) {
|
||||
if (!verifyRanges(sec->chunks)) {
|
||||
rangesOk = false;
|
||||
break;
|
||||
|
@ -575,7 +582,7 @@ void Writer::finalizeAddresses() {
|
|||
// If the previous pass didn't work out, reset everything back to the
|
||||
// original conditions before retrying with a wider margin. This should
|
||||
// ideally never happen under real circumstances.
|
||||
for (OutputSection *sec : ctx.outputSections)
|
||||
for (OutputSection *sec : outputSections)
|
||||
sec->chunks = sec->origChunks;
|
||||
margin *= 2;
|
||||
}
|
||||
|
@ -583,7 +590,7 @@ void Writer::finalizeAddresses() {
|
|||
// Try adding thunks everywhere where it is needed, with a margin
|
||||
// to avoid things going out of range due to the added thunks.
|
||||
bool addressesChanged = false;
|
||||
for (OutputSection *sec : ctx.outputSections)
|
||||
for (OutputSection *sec : outputSections)
|
||||
addressesChanged |= createThunks(sec, margin);
|
||||
// If the verification above thought we needed thunks, we should have
|
||||
// added some.
|
||||
|
@ -600,7 +607,7 @@ void Writer::finalizeAddresses() {
|
|||
|
||||
// The main function of the writer.
|
||||
void Writer::run() {
|
||||
ScopedTimer t1(ctx.codeLayoutTimer);
|
||||
ScopedTimer t1(codeLayoutTimer);
|
||||
|
||||
createImportTables();
|
||||
createSections();
|
||||
|
@ -638,17 +645,17 @@ void Writer::run() {
|
|||
|
||||
if (!config->pdbPath.empty() && config->debug) {
|
||||
assert(buildId);
|
||||
createPDB(ctx, sectionTable, buildId->buildId);
|
||||
createPDB(symtab, outputSections, sectionTable, buildId->buildId);
|
||||
}
|
||||
writeBuildId();
|
||||
|
||||
writeLLDMapFile(ctx);
|
||||
writeMapFile(ctx);
|
||||
writeLLDMapFile(outputSections);
|
||||
writeMapFile(outputSections);
|
||||
|
||||
if (errorCount())
|
||||
return;
|
||||
|
||||
ScopedTimer t2(ctx.outputCommitTimer);
|
||||
ScopedTimer t2(diskCommitTimer);
|
||||
if (auto e = buffer->commit())
|
||||
fatal("failed to write the output file: " + toString(std::move(e)));
|
||||
}
|
||||
|
@ -809,8 +816,7 @@ static bool shouldStripSectionSuffix(SectionChunk *sc, StringRef name) {
|
|||
|
||||
void Writer::sortSections() {
|
||||
if (!config->callGraphProfile.empty()) {
|
||||
DenseMap<const SectionChunk *, int> order =
|
||||
computeCallGraphProfileOrder(ctx);
|
||||
DenseMap<const SectionChunk *, int> order = computeCallGraphProfileOrder();
|
||||
for (auto it : order) {
|
||||
if (DefinedRegular *sym = it.first->sym)
|
||||
config->order[sym->getName()] = it.second;
|
||||
|
@ -837,7 +843,7 @@ void Writer::createSections() {
|
|||
OutputSection *&sec = sections[{name, outChars}];
|
||||
if (!sec) {
|
||||
sec = make<OutputSection>(name, outChars);
|
||||
ctx.outputSections.push_back(sec);
|
||||
outputSections.push_back(sec);
|
||||
}
|
||||
return sec;
|
||||
};
|
||||
|
@ -858,7 +864,7 @@ void Writer::createSections() {
|
|||
dtorsSec = createSection(".dtors", data | r | w);
|
||||
|
||||
// Then bin chunks by name and output characteristics.
|
||||
for (Chunk *c : ctx.symtab.getChunks()) {
|
||||
for (Chunk *c : symtab->getChunks()) {
|
||||
auto *sc = dyn_cast<SectionChunk>(c);
|
||||
if (sc && !sc->live) {
|
||||
if (config->verbose)
|
||||
|
@ -935,14 +941,14 @@ void Writer::createSections() {
|
|||
return 1;
|
||||
return 0;
|
||||
};
|
||||
llvm::stable_sort(ctx.outputSections,
|
||||
llvm::stable_sort(outputSections,
|
||||
[&](const OutputSection *s, const OutputSection *t) {
|
||||
return sectionOrder(s) < sectionOrder(t);
|
||||
});
|
||||
}
|
||||
|
||||
void Writer::createMiscChunks() {
|
||||
for (MergeChunk *p : ctx.mergeChunkInstances) {
|
||||
for (MergeChunk *p : MergeChunk::instances) {
|
||||
if (p) {
|
||||
p->finalizeContents();
|
||||
rdataSec->addChunk(p);
|
||||
|
@ -950,16 +956,15 @@ void Writer::createMiscChunks() {
|
|||
}
|
||||
|
||||
// Create thunks for locally-dllimported symbols.
|
||||
if (!ctx.symtab.localImportChunks.empty()) {
|
||||
for (Chunk *c : ctx.symtab.localImportChunks)
|
||||
if (!symtab->localImportChunks.empty()) {
|
||||
for (Chunk *c : symtab->localImportChunks)
|
||||
rdataSec->addChunk(c);
|
||||
}
|
||||
|
||||
// Create Debug Information Chunks
|
||||
OutputSection *debugInfoSec = config->mingw ? buildidSec : rdataSec;
|
||||
if (config->debug || config->repro || config->cetCompat) {
|
||||
debugDirectory =
|
||||
make<DebugDirectoryChunk>(ctx, debugRecords, config->repro);
|
||||
debugDirectory = make<DebugDirectoryChunk>(debugRecords, config->repro);
|
||||
debugDirectory->setAlignment(4);
|
||||
debugInfoSec->addChunk(debugDirectory);
|
||||
}
|
||||
|
@ -1008,7 +1013,7 @@ void Writer::createImportTables() {
|
|||
// Initialize DLLOrder so that import entries are ordered in
|
||||
// the same order as in the command line. (That affects DLL
|
||||
// initialization order, and this ordering is MSVC-compatible.)
|
||||
for (ImportFile *file : ctx.importFileInstances) {
|
||||
for (ImportFile *file : ImportFile::instances) {
|
||||
if (!file->live)
|
||||
continue;
|
||||
|
||||
|
@ -1031,10 +1036,10 @@ void Writer::createImportTables() {
|
|||
}
|
||||
|
||||
void Writer::appendImportThunks() {
|
||||
if (ctx.importFileInstances.empty())
|
||||
if (ImportFile::instances.empty())
|
||||
return;
|
||||
|
||||
for (ImportFile *file : ctx.importFileInstances) {
|
||||
for (ImportFile *file : ImportFile::instances) {
|
||||
if (!file->live)
|
||||
continue;
|
||||
|
||||
|
@ -1050,7 +1055,7 @@ void Writer::appendImportThunks() {
|
|||
|
||||
if (!delayIdata.empty()) {
|
||||
Defined *helper = cast<Defined>(config->delayLoadHelper);
|
||||
delayIdata.create(ctx, helper);
|
||||
delayIdata.create(helper);
|
||||
for (Chunk *c : delayIdata.getChunks())
|
||||
didatSec->addChunk(c);
|
||||
for (Chunk *c : delayIdata.getDataChunks())
|
||||
|
@ -1090,25 +1095,25 @@ void Writer::removeUnusedSections() {
|
|||
// later. Only remove sections that have no Chunks at all.
|
||||
return s->chunks.empty();
|
||||
};
|
||||
ctx.outputSections.erase(std::remove_if(ctx.outputSections.begin(),
|
||||
ctx.outputSections.end(), isUnused),
|
||||
ctx.outputSections.end());
|
||||
outputSections.erase(
|
||||
std::remove_if(outputSections.begin(), outputSections.end(), isUnused),
|
||||
outputSections.end());
|
||||
}
|
||||
|
||||
// The Windows loader doesn't seem to like empty sections,
|
||||
// so we remove them if any.
|
||||
void Writer::removeEmptySections() {
|
||||
auto isEmpty = [](OutputSection *s) { return s->getVirtualSize() == 0; };
|
||||
ctx.outputSections.erase(std::remove_if(ctx.outputSections.begin(),
|
||||
ctx.outputSections.end(), isEmpty),
|
||||
ctx.outputSections.end());
|
||||
outputSections.erase(
|
||||
std::remove_if(outputSections.begin(), outputSections.end(), isEmpty),
|
||||
outputSections.end());
|
||||
}
|
||||
|
||||
void Writer::assignOutputSectionIndices() {
|
||||
// Assign final output section indices, and assign each chunk to its output
|
||||
// section.
|
||||
uint32_t idx = 1;
|
||||
for (OutputSection *os : ctx.outputSections) {
|
||||
for (OutputSection *os : outputSections) {
|
||||
os->sectionIndex = idx;
|
||||
for (Chunk *c : os->chunks)
|
||||
c->setOutputSectionIdx(idx);
|
||||
|
@ -1117,7 +1122,7 @@ void Writer::assignOutputSectionIndices() {
|
|||
|
||||
// Merge chunks are containers of chunks, so assign those an output section
|
||||
// too.
|
||||
for (MergeChunk *mc : ctx.mergeChunkInstances)
|
||||
for (MergeChunk *mc : MergeChunk::instances)
|
||||
if (mc)
|
||||
for (SectionChunk *sc : mc->sections)
|
||||
if (sc && sc->live)
|
||||
|
@ -1148,7 +1153,7 @@ Optional<coff_symbol16> Writer::createSymbol(Defined *def) {
|
|||
Chunk *c = def->getChunk();
|
||||
if (!c)
|
||||
return None;
|
||||
OutputSection *os = ctx.getOutputSection(c);
|
||||
OutputSection *os = c->getOutputSection();
|
||||
if (!os)
|
||||
return None;
|
||||
|
||||
|
@ -1195,7 +1200,7 @@ void Writer::createSymbolAndStringTable() {
|
|||
// solution where discardable sections have long names preserved and
|
||||
// non-discardable sections have their names truncated, to ensure that any
|
||||
// section which is mapped at runtime also has its name mapped at runtime.
|
||||
for (OutputSection *sec : ctx.outputSections) {
|
||||
for (OutputSection *sec : outputSections) {
|
||||
if (sec->name.size() <= COFF::NameSize)
|
||||
continue;
|
||||
if ((sec->header.Characteristics & IMAGE_SCN_MEM_DISCARDABLE) == 0)
|
||||
|
@ -1209,7 +1214,7 @@ void Writer::createSymbolAndStringTable() {
|
|||
}
|
||||
|
||||
if (config->debugDwarf || config->debugSymtab) {
|
||||
for (ObjFile *file : ctx.objFileInstances) {
|
||||
for (ObjFile *file : ObjFile::instances) {
|
||||
for (Symbol *b : file->getSymbols()) {
|
||||
auto *d = dyn_cast_or_null<Defined>(b);
|
||||
if (!d || d->writtenToSymtab)
|
||||
|
@ -1269,7 +1274,7 @@ void Writer::mergeSections() {
|
|||
void Writer::assignAddresses() {
|
||||
sizeOfHeaders = dosStubSize + sizeof(PEMagic) + sizeof(coff_file_header) +
|
||||
sizeof(data_directory) * numberOfDataDirectory +
|
||||
sizeof(coff_section) * ctx.outputSections.size();
|
||||
sizeof(coff_section) * outputSections.size();
|
||||
sizeOfHeaders +=
|
||||
config->is64() ? sizeof(pe32plus_header) : sizeof(pe32_header);
|
||||
sizeOfHeaders = alignTo(sizeOfHeaders, config->fileAlign);
|
||||
|
@ -1278,7 +1283,7 @@ void Writer::assignAddresses() {
|
|||
// The first page is kept unmapped.
|
||||
uint64_t rva = alignTo(sizeOfHeaders, config->align);
|
||||
|
||||
for (OutputSection *sec : ctx.outputSections) {
|
||||
for (OutputSection *sec : outputSections) {
|
||||
if (sec == relocSec)
|
||||
addBaserels();
|
||||
uint64_t rawSize = 0, virtualSize = 0;
|
||||
|
@ -1313,7 +1318,7 @@ void Writer::assignAddresses() {
|
|||
sizeOfImage = alignTo(rva, config->align);
|
||||
|
||||
// Assign addresses to sections in MergeChunks.
|
||||
for (MergeChunk *mc : ctx.mergeChunkInstances)
|
||||
for (MergeChunk *mc : MergeChunk::instances)
|
||||
if (mc)
|
||||
mc->assignSubsectionRVAs();
|
||||
}
|
||||
|
@ -1348,7 +1353,7 @@ template <typename PEHeaderTy> void Writer::writeHeader() {
|
|||
auto *coff = reinterpret_cast<coff_file_header *>(buf);
|
||||
buf += sizeof(*coff);
|
||||
coff->Machine = config->machine;
|
||||
coff->NumberOfSections = ctx.outputSections.size();
|
||||
coff->NumberOfSections = outputSections.size();
|
||||
coff->Characteristics = IMAGE_FILE_EXECUTABLE_IMAGE;
|
||||
if (config->largeAddressAware)
|
||||
coff->Characteristics |= IMAGE_FILE_LARGE_ADDRESS_AWARE;
|
||||
|
@ -1461,7 +1466,7 @@ template <typename PEHeaderTy> void Writer::writeHeader() {
|
|||
dir[BASE_RELOCATION_TABLE].RelativeVirtualAddress = relocSec->getRVA();
|
||||
dir[BASE_RELOCATION_TABLE].Size = relocSec->getVirtualSize();
|
||||
}
|
||||
if (Symbol *sym = ctx.symtab.findUnderscore("_tls_used")) {
|
||||
if (Symbol *sym = symtab->findUnderscore("_tls_used")) {
|
||||
if (Defined *b = dyn_cast<Defined>(sym)) {
|
||||
dir[TLS_TABLE].RelativeVirtualAddress = b->getRVA();
|
||||
dir[TLS_TABLE].Size = config->is64()
|
||||
|
@ -1473,7 +1478,7 @@ template <typename PEHeaderTy> void Writer::writeHeader() {
|
|||
dir[DEBUG_DIRECTORY].RelativeVirtualAddress = debugDirectory->getRVA();
|
||||
dir[DEBUG_DIRECTORY].Size = debugDirectory->getSize();
|
||||
}
|
||||
if (Symbol *sym = ctx.symtab.findUnderscore("_load_config_used")) {
|
||||
if (Symbol *sym = symtab->findUnderscore("_load_config_used")) {
|
||||
if (auto *b = dyn_cast<DefinedRegular>(sym)) {
|
||||
SectionChunk *sc = b->getChunk();
|
||||
assert(b->getRVA() >= sc->getRVA());
|
||||
|
@ -1497,12 +1502,12 @@ template <typename PEHeaderTy> void Writer::writeHeader() {
|
|||
}
|
||||
|
||||
// Write section table
|
||||
for (OutputSection *sec : ctx.outputSections) {
|
||||
for (OutputSection *sec : outputSections) {
|
||||
sec->writeHeaderTo(buf);
|
||||
buf += sizeof(coff_section);
|
||||
}
|
||||
sectionTable = ArrayRef<uint8_t>(
|
||||
buf - ctx.outputSections.size() * sizeof(coff_section), buf);
|
||||
buf - outputSections.size() * sizeof(coff_section), buf);
|
||||
|
||||
if (outputSymtab.empty() && strtab.empty())
|
||||
return;
|
||||
|
@ -1530,7 +1535,7 @@ void Writer::openFile(StringRef path) {
|
|||
|
||||
void Writer::createSEHTable() {
|
||||
SymbolRVASet handlers;
|
||||
for (ObjFile *file : ctx.objFileInstances) {
|
||||
for (ObjFile *file : ObjFile::instances) {
|
||||
if (!file->hasSafeSEH())
|
||||
error("/safeseh: " + file->getName() + " is not compatible with SEH");
|
||||
markSymbolsForRVATable(file, file->getSXDataChunks(), handlers);
|
||||
|
@ -1539,7 +1544,7 @@ void Writer::createSEHTable() {
|
|||
// Set the "no SEH" characteristic if there really were no handlers, or if
|
||||
// there is no load config object to point to the table of handlers.
|
||||
setNoSEHCharacteristic =
|
||||
handlers.empty() || !ctx.symtab.findUnderscore("_load_config_used");
|
||||
handlers.empty() || !symtab->findUnderscore("_load_config_used");
|
||||
|
||||
maybeAddRVATable(std::move(handlers), "__safe_se_handler_table",
|
||||
"__safe_se_handler_count");
|
||||
|
@ -1637,7 +1642,7 @@ void Writer::createGuardCFTables() {
|
|||
std::vector<Symbol *> giatsSymbols;
|
||||
SymbolRVASet longJmpTargets;
|
||||
SymbolRVASet ehContTargets;
|
||||
for (ObjFile *file : ctx.objFileInstances) {
|
||||
for (ObjFile *file : ObjFile::instances) {
|
||||
// If the object was compiled with /guard:cf, the address taken symbols
|
||||
// are in .gfids$y sections, the longjmp targets are in .gljmp$y sections,
|
||||
// and ehcont targets are in .gehcont$y sections. If the object was not
|
||||
|
@ -1703,7 +1708,7 @@ void Writer::createGuardCFTables() {
|
|||
guardFlags |= uint32_t(coff_guard_flags::HasLongJmpTable);
|
||||
if (config->guardCF & GuardCFLevel::EHCont)
|
||||
guardFlags |= uint32_t(coff_guard_flags::HasEHContTable);
|
||||
Symbol *flagSym = ctx.symtab.findUnderscore("__guard_flags");
|
||||
Symbol *flagSym = symtab->findUnderscore("__guard_flags");
|
||||
cast<DefinedAbsolute>(flagSym)->setVA(guardFlags);
|
||||
}
|
||||
|
||||
|
@ -1775,8 +1780,8 @@ void Writer::maybeAddRVATable(SymbolRVASet tableSymbols, StringRef tableSym,
|
|||
tableChunk = make<RVATableChunk>(std::move(tableSymbols));
|
||||
rdataSec->addChunk(tableChunk);
|
||||
|
||||
Symbol *t = ctx.symtab.findUnderscore(tableSym);
|
||||
Symbol *c = ctx.symtab.findUnderscore(countSym);
|
||||
Symbol *t = symtab->findUnderscore(tableSym);
|
||||
Symbol *c = symtab->findUnderscore(countSym);
|
||||
replaceSymbol<DefinedSynthetic>(t, t->getName(), tableChunk);
|
||||
cast<DefinedAbsolute>(c)->setVA(tableChunk->getSize() / (hasFlag ? 5 : 4));
|
||||
}
|
||||
|
@ -1788,7 +1793,7 @@ void Writer::maybeAddRVATable(SymbolRVASet tableSymbols, StringRef tableSym,
|
|||
void Writer::createRuntimePseudoRelocs() {
|
||||
std::vector<RuntimePseudoReloc> rels;
|
||||
|
||||
for (Chunk *c : ctx.symtab.getChunks()) {
|
||||
for (Chunk *c : symtab->getChunks()) {
|
||||
auto *sc = dyn_cast<SectionChunk>(c);
|
||||
if (!sc || !sc->live)
|
||||
continue;
|
||||
|
@ -1811,9 +1816,8 @@ void Writer::createRuntimePseudoRelocs() {
|
|||
EmptyChunk *endOfList = make<EmptyChunk>();
|
||||
rdataSec->addChunk(endOfList);
|
||||
|
||||
Symbol *headSym = ctx.symtab.findUnderscore("__RUNTIME_PSEUDO_RELOC_LIST__");
|
||||
Symbol *endSym =
|
||||
ctx.symtab.findUnderscore("__RUNTIME_PSEUDO_RELOC_LIST_END__");
|
||||
Symbol *headSym = symtab->findUnderscore("__RUNTIME_PSEUDO_RELOC_LIST__");
|
||||
Symbol *endSym = symtab->findUnderscore("__RUNTIME_PSEUDO_RELOC_LIST_END__");
|
||||
replaceSymbol<DefinedSynthetic>(headSym, headSym->getName(), table);
|
||||
replaceSymbol<DefinedSynthetic>(endSym, endSym->getName(), endOfList);
|
||||
}
|
||||
|
@ -1833,8 +1837,8 @@ void Writer::insertCtorDtorSymbols() {
|
|||
dtorsSec->insertChunkAtStart(dtorListHead);
|
||||
dtorsSec->addChunk(dtorListEnd);
|
||||
|
||||
Symbol *ctorListSym = ctx.symtab.findUnderscore("__CTOR_LIST__");
|
||||
Symbol *dtorListSym = ctx.symtab.findUnderscore("__DTOR_LIST__");
|
||||
Symbol *ctorListSym = symtab->findUnderscore("__CTOR_LIST__");
|
||||
Symbol *dtorListSym = symtab->findUnderscore("__DTOR_LIST__");
|
||||
replaceSymbol<DefinedSynthetic>(ctorListSym, ctorListSym->getName(),
|
||||
ctorListHead);
|
||||
replaceSymbol<DefinedSynthetic>(dtorListSym, dtorListSym->getName(),
|
||||
|
@ -1847,7 +1851,7 @@ void Writer::setSectionPermissions() {
|
|||
for (auto &p : config->section) {
|
||||
StringRef name = p.first;
|
||||
uint32_t perm = p.second;
|
||||
for (OutputSection *sec : ctx.outputSections)
|
||||
for (OutputSection *sec : outputSections)
|
||||
if (sec->name == name)
|
||||
sec->setPermissions(perm);
|
||||
}
|
||||
|
@ -1857,10 +1861,10 @@ void Writer::setSectionPermissions() {
|
|||
void Writer::writeSections() {
|
||||
// Record the number of sections to apply section index relocations
|
||||
// against absolute symbols. See applySecIdx in Chunks.cpp..
|
||||
DefinedAbsolute::numOutputSections = ctx.outputSections.size();
|
||||
DefinedAbsolute::numOutputSections = outputSections.size();
|
||||
|
||||
uint8_t *buf = buffer->getBufferStart();
|
||||
for (OutputSection *sec : ctx.outputSections) {
|
||||
for (OutputSection *sec : outputSections) {
|
||||
uint8_t *secBuf = buf + sec->getFileOff();
|
||||
// Fill gaps between functions in .text with INT3 instructions
|
||||
// instead of leaving as NUL bytes (which can be interpreted as
|
||||
|
@ -1930,7 +1934,7 @@ void Writer::sortExceptionTable() {
|
|||
return;
|
||||
// We assume .pdata contains function table entries only.
|
||||
auto bufAddr = [&](Chunk *c) {
|
||||
OutputSection *os = ctx.getOutputSection(c);
|
||||
OutputSection *os = c->getOutputSection();
|
||||
return buffer->getBufferStart() + os->getFileOff() + c->getRVA() -
|
||||
os->getRVA();
|
||||
};
|
||||
|
@ -1998,7 +2002,7 @@ void Writer::sortCRTSectionChunks(std::vector<Chunk *> &chunks) {
|
|||
}
|
||||
|
||||
OutputSection *Writer::findSection(StringRef name) {
|
||||
for (OutputSection *sec : ctx.outputSections)
|
||||
for (OutputSection *sec : outputSections)
|
||||
if (sec->name == name)
|
||||
return sec;
|
||||
return nullptr;
|
||||
|
@ -2006,7 +2010,7 @@ OutputSection *Writer::findSection(StringRef name) {
|
|||
|
||||
uint32_t Writer::getSizeOfInitializedData() {
|
||||
uint32_t res = 0;
|
||||
for (OutputSection *s : ctx.outputSections)
|
||||
for (OutputSection *s : outputSections)
|
||||
if (s->header.Characteristics & IMAGE_SCN_CNT_INITIALIZED_DATA)
|
||||
res += s->getRawSize();
|
||||
return res;
|
||||
|
@ -2018,7 +2022,7 @@ void Writer::addBaserels() {
|
|||
return;
|
||||
relocSec->chunks.clear();
|
||||
std::vector<Baserel> v;
|
||||
for (OutputSection *sec : ctx.outputSections) {
|
||||
for (OutputSection *sec : outputSections) {
|
||||
if (sec->header.Characteristics & IMAGE_SCN_MEM_DISCARDABLE)
|
||||
continue;
|
||||
// Collect all locations for base relocations.
|
||||
|
@ -2067,11 +2071,11 @@ PartialSection *Writer::findPartialSection(StringRef name, uint32_t outChars) {
|
|||
|
||||
void Writer::fixTlsAlignment() {
|
||||
Defined *tlsSym =
|
||||
dyn_cast_or_null<Defined>(ctx.symtab.findUnderscore("_tls_used"));
|
||||
dyn_cast_or_null<Defined>(symtab->findUnderscore("_tls_used"));
|
||||
if (!tlsSym)
|
||||
return;
|
||||
|
||||
OutputSection *sec = ctx.getOutputSection(tlsSym->getChunk());
|
||||
OutputSection *sec = tlsSym->getChunk()->getOutputSection();
|
||||
assert(sec && tlsSym->getRVA() >= sec->getRVA() &&
|
||||
"no output section for _tls_used");
|
||||
|
||||
|
|
|
@ -19,9 +19,8 @@
|
|||
namespace lld {
|
||||
namespace coff {
|
||||
static const int pageSize = 4096;
|
||||
class COFFLinkerContext;
|
||||
|
||||
void writeResult(COFFLinkerContext &ctx);
|
||||
void writeResult();
|
||||
|
||||
class PartialSection {
|
||||
public:
|
||||
|
@ -51,6 +50,9 @@ public:
|
|||
void writeHeaderTo(uint8_t *buf);
|
||||
void addContributingPartialSection(PartialSection *sec);
|
||||
|
||||
// Clear the output sections static container.
|
||||
static void clear();
|
||||
|
||||
// Returns the size of this section in an executable memory image.
|
||||
// This may be smaller than the raw size (the raw size is multiple
|
||||
// of disk sector size, so there may be padding at end), or may be
|
||||
|
|
|
@ -31,8 +31,13 @@ Timer::Timer(llvm::StringRef name, Timer &parent) : name(std::string(name)) {
|
|||
parent.children.push_back(this);
|
||||
}
|
||||
|
||||
Timer &Timer::root() {
|
||||
static Timer rootTimer("Total Link Time");
|
||||
return rootTimer;
|
||||
}
|
||||
|
||||
void Timer::print() {
|
||||
double totalDuration = static_cast<double>(millis());
|
||||
double totalDuration = static_cast<double>(root().millis());
|
||||
|
||||
// We want to print the grand total under all the intermediate phases, so we
|
||||
// print all children first, then print the total under that.
|
||||
|
@ -42,7 +47,7 @@ void Timer::print() {
|
|||
|
||||
message(std::string(50, '-'));
|
||||
|
||||
print(0, millis(), false);
|
||||
root().print(0, root().millis(), false);
|
||||
}
|
||||
|
||||
double Timer::millis() const {
|
||||
|
|
|
@ -38,8 +38,7 @@ class Timer {
|
|||
public:
|
||||
Timer(llvm::StringRef name, Timer &parent);
|
||||
|
||||
// Creates the root timer.
|
||||
explicit Timer(llvm::StringRef name);
|
||||
static Timer &root();
|
||||
|
||||
void addToTotal(std::chrono::nanoseconds time) { total += time.count(); }
|
||||
void print();
|
||||
|
@ -47,6 +46,7 @@ public:
|
|||
double millis() const;
|
||||
|
||||
private:
|
||||
explicit Timer(llvm::StringRef name);
|
||||
void print(int depth, double totalDuration, bool recurse = true) const;
|
||||
|
||||
std::atomic<std::chrono::nanoseconds::rep> total;
|
||||
|
|
|
@ -1,62 +0,0 @@
|
|||
; REQUIRES: x86
|
||||
; RUN: llvm-as %s -o %t.obj
|
||||
|
||||
; Test different configurations of lld to get all possible timer outputs.
|
||||
; RUN: lld-link %t.obj -time -entry:main -debug:noghash 2>&1 | \
|
||||
; RUN: FileCheck %s --check-prefix=CHECK1
|
||||
; RUN: lld-link %t.obj -time -entry:main -debug 2>&1 | \
|
||||
; RUN: FileCheck %s --check-prefix=CHECK2
|
||||
; RUN: lld-link %t.obj -time -entry:main -map 2>&1 | \
|
||||
; RUN: FileCheck %s --check-prefix=CHECK3
|
||||
|
||||
; CHECK1: Input File Reading:
|
||||
; CHECK1: LTO:
|
||||
; CHECK1: Code Layout:
|
||||
; CHECK1: Commit Output File:
|
||||
; CHECK1: PDB Emission (Cumulative):
|
||||
; CHECK1: Add Objects:
|
||||
; CHECK1: Type Merging:
|
||||
; CHECK1: Symbol Merging:
|
||||
; CHECK1: Publics Stream Layout:
|
||||
; CHECK1: TPI Stream Layout:
|
||||
; CHECK1: Commit to Disk:
|
||||
|
||||
; CHECK2: Input File Reading:
|
||||
; CHECK2: LTO:
|
||||
; CHECK2: Code Layout:
|
||||
; CHECK2: Commit Output File:
|
||||
; CHECK2: PDB Emission (Cumulative):
|
||||
; CHECK2: Add Objects:
|
||||
; CHECK2: Global Type Hashing:
|
||||
; CHECK2: GHash Type Merging:
|
||||
; CHECK2: Symbol Merging:
|
||||
; CHECK2: Publics Stream Layout:
|
||||
; CHECK2: TPI Stream Layout:
|
||||
; CHECK2: Commit to Disk:
|
||||
|
||||
; CHECK3: Input File Reading:
|
||||
; CHECK3: LTO:
|
||||
; CHECK3: GC:
|
||||
; CHECK3: ICF:
|
||||
; CHECK3: Code Layout:
|
||||
; CHECK3: Commit Output File:
|
||||
; CHECK3: MAP Emission (Cumulative):
|
||||
; CHECK3: Gather Symbols:
|
||||
; CHECK3: Build Symbol Strings:
|
||||
; CHECK3: Write to File:
|
||||
|
||||
target datalayout = "e-m:w-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128"
|
||||
target triple = "x86_64-pc-windows-msvc19.11.0"
|
||||
|
||||
define dso_local i32 @main() {
|
||||
entry:
|
||||
ret i32 0
|
||||
}
|
||||
|
||||
!llvm.dbg.cu = !{!0}
|
||||
!llvm.module.flags = !{!2, !3}
|
||||
|
||||
!0 = distinct !DICompileUnit(language: DW_LANG_C_plus_plus_14, file: !1, producer: "clang version 14.0.0", isOptimized: false, runtimeVersion: 0, emissionKind: FullDebug, splitDebugInlining: false, nameTableKind: None)
|
||||
!1 = !DIFile(filename: "t.cpp", directory: "", checksumkind: CSK_MD5, checksum: "495fd79f78a98304e065540d576057d9")
|
||||
!2 = !{i32 2, !"CodeView", i32 1}
|
||||
!3 = !{i32 2, !"Debug Info Version", i32 3}
|
|
@ -29,7 +29,6 @@ static_library("COFF") {
|
|||
sources = [
|
||||
"CallGraphSort.cpp",
|
||||
"Chunks.cpp",
|
||||
"COFFLinkerContext.cpp",
|
||||
"DLL.cpp",
|
||||
"DebugTypes.cpp",
|
||||
"Driver.cpp",
|
||||
|
|
Loading…
Reference in New Issue