forked from OSchip/llvm-project
[NFC][NewPM] Reuse PassBuilder callbacks with -O0
This removes lots of duplicated code which was necessary before https://reviews.llvm.org/D89158. Now we can use PassBuilder::runRegisteredEPCallbacks(). This is mostly sanitizers. There is likely more that can be done to simplify, but let's start with this. Reviewed By: ychen Differential Revision: https://reviews.llvm.org/D90870
This commit is contained in:
parent
3c6b457bee
commit
3a7b57b7ca
|
@ -1062,67 +1062,6 @@ static void addCoroutinePassesAtO0(ModulePassManager &MPM,
|
|||
MPM.addPass(createModuleToFunctionPassAdaptor(CoroCleanupPass()));
|
||||
}
|
||||
|
||||
static void addSanitizersAtO0(ModulePassManager &MPM,
|
||||
const Triple &TargetTriple,
|
||||
const LangOptions &LangOpts,
|
||||
const CodeGenOptions &CodeGenOpts) {
|
||||
if (CodeGenOpts.SanitizeCoverageType ||
|
||||
CodeGenOpts.SanitizeCoverageIndirectCalls ||
|
||||
CodeGenOpts.SanitizeCoverageTraceCmp) {
|
||||
auto SancovOpts = getSancovOptsFromCGOpts(CodeGenOpts);
|
||||
MPM.addPass(ModuleSanitizerCoveragePass(
|
||||
SancovOpts, CodeGenOpts.SanitizeCoverageAllowlistFiles,
|
||||
CodeGenOpts.SanitizeCoverageBlocklistFiles));
|
||||
}
|
||||
|
||||
auto ASanPass = [&](SanitizerMask Mask, bool CompileKernel) {
|
||||
MPM.addPass(RequireAnalysisPass<ASanGlobalsMetadataAnalysis, Module>());
|
||||
bool Recover = CodeGenOpts.SanitizeRecover.has(Mask);
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(AddressSanitizerPass(
|
||||
CompileKernel, Recover, CodeGenOpts.SanitizeAddressUseAfterScope)));
|
||||
bool ModuleUseAfterScope = asanUseGlobalsGC(TargetTriple, CodeGenOpts);
|
||||
MPM.addPass(
|
||||
ModuleAddressSanitizerPass(CompileKernel, Recover, ModuleUseAfterScope,
|
||||
CodeGenOpts.SanitizeAddressUseOdrIndicator));
|
||||
};
|
||||
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::Address)) {
|
||||
ASanPass(SanitizerKind::Address, /*CompileKernel=*/false);
|
||||
}
|
||||
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::KernelAddress)) {
|
||||
ASanPass(SanitizerKind::KernelAddress, /*CompileKernel=*/true);
|
||||
}
|
||||
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::HWAddress)) {
|
||||
bool Recover = CodeGenOpts.SanitizeRecover.has(SanitizerKind::HWAddress);
|
||||
MPM.addPass(HWAddressSanitizerPass(
|
||||
/*CompileKernel=*/false, Recover));
|
||||
}
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::KernelHWAddress)) {
|
||||
MPM.addPass(HWAddressSanitizerPass(
|
||||
/*CompileKernel=*/true, /*Recover=*/true));
|
||||
}
|
||||
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::Memory)) {
|
||||
bool Recover = CodeGenOpts.SanitizeRecover.has(SanitizerKind::Memory);
|
||||
int TrackOrigins = CodeGenOpts.SanitizeMemoryTrackOrigins;
|
||||
MPM.addPass(MemorySanitizerPass({TrackOrigins, Recover, false}));
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(
|
||||
MemorySanitizerPass({TrackOrigins, Recover, false})));
|
||||
}
|
||||
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::KernelMemory)) {
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(
|
||||
MemorySanitizerPass({0, false, /*Kernel=*/true})));
|
||||
}
|
||||
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::Thread)) {
|
||||
MPM.addPass(ThreadSanitizerPass());
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(ThreadSanitizerPass()));
|
||||
}
|
||||
}
|
||||
|
||||
/// A clean version of `EmitAssembly` that uses the new pass manager.
|
||||
///
|
||||
/// Not all features are currently supported in this system, but where
|
||||
|
@ -1259,30 +1198,129 @@ void EmitAssemblyHelper::EmitAssemblyWithNewPassManager(
|
|||
bool IsThinLTO = CodeGenOpts.PrepareForThinLTO;
|
||||
bool IsLTO = CodeGenOpts.PrepareForLTO;
|
||||
|
||||
if (CodeGenOpts.OptimizationLevel == 0) {
|
||||
// If we reached here with a non-empty index file name, then the index
|
||||
// file was empty and we are not performing ThinLTO backend compilation
|
||||
// (used in testing in a distributed build environment). Drop any the type
|
||||
// test assume sequences inserted for whole program vtables so that
|
||||
// codegen doesn't complain.
|
||||
if (!CodeGenOpts.ThinLTOIndexFile.empty())
|
||||
MPM.addPass(LowerTypeTestsPass(/*ExportSummary=*/nullptr,
|
||||
/*ImportSummary=*/nullptr,
|
||||
/*DropTypeTests=*/true));
|
||||
if (Optional<GCOVOptions> Options = getGCOVOptions(CodeGenOpts, LangOpts))
|
||||
MPM.addPass(GCOVProfilerPass(*Options));
|
||||
if (Optional<InstrProfOptions> Options =
|
||||
getInstrProfOptions(CodeGenOpts, LangOpts))
|
||||
MPM.addPass(InstrProfiling(*Options, false));
|
||||
// If we reached here with a non-empty index file name, then the index
|
||||
// file was empty and we are not performing ThinLTO backend compilation
|
||||
// (used in testing in a distributed build environment). Drop any the type
|
||||
// test assume sequences inserted for whole program vtables so that
|
||||
// codegen doesn't complain.
|
||||
if (!CodeGenOpts.ThinLTOIndexFile.empty())
|
||||
PB.registerPipelineStartEPCallback(
|
||||
[](ModulePassManager &MPM, PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(LowerTypeTestsPass(/*ExportSummary=*/nullptr,
|
||||
/*ImportSummary=*/nullptr,
|
||||
/*DropTypeTests=*/true));
|
||||
});
|
||||
|
||||
if (Level != PassBuilder::OptimizationLevel::O0) {
|
||||
PB.registerPipelineStartEPCallback(
|
||||
[](ModulePassManager &MPM, PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(
|
||||
EntryExitInstrumenterPass(/*PostInlining=*/false)));
|
||||
});
|
||||
}
|
||||
|
||||
// Register callbacks to schedule sanitizer passes at the appropriate part
|
||||
// of the pipeline.
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::LocalBounds))
|
||||
PB.registerScalarOptimizerLateEPCallback(
|
||||
[](FunctionPassManager &FPM, PassBuilder::OptimizationLevel Level) {
|
||||
FPM.addPass(BoundsCheckingPass());
|
||||
});
|
||||
|
||||
if (CodeGenOpts.SanitizeCoverageType ||
|
||||
CodeGenOpts.SanitizeCoverageIndirectCalls ||
|
||||
CodeGenOpts.SanitizeCoverageTraceCmp) {
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[this](ModulePassManager &MPM, PassBuilder::OptimizationLevel Level) {
|
||||
auto SancovOpts = getSancovOptsFromCGOpts(CodeGenOpts);
|
||||
MPM.addPass(ModuleSanitizerCoveragePass(
|
||||
SancovOpts, CodeGenOpts.SanitizeCoverageAllowlistFiles,
|
||||
CodeGenOpts.SanitizeCoverageBlocklistFiles));
|
||||
});
|
||||
}
|
||||
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::Memory)) {
|
||||
int TrackOrigins = CodeGenOpts.SanitizeMemoryTrackOrigins;
|
||||
bool Recover = CodeGenOpts.SanitizeRecover.has(SanitizerKind::Memory);
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[TrackOrigins, Recover](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(MemorySanitizerPass({TrackOrigins, Recover, false}));
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(
|
||||
MemorySanitizerPass({TrackOrigins, Recover, false})));
|
||||
});
|
||||
}
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::Thread)) {
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[](ModulePassManager &MPM, PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(ThreadSanitizerPass());
|
||||
MPM.addPass(
|
||||
createModuleToFunctionPassAdaptor(ThreadSanitizerPass()));
|
||||
});
|
||||
}
|
||||
|
||||
auto ASanPass = [&](SanitizerMask Mask, bool CompileKernel) {
|
||||
if (LangOpts.Sanitize.has(Mask)) {
|
||||
bool Recover = CodeGenOpts.SanitizeRecover.has(Mask);
|
||||
bool UseAfterScope = CodeGenOpts.SanitizeAddressUseAfterScope;
|
||||
bool ModuleUseAfterScope = asanUseGlobalsGC(TargetTriple, CodeGenOpts);
|
||||
bool UseOdrIndicator = CodeGenOpts.SanitizeAddressUseOdrIndicator;
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[CompileKernel, Recover, UseAfterScope, ModuleUseAfterScope,
|
||||
UseOdrIndicator](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(
|
||||
RequireAnalysisPass<ASanGlobalsMetadataAnalysis, Module>());
|
||||
MPM.addPass(ModuleAddressSanitizerPass(CompileKernel, Recover,
|
||||
ModuleUseAfterScope,
|
||||
UseOdrIndicator));
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(
|
||||
AddressSanitizerPass(CompileKernel, Recover, UseAfterScope)));
|
||||
});
|
||||
}
|
||||
};
|
||||
ASanPass(SanitizerKind::Address, false);
|
||||
ASanPass(SanitizerKind::KernelAddress, true);
|
||||
|
||||
auto HWASanPass = [&](SanitizerMask Mask, bool CompileKernel) {
|
||||
if (LangOpts.Sanitize.has(Mask)) {
|
||||
bool Recover = CodeGenOpts.SanitizeRecover.has(Mask);
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[CompileKernel, Recover](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(HWAddressSanitizerPass(CompileKernel, Recover));
|
||||
});
|
||||
}
|
||||
};
|
||||
HWASanPass(SanitizerKind::HWAddress, false);
|
||||
HWASanPass(SanitizerKind::KernelHWAddress, true);
|
||||
|
||||
if (Optional<GCOVOptions> Options = getGCOVOptions(CodeGenOpts, LangOpts))
|
||||
PB.registerPipelineStartEPCallback(
|
||||
[Options](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(GCOVProfilerPass(*Options));
|
||||
});
|
||||
if (Optional<InstrProfOptions> Options =
|
||||
getInstrProfOptions(CodeGenOpts, LangOpts))
|
||||
PB.registerPipelineStartEPCallback(
|
||||
[Options](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(InstrProfiling(*Options, false));
|
||||
});
|
||||
|
||||
if (CodeGenOpts.OptimizationLevel == 0) {
|
||||
// Build a minimal pipeline based on the semantics required by Clang,
|
||||
// which is just that always inlining occurs. Further, disable generating
|
||||
// lifetime intrinsics to avoid enabling further optimizations during
|
||||
// code generation.
|
||||
// However, we need to insert lifetime intrinsics to avoid invalid access
|
||||
// caused by multithreaded coroutines.
|
||||
MPM.addPass(
|
||||
AlwaysInlinerPass(/*InsertLifetimeIntrinsics=*/LangOpts.Coroutines));
|
||||
PB.registerPipelineStartEPCallback(
|
||||
[this](ModulePassManager &MPM, PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(AlwaysInlinerPass(
|
||||
/*InsertLifetimeIntrinsics=*/LangOpts.Coroutines));
|
||||
});
|
||||
|
||||
// At -O0, we can still do PGO. Add all the requested passes for
|
||||
// instrumentation PGO, if requested.
|
||||
|
@ -1294,153 +1332,6 @@ void EmitAssemblyHelper::EmitAssemblyWithNewPassManager(
|
|||
/* IsCS */ false, PGOOpt->ProfileFile,
|
||||
PGOOpt->ProfileRemappingFile);
|
||||
|
||||
// At -O0 we directly run necessary sanitizer passes.
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::LocalBounds))
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(BoundsCheckingPass()));
|
||||
|
||||
// Lastly, add semantically necessary passes for LTO.
|
||||
if (IsLTO || IsThinLTO) {
|
||||
MPM.addPass(CanonicalizeAliasesPass());
|
||||
MPM.addPass(NameAnonGlobalPass());
|
||||
}
|
||||
} else {
|
||||
// If we reached here with a non-empty index file name, then the index
|
||||
// file was empty and we are not performing ThinLTO backend compilation
|
||||
// (used in testing in a distributed build environment). Drop any the type
|
||||
// test assume sequences inserted for whole program vtables so that
|
||||
// codegen doesn't complain.
|
||||
if (!CodeGenOpts.ThinLTOIndexFile.empty())
|
||||
PB.registerPipelineStartEPCallback(
|
||||
[](ModulePassManager &MPM, PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(LowerTypeTestsPass(/*ExportSummary=*/nullptr,
|
||||
/*ImportSummary=*/nullptr,
|
||||
/*DropTypeTests=*/true));
|
||||
});
|
||||
|
||||
PB.registerPipelineStartEPCallback(
|
||||
[](ModulePassManager &MPM, PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(
|
||||
EntryExitInstrumenterPass(/*PostInlining=*/false)));
|
||||
});
|
||||
|
||||
// Register callbacks to schedule sanitizer passes at the appropriate part of
|
||||
// the pipeline.
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::LocalBounds))
|
||||
PB.registerScalarOptimizerLateEPCallback(
|
||||
[](FunctionPassManager &FPM, PassBuilder::OptimizationLevel Level) {
|
||||
FPM.addPass(BoundsCheckingPass());
|
||||
});
|
||||
|
||||
if (CodeGenOpts.SanitizeCoverageType ||
|
||||
CodeGenOpts.SanitizeCoverageIndirectCalls ||
|
||||
CodeGenOpts.SanitizeCoverageTraceCmp) {
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[this](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
auto SancovOpts = getSancovOptsFromCGOpts(CodeGenOpts);
|
||||
MPM.addPass(ModuleSanitizerCoveragePass(
|
||||
SancovOpts, CodeGenOpts.SanitizeCoverageAllowlistFiles,
|
||||
CodeGenOpts.SanitizeCoverageBlocklistFiles));
|
||||
});
|
||||
}
|
||||
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::Memory)) {
|
||||
int TrackOrigins = CodeGenOpts.SanitizeMemoryTrackOrigins;
|
||||
bool Recover = CodeGenOpts.SanitizeRecover.has(SanitizerKind::Memory);
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[TrackOrigins, Recover](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(MemorySanitizerPass({TrackOrigins, Recover, false}));
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(
|
||||
MemorySanitizerPass({TrackOrigins, Recover, false})));
|
||||
});
|
||||
}
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::Thread)) {
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[](ModulePassManager &MPM, PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(ThreadSanitizerPass());
|
||||
MPM.addPass(
|
||||
createModuleToFunctionPassAdaptor(ThreadSanitizerPass()));
|
||||
});
|
||||
}
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::Address)) {
|
||||
bool Recover = CodeGenOpts.SanitizeRecover.has(SanitizerKind::Address);
|
||||
bool UseAfterScope = CodeGenOpts.SanitizeAddressUseAfterScope;
|
||||
bool ModuleUseAfterScope = asanUseGlobalsGC(TargetTriple, CodeGenOpts);
|
||||
bool UseOdrIndicator = CodeGenOpts.SanitizeAddressUseOdrIndicator;
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[Recover, UseAfterScope, ModuleUseAfterScope, UseOdrIndicator](
|
||||
ModulePassManager &MPM, PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(
|
||||
RequireAnalysisPass<ASanGlobalsMetadataAnalysis, Module>());
|
||||
MPM.addPass(ModuleAddressSanitizerPass(
|
||||
/*CompileKernel=*/false, Recover, ModuleUseAfterScope,
|
||||
UseOdrIndicator));
|
||||
MPM.addPass(
|
||||
createModuleToFunctionPassAdaptor(AddressSanitizerPass(
|
||||
/*CompileKernel=*/false, Recover, UseAfterScope)));
|
||||
});
|
||||
}
|
||||
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::HWAddress)) {
|
||||
bool Recover =
|
||||
CodeGenOpts.SanitizeRecover.has(SanitizerKind::HWAddress);
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[Recover](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(HWAddressSanitizerPass(
|
||||
/*CompileKernel=*/false, Recover));
|
||||
});
|
||||
}
|
||||
if (LangOpts.Sanitize.has(SanitizerKind::KernelHWAddress)) {
|
||||
bool Recover =
|
||||
CodeGenOpts.SanitizeRecover.has(SanitizerKind::KernelHWAddress);
|
||||
PB.registerOptimizerLastEPCallback(
|
||||
[Recover](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(HWAddressSanitizerPass(
|
||||
/*CompileKernel=*/true, Recover));
|
||||
});
|
||||
}
|
||||
|
||||
if (Optional<GCOVOptions> Options = getGCOVOptions(CodeGenOpts, LangOpts))
|
||||
PB.registerPipelineStartEPCallback(
|
||||
[Options](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(GCOVProfilerPass(*Options));
|
||||
});
|
||||
if (Optional<InstrProfOptions> Options =
|
||||
getInstrProfOptions(CodeGenOpts, LangOpts))
|
||||
PB.registerPipelineStartEPCallback(
|
||||
[Options](ModulePassManager &MPM,
|
||||
PassBuilder::OptimizationLevel Level) {
|
||||
MPM.addPass(InstrProfiling(*Options, false));
|
||||
});
|
||||
|
||||
if (IsThinLTO) {
|
||||
MPM = PB.buildThinLTOPreLinkDefaultPipeline(Level);
|
||||
MPM.addPass(CanonicalizeAliasesPass());
|
||||
MPM.addPass(NameAnonGlobalPass());
|
||||
} else if (IsLTO) {
|
||||
MPM = PB.buildLTOPreLinkDefaultPipeline(Level);
|
||||
MPM.addPass(CanonicalizeAliasesPass());
|
||||
MPM.addPass(NameAnonGlobalPass());
|
||||
} else {
|
||||
MPM = PB.buildPerModuleDefaultPipeline(Level);
|
||||
}
|
||||
}
|
||||
|
||||
// Add UniqueInternalLinkageNames Pass which renames internal linkage
|
||||
// symbols with unique names.
|
||||
if (CodeGenOpts.UniqueInternalLinkageNames)
|
||||
MPM.addPass(UniqueInternalLinkageNamesPass());
|
||||
|
||||
if (!CodeGenOpts.MemoryProfileOutput.empty()) {
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(MemProfilerPass()));
|
||||
MPM.addPass(ModuleMemProfilerPass());
|
||||
}
|
||||
|
||||
if (CodeGenOpts.OptimizationLevel == 0) {
|
||||
PB.runRegisteredEPCallbacks(MPM, Level, CodeGenOpts.DebugPassManager);
|
||||
|
||||
// FIXME: the backends do not handle matrix intrinsics currently. Make
|
||||
|
@ -1451,7 +1342,28 @@ void EmitAssemblyHelper::EmitAssemblyWithNewPassManager(
|
|||
createModuleToFunctionPassAdaptor(LowerMatrixIntrinsicsPass()));
|
||||
|
||||
addCoroutinePassesAtO0(MPM, LangOpts, CodeGenOpts);
|
||||
addSanitizersAtO0(MPM, TargetTriple, LangOpts, CodeGenOpts);
|
||||
} else if (IsThinLTO) {
|
||||
MPM = PB.buildThinLTOPreLinkDefaultPipeline(Level);
|
||||
} else if (IsLTO) {
|
||||
MPM = PB.buildLTOPreLinkDefaultPipeline(Level);
|
||||
} else {
|
||||
MPM = PB.buildPerModuleDefaultPipeline(Level);
|
||||
}
|
||||
|
||||
// Lastly, add semantically necessary passes for LTO.
|
||||
if (IsLTO || IsThinLTO) {
|
||||
MPM.addPass(CanonicalizeAliasesPass());
|
||||
MPM.addPass(NameAnonGlobalPass());
|
||||
}
|
||||
|
||||
// Add UniqueInternalLinkageNames Pass which renames internal linkage
|
||||
// symbols with unique names.
|
||||
if (CodeGenOpts.UniqueInternalLinkageNames)
|
||||
MPM.addPass(UniqueInternalLinkageNamesPass());
|
||||
|
||||
if (!CodeGenOpts.MemoryProfileOutput.empty()) {
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(MemProfilerPass()));
|
||||
MPM.addPass(ModuleMemProfilerPass());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue