2018-04-04 19:37:06 +08:00
|
|
|
//===-- llvm-exegesis.cpp ---------------------------------------*- C++ -*-===//
|
|
|
|
//
|
|
|
|
// The LLVM Compiler Infrastructure
|
|
|
|
//
|
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
///
|
|
|
|
/// \file
|
|
|
|
/// Measures execution properties (latencies/uops) of an instruction.
|
|
|
|
///
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
2018-05-15 20:08:00 +08:00
|
|
|
#include "lib/Analysis.h"
|
2018-04-04 19:37:06 +08:00
|
|
|
#include "lib/BenchmarkResult.h"
|
|
|
|
#include "lib/BenchmarkRunner.h"
|
2018-05-15 20:08:00 +08:00
|
|
|
#include "lib/Clustering.h"
|
2018-04-04 19:37:06 +08:00
|
|
|
#include "lib/Latency.h"
|
|
|
|
#include "lib/LlvmState.h"
|
|
|
|
#include "lib/PerfHelper.h"
|
|
|
|
#include "lib/Uops.h"
|
|
|
|
#include "lib/X86.h"
|
|
|
|
#include "llvm/ADT/StringExtras.h"
|
|
|
|
#include "llvm/ADT/Twine.h"
|
|
|
|
#include "llvm/MC/MCInstBuilder.h"
|
|
|
|
#include "llvm/MC/MCRegisterInfo.h"
|
2018-05-15 20:08:00 +08:00
|
|
|
#include "llvm/MC/MCSubtargetInfo.h"
|
2018-04-04 19:37:06 +08:00
|
|
|
#include "llvm/Support/CommandLine.h"
|
2018-05-15 20:08:00 +08:00
|
|
|
#include "llvm/Support/Format.h"
|
2018-04-04 19:37:06 +08:00
|
|
|
#include "llvm/Support/Path.h"
|
2018-05-15 20:08:00 +08:00
|
|
|
#include "llvm/Support/TargetRegistry.h"
|
2018-04-04 19:37:06 +08:00
|
|
|
#include "llvm/Support/TargetSelect.h"
|
|
|
|
#include <algorithm>
|
|
|
|
#include <random>
|
|
|
|
#include <string>
|
|
|
|
#include <unordered_map>
|
|
|
|
|
|
|
|
static llvm::cl::opt<unsigned>
|
|
|
|
OpcodeIndex("opcode-index", llvm::cl::desc("opcode to measure, by index"),
|
|
|
|
llvm::cl::init(0));
|
|
|
|
|
|
|
|
static llvm::cl::opt<std::string>
|
|
|
|
OpcodeName("opcode-name", llvm::cl::desc("opcode to measure, by name"),
|
|
|
|
llvm::cl::init(""));
|
|
|
|
|
2018-05-15 20:08:00 +08:00
|
|
|
static llvm::cl::opt<std::string>
|
2018-06-07 15:51:16 +08:00
|
|
|
BenchmarkFile("benchmarks-file", llvm::cl::desc(""), llvm::cl::init(""));
|
2018-05-15 20:08:00 +08:00
|
|
|
|
|
|
|
enum class BenchmarkModeE { Latency, Uops, Analysis };
|
|
|
|
static llvm::cl::opt<BenchmarkModeE> BenchmarkMode(
|
2018-05-18 20:33:57 +08:00
|
|
|
"mode", llvm::cl::desc("the mode to run"),
|
2018-05-15 20:08:00 +08:00
|
|
|
llvm::cl::values(
|
|
|
|
clEnumValN(BenchmarkModeE::Latency, "latency", "Instruction Latency"),
|
|
|
|
clEnumValN(BenchmarkModeE::Uops, "uops", "Uop Decomposition"),
|
|
|
|
clEnumValN(BenchmarkModeE::Analysis, "analysis", "Analysis")));
|
2018-04-04 19:37:06 +08:00
|
|
|
|
|
|
|
static llvm::cl::opt<unsigned>
|
|
|
|
NumRepetitions("num-repetitions",
|
|
|
|
llvm::cl::desc("number of time to repeat the asm snippet"),
|
|
|
|
llvm::cl::init(10000));
|
|
|
|
|
2018-06-18 19:27:47 +08:00
|
|
|
static llvm::cl::opt<bool> IgnoreInvalidSchedClass(
|
|
|
|
"ignore-invalid-sched-class",
|
|
|
|
llvm::cl::desc("ignore instructions that do not define a sched class"),
|
|
|
|
llvm::cl::init(false));
|
|
|
|
|
2018-05-15 20:08:00 +08:00
|
|
|
static llvm::cl::opt<unsigned> AnalysisNumPoints(
|
|
|
|
"analysis-numpoints",
|
|
|
|
llvm::cl::desc("minimum number of points in an analysis cluster"),
|
|
|
|
llvm::cl::init(3));
|
|
|
|
|
|
|
|
static llvm::cl::opt<float>
|
|
|
|
AnalysisEpsilon("analysis-epsilon",
|
|
|
|
llvm::cl::desc("dbscan epsilon for analysis clustering"),
|
|
|
|
llvm::cl::init(0.1));
|
|
|
|
|
2018-05-17 21:41:28 +08:00
|
|
|
static llvm::cl::opt<std::string>
|
|
|
|
AnalysisClustersOutputFile("analysis-clusters-output-file",
|
|
|
|
llvm::cl::desc(""), llvm::cl::init("-"));
|
|
|
|
static llvm::cl::opt<std::string>
|
|
|
|
AnalysisInconsistenciesOutputFile("analysis-inconsistencies-output-file",
|
|
|
|
llvm::cl::desc(""), llvm::cl::init("-"));
|
2018-05-16 17:50:04 +08:00
|
|
|
|
2018-04-04 19:37:06 +08:00
|
|
|
namespace exegesis {
|
|
|
|
|
2018-06-07 15:51:16 +08:00
|
|
|
static llvm::ExitOnError ExitOnErr;
|
|
|
|
|
2018-06-19 19:28:59 +08:00
|
|
|
#ifdef LLVM_EXEGESIS_INITIALIZE_NATIVE_TARGET
|
|
|
|
void LLVM_EXEGESIS_INITIALIZE_NATIVE_TARGET();
|
|
|
|
#endif
|
|
|
|
|
2018-05-17 18:52:18 +08:00
|
|
|
static unsigned GetOpcodeOrDie(const llvm::MCInstrInfo &MCInstrInfo) {
|
|
|
|
if (OpcodeName.empty() && (OpcodeIndex == 0))
|
|
|
|
llvm::report_fatal_error(
|
|
|
|
"please provide one and only one of 'opcode-index' or 'opcode-name'");
|
|
|
|
if (OpcodeIndex > 0)
|
|
|
|
return OpcodeIndex;
|
|
|
|
// Resolve opcode name -> opcode.
|
|
|
|
for (unsigned I = 0, E = MCInstrInfo.getNumOpcodes(); I < E; ++I)
|
|
|
|
if (MCInstrInfo.getName(I) == OpcodeName)
|
|
|
|
return I;
|
|
|
|
llvm::report_fatal_error(llvm::Twine("unknown opcode ").concat(OpcodeName));
|
|
|
|
}
|
|
|
|
|
2018-06-05 18:56:19 +08:00
|
|
|
static BenchmarkResultContext
|
|
|
|
getBenchmarkResultContext(const LLVMState &State) {
|
|
|
|
BenchmarkResultContext Ctx;
|
|
|
|
|
|
|
|
const llvm::MCInstrInfo &InstrInfo = State.getInstrInfo();
|
|
|
|
for (unsigned E = InstrInfo.getNumOpcodes(), I = 0; I < E; ++I)
|
|
|
|
Ctx.addInstrEntry(I, InstrInfo.getName(I).data());
|
|
|
|
|
|
|
|
const llvm::MCRegisterInfo &RegInfo = State.getRegInfo();
|
|
|
|
for (unsigned E = RegInfo.getNumRegs(), I = 0; I < E; ++I)
|
|
|
|
Ctx.addRegEntry(I, RegInfo.getName(I));
|
|
|
|
|
|
|
|
return Ctx;
|
|
|
|
}
|
|
|
|
|
2018-05-15 20:08:00 +08:00
|
|
|
void benchmarkMain() {
|
|
|
|
if (exegesis::pfm::pfmInitialize())
|
|
|
|
llvm::report_fatal_error("cannot initialize libpfm");
|
|
|
|
|
2018-04-04 19:37:06 +08:00
|
|
|
llvm::InitializeNativeTarget();
|
|
|
|
llvm::InitializeNativeTargetAsmPrinter();
|
2018-06-19 19:28:59 +08:00
|
|
|
#ifdef LLVM_EXEGESIS_INITIALIZE_NATIVE_TARGET
|
|
|
|
LLVM_EXEGESIS_INITIALIZE_NATIVE_TARGET();
|
|
|
|
#endif
|
2018-04-04 19:37:06 +08:00
|
|
|
|
|
|
|
// FIXME: Target-specific filter.
|
|
|
|
X86Filter Filter;
|
|
|
|
|
|
|
|
const LLVMState State;
|
2018-06-18 19:27:47 +08:00
|
|
|
const auto Opcode = GetOpcodeOrDie(State.getInstrInfo());
|
|
|
|
|
|
|
|
// Ignore instructions without a sched class if -ignore-invalid-sched-class is
|
|
|
|
// passed.
|
|
|
|
if (IgnoreInvalidSchedClass &&
|
|
|
|
State.getInstrInfo().get(Opcode).getSchedClass() == 0) {
|
|
|
|
llvm::errs() << "ignoring instruction without sched class\n";
|
|
|
|
return;
|
|
|
|
}
|
2018-04-04 19:37:06 +08:00
|
|
|
|
2018-05-17 18:52:18 +08:00
|
|
|
// FIXME: Do not require SchedModel for latency.
|
2018-04-18 22:46:54 +08:00
|
|
|
if (!State.getSubtargetInfo().getSchedModel().hasExtraProcessorInfo())
|
|
|
|
llvm::report_fatal_error("sched model is missing extra processor info!");
|
|
|
|
|
2018-04-04 19:37:06 +08:00
|
|
|
std::unique_ptr<BenchmarkRunner> Runner;
|
|
|
|
switch (BenchmarkMode) {
|
|
|
|
case BenchmarkModeE::Latency:
|
2018-05-17 18:52:18 +08:00
|
|
|
Runner = llvm::make_unique<LatencyBenchmarkRunner>(State);
|
2018-04-04 19:37:06 +08:00
|
|
|
break;
|
|
|
|
case BenchmarkModeE::Uops:
|
2018-05-17 18:52:18 +08:00
|
|
|
Runner = llvm::make_unique<UopsBenchmarkRunner>(State);
|
2018-04-04 19:37:06 +08:00
|
|
|
break;
|
2018-05-15 20:08:00 +08:00
|
|
|
case BenchmarkModeE::Analysis:
|
|
|
|
llvm_unreachable("not a benchmark");
|
2018-04-04 19:37:06 +08:00
|
|
|
}
|
|
|
|
|
2018-05-17 18:52:18 +08:00
|
|
|
if (NumRepetitions == 0)
|
|
|
|
llvm::report_fatal_error("--num-repetitions must be greater than zero");
|
|
|
|
|
2018-06-07 15:51:16 +08:00
|
|
|
// Write to standard output if file is not set.
|
|
|
|
if (BenchmarkFile.empty())
|
|
|
|
BenchmarkFile = "-";
|
|
|
|
|
2018-06-07 22:00:29 +08:00
|
|
|
const BenchmarkResultContext Context = getBenchmarkResultContext(State);
|
2018-06-18 19:27:47 +08:00
|
|
|
std::vector<InstructionBenchmark> Results =
|
|
|
|
ExitOnErr(Runner->run(Opcode, Filter, NumRepetitions));
|
2018-06-07 22:00:29 +08:00
|
|
|
for (InstructionBenchmark &Result : Results)
|
2018-06-11 22:10:10 +08:00
|
|
|
ExitOnErr(Result.writeYaml(Context, BenchmarkFile));
|
2018-06-07 22:00:29 +08:00
|
|
|
|
2018-05-15 20:08:00 +08:00
|
|
|
exegesis::pfm::pfmTerminate();
|
|
|
|
}
|
|
|
|
|
2018-05-17 21:41:28 +08:00
|
|
|
// Prints the results of running analysis pass `Pass` to file `OutputFilename`
|
|
|
|
// if OutputFilename is non-empty.
|
|
|
|
template <typename Pass>
|
|
|
|
static void maybeRunAnalysis(const Analysis &Analyzer, const std::string &Name,
|
2018-06-05 18:56:19 +08:00
|
|
|
const std::string &OutputFilename) {
|
2018-05-17 21:41:28 +08:00
|
|
|
if (OutputFilename.empty())
|
|
|
|
return;
|
|
|
|
if (OutputFilename != "-") {
|
|
|
|
llvm::errs() << "Printing " << Name << " results to file '"
|
|
|
|
<< OutputFilename << "'\n";
|
|
|
|
}
|
|
|
|
std::error_code ErrorCode;
|
|
|
|
llvm::raw_fd_ostream ClustersOS(OutputFilename, ErrorCode,
|
2018-06-08 03:58:58 +08:00
|
|
|
llvm::sys::fs::FA_Read |
|
|
|
|
llvm::sys::fs::FA_Write);
|
|
|
|
if (ErrorCode)
|
|
|
|
llvm::report_fatal_error("cannot open out file: " + OutputFilename);
|
|
|
|
if (auto Err = Analyzer.run<Pass>(ClustersOS))
|
|
|
|
llvm::report_fatal_error(std::move(Err));
|
2018-05-17 21:41:28 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
static void analysisMain() {
|
2018-06-07 15:51:16 +08:00
|
|
|
if (BenchmarkFile.empty())
|
|
|
|
llvm::report_fatal_error("--benchmarks-file must be set.");
|
|
|
|
|
2018-06-05 18:56:19 +08:00
|
|
|
llvm::InitializeNativeTarget();
|
|
|
|
llvm::InitializeNativeTargetAsmPrinter();
|
2018-06-15 15:30:45 +08:00
|
|
|
llvm::InitializeNativeTargetDisassembler();
|
2018-05-15 20:08:00 +08:00
|
|
|
// Read benchmarks.
|
2018-06-05 18:56:19 +08:00
|
|
|
const LLVMState State;
|
2018-05-15 20:08:00 +08:00
|
|
|
const std::vector<InstructionBenchmark> Points =
|
2018-06-07 15:51:16 +08:00
|
|
|
ExitOnErr(InstructionBenchmark::readYamls(
|
|
|
|
getBenchmarkResultContext(State), BenchmarkFile));
|
2018-05-15 20:08:00 +08:00
|
|
|
llvm::outs() << "Parsed " << Points.size() << " benchmark points\n";
|
|
|
|
if (Points.empty()) {
|
|
|
|
llvm::errs() << "no benchmarks to analyze\n";
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// FIXME: Check that all points have the same triple/cpu.
|
|
|
|
// FIXME: Merge points from several runs (latency and uops).
|
|
|
|
|
|
|
|
std::string Error;
|
|
|
|
const auto *TheTarget =
|
|
|
|
llvm::TargetRegistry::lookupTarget(Points[0].LLVMTriple, Error);
|
|
|
|
if (!TheTarget) {
|
|
|
|
llvm::errs() << "unknown target '" << Points[0].LLVMTriple << "'\n";
|
|
|
|
return;
|
|
|
|
}
|
2018-06-11 17:18:01 +08:00
|
|
|
const auto Clustering = ExitOnErr(InstructionBenchmarkClustering::create(
|
2018-05-15 20:08:00 +08:00
|
|
|
Points, AnalysisNumPoints, AnalysisEpsilon));
|
2018-05-16 16:47:21 +08:00
|
|
|
|
|
|
|
const Analysis Analyzer(*TheTarget, Clustering);
|
|
|
|
|
2018-05-17 21:41:28 +08:00
|
|
|
maybeRunAnalysis<Analysis::PrintClusters>(Analyzer, "analysis clusters",
|
|
|
|
AnalysisClustersOutputFile);
|
|
|
|
maybeRunAnalysis<Analysis::PrintSchedClassInconsistencies>(
|
|
|
|
Analyzer, "sched class consistency analysis",
|
|
|
|
AnalysisInconsistenciesOutputFile);
|
2018-04-04 19:37:06 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace exegesis
|
|
|
|
|
|
|
|
int main(int Argc, char **Argv) {
|
|
|
|
llvm::cl::ParseCommandLineOptions(Argc, Argv, "");
|
|
|
|
|
2018-06-11 17:18:01 +08:00
|
|
|
exegesis::ExitOnErr.setExitCodeMapper([](const llvm::Error &Err) {
|
|
|
|
if (Err.isA<llvm::StringError>())
|
|
|
|
return EXIT_SUCCESS;
|
|
|
|
return EXIT_FAILURE;
|
|
|
|
});
|
|
|
|
|
2018-05-15 20:08:00 +08:00
|
|
|
if (BenchmarkMode == BenchmarkModeE::Analysis) {
|
|
|
|
exegesis::analysisMain();
|
|
|
|
} else {
|
|
|
|
exegesis::benchmarkMain();
|
2018-04-04 19:37:06 +08:00
|
|
|
}
|
|
|
|
return EXIT_SUCCESS;
|
|
|
|
}
|