2017-05-26 04:48:44 +08:00
|
|
|
/*
|
|
|
|
* Performance.actor.cpp
|
|
|
|
*
|
|
|
|
* This source file is part of the FoundationDB open source project
|
|
|
|
*
|
|
|
|
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
2018-02-22 02:25:11 +08:00
|
|
|
*
|
2017-05-26 04:48:44 +08:00
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
2018-02-22 02:25:11 +08:00
|
|
|
*
|
2017-05-26 04:48:44 +08:00
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
2018-02-22 02:25:11 +08:00
|
|
|
*
|
2017-05-26 04:48:44 +08:00
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
* limitations under the License.
|
|
|
|
*/
|
|
|
|
|
2019-02-18 07:41:16 +08:00
|
|
|
#include "fdbclient/NativeAPI.actor.h"
|
2019-02-18 11:25:16 +08:00
|
|
|
#include "fdbserver/TesterInterface.actor.h"
|
2019-02-18 11:18:30 +08:00
|
|
|
#include "fdbserver/workloads/workloads.actor.h"
|
2017-05-26 04:48:44 +08:00
|
|
|
#include "fdbserver/QuietDatabase.h"
|
2021-03-11 02:06:03 +08:00
|
|
|
#include "flow/actorcompiler.h" // This must be the last #include.
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
struct PerformanceWorkload : TestWorkload {
|
|
|
|
Value probeWorkload;
|
2021-03-11 02:06:03 +08:00
|
|
|
Standalone<VectorRef<KeyValueRef>> savedOptions;
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2021-09-17 08:42:34 +08:00
|
|
|
std::vector<PerfMetric> metrics;
|
|
|
|
std::vector<TesterInterface> testers;
|
2017-05-26 04:48:44 +08:00
|
|
|
PerfMetric latencyBaseline, latencySaturation;
|
|
|
|
PerfMetric maxAchievedTPS;
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
PerformanceWorkload(WorkloadContext const& wcx) : TestWorkload(wcx) {
|
|
|
|
probeWorkload = getOption(options, LiteralStringRef("probeWorkload"), LiteralStringRef("ReadWrite"));
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
// "Consume" all options and save for later tests
|
2021-03-11 02:06:03 +08:00
|
|
|
for (int i = 0; i < options.size(); i++) {
|
|
|
|
if (options[i].value.size()) {
|
|
|
|
savedOptions.push_back_deep(savedOptions.arena(), KeyValueRef(options[i].key, options[i].value));
|
|
|
|
printf("saved option (%d): '%s'='%s'\n",
|
|
|
|
i,
|
|
|
|
printable(options[i].key).c_str(),
|
|
|
|
printable(options[i].value).c_str());
|
2017-05-26 04:48:44 +08:00
|
|
|
options[i].value = LiteralStringRef("");
|
|
|
|
}
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
printf("saved %d options\n", savedOptions.size());
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
|
2020-10-05 13:29:07 +08:00
|
|
|
std::string description() const override { return "PerformanceTestWorkload"; }
|
|
|
|
Future<Void> setup(Database const& cx) override {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (!clientId)
|
|
|
|
return _setup(cx, this);
|
2017-05-26 04:48:44 +08:00
|
|
|
return Void();
|
|
|
|
}
|
|
|
|
|
2020-10-05 13:29:07 +08:00
|
|
|
Future<Void> start(Database const& cx) override {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (!clientId)
|
|
|
|
return _start(cx, this);
|
2017-05-26 04:48:44 +08:00
|
|
|
return Void();
|
|
|
|
}
|
|
|
|
|
2020-10-05 13:29:07 +08:00
|
|
|
Future<bool> check(Database const& cx) override { return true; }
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2021-09-17 08:42:34 +08:00
|
|
|
void getMetrics(std::vector<PerfMetric>& m) override {
|
2021-03-11 02:06:03 +08:00
|
|
|
for (int i = 0; i < metrics.size(); i++)
|
|
|
|
m.push_back(metrics[i]);
|
|
|
|
if (!clientId) {
|
2021-08-30 06:38:21 +08:00
|
|
|
m.emplace_back("Baseline Latency (average, ms)", latencyBaseline.value(), Averaged::False);
|
|
|
|
m.emplace_back("Saturation Transactions/sec", maxAchievedTPS.value(), Averaged::False);
|
|
|
|
m.emplace_back("Saturation Median Latency (average, ms)", latencySaturation.value(), Averaged::False);
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
Standalone<VectorRef<VectorRef<KeyValueRef>>> getOpts(double transactionsPerSecond) {
|
|
|
|
Standalone<VectorRef<KeyValueRef>> options;
|
|
|
|
Standalone<VectorRef<VectorRef<KeyValueRef>>> opts;
|
|
|
|
options.push_back_deep(options.arena(), KeyValueRef(LiteralStringRef("testName"), probeWorkload));
|
|
|
|
options.push_back_deep(
|
|
|
|
options.arena(),
|
|
|
|
KeyValueRef(LiteralStringRef("transactionsPerSecond"), format("%f", transactionsPerSecond)));
|
|
|
|
for (int i = 0; i < savedOptions.size(); i++) {
|
|
|
|
options.push_back_deep(options.arena(), savedOptions[i]);
|
|
|
|
printf("option [%d]: '%s'='%s'\n",
|
|
|
|
i,
|
|
|
|
printable(savedOptions[i].key).c_str(),
|
|
|
|
printable(savedOptions[i].value).c_str());
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
opts.push_back_deep(opts.arena(), options);
|
2017-05-26 04:48:44 +08:00
|
|
|
return opts;
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
void logOptions(Standalone<VectorRef<VectorRef<KeyValueRef>>> options) {
|
2017-05-26 04:48:44 +08:00
|
|
|
TraceEvent start("PerformaceSetupStarting");
|
2021-03-11 02:06:03 +08:00
|
|
|
for (int i = 0; i < options.size(); i++) {
|
|
|
|
for (int j = 0; j < options[i].size(); j++) {
|
|
|
|
start.detail(format("Option-%d-%d", i, j).c_str(),
|
|
|
|
printable(options[i][j].key) + "=" + printable(options[i][j].value));
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
// FIXME: does not use testers which are recruited on workers
|
2021-09-17 08:42:34 +08:00
|
|
|
ACTOR Future<std::vector<TesterInterface>> getTesters(PerformanceWorkload* self) {
|
|
|
|
state std::vector<WorkerDetails> workers;
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
loop {
|
|
|
|
choose {
|
2021-03-11 02:06:03 +08:00
|
|
|
when(
|
2021-09-17 08:42:34 +08:00
|
|
|
std::vector<WorkerDetails> w = wait(
|
2021-03-11 02:06:03 +08:00
|
|
|
brokenPromiseToNever(self->dbInfo->get().clusterInterface.getWorkers.getReply(GetWorkersRequest(
|
|
|
|
GetWorkersRequest::TESTER_CLASS_ONLY | GetWorkersRequest::NON_EXCLUDED_PROCESSES_ONLY))))) {
|
2017-05-26 04:48:44 +08:00
|
|
|
workers = w;
|
2021-03-11 02:06:03 +08:00
|
|
|
break;
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
when(wait(self->dbInfo->onChange())) {}
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-17 08:42:34 +08:00
|
|
|
std::vector<TesterInterface> ts;
|
2021-03-04 11:36:21 +08:00
|
|
|
ts.reserve(workers.size());
|
2021-03-11 02:06:03 +08:00
|
|
|
for (int i = 0; i < workers.size(); i++)
|
|
|
|
ts.push_back(workers[i].interf.testerInterface);
|
2017-05-26 04:48:44 +08:00
|
|
|
return ts;
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
ACTOR Future<Void> _setup(Database cx, PerformanceWorkload* self) {
|
|
|
|
state Standalone<VectorRef<VectorRef<KeyValueRef>>> options = self->getOpts(1000.0);
|
|
|
|
self->logOptions(options);
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2021-09-17 08:42:34 +08:00
|
|
|
std::vector<TesterInterface> testers = wait(self->getTesters(self));
|
2017-05-26 04:48:44 +08:00
|
|
|
self->testers = testers;
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
TestSpec spec(LiteralStringRef("PerformanceSetup"), false, false);
|
2017-05-26 04:48:44 +08:00
|
|
|
spec.options = options;
|
|
|
|
spec.phases = TestWorkload::SETUP;
|
2021-03-11 02:06:03 +08:00
|
|
|
DistributedTestResults results = wait(runWorkload(cx, testers, spec));
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
return Void();
|
|
|
|
}
|
|
|
|
|
2021-09-17 08:42:34 +08:00
|
|
|
PerfMetric getNamedMetric(std::string name, std::vector<PerfMetric> metrics) {
|
2021-03-11 02:06:03 +08:00
|
|
|
for (int i = 0; i < metrics.size(); i++) {
|
|
|
|
if (metrics[i].name() == name) {
|
2017-05-26 04:48:44 +08:00
|
|
|
return metrics[i];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return PerfMetric();
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
ACTOR Future<Void> getSaturation(Database cx, PerformanceWorkload* self) {
|
2017-05-26 04:48:44 +08:00
|
|
|
state double tps = 400;
|
|
|
|
state bool reported = false;
|
|
|
|
state bool retry = false;
|
|
|
|
state double multiplier = 2.0;
|
|
|
|
|
|
|
|
loop {
|
2021-03-11 02:06:03 +08:00
|
|
|
Standalone<VectorRef<VectorRef<KeyValueRef>>> options = self->getOpts(tps);
|
2017-05-26 04:48:44 +08:00
|
|
|
TraceEvent start("PerformaceProbeStarting");
|
|
|
|
start.detail("RateTarget", tps);
|
2021-03-11 02:06:03 +08:00
|
|
|
for (int i = 0; i < options.size(); i++) {
|
|
|
|
for (int j = 0; j < options[i].size(); j++) {
|
|
|
|
start.detail(format("Option-%d-%d", i, j).c_str(),
|
|
|
|
printable(options[i][j].key) + "=" + printable(options[i][j].value));
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
state DistributedTestResults results;
|
|
|
|
try {
|
2021-03-11 02:06:03 +08:00
|
|
|
TestSpec spec(LiteralStringRef("PerformanceRun"), false, false);
|
2017-05-26 04:48:44 +08:00
|
|
|
spec.phases = TestWorkload::EXECUTION | TestWorkload::METRICS;
|
|
|
|
spec.options = options;
|
2021-03-11 02:06:03 +08:00
|
|
|
DistributedTestResults r = wait(runWorkload(cx, self->testers, spec));
|
2017-05-26 04:48:44 +08:00
|
|
|
results = r;
|
2021-03-11 02:06:03 +08:00
|
|
|
} catch (Error& e) {
|
2018-08-02 05:30:57 +08:00
|
|
|
TraceEvent("PerformanceRunError").error(e, true).detail("Workload", printable(self->probeWorkload));
|
2017-05-26 04:48:44 +08:00
|
|
|
break;
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
PerfMetric tpsMetric = self->getNamedMetric("Transactions/sec", results.metrics);
|
|
|
|
PerfMetric latencyMetric = self->getNamedMetric("Median Latency (ms, averaged)", results.metrics);
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
logMetrics(results.metrics);
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
if (!reported || self->latencyBaseline.value() > latencyMetric.value())
|
2017-05-26 04:48:44 +08:00
|
|
|
self->latencyBaseline = latencyMetric;
|
2021-03-11 02:06:03 +08:00
|
|
|
if (!reported || self->maxAchievedTPS.value() < tpsMetric.value()) {
|
2017-05-26 04:48:44 +08:00
|
|
|
self->maxAchievedTPS = tpsMetric;
|
|
|
|
self->latencySaturation = latencyMetric;
|
|
|
|
self->metrics = results.metrics;
|
|
|
|
}
|
|
|
|
reported = true;
|
|
|
|
|
|
|
|
TraceEvent evt("PerformanceProbeComplete");
|
2021-03-11 02:06:03 +08:00
|
|
|
evt.detail("RateTarget", tps)
|
|
|
|
.detail("AchievedRate", tpsMetric.value())
|
|
|
|
.detail("Multiplier", multiplier)
|
|
|
|
.detail("Retry", retry);
|
|
|
|
if (tpsMetric.value() < (tps * .95) - 100) {
|
2017-05-26 04:48:44 +08:00
|
|
|
evt.detail("LimitReached", 1);
|
2021-03-11 02:06:03 +08:00
|
|
|
if (!retry) {
|
2017-05-26 04:48:44 +08:00
|
|
|
retry = true;
|
2021-03-11 02:06:03 +08:00
|
|
|
} else if (multiplier < 2.0) {
|
2017-05-26 04:48:44 +08:00
|
|
|
evt.detail("Saturation", "final");
|
|
|
|
return Void();
|
2021-03-11 02:06:03 +08:00
|
|
|
} else {
|
2017-05-26 04:48:44 +08:00
|
|
|
tps /= 2;
|
|
|
|
multiplier = 1.189;
|
|
|
|
retry = false;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
retry = false;
|
|
|
|
}
|
|
|
|
tps *= retry ? 1.0 : multiplier;
|
|
|
|
}
|
|
|
|
|
|
|
|
return Void();
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
ACTOR Future<Void> _start(Database cx, PerformanceWorkload* self) {
|
|
|
|
wait(self->getSaturation(cx, self));
|
|
|
|
TraceEvent("PerformanceSaturation")
|
|
|
|
.detail("SaturationRate", self->maxAchievedTPS.value())
|
|
|
|
.detail("SaturationLatency", self->latencySaturation.value());
|
2017-05-26 04:48:44 +08:00
|
|
|
return Void();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
WorkloadFactory<PerformanceWorkload> PerformanceWorkloadFactory("Performance");
|