fix: avoid flushing large queues in simulation when checking latency

This commit is contained in:
Evan Tschannen 2017-11-27 17:23:20 -08:00
parent 062d7ad400
commit dc624a54dc
1 changed files with 5 additions and 0 deletions

View File

@ -37,6 +37,7 @@
#include "ServerDBInfo.h"
#include "LogSystem.h"
#include "WaitFailure.h"
#include "fdbrpc/simulator.h"
using std::pair;
using std::make_pair;
@ -1411,6 +1412,10 @@ ACTOR Future<Void> restorePersistentState( TLogData* self, LocalityData locality
state UID lastId = UID(1,1); //initialized so it will not compare equal to a default UID
state double recoverMemoryLimit = SERVER_KNOBS->TARGET_BYTES_PER_TLOG + SERVER_KNOBS->SPRING_BYTES_TLOG;
if (BUGGIFY) recoverMemoryLimit = std::max<double>(SERVER_KNOBS->BUGGIFY_RECOVER_MEMORY_LIMIT, SERVER_KNOBS->TLOG_SPILL_THRESHOLD);
//LowLatencyWithFailures needs to avoid flushing large queues
if (g_network->isSimulated() && g_simulator.connectionFailuresDisableDuration > 0) recoverMemoryLimit = 2000e6;
try {
loop {
if(allRemoved.isReady()) {