2019-02-13 07:26:26 +08:00
|
|
|
#!/usr/bin/env python
|
2018-08-29 02:47:35 +08:00
|
|
|
|
|
|
|
from argparse import ArgumentParser
|
|
|
|
from TestDirectory import TestDirectory
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import subprocess
|
|
|
|
import json
|
2019-01-05 06:58:00 +08:00
|
|
|
import xml.sax
|
|
|
|
import xml.sax.handler
|
2018-08-29 02:47:35 +08:00
|
|
|
import functools
|
|
|
|
import multiprocessing
|
|
|
|
import re
|
2019-01-15 12:31:02 +08:00
|
|
|
import shutil
|
2019-01-23 06:25:20 +08:00
|
|
|
import io
|
2019-02-16 13:45:26 +08:00
|
|
|
import random
|
2018-08-29 02:47:35 +08:00
|
|
|
|
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
_logger = None
|
2018-08-29 02:47:35 +08:00
|
|
|
|
|
|
|
def init_logging(loglevel, logdir):
|
|
|
|
global _logger
|
|
|
|
_logger = logging.getLogger('TestRunner')
|
|
|
|
_logger.setLevel(logging.DEBUG)
|
|
|
|
formatter = logging.Formatter('%(asctime)s - %(process)d - %(levelname)s - %(message)s')
|
|
|
|
try:
|
|
|
|
os.makedirs(logdir)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
fh = logging.FileHandler(os.path.join(logdir, 'run_test.log'))
|
|
|
|
fh.setLevel(logging.DEBUG)
|
|
|
|
fh.setFormatter(formatter)
|
|
|
|
sh = logging.StreamHandler()
|
|
|
|
sh.setLevel(loglevel)
|
|
|
|
sh.setFormatter(formatter)
|
|
|
|
_logger.addHandler(fh)
|
|
|
|
_logger.addHandler(sh)
|
|
|
|
|
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
class LogParser:
|
2019-06-19 14:56:01 +08:00
|
|
|
def __init__(self, basedir, name, infile, out, aggregationPolicy, symbolicateBacktraces):
|
2019-01-05 06:58:00 +08:00
|
|
|
self.basedir = basedir
|
|
|
|
self.name = name
|
|
|
|
self.infile = infile
|
|
|
|
self.backtraces = []
|
|
|
|
self.result = True
|
|
|
|
self.address_re = re.compile(r'(0x[0-9a-f]+\s+)+')
|
2019-01-23 06:25:20 +08:00
|
|
|
self.aggregationPolicy = aggregationPolicy
|
2019-06-19 14:56:01 +08:00
|
|
|
self.symbolicateBacktraces = symbolicateBacktraces
|
2019-01-23 06:25:20 +08:00
|
|
|
self.outStream = None
|
|
|
|
if self.aggregationPolicy == 'NONE':
|
|
|
|
self.out = None
|
|
|
|
elif self.aggregationPolicy != 'ALL':
|
|
|
|
self.out = io.StringIO()
|
|
|
|
self.outStream = out
|
|
|
|
else:
|
|
|
|
self.out = out
|
|
|
|
|
|
|
|
def write(self, txt):
|
|
|
|
if self.aggregationPolicy == 'NONE':
|
|
|
|
pass
|
|
|
|
elif not self.result or self.aggregationPolicy == 'ALL':
|
|
|
|
self.out.write(txt)
|
|
|
|
else:
|
|
|
|
self.outStream.wite(txt)
|
|
|
|
|
|
|
|
def fail(self):
|
|
|
|
self.result = False
|
|
|
|
if self.aggregationPolicy == 'FAILED':
|
|
|
|
self.out.write(self.outStream.getvalue())
|
|
|
|
self.outStream = None
|
2018-08-29 02:47:35 +08:00
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
def writeHeader(self):
|
|
|
|
pass
|
2018-08-29 02:47:35 +08:00
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
def writeFooter(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def applyAddr2line(self, obj):
|
2019-01-15 11:10:33 +08:00
|
|
|
addresses = self.sanitizeBacktrace(obj)
|
2019-01-05 06:58:00 +08:00
|
|
|
assert addresses is not None
|
|
|
|
fdbbin = os.path.join(basedir, 'bin', 'fdbserver')
|
2018-08-29 02:47:35 +08:00
|
|
|
try:
|
2019-01-05 06:58:00 +08:00
|
|
|
resolved = subprocess.check_output(
|
|
|
|
('addr2line -e %s -C -f -i' % fdbbin).split() + addresses.split()).splitlines()
|
|
|
|
tmp = dict(**obj)
|
|
|
|
for i, line in enumerate(resolved):
|
|
|
|
tmp['line%04d' % i] = line.decode('utf-8')
|
|
|
|
return tmp
|
|
|
|
except (subprocess.CalledProcessError, UnicodeDecodeError):
|
|
|
|
obj['FailedAddr2LineResolution'] = 'true'
|
|
|
|
return obj
|
|
|
|
|
|
|
|
|
|
|
|
def sanitizeBacktrace(self, obj):
|
|
|
|
if sys.platform != "linux" and sys.platform != "linux2":
|
|
|
|
return None
|
|
|
|
raw_backtrace = obj.get('Backtrace', None)
|
|
|
|
if raw_backtrace is None:
|
|
|
|
return None
|
|
|
|
match = self.address_re.search(raw_backtrace)
|
|
|
|
if not match:
|
|
|
|
return None
|
|
|
|
return match.group(0)
|
|
|
|
|
|
|
|
def processTraces(self):
|
|
|
|
linenr = 0
|
|
|
|
with open(self.infile) as f:
|
2018-08-29 02:47:35 +08:00
|
|
|
line = f.readline()
|
|
|
|
while line != '':
|
2019-01-05 06:58:00 +08:00
|
|
|
obj = self.processLine(line, linenr)
|
|
|
|
line = f.readline()
|
|
|
|
linenr += 1
|
|
|
|
if obj is None:
|
|
|
|
continue
|
2018-08-29 02:47:35 +08:00
|
|
|
if 'Type' not in obj:
|
|
|
|
continue
|
|
|
|
if obj['Severity'] == '40':
|
2019-01-23 06:25:20 +08:00
|
|
|
self.fail()
|
2019-01-05 06:58:00 +08:00
|
|
|
if self.name is not None:
|
|
|
|
obj['testname'] = self.name
|
2019-06-19 14:56:01 +08:00
|
|
|
if self.symbolicateBacktraces and self.sanitizeBacktrace(obj) is not None:
|
2019-01-05 06:58:00 +08:00
|
|
|
obj = self.applyAddr2line(obj)
|
|
|
|
self.writeObject(obj)
|
|
|
|
|
|
|
|
def log_trace_parse_error(self, linenr, e):
|
|
|
|
obj = {}
|
|
|
|
_logger.error("Process trace line file {} Failed".format(self.infile))
|
|
|
|
_logger.error("Exception {} args: {}".format(type(e), e.args))
|
|
|
|
_logger.error("Line: '{}'".format(linenr))
|
|
|
|
obj['Severity'] = "warning"
|
|
|
|
obj['Type'] = "TestInfastructureLogLineGarbled"
|
|
|
|
obj['isLastLine'] = "TestFailure"
|
|
|
|
obj['TraceLine'] = linenr
|
|
|
|
obj['File'] = self.infile
|
|
|
|
return obj
|
|
|
|
|
|
|
|
def processReturnCodes(self, return_codes):
|
|
|
|
for (command, return_code) in return_codes.items():
|
|
|
|
return_code_trace = {}
|
|
|
|
if return_code != 0:
|
|
|
|
return_code_trace['Severity'] = '40'
|
|
|
|
return_code_trace['Type'] = 'TestFailure'
|
2019-01-23 06:25:20 +08:00
|
|
|
self.fail()
|
2019-01-05 06:58:00 +08:00
|
|
|
else:
|
|
|
|
return_code_trace['Severity'] = '10'
|
|
|
|
return_code_trace['Type'] = 'ReturnCode'
|
|
|
|
return_code_trace['Command'] = command
|
|
|
|
return_code_trace['ReturnCode'] = return_code
|
|
|
|
return_code_trace['testname'] = self.name
|
|
|
|
self.writeObject(return_code_trace)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class JSONParser(LogParser):
|
2019-06-19 14:56:01 +08:00
|
|
|
def __init__(self, basedir, name, infile, out, aggregationPolicy, symbolicateBacktraces):
|
|
|
|
super().__init__(basedir, name, infile, out, aggregationPolicy, symbolicateBacktraces)
|
2018-08-29 02:47:35 +08:00
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
def processLine(self, line, linenr):
|
|
|
|
try:
|
|
|
|
return json.loads(line)
|
|
|
|
except Exception as e:
|
|
|
|
self.log_trace_parse_error(linenr, e)
|
|
|
|
|
|
|
|
def writeObject(self, obj):
|
2019-01-23 06:25:20 +08:00
|
|
|
self.write(json.dumps(obj))
|
|
|
|
self.write('\n')
|
2019-01-05 06:58:00 +08:00
|
|
|
|
|
|
|
|
|
|
|
class XMLParser(LogParser):
|
|
|
|
|
|
|
|
class XMLHandler(xml.sax.handler.ContentHandler):
|
|
|
|
def __init__(self):
|
|
|
|
self.result = {}
|
|
|
|
|
|
|
|
def startElement(self, name, attrs):
|
|
|
|
if name != 'Event':
|
|
|
|
return
|
|
|
|
for (key, value) in attrs.items():
|
|
|
|
self.result[key] = value
|
|
|
|
|
|
|
|
class XMLErrorHandler(xml.sax.handler.ErrorHandler):
|
|
|
|
def __init__(self):
|
|
|
|
self.errors = []
|
|
|
|
self.fatalErrors = []
|
|
|
|
self.warnings = []
|
|
|
|
|
|
|
|
def error(self, exception):
|
|
|
|
self.errors.append(exception)
|
|
|
|
|
|
|
|
def fatalError(self, exception):
|
|
|
|
self.fatalError.append(exception)
|
2018-08-29 02:47:35 +08:00
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
def warning(self, exception):
|
|
|
|
self.warnings.append(exception)
|
|
|
|
|
2019-06-19 14:56:01 +08:00
|
|
|
def __init__(self, basedir, name, infile, out, aggregationPolicy, symbolicateBacktraces):
|
|
|
|
super().__init__(basedir, name, infile, out, aggregationPolicy, symbolicateBacktraces)
|
2019-01-05 06:58:00 +08:00
|
|
|
|
|
|
|
def writeHeader(self):
|
2019-01-23 06:25:20 +08:00
|
|
|
self.write('<?xml version="1.0"?>\n<Trace>\n')
|
2019-01-05 06:58:00 +08:00
|
|
|
|
|
|
|
def writeFooter(self):
|
2019-01-23 06:25:20 +08:00
|
|
|
self.write("</Trace>")
|
2019-01-05 06:58:00 +08:00
|
|
|
|
|
|
|
def writeObject(self, obj):
|
2019-01-23 06:25:20 +08:00
|
|
|
self.write('<Event')
|
2019-01-05 06:58:00 +08:00
|
|
|
for (key, value) in obj.items():
|
2019-01-23 06:25:20 +08:00
|
|
|
self.write(' {}="{}"'.format(key, value))
|
|
|
|
self.write('/>\n')
|
2019-01-05 06:58:00 +08:00
|
|
|
|
|
|
|
def processLine(self, line, linenr):
|
|
|
|
if linenr < 3:
|
|
|
|
# the first two lines don't need to be parsed
|
|
|
|
return None
|
|
|
|
if line.startswith('</'):
|
|
|
|
# don't parse the closing element
|
|
|
|
return None
|
|
|
|
handler = XMLParser.XMLHandler()
|
|
|
|
errorHandler = XMLParser.XMLErrorHandler()
|
Pass a bytestring to xml.sax.parseString
This fixes the ctest prb.
CI was broken because TestRunner errored at:
Traceback (most recent call last):
File "/foundationdb/tests/TestRunner/TestRunner.py", line 373, in <module>
res = run_simulation_test(basedir, args)
File "/foundationdb/tests/TestRunner/TestRunner.py", line 313, in run_simulation_test
options.log_format, return_codes)
File "/foundationdb/tests/TestRunner/TestRunner.py", line 261, in process_traces
parser.processTraces()
File "/foundationdb/tests/TestRunner/TestRunner.py", line 112, in processTraces
obj = self.processLine(line, linenr)
File "/foundationdb/tests/TestRunner/TestRunner.py", line 222, in processLine
xml.sax.parseString(line, handler, errorHandler=errorHandler)
File "/usr/lib/python3.4/xml/sax/__init__.py", line 45, in parseString
inpsrc.setByteStream(BytesIO(string))
TypeError: 'str' does not support the buffer interface
Which means you can't parse a string to parseString. This was fixed by
3.7, but our CI runs 3.4, so we need to .encode() `line` before passing
it, so that it ends up as a `bytes`.
2019-02-12 09:12:49 +08:00
|
|
|
xml.sax.parseString(line.encode('utf-8'), handler, errorHandler=errorHandler)
|
2019-01-05 06:58:00 +08:00
|
|
|
if len(errorHandler.fatalErrors) > 0:
|
|
|
|
return self.log_trace_parse_error(linenr, errorHandler.fatalErrors[0])
|
|
|
|
return handler.result
|
|
|
|
|
|
|
|
|
|
|
|
def get_traces(d, log_format):
|
|
|
|
p = re.compile('^trace\\..*\\.{}$'.format(log_format))
|
2018-08-29 02:47:35 +08:00
|
|
|
traces = list(map(
|
|
|
|
functools.partial(os.path.join, d),
|
|
|
|
filter(
|
|
|
|
lambda f: p.match(f) is not None,
|
|
|
|
os.listdir(d))))
|
|
|
|
if os.path.isdir(os.path.join(d, 'testrunner')):
|
|
|
|
traces += list(map(
|
|
|
|
functools.partial(os.path.join, d, 'testrunner'),
|
|
|
|
filter(
|
|
|
|
lambda f: p.match(f) is not None,
|
|
|
|
os.listdir(os.path.join(d, 'testrunner')))))
|
|
|
|
return traces
|
|
|
|
|
|
|
|
|
2019-06-19 14:56:01 +08:00
|
|
|
def process_traces(basedir, testname, path, out, aggregationPolicy, symbolicateBacktraces, log_format, return_codes, cmake_seed):
|
2018-08-29 02:47:35 +08:00
|
|
|
res = True
|
|
|
|
backtraces = []
|
2019-01-05 06:58:00 +08:00
|
|
|
parser = None
|
|
|
|
if log_format == 'json':
|
2019-06-19 14:56:01 +08:00
|
|
|
parser = JSONParser(basedir, testname, None, out, aggregationPolicy, symbolicateBacktraces)
|
2019-01-05 06:58:00 +08:00
|
|
|
else:
|
2019-06-19 14:56:01 +08:00
|
|
|
parser = XMLParser(basedir, testname, None, out, aggregationPolicy, symbolicateBacktraces)
|
2019-01-05 06:58:00 +08:00
|
|
|
parser.processReturnCodes(return_codes)
|
2019-01-23 06:25:20 +08:00
|
|
|
res = parser.result
|
|
|
|
for trace in get_traces(path, log_format):
|
|
|
|
if log_format == 'json':
|
2019-06-19 14:56:01 +08:00
|
|
|
parser = JSONParser(basedir, testname, trace, out, aggregationPolicy, symbolicateBacktraces)
|
2019-01-23 06:25:20 +08:00
|
|
|
else:
|
2019-06-19 14:56:01 +08:00
|
|
|
parser = XMLParser(basedir, testname, trace, out, aggregationPolicy, symbolicateBacktraces)
|
2019-01-23 06:25:20 +08:00
|
|
|
if not res:
|
|
|
|
parser.fail()
|
|
|
|
parser.processTraces()
|
|
|
|
res = res and parser.result
|
2019-03-08 07:04:21 +08:00
|
|
|
parser.writeObject({'CMakeSEED': str(cmake_seed)})
|
2019-01-23 06:25:20 +08:00
|
|
|
return res
|
|
|
|
|
2021-04-13 07:15:17 +08:00
|
|
|
class RestartTestPolicy:
|
|
|
|
def __init__(self, name, old_binary, new_binary):
|
|
|
|
# Default is to use the same binary for the restart test, unless constraints are satisfied.
|
|
|
|
self._first_binary = new_binary
|
|
|
|
self._second_binary = new_binary
|
|
|
|
if old_binary is None:
|
|
|
|
_logger.info("No old binary provided")
|
|
|
|
old_binary_version_raw = subprocess.check_output([old_binary, '--version']).decode('utf-8')
|
|
|
|
match = re.match('FoundationDB.*\(v([0-9]+\.[0-9]+\.[0-9]+)\)', old_binary_version_raw)
|
|
|
|
assert match, old_binary_version_raw
|
|
|
|
old_binary_version = tuple(map(int, match.group(1).split('.')))
|
|
|
|
match = re.match('.*/restarting/from_([0-9]+\.[0-9]+\.[0-9]+)/', name)
|
|
|
|
if match: # upgrading _from_
|
|
|
|
lower_bound = tuple(map(int, match.group(1).split('.')))
|
|
|
|
if old_binary_version >= lower_bound:
|
|
|
|
self._first_binary = old_binary
|
|
|
|
_logger.info("Using old binary as first binary: {} >= {}".format(old_binary_version, lower_bound))
|
|
|
|
else:
|
|
|
|
_logger.info("Using new binary as first binary: {} < {}".format(old_binary_version, lower_bound))
|
|
|
|
match = re.match('.*/restarting/to_([0-9]+\.[0-9]+\.[0-9]+)/', name)
|
|
|
|
if match: # downgrading _to_
|
|
|
|
lower_bound = tuple(map(int, match.group(1).split('.')))
|
|
|
|
if old_binary_version >= lower_bound:
|
|
|
|
self._second_binary = old_binary
|
|
|
|
_logger.info("Using old binary as second binary: {} >= {}".format(old_binary_version, lower_bound))
|
|
|
|
else:
|
|
|
|
_logger.info("Using new binary as second binary: {} < {}".format(old_binary_version, lower_bound))
|
|
|
|
|
|
|
|
def first_binary(self):
|
|
|
|
return self._first_binary
|
|
|
|
|
|
|
|
def second_binary(self):
|
|
|
|
return self._second_binary
|
|
|
|
|
2019-01-23 06:25:20 +08:00
|
|
|
def run_simulation_test(basedir, options):
|
|
|
|
fdbserver = os.path.join(basedir, 'bin', 'fdbserver')
|
|
|
|
pargs = [fdbserver,
|
|
|
|
'-r', options.testtype]
|
2019-07-17 05:47:40 +08:00
|
|
|
seed = 0
|
|
|
|
if options.seed is not None:
|
|
|
|
pargs.append('-s')
|
|
|
|
seed = int(options.seed, 0)
|
|
|
|
if options.test_number:
|
|
|
|
idx = int(options.test_number)
|
|
|
|
seed = ((seed + idx) % (2**32-2)) + 1
|
|
|
|
pargs.append("{}".format(seed))
|
2019-01-23 06:25:20 +08:00
|
|
|
if options.testtype == 'test':
|
2018-08-29 02:47:35 +08:00
|
|
|
pargs.append('-C')
|
|
|
|
pargs.append(os.path.join(args.builddir, 'fdb.cluster'))
|
|
|
|
td = TestDirectory(basedir)
|
2019-01-23 06:25:20 +08:00
|
|
|
if options.buggify:
|
2018-08-29 02:47:35 +08:00
|
|
|
pargs.append('-b')
|
|
|
|
pargs.append('on')
|
2020-05-05 18:11:09 +08:00
|
|
|
if options.crash:
|
|
|
|
pargs.append('--crash')
|
2019-02-20 05:58:18 +08:00
|
|
|
pargs.append('--trace_format')
|
|
|
|
pargs.append(options.log_format)
|
2018-08-29 02:47:35 +08:00
|
|
|
test_dir = td.get_current_test_dir()
|
2019-01-23 06:25:20 +08:00
|
|
|
if options.seed is not None:
|
2019-02-16 13:45:26 +08:00
|
|
|
seed = int(options.seed, 0)
|
|
|
|
if options.test_number:
|
|
|
|
idx = int(options.test_number)
|
2019-02-20 06:42:11 +08:00
|
|
|
seed = ((seed + idx) % (2**32-2)) + 1
|
2018-08-29 02:47:35 +08:00
|
|
|
wd = os.path.join(test_dir,
|
2019-01-23 06:25:20 +08:00
|
|
|
'test_{}'.format(options.name.replace('/', '_')))
|
2018-08-29 02:47:35 +08:00
|
|
|
os.mkdir(wd)
|
|
|
|
return_codes = {} # {command: return_code}
|
|
|
|
first = True
|
2021-04-13 07:15:17 +08:00
|
|
|
restart_test_policy = None
|
|
|
|
if len(options.testfile) > 1:
|
|
|
|
restart_test_policy = RestartTestPolicy(options.testfile[0], options.old_binary, fdbserver)
|
2019-01-23 06:25:20 +08:00
|
|
|
for testfile in options.testfile:
|
2018-08-29 02:47:35 +08:00
|
|
|
tmp = list(pargs)
|
2019-07-03 00:15:05 +08:00
|
|
|
valgrind_args = []
|
2021-04-13 07:15:17 +08:00
|
|
|
if restart_test_policy is not None:
|
|
|
|
if first:
|
|
|
|
tmp[0] = restart_test_policy.first_binary()
|
|
|
|
else:
|
|
|
|
tmp[0] = restart_test_policy.second_binary()
|
|
|
|
# old_binary is not under test, so don't run under valgrind
|
|
|
|
if options.use_valgrind and tmp[0] == fdbserver:
|
2019-07-03 00:15:05 +08:00
|
|
|
valgrind_args = ['valgrind', '--error-exitcode=99', '--']
|
2018-08-29 02:47:35 +08:00
|
|
|
if not first:
|
|
|
|
tmp.append('-R')
|
2019-07-12 00:55:01 +08:00
|
|
|
if seed is not None:
|
|
|
|
seed = ((seed + 1) % (2**32-2))
|
2018-08-29 02:47:35 +08:00
|
|
|
first = False
|
2019-07-12 00:55:01 +08:00
|
|
|
if seed is not None:
|
|
|
|
tmp.append('-s')
|
|
|
|
tmp.append("{}".format(seed))
|
2018-08-29 02:47:35 +08:00
|
|
|
tmp.append('-f')
|
|
|
|
tmp.append(testfile)
|
2019-07-03 00:15:05 +08:00
|
|
|
tmp = valgrind_args + tmp
|
2018-08-29 02:47:35 +08:00
|
|
|
command = ' '.join(tmp)
|
|
|
|
_logger.info("COMMAND: {}".format(command))
|
|
|
|
proc = subprocess.Popen(tmp,
|
|
|
|
stdout=sys.stdout,
|
|
|
|
stderr=sys.stderr,
|
|
|
|
cwd=wd)
|
|
|
|
proc.wait()
|
|
|
|
return_codes[command] = proc.returncode
|
2019-07-12 00:55:01 +08:00
|
|
|
outfile = os.path.join(test_dir, 'traces.{}'.format(options.log_format))
|
|
|
|
res = True
|
|
|
|
if options.aggregate_traces == 'NONE':
|
2019-01-23 06:25:20 +08:00
|
|
|
res = process_traces(basedir, options.name,
|
2019-07-12 00:55:01 +08:00
|
|
|
wd, None, 'NONE', options.symbolicate,
|
2019-03-08 07:04:21 +08:00
|
|
|
options.log_format, return_codes, options.seed)
|
2019-07-12 00:55:01 +08:00
|
|
|
|
|
|
|
else:
|
|
|
|
with open(outfile, 'a') as f:
|
|
|
|
os.lockf(f.fileno(), os.F_LOCK, 0)
|
|
|
|
pos = f.tell()
|
|
|
|
res = process_traces(basedir, options.name,
|
|
|
|
wd, f, options.aggregate_traces, options.symbolicate,
|
|
|
|
options.log_format, return_codes, options.seed)
|
|
|
|
f.seek(pos)
|
|
|
|
os.lockf(f.fileno(), os.F_ULOCK, 0)
|
|
|
|
if proc.returncode != 0 or res == False:
|
|
|
|
break
|
2019-01-23 06:25:20 +08:00
|
|
|
if options.keep_logs == 'NONE' or options.keep_logs == 'FAILED' and res:
|
|
|
|
print("Deleting old logs in {}".format(wd))
|
|
|
|
traces = get_traces(wd, options.log_format)
|
|
|
|
for trace in traces:
|
|
|
|
os.remove(trace)
|
|
|
|
if options.keep_simdirs == 'NONE' or options.keep_simdirs == 'FAILED' and res:
|
|
|
|
print("Delete {}".format(os.path.join(wd, 'simfdb')))
|
|
|
|
shutil.rmtree(os.path.join(wd, 'simfdb'))
|
|
|
|
if len(os.listdir(wd)) == 0:
|
|
|
|
print("Delete {} - empty".format(wd))
|
|
|
|
os.rmdir(wd)
|
2018-08-29 02:47:35 +08:00
|
|
|
return res
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
testtypes = ['simulation', 'test']
|
|
|
|
parser = ArgumentParser(description='Run a test preprocess trace')
|
|
|
|
parser.add_argument('-b', '--builddir', help='Path to build directory')
|
|
|
|
parser.add_argument('-s', '--sourcedir', help='Path to source directory')
|
|
|
|
parser.add_argument('-n', '--name', help='Name of the test')
|
|
|
|
parser.add_argument('-t', '--testtype', choices=testtypes,
|
|
|
|
default='simulation',
|
|
|
|
help='The type of test to run, choices are [{}]'.format(
|
|
|
|
', '.join(testtypes))),
|
|
|
|
parser.add_argument('-B', '--buggify', action='store_true',
|
|
|
|
help='Enable buggify')
|
|
|
|
parser.add_argument('--logdir', default='logs',
|
|
|
|
help='Directory for logs')
|
|
|
|
parser.add_argument('-l', '--loglevel',
|
|
|
|
choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO',
|
|
|
|
'DEBUG'],
|
|
|
|
default='INFO')
|
|
|
|
parser.add_argument('-x', '--seed', required=False, default=None,
|
|
|
|
help='The seed to use for this test')
|
2019-02-16 13:45:26 +08:00
|
|
|
parser.add_argument('-N', '--test-number', required=False, default=None,
|
|
|
|
help='A unique number for this test (for seed generation)')
|
2019-01-05 06:58:00 +08:00
|
|
|
parser.add_argument('-F', '--log-format', required=False, default='xml',
|
|
|
|
choices=['xml', 'json'], help='Log format (json or xml)')
|
2019-01-23 06:25:20 +08:00
|
|
|
parser.add_argument('-O', '--old-binary', required=False, default=None,
|
|
|
|
help='Path to the old binary to use for upgrade tests')
|
2019-06-19 14:56:01 +08:00
|
|
|
parser.add_argument('-S', '--symbolicate', action='store_true', default=False,
|
|
|
|
help='Symbolicate backtraces in trace events')
|
2020-05-05 18:11:09 +08:00
|
|
|
parser.add_argument('--crash', action='store_true', default=False,
|
|
|
|
help='Test ASSERT failures should crash the test')
|
2019-01-23 06:25:20 +08:00
|
|
|
parser.add_argument('--aggregate-traces', default='NONE',
|
|
|
|
choices=['NONE', 'FAILED', 'ALL'])
|
|
|
|
parser.add_argument('--keep-logs', default='FAILED',
|
|
|
|
choices=['NONE', 'FAILED', 'ALL'])
|
|
|
|
parser.add_argument('--keep-simdirs', default='NONE',
|
|
|
|
choices=['NONE', 'FAILED', 'ALL'])
|
2018-08-29 02:47:35 +08:00
|
|
|
parser.add_argument('testfile', nargs="+", help='The tests to run')
|
2019-07-03 00:15:05 +08:00
|
|
|
parser.add_argument('--use-valgrind', action='store_true', default=False,
|
|
|
|
help='Run under valgrind')
|
2018-08-29 02:47:35 +08:00
|
|
|
args = parser.parse_args()
|
|
|
|
init_logging(args.loglevel, args.logdir)
|
|
|
|
basedir = os.getcwd()
|
|
|
|
if args.builddir is not None:
|
|
|
|
basedir = args.builddir
|
2019-01-23 06:25:20 +08:00
|
|
|
res = run_simulation_test(basedir, args)
|
2018-11-07 06:47:25 +08:00
|
|
|
sys.exit(0 if res else 1)
|