2019-02-13 07:26:26 +08:00
|
|
|
#!/usr/bin/env python
|
2018-08-29 02:47:35 +08:00
|
|
|
|
|
|
|
from argparse import ArgumentParser
|
|
|
|
from TestDirectory import TestDirectory
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import subprocess
|
|
|
|
import json
|
2019-01-05 06:58:00 +08:00
|
|
|
import xml.sax
|
|
|
|
import xml.sax.handler
|
2018-08-29 02:47:35 +08:00
|
|
|
import functools
|
|
|
|
import multiprocessing
|
|
|
|
import re
|
2019-01-15 12:31:02 +08:00
|
|
|
import shutil
|
2019-01-23 06:25:20 +08:00
|
|
|
import io
|
2019-02-16 13:45:26 +08:00
|
|
|
import random
|
2018-08-29 02:47:35 +08:00
|
|
|
|
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
_logger = None
|
2018-08-29 02:47:35 +08:00
|
|
|
|
|
|
|
def init_logging(loglevel, logdir):
|
|
|
|
global _logger
|
|
|
|
_logger = logging.getLogger('TestRunner')
|
|
|
|
_logger.setLevel(logging.DEBUG)
|
|
|
|
formatter = logging.Formatter('%(asctime)s - %(process)d - %(levelname)s - %(message)s')
|
|
|
|
try:
|
|
|
|
os.makedirs(logdir)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
fh = logging.FileHandler(os.path.join(logdir, 'run_test.log'))
|
|
|
|
fh.setLevel(logging.DEBUG)
|
|
|
|
fh.setFormatter(formatter)
|
|
|
|
sh = logging.StreamHandler()
|
|
|
|
sh.setLevel(loglevel)
|
|
|
|
sh.setFormatter(formatter)
|
|
|
|
_logger.addHandler(fh)
|
|
|
|
_logger.addHandler(sh)
|
|
|
|
|
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
class LogParser:
|
2019-06-19 14:56:01 +08:00
|
|
|
def __init__(self, basedir, name, infile, out, aggregationPolicy, symbolicateBacktraces):
|
2019-01-05 06:58:00 +08:00
|
|
|
self.basedir = basedir
|
|
|
|
self.name = name
|
|
|
|
self.infile = infile
|
|
|
|
self.backtraces = []
|
|
|
|
self.result = True
|
|
|
|
self.address_re = re.compile(r'(0x[0-9a-f]+\s+)+')
|
2019-01-23 06:25:20 +08:00
|
|
|
self.aggregationPolicy = aggregationPolicy
|
2019-06-19 14:56:01 +08:00
|
|
|
self.symbolicateBacktraces = symbolicateBacktraces
|
2019-01-23 06:25:20 +08:00
|
|
|
self.outStream = None
|
|
|
|
if self.aggregationPolicy == 'NONE':
|
|
|
|
self.out = None
|
|
|
|
elif self.aggregationPolicy != 'ALL':
|
|
|
|
self.out = io.StringIO()
|
|
|
|
self.outStream = out
|
|
|
|
else:
|
|
|
|
self.out = out
|
|
|
|
|
|
|
|
def write(self, txt):
|
|
|
|
if self.aggregationPolicy == 'NONE':
|
|
|
|
pass
|
|
|
|
elif not self.result or self.aggregationPolicy == 'ALL':
|
|
|
|
self.out.write(txt)
|
|
|
|
else:
|
|
|
|
self.outStream.wite(txt)
|
|
|
|
|
|
|
|
def fail(self):
|
|
|
|
self.result = False
|
|
|
|
if self.aggregationPolicy == 'FAILED':
|
|
|
|
self.out.write(self.outStream.getvalue())
|
|
|
|
self.outStream = None
|
2018-08-29 02:47:35 +08:00
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
def writeHeader(self):
|
|
|
|
pass
|
2018-08-29 02:47:35 +08:00
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
def writeFooter(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def applyAddr2line(self, obj):
|
2019-01-15 11:10:33 +08:00
|
|
|
addresses = self.sanitizeBacktrace(obj)
|
2019-01-05 06:58:00 +08:00
|
|
|
assert addresses is not None
|
|
|
|
fdbbin = os.path.join(basedir, 'bin', 'fdbserver')
|
2018-08-29 02:47:35 +08:00
|
|
|
try:
|
2019-01-05 06:58:00 +08:00
|
|
|
resolved = subprocess.check_output(
|
|
|
|
('addr2line -e %s -C -f -i' % fdbbin).split() + addresses.split()).splitlines()
|
|
|
|
tmp = dict(**obj)
|
|
|
|
for i, line in enumerate(resolved):
|
|
|
|
tmp['line%04d' % i] = line.decode('utf-8')
|
|
|
|
return tmp
|
|
|
|
except (subprocess.CalledProcessError, UnicodeDecodeError):
|
|
|
|
obj['FailedAddr2LineResolution'] = 'true'
|
|
|
|
return obj
|
|
|
|
|
|
|
|
|
|
|
|
def sanitizeBacktrace(self, obj):
|
|
|
|
if sys.platform != "linux" and sys.platform != "linux2":
|
|
|
|
return None
|
|
|
|
raw_backtrace = obj.get('Backtrace', None)
|
|
|
|
if raw_backtrace is None:
|
|
|
|
return None
|
|
|
|
match = self.address_re.search(raw_backtrace)
|
|
|
|
if not match:
|
|
|
|
return None
|
|
|
|
return match.group(0)
|
|
|
|
|
|
|
|
def processTraces(self):
|
|
|
|
linenr = 0
|
|
|
|
with open(self.infile) as f:
|
2018-08-29 02:47:35 +08:00
|
|
|
line = f.readline()
|
|
|
|
while line != '':
|
2019-01-05 06:58:00 +08:00
|
|
|
obj = self.processLine(line, linenr)
|
|
|
|
line = f.readline()
|
|
|
|
linenr += 1
|
|
|
|
if obj is None:
|
|
|
|
continue
|
2018-08-29 02:47:35 +08:00
|
|
|
if 'Type' not in obj:
|
|
|
|
continue
|
|
|
|
if obj['Severity'] == '40':
|
2019-01-23 06:25:20 +08:00
|
|
|
self.fail()
|
2019-01-05 06:58:00 +08:00
|
|
|
if self.name is not None:
|
|
|
|
obj['testname'] = self.name
|
2019-06-19 14:56:01 +08:00
|
|
|
if self.symbolicateBacktraces and self.sanitizeBacktrace(obj) is not None:
|
2019-01-05 06:58:00 +08:00
|
|
|
obj = self.applyAddr2line(obj)
|
|
|
|
self.writeObject(obj)
|
|
|
|
|
|
|
|
def log_trace_parse_error(self, linenr, e):
|
|
|
|
obj = {}
|
|
|
|
_logger.error("Process trace line file {} Failed".format(self.infile))
|
|
|
|
_logger.error("Exception {} args: {}".format(type(e), e.args))
|
|
|
|
_logger.error("Line: '{}'".format(linenr))
|
|
|
|
obj['Severity'] = "warning"
|
|
|
|
obj['Type'] = "TestInfastructureLogLineGarbled"
|
|
|
|
obj['isLastLine'] = "TestFailure"
|
|
|
|
obj['TraceLine'] = linenr
|
|
|
|
obj['File'] = self.infile
|
|
|
|
return obj
|
|
|
|
|
|
|
|
def processReturnCodes(self, return_codes):
|
|
|
|
for (command, return_code) in return_codes.items():
|
|
|
|
return_code_trace = {}
|
|
|
|
if return_code != 0:
|
|
|
|
return_code_trace['Severity'] = '40'
|
|
|
|
return_code_trace['Type'] = 'TestFailure'
|
2019-01-23 06:25:20 +08:00
|
|
|
self.fail()
|
2019-01-05 06:58:00 +08:00
|
|
|
else:
|
|
|
|
return_code_trace['Severity'] = '10'
|
|
|
|
return_code_trace['Type'] = 'ReturnCode'
|
|
|
|
return_code_trace['Command'] = command
|
|
|
|
return_code_trace['ReturnCode'] = return_code
|
|
|
|
return_code_trace['testname'] = self.name
|
|
|
|
self.writeObject(return_code_trace)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class JSONParser(LogParser):
|
2019-06-19 14:56:01 +08:00
|
|
|
def __init__(self, basedir, name, infile, out, aggregationPolicy, symbolicateBacktraces):
|
|
|
|
super().__init__(basedir, name, infile, out, aggregationPolicy, symbolicateBacktraces)
|
2018-08-29 02:47:35 +08:00
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
def processLine(self, line, linenr):
|
|
|
|
try:
|
|
|
|
return json.loads(line)
|
|
|
|
except Exception as e:
|
|
|
|
self.log_trace_parse_error(linenr, e)
|
|
|
|
|
|
|
|
def writeObject(self, obj):
|
2019-01-23 06:25:20 +08:00
|
|
|
self.write(json.dumps(obj))
|
|
|
|
self.write('\n')
|
2019-01-05 06:58:00 +08:00
|
|
|
|
|
|
|
|
|
|
|
class XMLParser(LogParser):
|
|
|
|
|
|
|
|
class XMLHandler(xml.sax.handler.ContentHandler):
|
|
|
|
def __init__(self):
|
|
|
|
self.result = {}
|
|
|
|
|
|
|
|
def startElement(self, name, attrs):
|
|
|
|
if name != 'Event':
|
|
|
|
return
|
|
|
|
for (key, value) in attrs.items():
|
|
|
|
self.result[key] = value
|
|
|
|
|
|
|
|
class XMLErrorHandler(xml.sax.handler.ErrorHandler):
|
|
|
|
def __init__(self):
|
|
|
|
self.errors = []
|
|
|
|
self.fatalErrors = []
|
|
|
|
self.warnings = []
|
|
|
|
|
|
|
|
def error(self, exception):
|
|
|
|
self.errors.append(exception)
|
|
|
|
|
|
|
|
def fatalError(self, exception):
|
|
|
|
self.fatalError.append(exception)
|
2018-08-29 02:47:35 +08:00
|
|
|
|
2019-01-05 06:58:00 +08:00
|
|
|
def warning(self, exception):
|
|
|
|
self.warnings.append(exception)
|
|
|
|
|
2019-06-19 14:56:01 +08:00
|
|
|
def __init__(self, basedir, name, infile, out, aggregationPolicy, symbolicateBacktraces):
|
|
|
|
super().__init__(basedir, name, infile, out, aggregationPolicy, symbolicateBacktraces)
|
2019-01-05 06:58:00 +08:00
|
|
|
|
|
|
|
def writeHeader(self):
|
2019-01-23 06:25:20 +08:00
|
|
|
self.write('<?xml version="1.0"?>\n<Trace>\n')
|
2019-01-05 06:58:00 +08:00
|
|
|
|
|
|
|
def writeFooter(self):
|
2019-01-23 06:25:20 +08:00
|
|
|
self.write("</Trace>")
|
2019-01-05 06:58:00 +08:00
|
|
|
|
|
|
|
def writeObject(self, obj):
|
2019-01-23 06:25:20 +08:00
|
|
|
self.write('<Event')
|
2019-01-05 06:58:00 +08:00
|
|
|
for (key, value) in obj.items():
|
2019-01-23 06:25:20 +08:00
|
|
|
self.write(' {}="{}"'.format(key, value))
|
|
|
|
self.write('/>\n')
|
2019-01-05 06:58:00 +08:00
|
|
|
|
|
|
|
def processLine(self, line, linenr):
|
|
|
|
if linenr < 3:
|
|
|
|
# the first two lines don't need to be parsed
|
|
|
|
return None
|
|
|
|
if line.startswith('</'):
|
|
|
|
# don't parse the closing element
|
|
|
|
return None
|
|
|
|
handler = XMLParser.XMLHandler()
|
|
|
|
errorHandler = XMLParser.XMLErrorHandler()
|
Pass a bytestring to xml.sax.parseString
This fixes the ctest prb.
CI was broken because TestRunner errored at:
Traceback (most recent call last):
File "/foundationdb/tests/TestRunner/TestRunner.py", line 373, in <module>
res = run_simulation_test(basedir, args)
File "/foundationdb/tests/TestRunner/TestRunner.py", line 313, in run_simulation_test
options.log_format, return_codes)
File "/foundationdb/tests/TestRunner/TestRunner.py", line 261, in process_traces
parser.processTraces()
File "/foundationdb/tests/TestRunner/TestRunner.py", line 112, in processTraces
obj = self.processLine(line, linenr)
File "/foundationdb/tests/TestRunner/TestRunner.py", line 222, in processLine
xml.sax.parseString(line, handler, errorHandler=errorHandler)
File "/usr/lib/python3.4/xml/sax/__init__.py", line 45, in parseString
inpsrc.setByteStream(BytesIO(string))
TypeError: 'str' does not support the buffer interface
Which means you can't parse a string to parseString. This was fixed by
3.7, but our CI runs 3.4, so we need to .encode() `line` before passing
it, so that it ends up as a `bytes`.
2019-02-12 09:12:49 +08:00
|
|
|
xml.sax.parseString(line.encode('utf-8'), handler, errorHandler=errorHandler)
|
2019-01-05 06:58:00 +08:00
|
|
|
if len(errorHandler.fatalErrors) > 0:
|
|
|
|
return self.log_trace_parse_error(linenr, errorHandler.fatalErrors[0])
|
|
|
|
return handler.result
|
|
|
|
|
|
|
|
|
|
|
|
def get_traces(d, log_format):
|
|
|
|
p = re.compile('^trace\\..*\\.{}$'.format(log_format))
|
2018-08-29 02:47:35 +08:00
|
|
|
traces = list(map(
|
|
|
|
functools.partial(os.path.join, d),
|
|
|
|
filter(
|
|
|
|
lambda f: p.match(f) is not None,
|
|
|
|
os.listdir(d))))
|
|
|
|
if os.path.isdir(os.path.join(d, 'testrunner')):
|
|
|
|
traces += list(map(
|
|
|
|
functools.partial(os.path.join, d, 'testrunner'),
|
|
|
|
filter(
|
|
|
|
lambda f: p.match(f) is not None,
|
|
|
|
os.listdir(os.path.join(d, 'testrunner')))))
|
|
|
|
return traces
|
|
|
|
|
|
|
|
|
2019-06-19 14:56:01 +08:00
|
|
|
def process_traces(basedir, testname, path, out, aggregationPolicy, symbolicateBacktraces, log_format, return_codes, cmake_seed):
|
2018-08-29 02:47:35 +08:00
|
|
|
res = True
|
|
|
|
backtraces = []
|
2019-01-05 06:58:00 +08:00
|
|
|
parser = None
|
|
|
|
if log_format == 'json':
|
2019-06-19 14:56:01 +08:00
|
|
|
parser = JSONParser(basedir, testname, None, out, aggregationPolicy, symbolicateBacktraces)
|
2019-01-05 06:58:00 +08:00
|
|
|
else:
|
2019-06-19 14:56:01 +08:00
|
|
|
parser = XMLParser(basedir, testname, None, out, aggregationPolicy, symbolicateBacktraces)
|
2019-01-05 06:58:00 +08:00
|
|
|
parser.processReturnCodes(return_codes)
|
2019-01-23 06:25:20 +08:00
|
|
|
res = parser.result
|
|
|
|
for trace in get_traces(path, log_format):
|
|
|
|
if log_format == 'json':
|
2019-06-19 14:56:01 +08:00
|
|
|
parser = JSONParser(basedir, testname, trace, out, aggregationPolicy, symbolicateBacktraces)
|
2019-01-23 06:25:20 +08:00
|
|
|
else:
|
2019-06-19 14:56:01 +08:00
|
|
|
parser = XMLParser(basedir, testname, trace, out, aggregationPolicy, symbolicateBacktraces)
|
2019-01-23 06:25:20 +08:00
|
|
|
if not res:
|
|
|
|
parser.fail()
|
|
|
|
parser.processTraces()
|
|
|
|
res = res and parser.result
|
2019-03-08 07:04:21 +08:00
|
|
|
parser.writeObject({'CMakeSEED': str(cmake_seed)})
|
2019-01-23 06:25:20 +08:00
|
|
|
return res
|
|
|
|
|
|
|
|
def run_simulation_test(basedir, options):
|
|
|
|
fdbserver = os.path.join(basedir, 'bin', 'fdbserver')
|
|
|
|
pargs = [fdbserver,
|
|
|
|
'-r', options.testtype]
|
|
|
|
if options.testtype == 'test':
|
2018-08-29 02:47:35 +08:00
|
|
|
pargs.append('-C')
|
|
|
|
pargs.append(os.path.join(args.builddir, 'fdb.cluster'))
|
2019-01-31 05:53:23 +08:00
|
|
|
else:
|
|
|
|
pargs.append('-S')
|
2019-05-17 04:02:06 +08:00
|
|
|
pargs.append('off')
|
2018-08-29 02:47:35 +08:00
|
|
|
td = TestDirectory(basedir)
|
2019-01-23 06:25:20 +08:00
|
|
|
if options.buggify:
|
2018-08-29 02:47:35 +08:00
|
|
|
pargs.append('-b')
|
|
|
|
pargs.append('on')
|
2019-02-20 05:58:18 +08:00
|
|
|
pargs.append('--trace_format')
|
|
|
|
pargs.append(options.log_format)
|
2018-08-29 02:47:35 +08:00
|
|
|
test_dir = td.get_current_test_dir()
|
2019-01-23 06:25:20 +08:00
|
|
|
if options.seed is not None:
|
2018-08-29 02:47:35 +08:00
|
|
|
pargs.append('-s')
|
2019-02-16 13:45:26 +08:00
|
|
|
seed = int(options.seed, 0)
|
|
|
|
if options.test_number:
|
|
|
|
idx = int(options.test_number)
|
2019-02-20 06:42:11 +08:00
|
|
|
seed = ((seed + idx) % (2**32-2)) + 1
|
2019-02-16 13:45:26 +08:00
|
|
|
pargs.append("{}".format(seed))
|
2018-08-29 02:47:35 +08:00
|
|
|
wd = os.path.join(test_dir,
|
2019-01-23 06:25:20 +08:00
|
|
|
'test_{}'.format(options.name.replace('/', '_')))
|
2018-08-29 02:47:35 +08:00
|
|
|
os.mkdir(wd)
|
|
|
|
return_codes = {} # {command: return_code}
|
|
|
|
first = True
|
2019-01-23 06:25:20 +08:00
|
|
|
for testfile in options.testfile:
|
2018-08-29 02:47:35 +08:00
|
|
|
tmp = list(pargs)
|
2019-01-23 06:25:20 +08:00
|
|
|
if first and options.old_binary is not None and len(options.testfile) > 1:
|
|
|
|
_logger.info("Run old binary at {}".format(options.old_binary))
|
|
|
|
tmp[0] = options.old_binary
|
2018-08-29 02:47:35 +08:00
|
|
|
if not first:
|
|
|
|
tmp.append('-R')
|
|
|
|
first = False
|
|
|
|
tmp.append('-f')
|
|
|
|
tmp.append(testfile)
|
|
|
|
command = ' '.join(tmp)
|
|
|
|
_logger.info("COMMAND: {}".format(command))
|
|
|
|
proc = subprocess.Popen(tmp,
|
|
|
|
stdout=sys.stdout,
|
|
|
|
stderr=sys.stderr,
|
|
|
|
cwd=wd)
|
|
|
|
proc.wait()
|
|
|
|
return_codes[command] = proc.returncode
|
|
|
|
if proc.returncode != 0:
|
|
|
|
break
|
2019-01-23 06:25:20 +08:00
|
|
|
outfile = os.path.join(test_dir, 'traces.{}'.format(options.log_format))
|
2018-08-29 02:47:35 +08:00
|
|
|
res = True
|
2019-01-23 06:25:20 +08:00
|
|
|
if options.aggregate_traces == 'NONE':
|
|
|
|
res = process_traces(basedir, options.name,
|
2019-06-19 14:56:01 +08:00
|
|
|
wd, None, 'NONE', options.symbolicate,
|
2019-03-08 07:04:21 +08:00
|
|
|
options.log_format, return_codes, options.seed)
|
2019-01-23 06:25:20 +08:00
|
|
|
else:
|
|
|
|
with open(outfile, 'a') as f:
|
|
|
|
os.lockf(f.fileno(), os.F_LOCK, 0)
|
|
|
|
pos = f.tell()
|
|
|
|
res = process_traces(basedir, options.name,
|
2019-06-19 14:56:01 +08:00
|
|
|
wd, f, options.aggregate_traces, options.symbolicate,
|
2019-03-08 07:04:21 +08:00
|
|
|
options.log_format, return_codes, options.seed)
|
2019-01-23 06:25:20 +08:00
|
|
|
f.seek(pos)
|
|
|
|
os.lockf(f.fileno(), os.F_ULOCK, 0)
|
|
|
|
if options.keep_logs == 'NONE' or options.keep_logs == 'FAILED' and res:
|
|
|
|
print("Deleting old logs in {}".format(wd))
|
|
|
|
traces = get_traces(wd, options.log_format)
|
|
|
|
for trace in traces:
|
|
|
|
os.remove(trace)
|
|
|
|
if options.keep_simdirs == 'NONE' or options.keep_simdirs == 'FAILED' and res:
|
|
|
|
print("Delete {}".format(os.path.join(wd, 'simfdb')))
|
|
|
|
shutil.rmtree(os.path.join(wd, 'simfdb'))
|
|
|
|
if len(os.listdir(wd)) == 0:
|
|
|
|
print("Delete {} - empty".format(wd))
|
|
|
|
os.rmdir(wd)
|
2018-08-29 02:47:35 +08:00
|
|
|
return res
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
testtypes = ['simulation', 'test']
|
|
|
|
parser = ArgumentParser(description='Run a test preprocess trace')
|
|
|
|
parser.add_argument('-b', '--builddir', help='Path to build directory')
|
|
|
|
parser.add_argument('-s', '--sourcedir', help='Path to source directory')
|
|
|
|
parser.add_argument('-n', '--name', help='Name of the test')
|
|
|
|
parser.add_argument('-t', '--testtype', choices=testtypes,
|
|
|
|
default='simulation',
|
|
|
|
help='The type of test to run, choices are [{}]'.format(
|
|
|
|
', '.join(testtypes))),
|
|
|
|
parser.add_argument('-B', '--buggify', action='store_true',
|
|
|
|
help='Enable buggify')
|
|
|
|
parser.add_argument('--logdir', default='logs',
|
|
|
|
help='Directory for logs')
|
|
|
|
parser.add_argument('-l', '--loglevel',
|
|
|
|
choices=['CRITICAL', 'ERROR', 'WARNING', 'INFO',
|
|
|
|
'DEBUG'],
|
|
|
|
default='INFO')
|
|
|
|
parser.add_argument('-x', '--seed', required=False, default=None,
|
|
|
|
help='The seed to use for this test')
|
2019-02-16 13:45:26 +08:00
|
|
|
parser.add_argument('-N', '--test-number', required=False, default=None,
|
|
|
|
help='A unique number for this test (for seed generation)')
|
2019-01-05 06:58:00 +08:00
|
|
|
parser.add_argument('-F', '--log-format', required=False, default='xml',
|
|
|
|
choices=['xml', 'json'], help='Log format (json or xml)')
|
2019-01-23 06:25:20 +08:00
|
|
|
parser.add_argument('-O', '--old-binary', required=False, default=None,
|
|
|
|
help='Path to the old binary to use for upgrade tests')
|
2019-06-19 14:56:01 +08:00
|
|
|
parser.add_argument('-S', '--symbolicate', action='store_true', default=False,
|
|
|
|
help='Symbolicate backtraces in trace events')
|
2019-01-23 06:25:20 +08:00
|
|
|
parser.add_argument('--aggregate-traces', default='NONE',
|
|
|
|
choices=['NONE', 'FAILED', 'ALL'])
|
|
|
|
parser.add_argument('--keep-logs', default='FAILED',
|
|
|
|
choices=['NONE', 'FAILED', 'ALL'])
|
|
|
|
parser.add_argument('--keep-simdirs', default='NONE',
|
|
|
|
choices=['NONE', 'FAILED', 'ALL'])
|
2018-08-29 02:47:35 +08:00
|
|
|
parser.add_argument('testfile', nargs="+", help='The tests to run')
|
|
|
|
args = parser.parse_args()
|
|
|
|
init_logging(args.loglevel, args.logdir)
|
|
|
|
basedir = os.getcwd()
|
|
|
|
if args.builddir is not None:
|
|
|
|
basedir = args.builddir
|
2019-01-23 06:25:20 +08:00
|
|
|
res = run_simulation_test(basedir, args)
|
2018-11-07 06:47:25 +08:00
|
|
|
sys.exit(0 if res else 1)
|