forked from OSchip/llvm-project
[dosep] Run tests in a more parallel fashion
Summary: Due to in-tree builds, we were parallelizing the tests at the directory level. Now that the tests are built out-of-tree, we can remove this limitation and paralelize at file level instead. This decreases test suite time by about 10% for me, which is not world-shattering, but it makes the code slightly simpler and will also allow us to merge tests which were artificially spread over multiple folders (TestConcurrentEvents...) to work-around this limitation. To make this work, I've also needed to include the test file name in the build directory name, as just the test method name is not unique enough (plenty of tests have a test method called "test" or similar). While doing this, I've found a couple of tests that are taking waaay longer then they ought to (TestBreakpointCaseSensitivity -- 90 seconds), which I plan to look into in the future. Reviewers: aprantl Subscribers: lldb-commits Differential Revision: https://reviews.llvm.org/D43335 llvm-svn: 325322
This commit is contained in:
parent
82443d365f
commit
42a1619262
|
@ -470,20 +470,21 @@ def call_with_timeout(
|
|||
return process_driver.results
|
||||
|
||||
|
||||
def process_dir(root, files, dotest_argv, inferior_pid_events):
|
||||
"""Examine a directory for tests, and invoke any found within it."""
|
||||
def process_file(test_file, dotest_argv, inferior_pid_events):
|
||||
"""Run tests in the specified file in a subprocess and gather the results."""
|
||||
results = []
|
||||
for (base_name, full_test_path) in files:
|
||||
base_name = os.path.basename(test_file)
|
||||
|
||||
import __main__ as main
|
||||
global dotest_options
|
||||
if dotest_options.p and not re.search(dotest_options.p, base_name):
|
||||
continue
|
||||
return
|
||||
|
||||
script_file = main.__file__
|
||||
command = ([sys.executable, script_file] +
|
||||
dotest_argv +
|
||||
["-S", dotest_options.session_file_format] +
|
||||
["--inferior", "-p", base_name, root])
|
||||
["--inferior", "-p", base_name, os.path.dirname(test_file)])
|
||||
|
||||
timeout_name = os.path.basename(os.path.splitext(base_name)[0]).upper()
|
||||
|
||||
|
@ -491,7 +492,7 @@ def process_dir(root, files, dotest_argv, inferior_pid_events):
|
|||
getDefaultTimeout(dotest_options.lldb_platform_name))
|
||||
|
||||
results.append(call_with_timeout(
|
||||
command, timeout, base_name, inferior_pid_events, full_test_path))
|
||||
command, timeout, base_name, inferior_pid_events, test_file))
|
||||
|
||||
# result = (name, status, passes, failures, unexpected_successes)
|
||||
timed_out = [name for name, status, _, _, _ in results
|
||||
|
@ -535,7 +536,7 @@ def process_dir_worker_multiprocessing(
|
|||
while not job_queue.empty():
|
||||
try:
|
||||
job = job_queue.get(block=False)
|
||||
result = process_dir(job[0], job[1], job[2],
|
||||
result = process_file(job[0], job[1], job[2],
|
||||
inferior_pid_events)
|
||||
result_queue.put(result)
|
||||
except queue.Empty:
|
||||
|
@ -543,11 +544,11 @@ def process_dir_worker_multiprocessing(
|
|||
pass
|
||||
|
||||
|
||||
def process_dir_worker_multiprocessing_pool(args):
|
||||
return process_dir(*args)
|
||||
def process_file_worker_multiprocessing_pool(args):
|
||||
return process_file(*args)
|
||||
|
||||
|
||||
def process_dir_worker_threading(job_queue, result_queue, inferior_pid_events):
|
||||
def process_file_worker_threading(job_queue, result_queue, inferior_pid_events):
|
||||
"""Worker thread main loop when in threading mode.
|
||||
|
||||
This one supports the hand-rolled pooling support.
|
||||
|
@ -558,25 +559,24 @@ def process_dir_worker_threading(job_queue, result_queue, inferior_pid_events):
|
|||
while not job_queue.empty():
|
||||
try:
|
||||
job = job_queue.get(block=False)
|
||||
result = process_dir(job[0], job[1], job[2],
|
||||
inferior_pid_events)
|
||||
result = process_file(job[0], job[1], inferior_pid_events)
|
||||
result_queue.put(result)
|
||||
except queue.Empty:
|
||||
# Fine, we're done.
|
||||
pass
|
||||
|
||||
|
||||
def process_dir_worker_threading_pool(args):
|
||||
return process_dir(*args)
|
||||
def process_file_worker_threading_pool(args):
|
||||
return process_file(*args)
|
||||
|
||||
|
||||
def process_dir_mapper_inprocess(args):
|
||||
def process_file_mapper_inprocess(args):
|
||||
"""Map adapter for running the subprocess-based, non-threaded test runner.
|
||||
|
||||
@param args the process work item tuple
|
||||
@return the test result tuple
|
||||
"""
|
||||
return process_dir(*args)
|
||||
return process_file(*args)
|
||||
|
||||
|
||||
def collect_active_pids_from_pid_events(event_queue):
|
||||
|
@ -646,16 +646,12 @@ def kill_all_worker_threads(workers, inferior_pid_events):
|
|||
worker.join()
|
||||
|
||||
|
||||
def find_test_files_in_dir_tree(dir_root, found_func):
|
||||
"""Calls found_func for all the test files in the given dir hierarchy.
|
||||
def find_test_files_in_dir_tree(dir_root):
|
||||
"""Returns all the test files in the given dir hierarchy.
|
||||
|
||||
@param dir_root the path to the directory to start scanning
|
||||
for test files. All files in this directory and all its children
|
||||
directory trees will be searched.
|
||||
|
||||
@param found_func a callable object that will be passed
|
||||
the parent directory (relative to dir_root) and the list of
|
||||
test files from within that directory.
|
||||
"""
|
||||
for root, _, files in os.walk(dir_root, topdown=False):
|
||||
def is_test_filename(test_dir, base_filename):
|
||||
|
@ -676,12 +672,9 @@ def find_test_files_in_dir_tree(dir_root, found_func):
|
|||
return (base_filename.startswith("Test") and
|
||||
base_filename.endswith(".py"))
|
||||
|
||||
tests = [
|
||||
(filename, os.path.join(root, filename))
|
||||
for filename in files
|
||||
if is_test_filename(root, filename)]
|
||||
if tests:
|
||||
found_func(root, tests)
|
||||
for f in files:
|
||||
if is_test_filename(root, f):
|
||||
yield os.path.join(root, f)
|
||||
|
||||
|
||||
def initialize_global_vars_common(num_threads, test_work_items, session_dir,
|
||||
|
@ -689,7 +682,7 @@ def initialize_global_vars_common(num_threads, test_work_items, session_dir,
|
|||
global g_session_dir, g_runner_context, total_tests, test_counter
|
||||
global test_name_len
|
||||
|
||||
total_tests = sum([len(item[1]) for item in test_work_items])
|
||||
total_tests = len(test_work_items)
|
||||
test_counter = multiprocessing.Value('i', 0)
|
||||
test_name_len = multiprocessing.Value('i', 0)
|
||||
g_session_dir = session_dir
|
||||
|
@ -949,7 +942,7 @@ def multiprocessing_test_runner(num_threads, test_work_items, session_dir,
|
|||
workers = []
|
||||
for _ in range(num_threads):
|
||||
worker = multiprocessing.Process(
|
||||
target=process_dir_worker_multiprocessing,
|
||||
target=process_file_worker_multiprocessing,
|
||||
args=(output_lock,
|
||||
test_counter,
|
||||
total_tests,
|
||||
|
@ -1045,7 +1038,7 @@ def multiprocessing_test_runner_pool(num_threads, test_work_items, session_dir,
|
|||
|
||||
# Start the map operation (async mode).
|
||||
map_future = pool.map_async(
|
||||
process_dir_worker_multiprocessing_pool, test_work_items)
|
||||
process_file_worker_multiprocessing_pool, test_work_items)
|
||||
return map_async_run_loop(
|
||||
map_future, RUNNER_PROCESS_ASYNC_MAP, RESULTS_LISTENER_CHANNEL)
|
||||
|
||||
|
@ -1092,7 +1085,7 @@ def threading_test_runner(num_threads, test_work_items, session_dir,
|
|||
workers = []
|
||||
for _ in range(num_threads):
|
||||
worker = threading.Thread(
|
||||
target=process_dir_worker_threading,
|
||||
target=process_file_worker_threading,
|
||||
args=(job_queue,
|
||||
result_queue,
|
||||
inferior_pid_events))
|
||||
|
@ -1129,7 +1122,7 @@ def threading_test_runner_pool(num_threads, test_work_items, session_dir,
|
|||
|
||||
pool = multiprocessing.pool.ThreadPool(num_threads)
|
||||
map_future = pool.map_async(
|
||||
process_dir_worker_threading_pool, test_work_items)
|
||||
process_file_worker_threading_pool, test_work_items)
|
||||
|
||||
return map_async_run_loop(
|
||||
map_future, RUNNER_PROCESS_ASYNC_MAP, RESULTS_LISTENER_CHANNEL)
|
||||
|
@ -1166,7 +1159,7 @@ def inprocess_exec_test_runner(test_work_items, session_dir, runner_context):
|
|||
socket_thread.start()
|
||||
|
||||
# Do the work.
|
||||
test_results = list(map(process_dir_mapper_inprocess, test_work_items))
|
||||
test_results = list(map(process_file_mapper_inprocess, test_work_items))
|
||||
|
||||
# If we have a listener channel, shut it down here.
|
||||
if RESULTS_LISTENER_CHANNEL is not None:
|
||||
|
@ -1183,7 +1176,7 @@ def inprocess_exec_test_runner(test_work_items, session_dir, runner_context):
|
|||
def walk_and_invoke(test_files, dotest_argv, num_workers, test_runner_func):
|
||||
"""Invokes the test runner on each test file specified by test_files.
|
||||
|
||||
@param test_files a list of (test_subdir, list_of_test_files_in_dir)
|
||||
@param test_files a list of (test_file, full_path_to_test_file)
|
||||
@param num_workers the number of worker queues working on these test files
|
||||
@param test_runner_func the test runner configured to run the tests
|
||||
|
||||
|
@ -1218,8 +1211,8 @@ def walk_and_invoke(test_files, dotest_argv, num_workers, test_runner_func):
|
|||
|
||||
# Build the test work items out of the (dir, file_list) entries passed in.
|
||||
test_work_items = []
|
||||
for entry in test_files:
|
||||
test_work_items.append((entry[0], entry[1], dotest_argv, None))
|
||||
for test_file in test_files:
|
||||
test_work_items.append((test_file, dotest_argv, None))
|
||||
|
||||
# Convert test work items into test results using whatever
|
||||
# was provided as the test run function.
|
||||
|
@ -1675,15 +1668,9 @@ def main(num_threads, test_subdir, test_runner_name, results_formatter):
|
|||
list(runner_strategies_by_name.keys())))
|
||||
test_runner_func = runner_strategies_by_name[test_runner_name]
|
||||
|
||||
# Collect the files on which we'll run the first test run phase.
|
||||
test_files = []
|
||||
find_test_files_in_dir_tree(
|
||||
test_subdir, lambda tdir, tfiles: test_files.append(
|
||||
(test_subdir, tfiles)))
|
||||
|
||||
# Do the first test run phase.
|
||||
summary_results = walk_and_invoke(
|
||||
test_files,
|
||||
find_test_files_in_dir_tree(test_subdir),
|
||||
dotest_argv,
|
||||
num_threads,
|
||||
test_runner_func)
|
||||
|
|
|
@ -705,13 +705,16 @@ class Base(unittest2.TestCase):
|
|||
"""Return the full path to the current test."""
|
||||
return os.path.join(os.environ["LLDB_TEST"], self.mydir)
|
||||
|
||||
def getBuildDirBasename(self):
|
||||
return self.__class__.__module__ + "." + self.testMethodName
|
||||
|
||||
def getBuildDir(self):
|
||||
"""Return the full path to the current test."""
|
||||
variant = self.getDebugInfo()
|
||||
if variant is None:
|
||||
variant = 'default'
|
||||
return os.path.join(os.environ["LLDB_BUILD"], self.mydir,
|
||||
self.testMethodName)
|
||||
self.getBuildDirBasename())
|
||||
|
||||
|
||||
def makeBuildDir(self):
|
||||
|
@ -1504,7 +1507,7 @@ class Base(unittest2.TestCase):
|
|||
clean=True):
|
||||
"""Platform specific way to build the default binaries."""
|
||||
testdir = self.mydir
|
||||
testname = self.testMethodName
|
||||
testname = self.getBuildDirBasename()
|
||||
if self.getDebugInfo():
|
||||
raise Exception("buildDefault tests must set NO_DEBUG_INFO_TESTCASE")
|
||||
module = builder_module()
|
||||
|
@ -1522,7 +1525,7 @@ class Base(unittest2.TestCase):
|
|||
clean=True):
|
||||
"""Platform specific way to build binaries with dsym info."""
|
||||
testdir = self.mydir
|
||||
testname = self.testMethodName
|
||||
testname = self.getBuildDirBasename()
|
||||
if self.getDebugInfo() != "dsym":
|
||||
raise Exception("NO_DEBUG_INFO_TESTCASE must build with buildDefault")
|
||||
|
||||
|
@ -1540,7 +1543,7 @@ class Base(unittest2.TestCase):
|
|||
clean=True):
|
||||
"""Platform specific way to build binaries with dwarf maps."""
|
||||
testdir = self.mydir
|
||||
testname = self.testMethodName
|
||||
testname = self.getBuildDirBasename()
|
||||
if self.getDebugInfo() != "dwarf":
|
||||
raise Exception("NO_DEBUG_INFO_TESTCASE must build with buildDefault")
|
||||
|
||||
|
@ -1558,7 +1561,7 @@ class Base(unittest2.TestCase):
|
|||
clean=True):
|
||||
"""Platform specific way to build binaries with dwarf maps."""
|
||||
testdir = self.mydir
|
||||
testname = self.testMethodName
|
||||
testname = self.getBuildDirBasename()
|
||||
if self.getDebugInfo() != "dwo":
|
||||
raise Exception("NO_DEBUG_INFO_TESTCASE must build with buildDefault")
|
||||
|
||||
|
@ -1576,7 +1579,7 @@ class Base(unittest2.TestCase):
|
|||
clean=True):
|
||||
"""Platform specific way to build binaries with gmodules info."""
|
||||
testdir = self.mydir
|
||||
testname = self.testMethodName
|
||||
testname = self.getBuildDirBasename()
|
||||
if self.getDebugInfo() != "gmodules":
|
||||
raise Exception("NO_DEBUG_INFO_TESTCASE must build with buildDefault")
|
||||
|
||||
|
|
Loading…
Reference in New Issue