linux-kselftest-kunit-5.16-rc1
This KUnit update for Linux 5.16-rc1 consist of several enhancements and fixes: - ability to run each test suite and test separately - support for timing test run - several fixes and improvements -----BEGIN PGP SIGNATURE----- iQIzBAABCgAdFiEEPZKym/RZuOCGeA/kCwJExA0NQxwFAmGBhpoACgkQCwJExA0N Qxx2xQ//bCql+nCJdowo2er7MgYV1Jw9bkjapriV7vaakHVBOQhMItMAD3lw2WNI SXuX4n4x4Ap4FMBd3gBQ3Flc1e7MdY/FHQIIcIE5+xDoU/ehl9ypbUMd7NrGOTI7 KMN18TJawHHyMHjKz/fFFV5Bfi97YptQMy3VB/ujdgRCIF2/bPhra5F/rFYUwdSk GLql9CBPbomgaginzAuvnfPKoxWGjiiRZjNTlsXDyRihras0ezS7kfSYPLEV8f/i L/ydZK+Lc3QCrWCYadBeAtq/3hkb1pV7FGKfjypvEGhGcvqEQZW7irKxE4KoLB2D VFeVVZmK0WCfCFHtOQookTKPVwkgTzItwpXxl57ILMWo6FaB8O8tQGHqEvcF7Xfm NIgT/V0laiUWABpWWpQjf1jnY+X1qI+s+f8eZ2eI889AduBdlrNIfCRotYz2tB0i /GfnmVLp5pDIYZX0SxCERpu91297sDXSV+uw4mf9hjDs7189LmWppEpPENfjWebM geFlconrtrmnWx3E3Wvqxk8pEmJwChDYVAGUUhKFZXLxYO0YvWoBM4REb9v8sv3c 0x3A1ZhpdeN5f7S6BHEAVKa3VrDHXLG9nZCK6wlaxYIiM2rOJDQN9h4BF5/RO9/2 SebYQuMA95aQgAPXjJHYKQ9fdxrR3BOgFdn0Xfp/EShEWTl/usA= =UpDf -----END PGP SIGNATURE----- Merge tag 'linux-kselftest-kunit-5.16-rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/shuah/linux-kselftest Pull KUnit updates from Shuah Khan: "Several enhancements and fixes: - ability to run each test suite and test separately - support for timing test run - several fixes and improvements" * tag 'linux-kselftest-kunit-5.16-rc1' of git://git.kernel.org/pub/scm/linux/kernel/git/shuah/linux-kselftest: kunit: tool: fix typecheck errors about loading qemu configs kunit: tool: continue past invalid utf-8 output kunit: Reset suite count after running tests kunit: tool: improve compatibility of kunit_parser with KTAP specification kunit: tool: yield output from run_kernel in real time kunit: tool: support running each suite/test separately kunit: tool: actually track how long it took to run tests kunit: tool: factor exec + parse steps into a function kunit: add 'kunit.action' param to allow listing out tests kunit: tool: show list of valid --arch options when invalid kunit: tool: misc fixes (unused vars, imports, leaked files) kunit: fix too small allocation when using suite-only kunit.filter_glob kunit: tool: allow filtering test cases via glob kunit: drop assumption in kunit-log-test about current suite
This commit is contained in:
commit
313b6ffc8e
|
@ -25,8 +25,8 @@ It can be handy to create a bash function like:
|
|||
Running a subset of tests
|
||||
-------------------------
|
||||
|
||||
``kunit.py run`` accepts an optional glob argument to filter tests. Currently
|
||||
this only matches against suite names, but this may change in the future.
|
||||
``kunit.py run`` accepts an optional glob argument to filter tests. The format
|
||||
is ``"<suite_glob>[.test_glob]"``.
|
||||
|
||||
Say that we wanted to run the sysctl tests, we could do so via:
|
||||
|
||||
|
@ -35,6 +35,13 @@ Say that we wanted to run the sysctl tests, we could do so via:
|
|||
$ echo -e 'CONFIG_KUNIT=y\nCONFIG_KUNIT_ALL_TESTS=y' > .kunit/.kunitconfig
|
||||
$ ./tools/testing/kunit/kunit.py run 'sysctl*'
|
||||
|
||||
We can filter down to just the "write" tests via:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ echo -e 'CONFIG_KUNIT=y\nCONFIG_KUNIT_ALL_TESTS=y' > .kunit/.kunitconfig
|
||||
$ ./tools/testing/kunit/kunit.py run 'sysctl*.*write*'
|
||||
|
||||
We're paying the cost of building more tests than we need this way, but it's
|
||||
easier than fiddling with ``.kunitconfig`` files or commenting out
|
||||
``kunit_suite``'s.
|
||||
|
|
|
@ -15,23 +15,89 @@ extern struct kunit_suite * const * const __kunit_suites_end[];
|
|||
#if IS_BUILTIN(CONFIG_KUNIT)
|
||||
|
||||
static char *filter_glob_param;
|
||||
static char *action_param;
|
||||
|
||||
module_param_named(filter_glob, filter_glob_param, charp, 0);
|
||||
MODULE_PARM_DESC(filter_glob,
|
||||
"Filter which KUnit test suites run at boot-time, e.g. list*");
|
||||
"Filter which KUnit test suites/tests run at boot-time, e.g. list* or list*.*del_test");
|
||||
module_param_named(action, action_param, charp, 0);
|
||||
MODULE_PARM_DESC(action,
|
||||
"Changes KUnit executor behavior, valid values are:\n"
|
||||
"<none>: run the tests like normal\n"
|
||||
"'list' to list test names instead of running them.\n");
|
||||
|
||||
/* glob_match() needs NULL terminated strings, so we need a copy of filter_glob_param. */
|
||||
struct kunit_test_filter {
|
||||
char *suite_glob;
|
||||
char *test_glob;
|
||||
};
|
||||
|
||||
/* Split "suite_glob.test_glob" into two. Assumes filter_glob is not empty. */
|
||||
static void kunit_parse_filter_glob(struct kunit_test_filter *parsed,
|
||||
const char *filter_glob)
|
||||
{
|
||||
const int len = strlen(filter_glob);
|
||||
const char *period = strchr(filter_glob, '.');
|
||||
|
||||
if (!period) {
|
||||
parsed->suite_glob = kzalloc(len + 1, GFP_KERNEL);
|
||||
parsed->test_glob = NULL;
|
||||
strcpy(parsed->suite_glob, filter_glob);
|
||||
return;
|
||||
}
|
||||
|
||||
parsed->suite_glob = kzalloc(period - filter_glob + 1, GFP_KERNEL);
|
||||
parsed->test_glob = kzalloc(len - (period - filter_glob) + 1, GFP_KERNEL);
|
||||
|
||||
strncpy(parsed->suite_glob, filter_glob, period - filter_glob);
|
||||
strncpy(parsed->test_glob, period + 1, len - (period - filter_glob));
|
||||
}
|
||||
|
||||
/* Create a copy of suite with only tests that match test_glob. */
|
||||
static struct kunit_suite *
|
||||
kunit_filter_tests(struct kunit_suite *const suite, const char *test_glob)
|
||||
{
|
||||
int n = 0;
|
||||
struct kunit_case *filtered, *test_case;
|
||||
struct kunit_suite *copy;
|
||||
|
||||
kunit_suite_for_each_test_case(suite, test_case) {
|
||||
if (!test_glob || glob_match(test_glob, test_case->name))
|
||||
++n;
|
||||
}
|
||||
|
||||
if (n == 0)
|
||||
return NULL;
|
||||
|
||||
/* Use memcpy to workaround copy->name being const. */
|
||||
copy = kmalloc(sizeof(*copy), GFP_KERNEL);
|
||||
memcpy(copy, suite, sizeof(*copy));
|
||||
|
||||
filtered = kcalloc(n + 1, sizeof(*filtered), GFP_KERNEL);
|
||||
|
||||
n = 0;
|
||||
kunit_suite_for_each_test_case(suite, test_case) {
|
||||
if (!test_glob || glob_match(test_glob, test_case->name))
|
||||
filtered[n++] = *test_case;
|
||||
}
|
||||
|
||||
copy->test_cases = filtered;
|
||||
return copy;
|
||||
}
|
||||
|
||||
static char *kunit_shutdown;
|
||||
core_param(kunit_shutdown, kunit_shutdown, charp, 0644);
|
||||
|
||||
static struct kunit_suite * const *
|
||||
kunit_filter_subsuite(struct kunit_suite * const * const subsuite,
|
||||
const char *filter_glob)
|
||||
struct kunit_test_filter *filter)
|
||||
{
|
||||
int i, n = 0;
|
||||
struct kunit_suite **filtered;
|
||||
struct kunit_suite **filtered, *filtered_suite;
|
||||
|
||||
n = 0;
|
||||
for (i = 0; subsuite[i] != NULL; ++i) {
|
||||
if (glob_match(filter_glob, subsuite[i]->name))
|
||||
for (i = 0; subsuite[i]; ++i) {
|
||||
if (glob_match(filter->suite_glob, subsuite[i]->name))
|
||||
++n;
|
||||
}
|
||||
|
||||
|
@ -44,8 +110,11 @@ kunit_filter_subsuite(struct kunit_suite * const * const subsuite,
|
|||
|
||||
n = 0;
|
||||
for (i = 0; subsuite[i] != NULL; ++i) {
|
||||
if (glob_match(filter_glob, subsuite[i]->name))
|
||||
filtered[n++] = subsuite[i];
|
||||
if (!glob_match(filter->suite_glob, subsuite[i]->name))
|
||||
continue;
|
||||
filtered_suite = kunit_filter_tests(subsuite[i], filter->test_glob);
|
||||
if (filtered_suite)
|
||||
filtered[n++] = filtered_suite;
|
||||
}
|
||||
filtered[n] = NULL;
|
||||
|
||||
|
@ -57,12 +126,32 @@ struct suite_set {
|
|||
struct kunit_suite * const * const *end;
|
||||
};
|
||||
|
||||
static void kunit_free_subsuite(struct kunit_suite * const *subsuite)
|
||||
{
|
||||
unsigned int i;
|
||||
|
||||
for (i = 0; subsuite[i]; i++)
|
||||
kfree(subsuite[i]);
|
||||
|
||||
kfree(subsuite);
|
||||
}
|
||||
|
||||
static void kunit_free_suite_set(struct suite_set suite_set)
|
||||
{
|
||||
struct kunit_suite * const * const *suites;
|
||||
|
||||
for (suites = suite_set.start; suites < suite_set.end; suites++)
|
||||
kunit_free_subsuite(*suites);
|
||||
kfree(suite_set.start);
|
||||
}
|
||||
|
||||
static struct suite_set kunit_filter_suites(const struct suite_set *suite_set,
|
||||
const char *filter_glob)
|
||||
{
|
||||
int i;
|
||||
struct kunit_suite * const **copy, * const *filtered_subsuite;
|
||||
struct suite_set filtered;
|
||||
struct kunit_test_filter filter;
|
||||
|
||||
const size_t max = suite_set->end - suite_set->start;
|
||||
|
||||
|
@ -73,12 +162,17 @@ static struct suite_set kunit_filter_suites(const struct suite_set *suite_set,
|
|||
return filtered;
|
||||
}
|
||||
|
||||
kunit_parse_filter_glob(&filter, filter_glob);
|
||||
|
||||
for (i = 0; i < max; ++i) {
|
||||
filtered_subsuite = kunit_filter_subsuite(suite_set->start[i], filter_glob);
|
||||
filtered_subsuite = kunit_filter_subsuite(suite_set->start[i], &filter);
|
||||
if (filtered_subsuite)
|
||||
*copy++ = filtered_subsuite;
|
||||
}
|
||||
filtered.end = copy;
|
||||
|
||||
kfree(filter.suite_glob);
|
||||
kfree(filter.test_glob);
|
||||
return filtered;
|
||||
}
|
||||
|
||||
|
@ -109,9 +203,35 @@ static void kunit_print_tap_header(struct suite_set *suite_set)
|
|||
pr_info("1..%d\n", num_of_suites);
|
||||
}
|
||||
|
||||
int kunit_run_all_tests(void)
|
||||
static void kunit_exec_run_tests(struct suite_set *suite_set)
|
||||
{
|
||||
struct kunit_suite * const * const *suites;
|
||||
|
||||
kunit_print_tap_header(suite_set);
|
||||
|
||||
for (suites = suite_set->start; suites < suite_set->end; suites++)
|
||||
__kunit_test_suites_init(*suites);
|
||||
}
|
||||
|
||||
static void kunit_exec_list_tests(struct suite_set *suite_set)
|
||||
{
|
||||
unsigned int i;
|
||||
struct kunit_suite * const * const *suites;
|
||||
struct kunit_case *test_case;
|
||||
|
||||
/* Hack: print a tap header so kunit.py can find the start of KUnit output. */
|
||||
pr_info("TAP version 14\n");
|
||||
|
||||
for (suites = suite_set->start; suites < suite_set->end; suites++)
|
||||
for (i = 0; (*suites)[i] != NULL; i++) {
|
||||
kunit_suite_for_each_test_case((*suites)[i], test_case) {
|
||||
pr_info("%s.%s\n", (*suites)[i]->name, test_case->name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int kunit_run_all_tests(void)
|
||||
{
|
||||
struct suite_set suite_set = {
|
||||
.start = __kunit_suites_start,
|
||||
.end = __kunit_suites_end,
|
||||
|
@ -120,15 +240,15 @@ int kunit_run_all_tests(void)
|
|||
if (filter_glob_param)
|
||||
suite_set = kunit_filter_suites(&suite_set, filter_glob_param);
|
||||
|
||||
kunit_print_tap_header(&suite_set);
|
||||
|
||||
for (suites = suite_set.start; suites < suite_set.end; suites++)
|
||||
__kunit_test_suites_init(*suites);
|
||||
if (!action_param)
|
||||
kunit_exec_run_tests(&suite_set);
|
||||
else if (strcmp(action_param, "list") == 0)
|
||||
kunit_exec_list_tests(&suite_set);
|
||||
else
|
||||
pr_err("kunit executor: unknown action '%s'\n", action_param);
|
||||
|
||||
if (filter_glob_param) { /* a copy was made of each array */
|
||||
for (suites = suite_set.start; suites < suite_set.end; suites++)
|
||||
kfree(*suites);
|
||||
kfree(suite_set.start);
|
||||
kunit_free_suite_set(suite_set);
|
||||
}
|
||||
|
||||
kunit_handle_shutdown();
|
||||
|
|
|
@ -9,38 +9,103 @@
|
|||
#include <kunit/test.h>
|
||||
|
||||
static void kfree_at_end(struct kunit *test, const void *to_free);
|
||||
static void free_subsuite_at_end(struct kunit *test,
|
||||
struct kunit_suite *const *to_free);
|
||||
static struct kunit_suite *alloc_fake_suite(struct kunit *test,
|
||||
const char *suite_name);
|
||||
const char *suite_name,
|
||||
struct kunit_case *test_cases);
|
||||
|
||||
static void dummy_test(struct kunit *test) {}
|
||||
|
||||
static struct kunit_case dummy_test_cases[] = {
|
||||
/* .run_case is not important, just needs to be non-NULL */
|
||||
{ .name = "test1", .run_case = dummy_test },
|
||||
{ .name = "test2", .run_case = dummy_test },
|
||||
{},
|
||||
};
|
||||
|
||||
static void parse_filter_test(struct kunit *test)
|
||||
{
|
||||
struct kunit_test_filter filter = {NULL, NULL};
|
||||
|
||||
kunit_parse_filter_glob(&filter, "suite");
|
||||
KUNIT_EXPECT_STREQ(test, filter.suite_glob, "suite");
|
||||
KUNIT_EXPECT_FALSE(test, filter.test_glob);
|
||||
kfree(filter.suite_glob);
|
||||
kfree(filter.test_glob);
|
||||
|
||||
kunit_parse_filter_glob(&filter, "suite.test");
|
||||
KUNIT_EXPECT_STREQ(test, filter.suite_glob, "suite");
|
||||
KUNIT_EXPECT_STREQ(test, filter.test_glob, "test");
|
||||
kfree(filter.suite_glob);
|
||||
kfree(filter.test_glob);
|
||||
}
|
||||
|
||||
static void filter_subsuite_test(struct kunit *test)
|
||||
{
|
||||
struct kunit_suite *subsuite[3] = {NULL, NULL, NULL};
|
||||
struct kunit_suite * const *filtered;
|
||||
struct kunit_test_filter filter = {
|
||||
.suite_glob = "suite2",
|
||||
.test_glob = NULL,
|
||||
};
|
||||
|
||||
subsuite[0] = alloc_fake_suite(test, "suite1");
|
||||
subsuite[1] = alloc_fake_suite(test, "suite2");
|
||||
subsuite[0] = alloc_fake_suite(test, "suite1", dummy_test_cases);
|
||||
subsuite[1] = alloc_fake_suite(test, "suite2", dummy_test_cases);
|
||||
|
||||
/* Want: suite1, suite2, NULL -> suite2, NULL */
|
||||
filtered = kunit_filter_subsuite(subsuite, "suite2*");
|
||||
filtered = kunit_filter_subsuite(subsuite, &filter);
|
||||
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, filtered);
|
||||
kfree_at_end(test, filtered);
|
||||
free_subsuite_at_end(test, filtered);
|
||||
|
||||
/* Validate we just have suite2 */
|
||||
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, filtered[0]);
|
||||
KUNIT_EXPECT_STREQ(test, (const char *)filtered[0]->name, "suite2");
|
||||
|
||||
KUNIT_EXPECT_FALSE(test, filtered[1]);
|
||||
}
|
||||
|
||||
static void filter_subsuite_test_glob_test(struct kunit *test)
|
||||
{
|
||||
struct kunit_suite *subsuite[3] = {NULL, NULL, NULL};
|
||||
struct kunit_suite * const *filtered;
|
||||
struct kunit_test_filter filter = {
|
||||
.suite_glob = "suite2",
|
||||
.test_glob = "test2",
|
||||
};
|
||||
|
||||
subsuite[0] = alloc_fake_suite(test, "suite1", dummy_test_cases);
|
||||
subsuite[1] = alloc_fake_suite(test, "suite2", dummy_test_cases);
|
||||
|
||||
/* Want: suite1, suite2, NULL -> suite2 (just test1), NULL */
|
||||
filtered = kunit_filter_subsuite(subsuite, &filter);
|
||||
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, filtered);
|
||||
free_subsuite_at_end(test, filtered);
|
||||
|
||||
/* Validate we just have suite2 */
|
||||
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, filtered[0]);
|
||||
KUNIT_EXPECT_STREQ(test, (const char *)filtered[0]->name, "suite2");
|
||||
KUNIT_EXPECT_FALSE(test, filtered[1]);
|
||||
|
||||
/* Now validate we just have test2 */
|
||||
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, filtered[0]->test_cases);
|
||||
KUNIT_EXPECT_STREQ(test, (const char *)filtered[0]->test_cases[0].name, "test2");
|
||||
KUNIT_EXPECT_FALSE(test, filtered[0]->test_cases[1].name);
|
||||
}
|
||||
|
||||
static void filter_subsuite_to_empty_test(struct kunit *test)
|
||||
{
|
||||
struct kunit_suite *subsuite[3] = {NULL, NULL, NULL};
|
||||
struct kunit_suite * const *filtered;
|
||||
struct kunit_test_filter filter = {
|
||||
.suite_glob = "not_found",
|
||||
.test_glob = NULL,
|
||||
};
|
||||
|
||||
subsuite[0] = alloc_fake_suite(test, "suite1");
|
||||
subsuite[1] = alloc_fake_suite(test, "suite2");
|
||||
subsuite[0] = alloc_fake_suite(test, "suite1", dummy_test_cases);
|
||||
subsuite[1] = alloc_fake_suite(test, "suite2", dummy_test_cases);
|
||||
|
||||
filtered = kunit_filter_subsuite(subsuite, "not_found");
|
||||
kfree_at_end(test, filtered); /* just in case */
|
||||
filtered = kunit_filter_subsuite(subsuite, &filter);
|
||||
free_subsuite_at_end(test, filtered); /* just in case */
|
||||
|
||||
KUNIT_EXPECT_FALSE_MSG(test, filtered,
|
||||
"should be NULL to indicate no match");
|
||||
|
@ -52,7 +117,7 @@ static void kfree_subsuites_at_end(struct kunit *test, struct suite_set *suite_s
|
|||
|
||||
kfree_at_end(test, suite_set->start);
|
||||
for (suites = suite_set->start; suites < suite_set->end; suites++)
|
||||
kfree_at_end(test, *suites);
|
||||
free_subsuite_at_end(test, *suites);
|
||||
}
|
||||
|
||||
static void filter_suites_test(struct kunit *test)
|
||||
|
@ -74,8 +139,8 @@ static void filter_suites_test(struct kunit *test)
|
|||
struct suite_set filtered = {.start = NULL, .end = NULL};
|
||||
|
||||
/* Emulate two files, each having one suite */
|
||||
subsuites[0][0] = alloc_fake_suite(test, "suite0");
|
||||
subsuites[1][0] = alloc_fake_suite(test, "suite1");
|
||||
subsuites[0][0] = alloc_fake_suite(test, "suite0", dummy_test_cases);
|
||||
subsuites[1][0] = alloc_fake_suite(test, "suite1", dummy_test_cases);
|
||||
|
||||
/* Filter out suite1 */
|
||||
filtered = kunit_filter_suites(&suite_set, "suite0");
|
||||
|
@ -84,11 +149,14 @@ static void filter_suites_test(struct kunit *test)
|
|||
|
||||
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, filtered.start);
|
||||
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, filtered.start[0]);
|
||||
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, filtered.start[0][0]);
|
||||
KUNIT_EXPECT_STREQ(test, (const char *)filtered.start[0][0]->name, "suite0");
|
||||
}
|
||||
|
||||
static struct kunit_case executor_test_cases[] = {
|
||||
KUNIT_CASE(parse_filter_test),
|
||||
KUNIT_CASE(filter_subsuite_test),
|
||||
KUNIT_CASE(filter_subsuite_test_glob_test),
|
||||
KUNIT_CASE(filter_subsuite_to_empty_test),
|
||||
KUNIT_CASE(filter_suites_test),
|
||||
{}
|
||||
|
@ -120,14 +188,30 @@ static void kfree_at_end(struct kunit *test, const void *to_free)
|
|||
(void *)to_free);
|
||||
}
|
||||
|
||||
static void free_subsuite_res_free(struct kunit_resource *res)
|
||||
{
|
||||
kunit_free_subsuite(res->data);
|
||||
}
|
||||
|
||||
static void free_subsuite_at_end(struct kunit *test,
|
||||
struct kunit_suite *const *to_free)
|
||||
{
|
||||
if (IS_ERR_OR_NULL(to_free))
|
||||
return;
|
||||
kunit_alloc_resource(test, NULL, free_subsuite_res_free,
|
||||
GFP_KERNEL, (void *)to_free);
|
||||
}
|
||||
|
||||
static struct kunit_suite *alloc_fake_suite(struct kunit *test,
|
||||
const char *suite_name)
|
||||
const char *suite_name,
|
||||
struct kunit_case *test_cases)
|
||||
{
|
||||
struct kunit_suite *suite;
|
||||
|
||||
/* We normally never expect to allocate suites, hence the non-const cast. */
|
||||
suite = kunit_kzalloc(test, sizeof(*suite), GFP_KERNEL);
|
||||
strncpy((char *)suite->name, suite_name, sizeof(suite->name) - 1);
|
||||
suite->test_cases = test_cases;
|
||||
|
||||
return suite;
|
||||
}
|
||||
|
|
|
@ -415,12 +415,15 @@ static struct kunit_suite kunit_log_test_suite = {
|
|||
|
||||
static void kunit_log_test(struct kunit *test)
|
||||
{
|
||||
struct kunit_suite *suite = &kunit_log_test_suite;
|
||||
struct kunit_suite suite;
|
||||
|
||||
suite.log = kunit_kzalloc(test, KUNIT_LOG_SIZE, GFP_KERNEL);
|
||||
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, suite.log);
|
||||
|
||||
kunit_log(KERN_INFO, test, "put this in log.");
|
||||
kunit_log(KERN_INFO, test, "this too.");
|
||||
kunit_log(KERN_INFO, suite, "add to suite log.");
|
||||
kunit_log(KERN_INFO, suite, "along with this.");
|
||||
kunit_log(KERN_INFO, &suite, "add to suite log.");
|
||||
kunit_log(KERN_INFO, &suite, "along with this.");
|
||||
|
||||
#ifdef CONFIG_KUNIT_DEBUGFS
|
||||
KUNIT_EXPECT_NOT_ERR_OR_NULL(test,
|
||||
|
@ -428,12 +431,11 @@ static void kunit_log_test(struct kunit *test)
|
|||
KUNIT_EXPECT_NOT_ERR_OR_NULL(test,
|
||||
strstr(test->log, "this too."));
|
||||
KUNIT_EXPECT_NOT_ERR_OR_NULL(test,
|
||||
strstr(suite->log, "add to suite log."));
|
||||
strstr(suite.log, "add to suite log."));
|
||||
KUNIT_EXPECT_NOT_ERR_OR_NULL(test,
|
||||
strstr(suite->log, "along with this."));
|
||||
strstr(suite.log, "along with this."));
|
||||
#else
|
||||
KUNIT_EXPECT_PTR_EQ(test, test->log, (char *)NULL);
|
||||
KUNIT_EXPECT_PTR_EQ(test, suite->log, (char *)NULL);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -190,10 +190,10 @@ enum kunit_status kunit_suite_has_succeeded(struct kunit_suite *suite)
|
|||
}
|
||||
EXPORT_SYMBOL_GPL(kunit_suite_has_succeeded);
|
||||
|
||||
static size_t kunit_suite_counter = 1;
|
||||
|
||||
static void kunit_print_subtest_end(struct kunit_suite *suite)
|
||||
{
|
||||
static size_t kunit_suite_counter = 1;
|
||||
|
||||
kunit_print_ok_not_ok((void *)suite, false,
|
||||
kunit_suite_has_succeeded(suite),
|
||||
kunit_suite_counter++,
|
||||
|
@ -583,6 +583,8 @@ void __kunit_test_suites_exit(struct kunit_suite **suites)
|
|||
|
||||
for (i = 0; suites[i] != NULL; i++)
|
||||
kunit_exit_suite(suites[i]);
|
||||
|
||||
kunit_suite_counter = 1;
|
||||
}
|
||||
EXPORT_SYMBOL_GPL(__kunit_test_suites_exit);
|
||||
|
||||
|
|
|
@ -8,17 +8,17 @@
|
|||
# Author: Brendan Higgins <brendanhiggins@google.com>
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import time
|
||||
|
||||
assert sys.version_info >= (3, 7), "Python version is too old"
|
||||
|
||||
from collections import namedtuple
|
||||
from enum import Enum, auto
|
||||
from typing import Iterable, Sequence
|
||||
from typing import Iterable, Sequence, List
|
||||
|
||||
import kunit_config
|
||||
import kunit_json
|
||||
import kunit_kernel
|
||||
import kunit_parser
|
||||
|
@ -31,13 +31,13 @@ KunitBuildRequest = namedtuple('KunitBuildRequest',
|
|||
['jobs', 'build_dir', 'alltests',
|
||||
'make_options'])
|
||||
KunitExecRequest = namedtuple('KunitExecRequest',
|
||||
['timeout', 'build_dir', 'alltests',
|
||||
'filter_glob', 'kernel_args'])
|
||||
['timeout', 'build_dir', 'alltests',
|
||||
'filter_glob', 'kernel_args', 'run_isolated'])
|
||||
KunitParseRequest = namedtuple('KunitParseRequest',
|
||||
['raw_output', 'input_data', 'build_dir', 'json'])
|
||||
['raw_output', 'build_dir', 'json'])
|
||||
KunitRequest = namedtuple('KunitRequest', ['raw_output','timeout', 'jobs',
|
||||
'build_dir', 'alltests', 'filter_glob',
|
||||
'kernel_args', 'json', 'make_options'])
|
||||
'kernel_args', 'run_isolated', 'json', 'make_options'])
|
||||
|
||||
KernelDirectoryPath = sys.argv[0].split('tools/testing/kunit/')[0]
|
||||
|
||||
|
@ -91,31 +91,93 @@ def build_tests(linux: kunit_kernel.LinuxSourceTree,
|
|||
'built kernel successfully',
|
||||
build_end - build_start)
|
||||
|
||||
def exec_tests(linux: kunit_kernel.LinuxSourceTree,
|
||||
request: KunitExecRequest) -> KunitResult:
|
||||
kunit_parser.print_with_timestamp('Starting KUnit Kernel ...')
|
||||
test_start = time.time()
|
||||
result = linux.run_kernel(
|
||||
args=request.kernel_args,
|
||||
timeout=None if request.alltests else request.timeout,
|
||||
filter_glob=request.filter_glob,
|
||||
build_dir=request.build_dir)
|
||||
def _list_tests(linux: kunit_kernel.LinuxSourceTree, request: KunitExecRequest) -> List[str]:
|
||||
args = ['kunit.action=list']
|
||||
if request.kernel_args:
|
||||
args.extend(request.kernel_args)
|
||||
|
||||
test_end = time.time()
|
||||
output = linux.run_kernel(args=args,
|
||||
timeout=None if request.alltests else request.timeout,
|
||||
filter_glob=request.filter_glob,
|
||||
build_dir=request.build_dir)
|
||||
lines = kunit_parser.extract_tap_lines(output)
|
||||
# Hack! Drop the dummy TAP version header that the executor prints out.
|
||||
lines.pop()
|
||||
|
||||
return KunitResult(KunitStatus.SUCCESS,
|
||||
result,
|
||||
test_end - test_start)
|
||||
# Filter out any extraneous non-test output that might have gotten mixed in.
|
||||
return [l for l in lines if re.match('^[^\s.]+\.[^\s.]+$', l)]
|
||||
|
||||
def parse_tests(request: KunitParseRequest) -> KunitResult:
|
||||
def _suites_from_test_list(tests: List[str]) -> List[str]:
|
||||
"""Extracts all the suites from an ordered list of tests."""
|
||||
suites = [] # type: List[str]
|
||||
for t in tests:
|
||||
parts = t.split('.', maxsplit=2)
|
||||
if len(parts) != 2:
|
||||
raise ValueError(f'internal KUnit error, test name should be of the form "<suite>.<test>", got "{t}"')
|
||||
suite, case = parts
|
||||
if not suites or suites[-1] != suite:
|
||||
suites.append(suite)
|
||||
return suites
|
||||
|
||||
|
||||
|
||||
def exec_tests(linux: kunit_kernel.LinuxSourceTree, request: KunitExecRequest,
|
||||
parse_request: KunitParseRequest) -> KunitResult:
|
||||
filter_globs = [request.filter_glob]
|
||||
if request.run_isolated:
|
||||
tests = _list_tests(linux, request)
|
||||
if request.run_isolated == 'test':
|
||||
filter_globs = tests
|
||||
if request.run_isolated == 'suite':
|
||||
filter_globs = _suites_from_test_list(tests)
|
||||
# Apply the test-part of the user's glob, if present.
|
||||
if '.' in request.filter_glob:
|
||||
test_glob = request.filter_glob.split('.', maxsplit=2)[1]
|
||||
filter_globs = [g + '.'+ test_glob for g in filter_globs]
|
||||
|
||||
test_counts = kunit_parser.TestCounts()
|
||||
exec_time = 0.0
|
||||
for i, filter_glob in enumerate(filter_globs):
|
||||
kunit_parser.print_with_timestamp('Starting KUnit Kernel ({}/{})...'.format(i+1, len(filter_globs)))
|
||||
|
||||
test_start = time.time()
|
||||
run_result = linux.run_kernel(
|
||||
args=request.kernel_args,
|
||||
timeout=None if request.alltests else request.timeout,
|
||||
filter_glob=filter_glob,
|
||||
build_dir=request.build_dir)
|
||||
|
||||
result = parse_tests(parse_request, run_result)
|
||||
# run_kernel() doesn't block on the kernel exiting.
|
||||
# That only happens after we get the last line of output from `run_result`.
|
||||
# So exec_time here actually contains parsing + execution time, which is fine.
|
||||
test_end = time.time()
|
||||
exec_time += test_end - test_start
|
||||
|
||||
test_counts.add_subtest_counts(result.result.test.counts)
|
||||
|
||||
kunit_status = _map_to_overall_status(test_counts.get_status())
|
||||
return KunitResult(status=kunit_status, result=result.result, elapsed_time=exec_time)
|
||||
|
||||
def _map_to_overall_status(test_status: kunit_parser.TestStatus) -> KunitStatus:
|
||||
if test_status in (kunit_parser.TestStatus.SUCCESS, kunit_parser.TestStatus.SKIPPED):
|
||||
return KunitStatus.SUCCESS
|
||||
else:
|
||||
return KunitStatus.TEST_FAILURE
|
||||
|
||||
def parse_tests(request: KunitParseRequest, input_data: Iterable[str]) -> KunitResult:
|
||||
parse_start = time.time()
|
||||
|
||||
test_result = kunit_parser.TestResult(kunit_parser.TestStatus.SUCCESS,
|
||||
[],
|
||||
kunit_parser.Test(),
|
||||
'Tests not Parsed.')
|
||||
|
||||
if request.raw_output:
|
||||
output: Iterable[str] = request.input_data
|
||||
# Treat unparsed results as one passing test.
|
||||
test_result.test.status = kunit_parser.TestStatus.SUCCESS
|
||||
test_result.test.counts.passed = 1
|
||||
|
||||
output: Iterable[str] = input_data
|
||||
if request.raw_output == 'all':
|
||||
pass
|
||||
elif request.raw_output == 'kunit':
|
||||
|
@ -126,7 +188,7 @@ def parse_tests(request: KunitParseRequest) -> KunitResult:
|
|||
print(line.rstrip())
|
||||
|
||||
else:
|
||||
test_result = kunit_parser.parse_run_tests(request.input_data)
|
||||
test_result = kunit_parser.parse_run_tests(input_data)
|
||||
parse_end = time.time()
|
||||
|
||||
if request.json:
|
||||
|
@ -164,16 +226,12 @@ def run_tests(linux: kunit_kernel.LinuxSourceTree,
|
|||
|
||||
exec_request = KunitExecRequest(request.timeout, request.build_dir,
|
||||
request.alltests, request.filter_glob,
|
||||
request.kernel_args)
|
||||
exec_result = exec_tests(linux, exec_request)
|
||||
if exec_result.status != KunitStatus.SUCCESS:
|
||||
return exec_result
|
||||
|
||||
request.kernel_args, request.run_isolated)
|
||||
parse_request = KunitParseRequest(request.raw_output,
|
||||
exec_result.result,
|
||||
request.build_dir,
|
||||
request.json)
|
||||
parse_result = parse_tests(parse_request)
|
||||
|
||||
exec_result = exec_tests(linux, exec_request, parse_request)
|
||||
|
||||
run_end = time.time()
|
||||
|
||||
|
@ -184,7 +242,7 @@ def run_tests(linux: kunit_kernel.LinuxSourceTree,
|
|||
config_result.elapsed_time,
|
||||
build_result.elapsed_time,
|
||||
exec_result.elapsed_time))
|
||||
return parse_result
|
||||
return exec_result
|
||||
|
||||
# Problem:
|
||||
# $ kunit.py run --json
|
||||
|
@ -263,9 +321,8 @@ def add_exec_opts(parser) -> None:
|
|||
default=300,
|
||||
metavar='timeout')
|
||||
parser.add_argument('filter_glob',
|
||||
help='maximum number of seconds to allow for all tests '
|
||||
'to run. This does not include time taken to build the '
|
||||
'tests.',
|
||||
help='Filter which KUnit test suites/tests run at '
|
||||
'boot-time, e.g. list* or list*.*del_test',
|
||||
type=str,
|
||||
nargs='?',
|
||||
default='',
|
||||
|
@ -273,6 +330,12 @@ def add_exec_opts(parser) -> None:
|
|||
parser.add_argument('--kernel_args',
|
||||
help='Kernel command-line parameters. Maybe be repeated',
|
||||
action='append')
|
||||
parser.add_argument('--run_isolated', help='If set, boot the kernel for each '
|
||||
'individual suite/test. This is can be useful for debugging '
|
||||
'a non-hermetic test, one that might pass/fail based on '
|
||||
'what ran before it.',
|
||||
type=str,
|
||||
choices=['suite', 'test']),
|
||||
|
||||
def add_parse_opts(parser) -> None:
|
||||
parser.add_argument('--raw_output', help='If set don\'t format output from kernel. '
|
||||
|
@ -346,6 +409,7 @@ def main(argv, linux=None):
|
|||
cli_args.alltests,
|
||||
cli_args.filter_glob,
|
||||
cli_args.kernel_args,
|
||||
cli_args.run_isolated,
|
||||
cli_args.json,
|
||||
cli_args.make_options)
|
||||
result = run_tests(linux, request)
|
||||
|
@ -401,29 +465,27 @@ def main(argv, linux=None):
|
|||
cli_args.build_dir,
|
||||
cli_args.alltests,
|
||||
cli_args.filter_glob,
|
||||
cli_args.kernel_args)
|
||||
exec_result = exec_tests(linux, exec_request)
|
||||
cli_args.kernel_args,
|
||||
cli_args.run_isolated)
|
||||
parse_request = KunitParseRequest(cli_args.raw_output,
|
||||
exec_result.result,
|
||||
cli_args.build_dir,
|
||||
cli_args.json)
|
||||
result = parse_tests(parse_request)
|
||||
result = exec_tests(linux, exec_request, parse_request)
|
||||
kunit_parser.print_with_timestamp((
|
||||
'Elapsed time: %.3fs\n') % (
|
||||
exec_result.elapsed_time))
|
||||
'Elapsed time: %.3fs\n') % (result.elapsed_time))
|
||||
if result.status != KunitStatus.SUCCESS:
|
||||
sys.exit(1)
|
||||
elif cli_args.subcommand == 'parse':
|
||||
if cli_args.file == None:
|
||||
sys.stdin.reconfigure(errors='backslashreplace') # pytype: disable=attribute-error
|
||||
kunit_output = sys.stdin
|
||||
else:
|
||||
with open(cli_args.file, 'r') as f:
|
||||
with open(cli_args.file, 'r', errors='backslashreplace') as f:
|
||||
kunit_output = f.read().splitlines()
|
||||
request = KunitParseRequest(cli_args.raw_output,
|
||||
kunit_output,
|
||||
None,
|
||||
cli_args.json)
|
||||
result = parse_tests(request)
|
||||
result = parse_tests(request, kunit_output)
|
||||
if result.status != KunitStatus.SUCCESS:
|
||||
sys.exit(1)
|
||||
else:
|
||||
|
|
|
@ -11,47 +11,47 @@ import os
|
|||
|
||||
import kunit_parser
|
||||
|
||||
from kunit_parser import TestStatus
|
||||
from kunit_parser import Test, TestResult, TestStatus
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
def get_json_result(test_result, def_config, build_dir, json_path) -> str:
|
||||
sub_groups = []
|
||||
JsonObj = Dict[str, Any]
|
||||
|
||||
# Each test suite is mapped to a KernelCI sub_group
|
||||
for test_suite in test_result.suites:
|
||||
sub_group = {
|
||||
"name": test_suite.name,
|
||||
"arch": "UM",
|
||||
"defconfig": def_config,
|
||||
"build_environment": build_dir,
|
||||
"test_cases": [],
|
||||
"lab_name": None,
|
||||
"kernel": None,
|
||||
"job": None,
|
||||
"git_branch": "kselftest",
|
||||
}
|
||||
test_cases = []
|
||||
# TODO: Add attachments attribute in test_case with detailed
|
||||
# failure message, see https://api.kernelci.org/schema-test-case.html#get
|
||||
for case in test_suite.cases:
|
||||
test_case = {"name": case.name, "status": "FAIL"}
|
||||
if case.status == TestStatus.SUCCESS:
|
||||
def _get_group_json(test: Test, def_config: str,
|
||||
build_dir: Optional[str]) -> JsonObj:
|
||||
sub_groups = [] # List[JsonObj]
|
||||
test_cases = [] # List[JsonObj]
|
||||
|
||||
for subtest in test.subtests:
|
||||
if len(subtest.subtests):
|
||||
sub_group = _get_group_json(subtest, def_config,
|
||||
build_dir)
|
||||
sub_groups.append(sub_group)
|
||||
else:
|
||||
test_case = {"name": subtest.name, "status": "FAIL"}
|
||||
if subtest.status == TestStatus.SUCCESS:
|
||||
test_case["status"] = "PASS"
|
||||
elif case.status == TestStatus.TEST_CRASHED:
|
||||
elif subtest.status == TestStatus.TEST_CRASHED:
|
||||
test_case["status"] = "ERROR"
|
||||
test_cases.append(test_case)
|
||||
sub_group["test_cases"] = test_cases
|
||||
sub_groups.append(sub_group)
|
||||
|
||||
test_group = {
|
||||
"name": "KUnit Test Group",
|
||||
"name": test.name,
|
||||
"arch": "UM",
|
||||
"defconfig": def_config,
|
||||
"build_environment": build_dir,
|
||||
"sub_groups": sub_groups,
|
||||
"test_cases": test_cases,
|
||||
"lab_name": None,
|
||||
"kernel": None,
|
||||
"job": None,
|
||||
"git_branch": "kselftest",
|
||||
}
|
||||
return test_group
|
||||
|
||||
def get_json_result(test_result: TestResult, def_config: str,
|
||||
build_dir: Optional[str], json_path: str) -> str:
|
||||
test_group = _get_group_json(test_result.test, def_config, build_dir)
|
||||
test_group["name"] = "KUnit Test Group"
|
||||
json_obj = json.dumps(test_group, indent=4)
|
||||
if json_path != 'stdout':
|
||||
with open(json_path, 'w') as result_path:
|
||||
|
|
|
@ -12,11 +12,8 @@ import subprocess
|
|||
import os
|
||||
import shutil
|
||||
import signal
|
||||
from typing import Iterator, Optional, Tuple
|
||||
|
||||
from contextlib import ExitStack
|
||||
|
||||
from collections import namedtuple
|
||||
import threading
|
||||
from typing import Iterator, List, Optional, Tuple
|
||||
|
||||
import kunit_config
|
||||
import kunit_parser
|
||||
|
@ -103,8 +100,8 @@ class LinuxSourceTreeOperations(object):
|
|||
if stderr: # likely only due to build warnings
|
||||
print(stderr.decode())
|
||||
|
||||
def run(self, params, timeout, build_dir, outfile) -> None:
|
||||
pass
|
||||
def start(self, params: List[str], build_dir: str) -> subprocess.Popen:
|
||||
raise RuntimeError('not implemented!')
|
||||
|
||||
|
||||
class LinuxSourceTreeOperationsQemu(LinuxSourceTreeOperations):
|
||||
|
@ -123,7 +120,7 @@ class LinuxSourceTreeOperationsQemu(LinuxSourceTreeOperations):
|
|||
kconfig.parse_from_string(self._kconfig)
|
||||
base_kunitconfig.merge_in_entries(kconfig)
|
||||
|
||||
def run(self, params, timeout, build_dir, outfile):
|
||||
def start(self, params: List[str], build_dir: str) -> subprocess.Popen:
|
||||
kernel_path = os.path.join(build_dir, self._kernel_path)
|
||||
qemu_command = ['qemu-system-' + self._qemu_arch,
|
||||
'-nodefaults',
|
||||
|
@ -134,18 +131,11 @@ class LinuxSourceTreeOperationsQemu(LinuxSourceTreeOperations):
|
|||
'-nographic',
|
||||
'-serial stdio'] + self._extra_qemu_params
|
||||
print('Running tests with:\n$', ' '.join(qemu_command))
|
||||
with open(outfile, 'w') as output:
|
||||
process = subprocess.Popen(' '.join(qemu_command),
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=output,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True, shell=True)
|
||||
try:
|
||||
process.wait(timeout=timeout)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
process.terminate()
|
||||
return process
|
||||
return subprocess.Popen(' '.join(qemu_command),
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True, shell=True, errors='backslashreplace')
|
||||
|
||||
class LinuxSourceTreeOperationsUml(LinuxSourceTreeOperations):
|
||||
"""An abstraction over command line operations performed on a source tree."""
|
||||
|
@ -168,24 +158,21 @@ class LinuxSourceTreeOperationsUml(LinuxSourceTreeOperations):
|
|||
process.wait()
|
||||
kunit_parser.print_with_timestamp(
|
||||
'Disabling broken configs to run KUnit tests...')
|
||||
with ExitStack() as es:
|
||||
config = open(get_kconfig_path(build_dir), 'a')
|
||||
disable = open(BROKEN_ALLCONFIG_PATH, 'r').read()
|
||||
config.write(disable)
|
||||
|
||||
with open(get_kconfig_path(build_dir), 'a') as config:
|
||||
with open(BROKEN_ALLCONFIG_PATH, 'r') as disable:
|
||||
config.write(disable.read())
|
||||
kunit_parser.print_with_timestamp(
|
||||
'Starting Kernel with all configs takes a few minutes...')
|
||||
|
||||
def run(self, params, timeout, build_dir, outfile):
|
||||
def start(self, params: List[str], build_dir: str) -> subprocess.Popen:
|
||||
"""Runs the Linux UML binary. Must be named 'linux'."""
|
||||
linux_bin = get_file_path(build_dir, 'linux')
|
||||
outfile = get_outfile_path(build_dir)
|
||||
with open(outfile, 'w') as output:
|
||||
process = subprocess.Popen([linux_bin] + params,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=output,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True)
|
||||
process.wait(timeout)
|
||||
return subprocess.Popen([linux_bin] + params,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True, errors='backslashreplace')
|
||||
|
||||
def get_kconfig_path(build_dir) -> str:
|
||||
return get_file_path(build_dir, KCONFIG_PATH)
|
||||
|
@ -202,8 +189,9 @@ def get_source_tree_ops(arch: str, cross_compile: Optional[str]) -> LinuxSourceT
|
|||
return LinuxSourceTreeOperationsUml(cross_compile=cross_compile)
|
||||
elif os.path.isfile(config_path):
|
||||
return get_source_tree_ops_from_qemu_config(config_path, cross_compile)[1]
|
||||
else:
|
||||
raise ConfigError(arch + ' is not a valid arch')
|
||||
|
||||
options = [f[:-3] for f in os.listdir(QEMU_CONFIGS_DIR) if f.endswith('.py')]
|
||||
raise ConfigError(arch + ' is not a valid arch, options are ' + str(sorted(options)))
|
||||
|
||||
def get_source_tree_ops_from_qemu_config(config_path: str,
|
||||
cross_compile: Optional[str]) -> Tuple[
|
||||
|
@ -219,12 +207,15 @@ def get_source_tree_ops_from_qemu_config(config_path: str,
|
|||
module_path = '.' + os.path.join(os.path.basename(QEMU_CONFIGS_DIR), os.path.basename(config_path))
|
||||
spec = importlib.util.spec_from_file_location(module_path, config_path)
|
||||
config = importlib.util.module_from_spec(spec)
|
||||
# TODO(brendanhiggins@google.com): I looked this up and apparently other
|
||||
# Python projects have noted that pytype complains that "No attribute
|
||||
# 'exec_module' on _importlib_modulespec._Loader". Disabling for now.
|
||||
spec.loader.exec_module(config) # pytype: disable=attribute-error
|
||||
return config.QEMU_ARCH.linux_arch, LinuxSourceTreeOperationsQemu(
|
||||
config.QEMU_ARCH, cross_compile=cross_compile)
|
||||
# See https://github.com/python/typeshed/pull/2626 for context.
|
||||
assert isinstance(spec.loader, importlib.abc.Loader)
|
||||
spec.loader.exec_module(config)
|
||||
|
||||
if not hasattr(config, 'QEMU_ARCH'):
|
||||
raise ValueError('qemu_config module missing "QEMU_ARCH": ' + config_path)
|
||||
params: qemu_config.QemuArchParams = config.QEMU_ARCH # type: ignore
|
||||
return params.linux_arch, LinuxSourceTreeOperationsQemu(
|
||||
params, cross_compile=cross_compile)
|
||||
|
||||
class LinuxSourceTree(object):
|
||||
"""Represents a Linux kernel source tree with KUnit tests."""
|
||||
|
@ -330,12 +321,36 @@ class LinuxSourceTree(object):
|
|||
args.extend(['mem=1G', 'console=tty', 'kunit_shutdown=halt'])
|
||||
if filter_glob:
|
||||
args.append('kunit.filter_glob='+filter_glob)
|
||||
outfile = get_outfile_path(build_dir)
|
||||
self._ops.run(args, timeout, build_dir, outfile)
|
||||
subprocess.call(['stty', 'sane'])
|
||||
with open(outfile, 'r') as file:
|
||||
for line in file:
|
||||
|
||||
process = self._ops.start(args, build_dir)
|
||||
assert process.stdout is not None # tell mypy it's set
|
||||
|
||||
# Enforce the timeout in a background thread.
|
||||
def _wait_proc():
|
||||
try:
|
||||
process.wait(timeout=timeout)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
process.terminate()
|
||||
process.wait()
|
||||
waiter = threading.Thread(target=_wait_proc)
|
||||
waiter.start()
|
||||
|
||||
output = open(get_outfile_path(build_dir), 'w')
|
||||
try:
|
||||
# Tee the output to the file and to our caller in real time.
|
||||
for line in process.stdout:
|
||||
output.write(line)
|
||||
yield line
|
||||
# This runs even if our caller doesn't consume every line.
|
||||
finally:
|
||||
# Flush any leftover output to the file
|
||||
output.write(process.stdout.read())
|
||||
output.close()
|
||||
process.stdout.close()
|
||||
|
||||
waiter.join()
|
||||
subprocess.call(['stty', 'sane'])
|
||||
|
||||
def signal_handler(self, sig, frame) -> None:
|
||||
logging.error('Build interruption occurred. Cleaning console.')
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -14,6 +14,7 @@ import tempfile, shutil # Handling test_tmpdir
|
|||
import itertools
|
||||
import json
|
||||
import signal
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
import kunit_config
|
||||
|
@ -106,10 +107,10 @@ class KUnitParserTest(unittest.TestCase):
|
|||
with open(log_path) as file:
|
||||
result = kunit_parser.extract_tap_lines(file.readlines())
|
||||
self.assertContains('TAP version 14', result)
|
||||
self.assertContains(' # Subtest: example', result)
|
||||
self.assertContains(' 1..2', result)
|
||||
self.assertContains(' ok 1 - example_simple_test', result)
|
||||
self.assertContains(' ok 2 - example_mock_test', result)
|
||||
self.assertContains('# Subtest: example', result)
|
||||
self.assertContains('1..2', result)
|
||||
self.assertContains('ok 1 - example_simple_test', result)
|
||||
self.assertContains('ok 2 - example_mock_test', result)
|
||||
self.assertContains('ok 1 - example', result)
|
||||
|
||||
def test_output_with_prefix_isolated_correctly(self):
|
||||
|
@ -117,28 +118,28 @@ class KUnitParserTest(unittest.TestCase):
|
|||
with open(log_path) as file:
|
||||
result = kunit_parser.extract_tap_lines(file.readlines())
|
||||
self.assertContains('TAP version 14', result)
|
||||
self.assertContains(' # Subtest: kunit-resource-test', result)
|
||||
self.assertContains(' 1..5', result)
|
||||
self.assertContains(' ok 1 - kunit_resource_test_init_resources', result)
|
||||
self.assertContains(' ok 2 - kunit_resource_test_alloc_resource', result)
|
||||
self.assertContains(' ok 3 - kunit_resource_test_destroy_resource', result)
|
||||
self.assertContains(' foo bar #', result)
|
||||
self.assertContains(' ok 4 - kunit_resource_test_cleanup_resources', result)
|
||||
self.assertContains(' ok 5 - kunit_resource_test_proper_free_ordering', result)
|
||||
self.assertContains('# Subtest: kunit-resource-test', result)
|
||||
self.assertContains('1..5', result)
|
||||
self.assertContains('ok 1 - kunit_resource_test_init_resources', result)
|
||||
self.assertContains('ok 2 - kunit_resource_test_alloc_resource', result)
|
||||
self.assertContains('ok 3 - kunit_resource_test_destroy_resource', result)
|
||||
self.assertContains('foo bar #', result)
|
||||
self.assertContains('ok 4 - kunit_resource_test_cleanup_resources', result)
|
||||
self.assertContains('ok 5 - kunit_resource_test_proper_free_ordering', result)
|
||||
self.assertContains('ok 1 - kunit-resource-test', result)
|
||||
self.assertContains(' foo bar # non-kunit output', result)
|
||||
self.assertContains(' # Subtest: kunit-try-catch-test', result)
|
||||
self.assertContains(' 1..2', result)
|
||||
self.assertContains(' ok 1 - kunit_test_try_catch_successful_try_no_catch',
|
||||
self.assertContains('foo bar # non-kunit output', result)
|
||||
self.assertContains('# Subtest: kunit-try-catch-test', result)
|
||||
self.assertContains('1..2', result)
|
||||
self.assertContains('ok 1 - kunit_test_try_catch_successful_try_no_catch',
|
||||
result)
|
||||
self.assertContains(' ok 2 - kunit_test_try_catch_unsuccessful_try_does_catch',
|
||||
self.assertContains('ok 2 - kunit_test_try_catch_unsuccessful_try_does_catch',
|
||||
result)
|
||||
self.assertContains('ok 2 - kunit-try-catch-test', result)
|
||||
self.assertContains(' # Subtest: string-stream-test', result)
|
||||
self.assertContains(' 1..3', result)
|
||||
self.assertContains(' ok 1 - string_stream_test_empty_on_creation', result)
|
||||
self.assertContains(' ok 2 - string_stream_test_not_empty_after_add', result)
|
||||
self.assertContains(' ok 3 - string_stream_test_get_string', result)
|
||||
self.assertContains('# Subtest: string-stream-test', result)
|
||||
self.assertContains('1..3', result)
|
||||
self.assertContains('ok 1 - string_stream_test_empty_on_creation', result)
|
||||
self.assertContains('ok 2 - string_stream_test_not_empty_after_add', result)
|
||||
self.assertContains('ok 3 - string_stream_test_get_string', result)
|
||||
self.assertContains('ok 3 - string-stream-test', result)
|
||||
|
||||
def test_parse_successful_test_log(self):
|
||||
|
@ -149,6 +150,22 @@ class KUnitParserTest(unittest.TestCase):
|
|||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
|
||||
def test_parse_successful_nested_tests_log(self):
|
||||
all_passed_log = test_data_path('test_is_test_passed-all_passed_nested.log')
|
||||
with open(all_passed_log) as file:
|
||||
result = kunit_parser.parse_run_tests(file.readlines())
|
||||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
|
||||
def test_kselftest_nested(self):
|
||||
kselftest_log = test_data_path('test_is_test_passed-kselftest.log')
|
||||
with open(kselftest_log) as file:
|
||||
result = kunit_parser.parse_run_tests(file.readlines())
|
||||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
|
||||
def test_parse_failed_test_log(self):
|
||||
failed_log = test_data_path('test_is_test_passed-failure.log')
|
||||
with open(failed_log) as file:
|
||||
|
@ -162,17 +179,29 @@ class KUnitParserTest(unittest.TestCase):
|
|||
with open(empty_log) as file:
|
||||
result = kunit_parser.parse_run_tests(
|
||||
kunit_parser.extract_tap_lines(file.readlines()))
|
||||
self.assertEqual(0, len(result.suites))
|
||||
self.assertEqual(0, len(result.test.subtests))
|
||||
self.assertEqual(
|
||||
kunit_parser.TestStatus.FAILURE_TO_PARSE_TESTS,
|
||||
result.status)
|
||||
|
||||
def test_missing_test_plan(self):
|
||||
missing_plan_log = test_data_path('test_is_test_passed-'
|
||||
'missing_plan.log')
|
||||
with open(missing_plan_log) as file:
|
||||
result = kunit_parser.parse_run_tests(
|
||||
kunit_parser.extract_tap_lines(
|
||||
file.readlines()))
|
||||
self.assertEqual(2, result.test.counts.errors)
|
||||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
|
||||
def test_no_tests(self):
|
||||
empty_log = test_data_path('test_is_test_passed-no_tests_run_with_header.log')
|
||||
with open(empty_log) as file:
|
||||
header_log = test_data_path('test_is_test_passed-no_tests_run_with_header.log')
|
||||
with open(header_log) as file:
|
||||
result = kunit_parser.parse_run_tests(
|
||||
kunit_parser.extract_tap_lines(file.readlines()))
|
||||
self.assertEqual(0, len(result.suites))
|
||||
self.assertEqual(0, len(result.test.subtests))
|
||||
self.assertEqual(
|
||||
kunit_parser.TestStatus.NO_TESTS,
|
||||
result.status)
|
||||
|
@ -183,37 +212,53 @@ class KUnitParserTest(unittest.TestCase):
|
|||
with open(crash_log) as file:
|
||||
result = kunit_parser.parse_run_tests(
|
||||
kunit_parser.extract_tap_lines(file.readlines()))
|
||||
print_mock.assert_any_call(StrContains('could not parse test results!'))
|
||||
print_mock.assert_any_call(StrContains('invalid KTAP input!'))
|
||||
print_mock.stop()
|
||||
file.close()
|
||||
self.assertEqual(0, len(result.test.subtests))
|
||||
|
||||
def test_crashed_test(self):
|
||||
crashed_log = test_data_path('test_is_test_passed-crash.log')
|
||||
with open(crashed_log) as file:
|
||||
result = kunit_parser.parse_run_tests(file.readlines())
|
||||
result = kunit_parser.parse_run_tests(
|
||||
file.readlines())
|
||||
self.assertEqual(
|
||||
kunit_parser.TestStatus.TEST_CRASHED,
|
||||
result.status)
|
||||
|
||||
def test_skipped_test(self):
|
||||
skipped_log = test_data_path('test_skip_tests.log')
|
||||
file = open(skipped_log)
|
||||
with open(skipped_log) as file:
|
||||
result = kunit_parser.parse_run_tests(file.readlines())
|
||||
|
||||
# A skipped test does not fail the whole suite.
|
||||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
|
||||
def test_skipped_all_tests(self):
|
||||
skipped_log = test_data_path('test_skip_all_tests.log')
|
||||
with open(skipped_log) as file:
|
||||
result = kunit_parser.parse_run_tests(file.readlines())
|
||||
|
||||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SKIPPED,
|
||||
result.status)
|
||||
|
||||
def test_ignores_hyphen(self):
|
||||
hyphen_log = test_data_path('test_strip_hyphen.log')
|
||||
file = open(hyphen_log)
|
||||
result = kunit_parser.parse_run_tests(file.readlines())
|
||||
|
||||
# A skipped test does not fail the whole suite.
|
||||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
file.close()
|
||||
|
||||
def test_skipped_all_tests(self):
|
||||
skipped_log = test_data_path('test_skip_all_tests.log')
|
||||
file = open(skipped_log)
|
||||
result = kunit_parser.parse_run_tests(file.readlines())
|
||||
|
||||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SKIPPED,
|
||||
result.status)
|
||||
"sysctl_test",
|
||||
result.test.subtests[0].name)
|
||||
self.assertEqual(
|
||||
"example",
|
||||
result.test.subtests[1].name)
|
||||
file.close()
|
||||
|
||||
|
||||
|
@ -224,7 +269,7 @@ class KUnitParserTest(unittest.TestCase):
|
|||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
self.assertEqual('kunit-resource-test', result.suites[0].name)
|
||||
self.assertEqual('kunit-resource-test', result.test.subtests[0].name)
|
||||
|
||||
def test_ignores_multiple_prefixes(self):
|
||||
prefix_log = test_data_path('test_multiple_prefixes.log')
|
||||
|
@ -233,7 +278,7 @@ class KUnitParserTest(unittest.TestCase):
|
|||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
self.assertEqual('kunit-resource-test', result.suites[0].name)
|
||||
self.assertEqual('kunit-resource-test', result.test.subtests[0].name)
|
||||
|
||||
def test_prefix_mixed_kernel_output(self):
|
||||
mixed_prefix_log = test_data_path('test_interrupted_tap_output.log')
|
||||
|
@ -242,7 +287,7 @@ class KUnitParserTest(unittest.TestCase):
|
|||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
self.assertEqual('kunit-resource-test', result.suites[0].name)
|
||||
self.assertEqual('kunit-resource-test', result.test.subtests[0].name)
|
||||
|
||||
def test_prefix_poundsign(self):
|
||||
pound_log = test_data_path('test_pound_sign.log')
|
||||
|
@ -251,7 +296,7 @@ class KUnitParserTest(unittest.TestCase):
|
|||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
self.assertEqual('kunit-resource-test', result.suites[0].name)
|
||||
self.assertEqual('kunit-resource-test', result.test.subtests[0].name)
|
||||
|
||||
def test_kernel_panic_end(self):
|
||||
panic_log = test_data_path('test_kernel_panic_interrupt.log')
|
||||
|
@ -260,7 +305,7 @@ class KUnitParserTest(unittest.TestCase):
|
|||
self.assertEqual(
|
||||
kunit_parser.TestStatus.TEST_CRASHED,
|
||||
result.status)
|
||||
self.assertEqual('kunit-resource-test', result.suites[0].name)
|
||||
self.assertEqual('kunit-resource-test', result.test.subtests[0].name)
|
||||
|
||||
def test_pound_no_prefix(self):
|
||||
pound_log = test_data_path('test_pound_no_prefix.log')
|
||||
|
@ -269,7 +314,7 @@ class KUnitParserTest(unittest.TestCase):
|
|||
self.assertEqual(
|
||||
kunit_parser.TestStatus.SUCCESS,
|
||||
result.status)
|
||||
self.assertEqual('kunit-resource-test', result.suites[0].name)
|
||||
self.assertEqual('kunit-resource-test', result.test.subtests[0].name)
|
||||
|
||||
class LinuxSourceTreeTest(unittest.TestCase):
|
||||
|
||||
|
@ -283,13 +328,33 @@ class LinuxSourceTreeTest(unittest.TestCase):
|
|||
|
||||
def test_valid_kunitconfig(self):
|
||||
with tempfile.NamedTemporaryFile('wt') as kunitconfig:
|
||||
tree = kunit_kernel.LinuxSourceTree('', kunitconfig_path=kunitconfig.name)
|
||||
kunit_kernel.LinuxSourceTree('', kunitconfig_path=kunitconfig.name)
|
||||
|
||||
def test_dir_kunitconfig(self):
|
||||
with tempfile.TemporaryDirectory('') as dir:
|
||||
with open(os.path.join(dir, '.kunitconfig'), 'w') as f:
|
||||
with open(os.path.join(dir, '.kunitconfig'), 'w'):
|
||||
pass
|
||||
tree = kunit_kernel.LinuxSourceTree('', kunitconfig_path=dir)
|
||||
kunit_kernel.LinuxSourceTree('', kunitconfig_path=dir)
|
||||
|
||||
def test_invalid_arch(self):
|
||||
with self.assertRaisesRegex(kunit_kernel.ConfigError, 'not a valid arch, options are.*x86_64'):
|
||||
kunit_kernel.LinuxSourceTree('', arch='invalid')
|
||||
|
||||
def test_run_kernel_hits_exception(self):
|
||||
def fake_start(unused_args, unused_build_dir):
|
||||
return subprocess.Popen(['echo "hi\nbye"'], shell=True, text=True, stdout=subprocess.PIPE)
|
||||
|
||||
with tempfile.TemporaryDirectory('') as build_dir:
|
||||
tree = kunit_kernel.LinuxSourceTree(build_dir, load_config=False)
|
||||
mock.patch.object(tree._ops, 'start', side_effect=fake_start).start()
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
for line in tree.run_kernel(build_dir=build_dir):
|
||||
self.assertEqual(line, 'hi\n')
|
||||
raise ValueError('uh oh, did not read all output')
|
||||
|
||||
with open(kunit_kernel.get_outfile_path(build_dir), 'rt') as outfile:
|
||||
self.assertEqual(outfile.read(), 'hi\nbye\n', msg='Missing some output')
|
||||
|
||||
# TODO: add more test cases.
|
||||
|
||||
|
@ -322,6 +387,12 @@ class KUnitJsonTest(unittest.TestCase):
|
|||
result = self._json_for('test_is_test_passed-no_tests_run_with_header.log')
|
||||
self.assertEqual(0, len(result['sub_groups']))
|
||||
|
||||
def test_nested_json(self):
|
||||
result = self._json_for('test_is_test_passed-all_passed_nested.log')
|
||||
self.assertEqual(
|
||||
{'name': 'example_simple_test', 'status': 'PASS'},
|
||||
result["sub_groups"][0]["sub_groups"][0]["test_cases"][0])
|
||||
|
||||
class StrContains(str):
|
||||
def __eq__(self, other):
|
||||
return self in other
|
||||
|
@ -380,7 +451,15 @@ class KUnitMainTest(unittest.TestCase):
|
|||
self.assertEqual(e.exception.code, 1)
|
||||
self.assertEqual(self.linux_source_mock.build_reconfig.call_count, 1)
|
||||
self.assertEqual(self.linux_source_mock.run_kernel.call_count, 1)
|
||||
self.print_mock.assert_any_call(StrContains(' 0 tests run'))
|
||||
self.print_mock.assert_any_call(StrContains('invalid KTAP input!'))
|
||||
|
||||
def test_exec_no_tests(self):
|
||||
self.linux_source_mock.run_kernel = mock.Mock(return_value=['TAP version 14', '1..0'])
|
||||
with self.assertRaises(SystemExit) as e:
|
||||
kunit.main(['run'], self.linux_source_mock)
|
||||
self.linux_source_mock.run_kernel.assert_called_once_with(
|
||||
args=None, build_dir='.kunit', filter_glob='', timeout=300)
|
||||
self.print_mock.assert_any_call(StrContains(' 0 tests run!'))
|
||||
|
||||
def test_exec_raw_output(self):
|
||||
self.linux_source_mock.run_kernel = mock.Mock(return_value=[])
|
||||
|
@ -388,7 +467,7 @@ class KUnitMainTest(unittest.TestCase):
|
|||
self.assertEqual(self.linux_source_mock.run_kernel.call_count, 1)
|
||||
for call in self.print_mock.call_args_list:
|
||||
self.assertNotEqual(call, mock.call(StrContains('Testing complete.')))
|
||||
self.assertNotEqual(call, mock.call(StrContains(' 0 tests run')))
|
||||
self.assertNotEqual(call, mock.call(StrContains(' 0 tests run!')))
|
||||
|
||||
def test_run_raw_output(self):
|
||||
self.linux_source_mock.run_kernel = mock.Mock(return_value=[])
|
||||
|
@ -397,7 +476,7 @@ class KUnitMainTest(unittest.TestCase):
|
|||
self.assertEqual(self.linux_source_mock.run_kernel.call_count, 1)
|
||||
for call in self.print_mock.call_args_list:
|
||||
self.assertNotEqual(call, mock.call(StrContains('Testing complete.')))
|
||||
self.assertNotEqual(call, mock.call(StrContains(' 0 tests run')))
|
||||
self.assertNotEqual(call, mock.call(StrContains(' 0 tests run!')))
|
||||
|
||||
def test_run_raw_output_kunit(self):
|
||||
self.linux_source_mock.run_kernel = mock.Mock(return_value=[])
|
||||
|
@ -485,6 +564,46 @@ class KUnitMainTest(unittest.TestCase):
|
|||
args=['a=1','b=2'], build_dir='.kunit', filter_glob='', timeout=300)
|
||||
self.print_mock.assert_any_call(StrContains('Testing complete.'))
|
||||
|
||||
def test_list_tests(self):
|
||||
want = ['suite.test1', 'suite.test2', 'suite2.test1']
|
||||
self.linux_source_mock.run_kernel.return_value = ['TAP version 14', 'init: random output'] + want
|
||||
|
||||
got = kunit._list_tests(self.linux_source_mock,
|
||||
kunit.KunitExecRequest(300, '.kunit', False, 'suite*', None, 'suite'))
|
||||
|
||||
self.assertEqual(got, want)
|
||||
# Should respect the user's filter glob when listing tests.
|
||||
self.linux_source_mock.run_kernel.assert_called_once_with(
|
||||
args=['kunit.action=list'], build_dir='.kunit', filter_glob='suite*', timeout=300)
|
||||
|
||||
|
||||
@mock.patch.object(kunit, '_list_tests')
|
||||
def test_run_isolated_by_suite(self, mock_tests):
|
||||
mock_tests.return_value = ['suite.test1', 'suite.test2', 'suite2.test1']
|
||||
kunit.main(['exec', '--run_isolated=suite', 'suite*.test*'], self.linux_source_mock)
|
||||
|
||||
# Should respect the user's filter glob when listing tests.
|
||||
mock_tests.assert_called_once_with(mock.ANY,
|
||||
kunit.KunitExecRequest(300, '.kunit', False, 'suite*.test*', None, 'suite'))
|
||||
self.linux_source_mock.run_kernel.assert_has_calls([
|
||||
mock.call(args=None, build_dir='.kunit', filter_glob='suite.test*', timeout=300),
|
||||
mock.call(args=None, build_dir='.kunit', filter_glob='suite2.test*', timeout=300),
|
||||
])
|
||||
|
||||
@mock.patch.object(kunit, '_list_tests')
|
||||
def test_run_isolated_by_test(self, mock_tests):
|
||||
mock_tests.return_value = ['suite.test1', 'suite.test2', 'suite2.test1']
|
||||
kunit.main(['exec', '--run_isolated=test', 'suite*'], self.linux_source_mock)
|
||||
|
||||
# Should respect the user's filter glob when listing tests.
|
||||
mock_tests.assert_called_once_with(mock.ANY,
|
||||
kunit.KunitExecRequest(300, '.kunit', False, 'suite*', None, 'test'))
|
||||
self.linux_source_mock.run_kernel.assert_has_calls([
|
||||
mock.call(args=None, build_dir='.kunit', filter_glob='suite.test1', timeout=300),
|
||||
mock.call(args=None, build_dir='.kunit', filter_glob='suite.test2', timeout=300),
|
||||
mock.call(args=None, build_dir='.kunit', filter_glob='suite2.test1', timeout=300),
|
||||
])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
TAP version 14
|
||||
1..2
|
||||
# Subtest: sysctl_test
|
||||
1..4
|
||||
# sysctl_test_dointvec_null_tbl_data: sysctl_test_dointvec_null_tbl_data passed
|
||||
ok 1 - sysctl_test_dointvec_null_tbl_data
|
||||
# Subtest: example
|
||||
1..2
|
||||
init_suite
|
||||
# example_simple_test: initializing
|
||||
# example_simple_test: example_simple_test passed
|
||||
ok 1 - example_simple_test
|
||||
# example_mock_test: initializing
|
||||
# example_mock_test: example_mock_test passed
|
||||
ok 2 - example_mock_test
|
||||
kunit example: all tests passed
|
||||
ok 2 - example
|
||||
# sysctl_test_dointvec_table_len_is_zero: sysctl_test_dointvec_table_len_is_zero passed
|
||||
ok 3 - sysctl_test_dointvec_table_len_is_zero
|
||||
# sysctl_test_dointvec_table_read_but_position_set: sysctl_test_dointvec_table_read_but_position_set passed
|
||||
ok 4 - sysctl_test_dointvec_table_read_but_position_set
|
||||
kunit sysctl_test: all tests passed
|
||||
ok 1 - sysctl_test
|
||||
# Subtest: example
|
||||
1..2
|
||||
init_suite
|
||||
# example_simple_test: initializing
|
||||
# example_simple_test: example_simple_test passed
|
||||
ok 1 - example_simple_test
|
||||
# example_mock_test: initializing
|
||||
# example_mock_test: example_mock_test passed
|
||||
ok 2 - example_mock_test
|
||||
kunit example: all tests passed
|
||||
ok 2 - example
|
|
@ -0,0 +1,14 @@
|
|||
TAP version 13
|
||||
1..2
|
||||
# selftests: membarrier: membarrier_test_single_thread
|
||||
# TAP version 13
|
||||
# 1..2
|
||||
# ok 1 sys_membarrier available
|
||||
# ok 2 sys membarrier invalid command test: command = -1, flags = 0, errno = 22. Failed as expected
|
||||
ok 1 selftests: membarrier: membarrier_test_single_thread
|
||||
# selftests: membarrier: membarrier_test_multi_thread
|
||||
# TAP version 13
|
||||
# 1..2
|
||||
# ok 1 sys_membarrier available
|
||||
# ok 2 sys membarrier invalid command test: command = -1, flags = 0, errno = 22. Failed as expected
|
||||
ok 2 selftests: membarrier: membarrier_test_multi_thread
|
|
@ -0,0 +1,31 @@
|
|||
KTAP version 1
|
||||
# Subtest: sysctl_test
|
||||
# sysctl_test_dointvec_null_tbl_data: sysctl_test_dointvec_null_tbl_data passed
|
||||
ok 1 - sysctl_test_dointvec_null_tbl_data
|
||||
# sysctl_test_dointvec_table_maxlen_unset: sysctl_test_dointvec_table_maxlen_unset passed
|
||||
ok 2 - sysctl_test_dointvec_table_maxlen_unset
|
||||
# sysctl_test_dointvec_table_len_is_zero: sysctl_test_dointvec_table_len_is_zero passed
|
||||
ok 3 - sysctl_test_dointvec_table_len_is_zero
|
||||
# sysctl_test_dointvec_table_read_but_position_set: sysctl_test_dointvec_table_read_but_position_set passed
|
||||
ok 4 - sysctl_test_dointvec_table_read_but_position_set
|
||||
# sysctl_test_dointvec_happy_single_positive: sysctl_test_dointvec_happy_single_positive passed
|
||||
ok 5 - sysctl_test_dointvec_happy_single_positive
|
||||
# sysctl_test_dointvec_happy_single_negative: sysctl_test_dointvec_happy_single_negative passed
|
||||
ok 6 - sysctl_test_dointvec_happy_single_negative
|
||||
# sysctl_test_dointvec_single_less_int_min: sysctl_test_dointvec_single_less_int_min passed
|
||||
ok 7 - sysctl_test_dointvec_single_less_int_min
|
||||
# sysctl_test_dointvec_single_greater_int_max: sysctl_test_dointvec_single_greater_int_max passed
|
||||
ok 8 - sysctl_test_dointvec_single_greater_int_max
|
||||
kunit sysctl_test: all tests passed
|
||||
ok 1 - sysctl_test
|
||||
# Subtest: example
|
||||
1..2
|
||||
init_suite
|
||||
# example_simple_test: initializing
|
||||
# example_simple_test: example_simple_test passed
|
||||
ok 1 - example_simple_test
|
||||
# example_mock_test: initializing
|
||||
# example_mock_test: example_mock_test passed
|
||||
ok 2 - example_mock_test
|
||||
kunit example: all tests passed
|
||||
ok 2 - example
|
|
@ -0,0 +1,16 @@
|
|||
KTAP version 1
|
||||
1..2
|
||||
# Subtest: sysctl_test
|
||||
1..1
|
||||
# sysctl_test_dointvec_null_tbl_data: sysctl_test_dointvec_null_tbl_data passed
|
||||
ok 1 - sysctl_test_dointvec_null_tbl_data
|
||||
kunit sysctl_test: all tests passed
|
||||
ok 1 - sysctl_test
|
||||
# Subtest: example
|
||||
1..1
|
||||
init_suite
|
||||
# example_simple_test: initializing
|
||||
# example_simple_test: example_simple_test passed
|
||||
ok 1 example_simple_test
|
||||
kunit example: all tests passed
|
||||
ok 2 example
|
Loading…
Reference in New Issue