2016-03-18 05:21:45 +08:00
|
|
|
#include <stdio.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <unistd.h>
|
|
|
|
#include <time.h>
|
|
|
|
#include <assert.h>
|
|
|
|
|
|
|
|
#include <linux/slab.h>
|
|
|
|
#include <linux/radix-tree.h>
|
|
|
|
|
|
|
|
#include "test.h"
|
|
|
|
#include "regression.h"
|
|
|
|
|
|
|
|
void __gang_check(unsigned long middle, long down, long up, int chunk, int hop)
|
|
|
|
{
|
|
|
|
long idx;
|
|
|
|
RADIX_TREE(tree, GFP_KERNEL);
|
|
|
|
|
|
|
|
middle = 1 << 30;
|
|
|
|
|
|
|
|
for (idx = -down; idx < up; idx++)
|
|
|
|
item_insert(&tree, middle + idx);
|
|
|
|
|
|
|
|
item_check_absent(&tree, middle - down - 1);
|
|
|
|
for (idx = -down; idx < up; idx++)
|
|
|
|
item_check_present(&tree, middle + idx);
|
|
|
|
item_check_absent(&tree, middle + up);
|
|
|
|
|
|
|
|
item_gang_check_present(&tree, middle - down,
|
|
|
|
up + down, chunk, hop);
|
|
|
|
item_full_scan(&tree, middle - down, down + up, chunk);
|
|
|
|
item_kill_tree(&tree);
|
|
|
|
}
|
|
|
|
|
|
|
|
void gang_check(void)
|
|
|
|
{
|
|
|
|
__gang_check(1 << 30, 128, 128, 35, 2);
|
|
|
|
__gang_check(1 << 31, 128, 128, 32, 32);
|
|
|
|
__gang_check(1 << 31, 128, 128, 32, 100);
|
|
|
|
__gang_check(1 << 31, 128, 128, 17, 7);
|
|
|
|
__gang_check(0xffff0000, 0, 65536, 17, 7);
|
|
|
|
__gang_check(0xfffffffe, 1, 1, 17, 7);
|
|
|
|
}
|
|
|
|
|
|
|
|
void __big_gang_check(void)
|
|
|
|
{
|
|
|
|
unsigned long start;
|
|
|
|
int wrapped = 0;
|
|
|
|
|
|
|
|
start = 0;
|
|
|
|
do {
|
|
|
|
unsigned long old_start;
|
|
|
|
|
|
|
|
// printf("0x%08lx\n", start);
|
|
|
|
__gang_check(start, rand() % 113 + 1, rand() % 71,
|
|
|
|
rand() % 157, rand() % 91 + 1);
|
|
|
|
old_start = start;
|
|
|
|
start += rand() % 1000000;
|
|
|
|
start %= 1ULL << 33;
|
|
|
|
if (start < old_start)
|
|
|
|
wrapped = 1;
|
|
|
|
} while (!wrapped);
|
|
|
|
}
|
|
|
|
|
2016-05-21 08:01:45 +08:00
|
|
|
void big_gang_check(bool long_run)
|
2016-03-18 05:21:45 +08:00
|
|
|
{
|
|
|
|
int i;
|
|
|
|
|
2016-05-21 08:01:45 +08:00
|
|
|
for (i = 0; i < (long_run ? 1000 : 3); i++) {
|
2016-03-18 05:21:45 +08:00
|
|
|
__big_gang_check();
|
|
|
|
printf("%d ", i);
|
|
|
|
fflush(stdout);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void add_and_check(void)
|
|
|
|
{
|
|
|
|
RADIX_TREE(tree, GFP_KERNEL);
|
|
|
|
|
|
|
|
item_insert(&tree, 44);
|
|
|
|
item_check_present(&tree, 44);
|
|
|
|
item_check_absent(&tree, 43);
|
|
|
|
item_kill_tree(&tree);
|
|
|
|
}
|
|
|
|
|
|
|
|
void dynamic_height_check(void)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
RADIX_TREE(tree, GFP_KERNEL);
|
|
|
|
tree_verify_min_height(&tree, 0);
|
|
|
|
|
|
|
|
item_insert(&tree, 42);
|
|
|
|
tree_verify_min_height(&tree, 42);
|
|
|
|
|
|
|
|
item_insert(&tree, 1000000);
|
|
|
|
tree_verify_min_height(&tree, 1000000);
|
|
|
|
|
|
|
|
assert(item_delete(&tree, 1000000));
|
|
|
|
tree_verify_min_height(&tree, 42);
|
|
|
|
|
|
|
|
assert(item_delete(&tree, 42));
|
|
|
|
tree_verify_min_height(&tree, 0);
|
|
|
|
|
|
|
|
for (i = 0; i < 1000; i++) {
|
|
|
|
item_insert(&tree, i);
|
|
|
|
tree_verify_min_height(&tree, i);
|
|
|
|
}
|
|
|
|
|
|
|
|
i--;
|
|
|
|
for (;;) {
|
|
|
|
assert(item_delete(&tree, i));
|
|
|
|
if (i == 0) {
|
|
|
|
tree_verify_min_height(&tree, 0);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
i--;
|
|
|
|
tree_verify_min_height(&tree, i);
|
|
|
|
}
|
|
|
|
|
|
|
|
item_kill_tree(&tree);
|
|
|
|
}
|
|
|
|
|
|
|
|
void check_copied_tags(struct radix_tree_root *tree, unsigned long start, unsigned long end, unsigned long *idx, int count, int fromtag, int totag)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
|
|
|
|
for (i = 0; i < count; i++) {
|
|
|
|
/* if (i % 1000 == 0)
|
|
|
|
putchar('.'); */
|
|
|
|
if (idx[i] < start || idx[i] > end) {
|
|
|
|
if (item_tag_get(tree, idx[i], totag)) {
|
|
|
|
printf("%lu-%lu: %lu, tags %d-%d\n", start, end, idx[i], item_tag_get(tree, idx[i], fromtag), item_tag_get(tree, idx[i], totag));
|
|
|
|
}
|
|
|
|
assert(!item_tag_get(tree, idx[i], totag));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
if (item_tag_get(tree, idx[i], fromtag) ^
|
|
|
|
item_tag_get(tree, idx[i], totag)) {
|
|
|
|
printf("%lu-%lu: %lu, tags %d-%d\n", start, end, idx[i], item_tag_get(tree, idx[i], fromtag), item_tag_get(tree, idx[i], totag));
|
|
|
|
}
|
|
|
|
assert(!(item_tag_get(tree, idx[i], fromtag) ^
|
|
|
|
item_tag_get(tree, idx[i], totag)));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#define ITEMS 50000
|
|
|
|
|
|
|
|
void copy_tag_check(void)
|
|
|
|
{
|
|
|
|
RADIX_TREE(tree, GFP_KERNEL);
|
|
|
|
unsigned long idx[ITEMS];
|
|
|
|
unsigned long start, end, count = 0, tagged, cur, tmp;
|
|
|
|
int i;
|
|
|
|
|
|
|
|
// printf("generating radix tree indices...\n");
|
|
|
|
start = rand();
|
|
|
|
end = rand();
|
|
|
|
if (start > end && (rand() % 10)) {
|
|
|
|
cur = start;
|
|
|
|
start = end;
|
|
|
|
end = cur;
|
|
|
|
}
|
|
|
|
/* Specifically create items around the start and the end of the range
|
|
|
|
* with high probability to check for off by one errors */
|
|
|
|
cur = rand();
|
|
|
|
if (cur & 1) {
|
|
|
|
item_insert(&tree, start);
|
|
|
|
if (cur & 2) {
|
|
|
|
if (start <= end)
|
|
|
|
count++;
|
|
|
|
item_tag_set(&tree, start, 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (cur & 4) {
|
|
|
|
item_insert(&tree, start-1);
|
|
|
|
if (cur & 8)
|
|
|
|
item_tag_set(&tree, start-1, 0);
|
|
|
|
}
|
|
|
|
if (cur & 16) {
|
|
|
|
item_insert(&tree, end);
|
|
|
|
if (cur & 32) {
|
|
|
|
if (start <= end)
|
|
|
|
count++;
|
|
|
|
item_tag_set(&tree, end, 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (cur & 64) {
|
|
|
|
item_insert(&tree, end+1);
|
|
|
|
if (cur & 128)
|
|
|
|
item_tag_set(&tree, end+1, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < ITEMS; i++) {
|
|
|
|
do {
|
|
|
|
idx[i] = rand();
|
|
|
|
} while (item_lookup(&tree, idx[i]));
|
|
|
|
|
|
|
|
item_insert(&tree, idx[i]);
|
|
|
|
if (rand() & 1) {
|
|
|
|
item_tag_set(&tree, idx[i], 0);
|
|
|
|
if (idx[i] >= start && idx[i] <= end)
|
|
|
|
count++;
|
|
|
|
}
|
|
|
|
/* if (i % 1000 == 0)
|
|
|
|
putchar('.'); */
|
|
|
|
}
|
|
|
|
|
|
|
|
// printf("\ncopying tags...\n");
|
|
|
|
cur = start;
|
|
|
|
tagged = radix_tree_range_tag_if_tagged(&tree, &cur, end, ITEMS, 0, 1);
|
|
|
|
|
|
|
|
// printf("checking copied tags\n");
|
|
|
|
assert(tagged == count);
|
|
|
|
check_copied_tags(&tree, start, end, idx, ITEMS, 0, 1);
|
|
|
|
|
|
|
|
/* Copy tags in several rounds */
|
|
|
|
// printf("\ncopying tags...\n");
|
|
|
|
cur = start;
|
|
|
|
do {
|
|
|
|
tmp = rand() % (count/10+2);
|
|
|
|
tagged = radix_tree_range_tag_if_tagged(&tree, &cur, end, tmp, 0, 2);
|
|
|
|
} while (tmp == tagged);
|
|
|
|
|
|
|
|
// printf("%lu %lu %lu\n", tagged, tmp, count);
|
|
|
|
// printf("checking copied tags\n");
|
|
|
|
check_copied_tags(&tree, start, end, idx, ITEMS, 0, 2);
|
|
|
|
assert(tagged < tmp);
|
|
|
|
verify_tag_consistency(&tree, 0);
|
|
|
|
verify_tag_consistency(&tree, 1);
|
|
|
|
verify_tag_consistency(&tree, 2);
|
|
|
|
// printf("\n");
|
|
|
|
item_kill_tree(&tree);
|
|
|
|
}
|
|
|
|
|
2016-05-21 08:02:49 +08:00
|
|
|
static void __locate_check(struct radix_tree_root *tree, unsigned long index,
|
2016-05-21 08:02:46 +08:00
|
|
|
unsigned order)
|
2016-05-21 08:01:39 +08:00
|
|
|
{
|
|
|
|
struct item *item;
|
|
|
|
unsigned long index2;
|
|
|
|
|
2016-05-21 08:02:46 +08:00
|
|
|
item_insert_order(tree, index, order);
|
2016-05-21 08:01:39 +08:00
|
|
|
item = item_lookup(tree, index);
|
2016-12-15 07:08:52 +08:00
|
|
|
index2 = find_item(tree, item);
|
2016-05-21 08:01:39 +08:00
|
|
|
if (index != index2) {
|
2016-05-21 08:02:46 +08:00
|
|
|
printf("index %ld order %d inserted; found %ld\n",
|
|
|
|
index, order, index2);
|
2016-05-21 08:01:39 +08:00
|
|
|
abort();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-21 08:02:49 +08:00
|
|
|
static void __order_0_locate_check(void)
|
|
|
|
{
|
|
|
|
RADIX_TREE(tree, GFP_KERNEL);
|
|
|
|
int i;
|
|
|
|
|
|
|
|
for (i = 0; i < 50; i++)
|
|
|
|
__locate_check(&tree, rand() % INT_MAX, 0);
|
|
|
|
|
|
|
|
item_kill_tree(&tree);
|
|
|
|
}
|
|
|
|
|
2016-05-21 08:01:39 +08:00
|
|
|
static void locate_check(void)
|
|
|
|
{
|
|
|
|
RADIX_TREE(tree, GFP_KERNEL);
|
2016-05-21 08:02:46 +08:00
|
|
|
unsigned order;
|
2016-05-21 08:01:39 +08:00
|
|
|
unsigned long offset, index;
|
|
|
|
|
2016-05-21 08:02:49 +08:00
|
|
|
__order_0_locate_check();
|
|
|
|
|
2016-05-21 08:02:46 +08:00
|
|
|
for (order = 0; order < 20; order++) {
|
|
|
|
for (offset = 0; offset < (1 << (order + 3));
|
|
|
|
offset += (1UL << order)) {
|
|
|
|
for (index = 0; index < (1UL << (order + 5));
|
|
|
|
index += (1UL << order)) {
|
|
|
|
__locate_check(&tree, index + offset, order);
|
|
|
|
}
|
2016-12-15 07:08:52 +08:00
|
|
|
if (find_item(&tree, &tree) != -1)
|
2016-05-21 08:02:46 +08:00
|
|
|
abort();
|
2016-05-21 08:01:39 +08:00
|
|
|
|
2016-05-21 08:02:46 +08:00
|
|
|
item_kill_tree(&tree);
|
|
|
|
}
|
2016-05-21 08:01:39 +08:00
|
|
|
}
|
|
|
|
|
2016-12-15 07:08:52 +08:00
|
|
|
if (find_item(&tree, &tree) != -1)
|
2016-05-21 08:01:39 +08:00
|
|
|
abort();
|
2016-05-21 08:02:46 +08:00
|
|
|
__locate_check(&tree, -1, 0);
|
2016-12-15 07:08:52 +08:00
|
|
|
if (find_item(&tree, &tree) != -1)
|
2016-05-21 08:01:39 +08:00
|
|
|
abort();
|
|
|
|
item_kill_tree(&tree);
|
|
|
|
}
|
|
|
|
|
2016-05-21 08:01:45 +08:00
|
|
|
static void single_thread_tests(bool long_run)
|
2016-03-18 05:21:45 +08:00
|
|
|
{
|
|
|
|
int i;
|
|
|
|
|
2016-12-15 07:08:02 +08:00
|
|
|
printf("starting single_thread_tests: %d allocated, preempt %d\n",
|
|
|
|
nr_allocated, preempt_count);
|
2016-05-21 08:02:14 +08:00
|
|
|
multiorder_checks();
|
2016-12-15 07:08:17 +08:00
|
|
|
rcu_barrier();
|
2016-12-15 07:08:02 +08:00
|
|
|
printf("after multiorder_check: %d allocated, preempt %d\n",
|
|
|
|
nr_allocated, preempt_count);
|
2016-05-21 08:01:39 +08:00
|
|
|
locate_check();
|
2016-12-15 07:08:17 +08:00
|
|
|
rcu_barrier();
|
2016-12-15 07:08:02 +08:00
|
|
|
printf("after locate_check: %d allocated, preempt %d\n",
|
|
|
|
nr_allocated, preempt_count);
|
2016-03-18 05:21:45 +08:00
|
|
|
tag_check();
|
2016-12-15 07:08:17 +08:00
|
|
|
rcu_barrier();
|
2016-12-15 07:08:02 +08:00
|
|
|
printf("after tag_check: %d allocated, preempt %d\n",
|
|
|
|
nr_allocated, preempt_count);
|
2016-03-18 05:21:45 +08:00
|
|
|
gang_check();
|
2016-12-15 07:08:17 +08:00
|
|
|
rcu_barrier();
|
2016-12-15 07:08:02 +08:00
|
|
|
printf("after gang_check: %d allocated, preempt %d\n",
|
|
|
|
nr_allocated, preempt_count);
|
2016-03-18 05:21:45 +08:00
|
|
|
add_and_check();
|
2016-12-15 07:08:17 +08:00
|
|
|
rcu_barrier();
|
2016-12-15 07:08:02 +08:00
|
|
|
printf("after add_and_check: %d allocated, preempt %d\n",
|
|
|
|
nr_allocated, preempt_count);
|
2016-03-18 05:21:45 +08:00
|
|
|
dynamic_height_check();
|
2016-12-15 07:08:17 +08:00
|
|
|
rcu_barrier();
|
2016-12-15 07:08:02 +08:00
|
|
|
printf("after dynamic_height_check: %d allocated, preempt %d\n",
|
|
|
|
nr_allocated, preempt_count);
|
2016-05-21 08:01:45 +08:00
|
|
|
big_gang_check(long_run);
|
2016-12-15 07:08:17 +08:00
|
|
|
rcu_barrier();
|
2016-12-15 07:08:02 +08:00
|
|
|
printf("after big_gang_check: %d allocated, preempt %d\n",
|
|
|
|
nr_allocated, preempt_count);
|
2016-05-21 08:01:45 +08:00
|
|
|
for (i = 0; i < (long_run ? 2000 : 3); i++) {
|
2016-03-18 05:21:45 +08:00
|
|
|
copy_tag_check();
|
|
|
|
printf("%d ", i);
|
|
|
|
fflush(stdout);
|
|
|
|
}
|
2016-12-15 07:08:17 +08:00
|
|
|
rcu_barrier();
|
2016-12-15 07:08:02 +08:00
|
|
|
printf("after copy_tag_check: %d allocated, preempt %d\n",
|
|
|
|
nr_allocated, preempt_count);
|
2016-03-18 05:21:45 +08:00
|
|
|
}
|
|
|
|
|
2016-05-21 08:01:45 +08:00
|
|
|
int main(int argc, char **argv)
|
2016-03-18 05:21:45 +08:00
|
|
|
{
|
2016-05-21 08:01:45 +08:00
|
|
|
bool long_run = false;
|
|
|
|
int opt;
|
2016-12-15 07:08:08 +08:00
|
|
|
unsigned int seed = time(NULL);
|
2016-05-21 08:01:45 +08:00
|
|
|
|
2016-12-15 07:08:08 +08:00
|
|
|
while ((opt = getopt(argc, argv, "ls:")) != -1) {
|
2016-05-21 08:01:45 +08:00
|
|
|
if (opt == 'l')
|
|
|
|
long_run = true;
|
2016-12-15 07:08:08 +08:00
|
|
|
else if (opt == 's')
|
|
|
|
seed = strtoul(optarg, NULL, 0);
|
2016-05-21 08:01:45 +08:00
|
|
|
}
|
|
|
|
|
2016-12-15 07:08:08 +08:00
|
|
|
printf("random seed %u\n", seed);
|
|
|
|
srand(seed);
|
|
|
|
|
2016-03-18 05:21:45 +08:00
|
|
|
rcu_register_thread();
|
|
|
|
radix_tree_init();
|
|
|
|
|
|
|
|
regression1_test();
|
|
|
|
regression2_test();
|
2016-03-18 05:22:08 +08:00
|
|
|
regression3_test();
|
radix-tree tests: add iteration test
There are four cases I can see where we could end up with a NULL 'slot' in
radix_tree_next_slot(). This unit test exercises all four of them, making
sure that if in the future we have an unsafe path through
radix_tree_next_slot(), we'll catch it.
Here are details on the four cases:
1) radix_tree_iter_retry() via a non-tagged iteration like
radix_tree_for_each_slot(). In this case we currently aren't seeing a bug
because radix_tree_iter_retry() sets
iter->next_index = iter->index;
which means that in in the else case in radix_tree_next_slot(), 'count' is
zero, so we skip over the while() loop and effectively just return NULL
without ever dereferencing 'slot'.
2) radix_tree_iter_retry() via tagged iteration like
radix_tree_for_each_tagged(). This case was giving us NULL pointer
dereferences in testing, and was fixed with this commit:
commit 3cb9185c6730 ("radix-tree: fix radix_tree_iter_retry() for tagged
iterators.")
This fix doesn't explicitly check for 'slot' being NULL, though, it works
around the NULL pointer dereference by instead zeroing iter->tags in
radix_tree_iter_retry(), which makes us bail out of the if() case in
radix_tree_next_slot() before we dereference 'slot'.
3) radix_tree_iter_next() via via a non-tagged iteration like
radix_tree_for_each_slot(). This currently happens in shmem_tag_pins()
and shmem_partial_swap_usage().
As with non-tagged iteration, 'count' in the else case of
radix_tree_next_slot() is zero, so we skip over the while() loop and
effectively just return NULL without ever dereferencing 'slot'.
4) radix_tree_iter_next() via tagged iteration like
radix_tree_for_each_tagged(). This happens in shmem_wait_for_pins().
radix_tree_iter_next() zeros out iter->tags, so we end up exiting
radix_tree_next_slot() here:
if (flags & RADIX_TREE_ITER_TAGGED) {
void *canon = slot;
iter->tags >>= 1;
if (unlikely(!iter->tags))
return NULL;
Link: http://lkml.kernel.org/r/20160815194237.25967-3-ross.zwisler@linux.intel.com
Signed-off-by: Ross Zwisler <ross.zwisler@linux.intel.com>
Cc: Konstantin Khlebnikov <koct9i@gmail.com>
Cc: Andrey Ryabinin <aryabinin@virtuozzo.com>
Cc: Dmitry Vyukov <dvyukov@google.com>
Cc: Shuah Khan <shuahkh@osg.samsung.com>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
2016-10-12 04:51:21 +08:00
|
|
|
iteration_test();
|
2016-05-21 08:01:45 +08:00
|
|
|
single_thread_tests(long_run);
|
2016-03-18 05:21:45 +08:00
|
|
|
|
2016-12-15 07:08:05 +08:00
|
|
|
/* Free any remaining preallocated nodes */
|
|
|
|
radix_tree_cpu_dead(0);
|
|
|
|
|
2016-12-15 07:08:14 +08:00
|
|
|
benchmark();
|
|
|
|
|
2016-12-15 07:08:17 +08:00
|
|
|
rcu_barrier();
|
|
|
|
printf("after rcu_barrier: %d allocated, preempt %d\n",
|
2016-12-15 07:08:02 +08:00
|
|
|
nr_allocated, preempt_count);
|
2016-03-18 05:21:45 +08:00
|
|
|
rcu_unregister_thread();
|
|
|
|
|
|
|
|
exit(0);
|
|
|
|
}
|