radix tree test suite: Convert regression1 to XArray

Now the page cache lookup is using the XArray, let's convert this
regression test from the radix tree API to the XArray so it's testing
roughly the same thing it was testing before.

Signed-off-by: Matthew Wilcox <willy@infradead.org>
This commit is contained in:
Matthew Wilcox 2018-05-17 00:13:27 -04:00
parent 070e807c69
commit a332125fc3
1 changed files with 19 additions and 39 deletions

View File

@ -53,12 +53,12 @@ struct page {
unsigned long index;
};
static struct page *page_alloc(void)
static struct page *page_alloc(int index)
{
struct page *p;
p = malloc(sizeof(struct page));
p->count = 1;
p->index = 1;
p->index = index;
pthread_mutex_init(&p->lock, NULL);
return p;
@ -80,53 +80,33 @@ static void page_free(struct page *p)
static unsigned find_get_pages(unsigned long start,
unsigned int nr_pages, struct page **pages)
{
unsigned int i;
unsigned int ret;
unsigned int nr_found;
XA_STATE(xas, &mt_tree, start);
struct page *page;
unsigned int ret = 0;
rcu_read_lock();
restart:
nr_found = radix_tree_gang_lookup_slot(&mt_tree,
(void ***)pages, NULL, start, nr_pages);
ret = 0;
for (i = 0; i < nr_found; i++) {
struct page *page;
repeat:
page = radix_tree_deref_slot((void **)pages[i]);
if (unlikely(!page))
xas_for_each(&xas, page, ULONG_MAX) {
if (xas_retry(&xas, page))
continue;
if (radix_tree_exception(page)) {
if (radix_tree_deref_retry(page)) {
/*
* Transient condition which can only trigger
* when entry at index 0 moves out of or back
* to root: none yet gotten, safe to restart.
*/
assert((start | i) == 0);
goto restart;
}
/*
* No exceptional entries are inserted in this test.
*/
assert(0);
}
pthread_mutex_lock(&page->lock);
if (!page->count) {
pthread_mutex_unlock(&page->lock);
goto repeat;
}
if (!page->count)
goto unlock;
/* don't actually update page refcount */
pthread_mutex_unlock(&page->lock);
/* Has the page moved? */
if (unlikely(page != *((void **)pages[i]))) {
goto repeat;
}
if (unlikely(page != xas_reload(&xas)))
goto put_page;
pages[ret] = page;
ret++;
continue;
unlock:
pthread_mutex_unlock(&page->lock);
put_page:
xas_reset(&xas);
}
rcu_read_unlock();
return ret;
@ -145,12 +125,12 @@ static void *regression1_fn(void *arg)
for (j = 0; j < 1000000; j++) {
struct page *p;
p = page_alloc();
p = page_alloc(0);
pthread_mutex_lock(&mt_lock);
radix_tree_insert(&mt_tree, 0, p);
pthread_mutex_unlock(&mt_lock);
p = page_alloc();
p = page_alloc(1);
pthread_mutex_lock(&mt_lock);
radix_tree_insert(&mt_tree, 1, p);
pthread_mutex_unlock(&mt_lock);