radix tree test suite: Convert regression1 to XArray

Now the page cache lookup is using the XArray, let's convert this
regression test from the radix tree API to the XArray so it's testing
roughly the same thing it was testing before.

Signed-off-by: Matthew Wilcox <willy@infradead.org>
This commit is contained in:
Matthew Wilcox 2018-05-17 00:13:27 -04:00
parent 070e807c69
commit a332125fc3

View File

@ -53,12 +53,12 @@ struct page {
unsigned long index; unsigned long index;
}; };
static struct page *page_alloc(void) static struct page *page_alloc(int index)
{ {
struct page *p; struct page *p;
p = malloc(sizeof(struct page)); p = malloc(sizeof(struct page));
p->count = 1; p->count = 1;
p->index = 1; p->index = index;
pthread_mutex_init(&p->lock, NULL); pthread_mutex_init(&p->lock, NULL);
return p; return p;
@ -80,53 +80,33 @@ static void page_free(struct page *p)
static unsigned find_get_pages(unsigned long start, static unsigned find_get_pages(unsigned long start,
unsigned int nr_pages, struct page **pages) unsigned int nr_pages, struct page **pages)
{ {
unsigned int i; XA_STATE(xas, &mt_tree, start);
unsigned int ret; struct page *page;
unsigned int nr_found; unsigned int ret = 0;
rcu_read_lock(); rcu_read_lock();
restart: xas_for_each(&xas, page, ULONG_MAX) {
nr_found = radix_tree_gang_lookup_slot(&mt_tree, if (xas_retry(&xas, page))
(void ***)pages, NULL, start, nr_pages);
ret = 0;
for (i = 0; i < nr_found; i++) {
struct page *page;
repeat:
page = radix_tree_deref_slot((void **)pages[i]);
if (unlikely(!page))
continue; continue;
if (radix_tree_exception(page)) {
if (radix_tree_deref_retry(page)) {
/*
* Transient condition which can only trigger
* when entry at index 0 moves out of or back
* to root: none yet gotten, safe to restart.
*/
assert((start | i) == 0);
goto restart;
}
/*
* No exceptional entries are inserted in this test.
*/
assert(0);
}
pthread_mutex_lock(&page->lock); pthread_mutex_lock(&page->lock);
if (!page->count) { if (!page->count)
pthread_mutex_unlock(&page->lock); goto unlock;
goto repeat;
}
/* don't actually update page refcount */ /* don't actually update page refcount */
pthread_mutex_unlock(&page->lock); pthread_mutex_unlock(&page->lock);
/* Has the page moved? */ /* Has the page moved? */
if (unlikely(page != *((void **)pages[i]))) { if (unlikely(page != xas_reload(&xas)))
goto repeat; goto put_page;
}
pages[ret] = page; pages[ret] = page;
ret++; ret++;
continue;
unlock:
pthread_mutex_unlock(&page->lock);
put_page:
xas_reset(&xas);
} }
rcu_read_unlock(); rcu_read_unlock();
return ret; return ret;
@ -145,12 +125,12 @@ static void *regression1_fn(void *arg)
for (j = 0; j < 1000000; j++) { for (j = 0; j < 1000000; j++) {
struct page *p; struct page *p;
p = page_alloc(); p = page_alloc(0);
pthread_mutex_lock(&mt_lock); pthread_mutex_lock(&mt_lock);
radix_tree_insert(&mt_tree, 0, p); radix_tree_insert(&mt_tree, 0, p);
pthread_mutex_unlock(&mt_lock); pthread_mutex_unlock(&mt_lock);
p = page_alloc(); p = page_alloc(1);
pthread_mutex_lock(&mt_lock); pthread_mutex_lock(&mt_lock);
radix_tree_insert(&mt_tree, 1, p); radix_tree_insert(&mt_tree, 1, p);
pthread_mutex_unlock(&mt_lock); pthread_mutex_unlock(&mt_lock);