diff options
Diffstat (limited to 'tools/testing/radix-tree/iteration_check.c')
-rw-r--r-- | tools/testing/radix-tree/iteration_check.c | 103 |
1 files changed, 51 insertions, 52 deletions
diff --git a/tools/testing/radix-tree/iteration_check.c b/tools/testing/radix-tree/iteration_check.c index d047327bb9ef..238db187aa15 100644 --- a/tools/testing/radix-tree/iteration_check.c +++ b/tools/testing/radix-tree/iteration_check.c @@ -1,5 +1,5 @@ /* - * iteration_check.c: test races having to do with radix tree iteration + * iteration_check.c: test races having to do with xarray iteration * Copyright (c) 2016 Intel Corporation * Author: Ross Zwisler <ross.zwisler@linux.intel.com> * @@ -12,7 +12,6 @@ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. */ -#include <linux/radix-tree.h> #include <pthread.h> #include "test.h" @@ -23,29 +22,44 @@ static pthread_t threads[NUM_THREADS]; static unsigned int seeds[3]; -static RADIX_TREE(tree, GFP_KERNEL); +static DEFINE_XARRAY(array); static bool test_complete; static int max_order; -/* relentlessly fill the tree with tagged entries */ +void my_item_insert(struct xarray *xa, unsigned long index) +{ + XA_STATE(xas, xa, index); + struct item *item = item_create(index, 0); + int order; + +retry: + xas_lock(&xas); + for (order = max_order; order >= 0; order--) { + xas_set_order(&xas, index, order); + item->order = order; + if (xas_find_conflict(&xas)) + continue; + xas_store(&xas, item); + xas_set_mark(&xas, TAG); + break; + } + xas_unlock(&xas); + if (xas_nomem(&xas, GFP_KERNEL)) + goto retry; + if (order < 0) + free(item); +} + +/* relentlessly fill the array with tagged entries */ static void *add_entries_fn(void *arg) { rcu_register_thread(); while (!test_complete) { unsigned long pgoff; - int order; for (pgoff = 0; pgoff < MAX_IDX; pgoff++) { - xa_lock(&tree); - for (order = max_order; order >= 0; order--) { - if (item_insert_order(&tree, pgoff, order) - == 0) { - item_tag_set(&tree, pgoff, TAG); - break; - } - } - xa_unlock(&tree); + my_item_insert(&array, pgoff); } } @@ -55,33 +69,25 @@ static void *add_entries_fn(void *arg) } /* - * Iterate over the tagged entries, doing a radix_tree_iter_retry() as we find - * things that have been removed and randomly resetting our iteration to the - * next chunk with radix_tree_iter_resume(). Both radix_tree_iter_retry() and - * radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a - * NULL 'slot' variable. + * Iterate over tagged entries, retrying when we find ourselves in a deleted + * node and randomly pausing the iteration. */ static void *tagged_iteration_fn(void *arg) { - struct radix_tree_iter iter; - void **slot; + XA_STATE(xas, &array, 0); + void *entry; rcu_register_thread(); while (!test_complete) { + xas_set(&xas, 0); rcu_read_lock(); - radix_tree_for_each_tagged(slot, &tree, &iter, 0, TAG) { - void *entry = radix_tree_deref_slot(slot); - if (unlikely(!entry)) + xas_for_each_marked(&xas, entry, ULONG_MAX, TAG) { + if (xas_retry(&xas, entry)) continue; - if (radix_tree_deref_retry(entry)) { - slot = radix_tree_iter_retry(&iter); - continue; - } - if (rand_r(&seeds[0]) % 50 == 0) { - slot = radix_tree_iter_resume(slot, &iter); + xas_pause(&xas); rcu_read_unlock(); rcu_barrier(); rcu_read_lock(); @@ -96,33 +102,25 @@ static void *tagged_iteration_fn(void *arg) } /* - * Iterate over the entries, doing a radix_tree_iter_retry() as we find things - * that have been removed and randomly resetting our iteration to the next - * chunk with radix_tree_iter_resume(). Both radix_tree_iter_retry() and - * radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a - * NULL 'slot' variable. + * Iterate over the entries, retrying when we find ourselves in a deleted + * node and randomly pausing the iteration. */ static void *untagged_iteration_fn(void *arg) { - struct radix_tree_iter iter; - void **slot; + XA_STATE(xas, &array, 0); + void *entry; rcu_register_thread(); while (!test_complete) { + xas_set(&xas, 0); rcu_read_lock(); - radix_tree_for_each_slot(slot, &tree, &iter, 0) { - void *entry = radix_tree_deref_slot(slot); - if (unlikely(!entry)) + xas_for_each(&xas, entry, ULONG_MAX) { + if (xas_retry(&xas, entry)) continue; - if (radix_tree_deref_retry(entry)) { - slot = radix_tree_iter_retry(&iter); - continue; - } - if (rand_r(&seeds[1]) % 50 == 0) { - slot = radix_tree_iter_resume(slot, &iter); + xas_pause(&xas); rcu_read_unlock(); rcu_barrier(); rcu_read_lock(); @@ -137,7 +135,7 @@ static void *untagged_iteration_fn(void *arg) } /* - * Randomly remove entries to help induce radix_tree_iter_retry() calls in the + * Randomly remove entries to help induce retries in the * two iteration functions. */ static void *remove_entries_fn(void *arg) @@ -146,12 +144,13 @@ static void *remove_entries_fn(void *arg) while (!test_complete) { int pgoff; + struct item *item; pgoff = rand_r(&seeds[2]) % MAX_IDX; - xa_lock(&tree); - item_delete(&tree, pgoff); - xa_unlock(&tree); + item = xa_erase(&array, pgoff); + if (item) + item_free(item, pgoff); } rcu_unregister_thread(); @@ -164,7 +163,7 @@ static void *tag_entries_fn(void *arg) rcu_register_thread(); while (!test_complete) { - tag_tagged_items(&tree, 0, MAX_IDX, 10, TAG, NEW_TAG); + tag_tagged_items(&array, 0, MAX_IDX, 10, TAG, NEW_TAG); } rcu_unregister_thread(); return NULL; @@ -215,5 +214,5 @@ void iteration_test(unsigned order, unsigned test_duration) } } - item_kill_tree(&tree); + item_kill_tree(&array); } |