aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/tools
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2016-12-14 17:25:18 -0800
committerLinus Torvalds <torvalds@linux-foundation.org>2016-12-14 17:25:18 -0800
commita57cb1c1d7974c62a5c80f7869e35b492ace12cd (patch)
tree5a42ee9a668f171143464bc86013954c1bbe94ad /tools
parentMerge branch 'for-linus' of git://git.kernel.dk/linux-block (diff)
parentradix tree test suite: delete unused rcupdate.c (diff)
downloadwireguard-linux-a57cb1c1d7974c62a5c80f7869e35b492ace12cd.tar.xz
wireguard-linux-a57cb1c1d7974c62a5c80f7869e35b492ace12cd.zip
Merge branch 'akpm' (patches from Andrew)
Merge more updates from Andrew Morton: - a few misc things - kexec updates - DMA-mapping updates to better support networking DMA operations - IPC updates - various MM changes to improve DAX fault handling - lots of radix-tree changes, mainly to the test suite. All leading up to reimplementing the IDA/IDR code to be a wrapper layer over the radix-tree. However the final trigger-pulling patch is held off for 4.11. * emailed patches from Andrew Morton <akpm@linux-foundation.org>: (114 commits) radix tree test suite: delete unused rcupdate.c radix tree test suite: add new tag check radix-tree: ensure counts are initialised radix tree test suite: cache recently freed objects radix tree test suite: add some more functionality idr: reduce the number of bits per level from 8 to 6 rxrpc: abstract away knowledge of IDR internals tpm: use idr_find(), not idr_find_slowpath() idr: add ida_is_empty radix tree test suite: check multiorder iteration radix-tree: fix replacement for multiorder entries radix-tree: add radix_tree_split_preload() radix-tree: add radix_tree_split radix-tree: add radix_tree_join radix-tree: delete radix_tree_range_tag_if_tagged() radix-tree: delete radix_tree_locate_item() radix-tree: improve multiorder iterators btrfs: fix race in btrfs_free_dummy_fs_info() radix-tree: improve dump output radix-tree: make radix_tree_find_next_bit more useful ...
Diffstat (limited to 'tools')
-rw-r--r--tools/include/asm/bug.h11
-rw-r--r--tools/include/linux/bitmap.h26
-rwxr-xr-xtools/testing/ktest/ktest.pl8
-rw-r--r--tools/testing/radix-tree/Makefile15
-rw-r--r--tools/testing/radix-tree/benchmark.c98
-rw-r--r--tools/testing/radix-tree/find_next_bit.c57
-rw-r--r--tools/testing/radix-tree/iteration_check.c123
-rw-r--r--tools/testing/radix-tree/linux.c67
-rw-r--r--tools/testing/radix-tree/linux/bitops.h40
-rw-r--r--tools/testing/radix-tree/linux/bitops/non-atomic.h13
-rw-r--r--tools/testing/radix-tree/linux/bug.h2
-rw-r--r--tools/testing/radix-tree/linux/gfp.h22
-rw-r--r--tools/testing/radix-tree/linux/kernel.h18
-rw-r--r--tools/testing/radix-tree/linux/preempt.h6
-rw-r--r--tools/testing/radix-tree/linux/slab.h11
-rw-r--r--tools/testing/radix-tree/linux/types.h2
-rw-r--r--tools/testing/radix-tree/main.c77
-rw-r--r--tools/testing/radix-tree/multiorder.c326
-rw-r--r--tools/testing/radix-tree/rcupdate.c86
-rw-r--r--tools/testing/radix-tree/regression2.c3
-rw-r--r--tools/testing/radix-tree/regression3.c8
-rw-r--r--tools/testing/radix-tree/tag_check.c12
-rw-r--r--tools/testing/radix-tree/test.c92
-rw-r--r--tools/testing/radix-tree/test.h21
24 files changed, 842 insertions, 302 deletions
diff --git a/tools/include/asm/bug.h b/tools/include/asm/bug.h
index 9e5f4846967f..beda1a884b50 100644
--- a/tools/include/asm/bug.h
+++ b/tools/include/asm/bug.h
@@ -12,6 +12,17 @@
unlikely(__ret_warn_on); \
})
+#define WARN_ON_ONCE(condition) ({ \
+ static int __warned; \
+ int __ret_warn_once = !!(condition); \
+ \
+ if (unlikely(__ret_warn_once && !__warned)) { \
+ __warned = true; \
+ WARN_ON(1); \
+ } \
+ unlikely(__ret_warn_once); \
+})
+
#define WARN_ONCE(condition, format...) ({ \
static int __warned; \
int __ret_warn_once = !!(condition); \
diff --git a/tools/include/linux/bitmap.h b/tools/include/linux/bitmap.h
index 43c1c5021e4b..eef41d500e9e 100644
--- a/tools/include/linux/bitmap.h
+++ b/tools/include/linux/bitmap.h
@@ -35,6 +35,32 @@ static inline void bitmap_zero(unsigned long *dst, int nbits)
}
}
+static inline void bitmap_fill(unsigned long *dst, unsigned int nbits)
+{
+ unsigned int nlongs = BITS_TO_LONGS(nbits);
+ if (!small_const_nbits(nbits)) {
+ unsigned int len = (nlongs - 1) * sizeof(unsigned long);
+ memset(dst, 0xff, len);
+ }
+ dst[nlongs - 1] = BITMAP_LAST_WORD_MASK(nbits);
+}
+
+static inline int bitmap_empty(const unsigned long *src, unsigned nbits)
+{
+ if (small_const_nbits(nbits))
+ return ! (*src & BITMAP_LAST_WORD_MASK(nbits));
+
+ return find_first_bit(src, nbits) == nbits;
+}
+
+static inline int bitmap_full(const unsigned long *src, unsigned int nbits)
+{
+ if (small_const_nbits(nbits))
+ return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits));
+
+ return find_first_zero_bit(src, nbits) == nbits;
+}
+
static inline int bitmap_weight(const unsigned long *src, int nbits)
{
if (small_const_nbits(nbits))
diff --git a/tools/testing/ktest/ktest.pl b/tools/testing/ktest/ktest.pl
index d08e214ec6e7..be93ab02b490 100755
--- a/tools/testing/ktest/ktest.pl
+++ b/tools/testing/ktest/ktest.pl
@@ -719,14 +719,14 @@ sub set_value {
if ($buildonly && $lvalue =~ /^TEST_TYPE(\[.*\])?$/ && $prvalue ne "build") {
# Note if a test is something other than build, then we
- # will need other manditory options.
+ # will need other mandatory options.
if ($prvalue ne "install") {
# for bisect, we need to check BISECT_TYPE
if ($prvalue ne "bisect") {
$buildonly = 0;
}
} else {
- # install still limits some manditory options.
+ # install still limits some mandatory options.
$buildonly = 2;
}
}
@@ -735,7 +735,7 @@ sub set_value {
if ($prvalue ne "install") {
$buildonly = 0;
} else {
- # install still limits some manditory options.
+ # install still limits some mandatory options.
$buildonly = 2;
}
}
@@ -3989,7 +3989,7 @@ sub make_min_config {
}
}
- # Save off all the current mandidory configs
+ # Save off all the current mandatory configs
open (OUT, ">$temp_config")
or die "Can't write to $temp_config";
foreach my $config (keys %keep_configs) {
diff --git a/tools/testing/radix-tree/Makefile b/tools/testing/radix-tree/Makefile
index f2e07f2fd4b4..3635e4d3eca7 100644
--- a/tools/testing/radix-tree/Makefile
+++ b/tools/testing/radix-tree/Makefile
@@ -1,10 +1,14 @@
-CFLAGS += -I. -g -O2 -Wall -D_LGPL_SOURCE
+CFLAGS += -I. -I../../include -g -O2 -Wall -D_LGPL_SOURCE
LDFLAGS += -lpthread -lurcu
TARGETS = main
OFILES = main.o radix-tree.o linux.o test.o tag_check.o find_next_bit.o \
regression1.o regression2.o regression3.o multiorder.o \
- iteration_check.o
+ iteration_check.o benchmark.o
+
+ifdef BENCHMARK
+ CFLAGS += -DBENCHMARK=1
+endif
targets: $(TARGETS)
@@ -14,7 +18,12 @@ main: $(OFILES)
clean:
$(RM) -f $(TARGETS) *.o radix-tree.c
-$(OFILES): *.h */*.h ../../../include/linux/radix-tree.h ../../include/linux/*.h
+find_next_bit.o: ../../lib/find_bit.c
+ $(CC) $(CFLAGS) -c -o $@ $<
+
+$(OFILES): *.h */*.h \
+ ../../include/linux/*.h \
+ ../../../include/linux/radix-tree.h
radix-tree.c: ../../../lib/radix-tree.c
sed -e 's/^static //' -e 's/__always_inline //' -e 's/inline //' < $< > $@
diff --git a/tools/testing/radix-tree/benchmark.c b/tools/testing/radix-tree/benchmark.c
new file mode 100644
index 000000000000..215ca86c7605
--- /dev/null
+++ b/tools/testing/radix-tree/benchmark.c
@@ -0,0 +1,98 @@
+/*
+ * benchmark.c:
+ * Author: Konstantin Khlebnikov <koct9i@gmail.com>
+ *
+ * This program is free software; you can redistribute it and/or modify it
+ * under the terms and conditions of the GNU General Public License,
+ * version 2, as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+ * more details.
+ */
+#include <linux/radix-tree.h>
+#include <linux/slab.h>
+#include <linux/errno.h>
+#include <time.h>
+#include "test.h"
+
+#define NSEC_PER_SEC 1000000000L
+
+static long long benchmark_iter(struct radix_tree_root *root, bool tagged)
+{
+ volatile unsigned long sink = 0;
+ struct radix_tree_iter iter;
+ struct timespec start, finish;
+ long long nsec;
+ int l, loops = 1;
+ void **slot;
+
+#ifdef BENCHMARK
+again:
+#endif
+ clock_gettime(CLOCK_MONOTONIC, &start);
+ for (l = 0; l < loops; l++) {
+ if (tagged) {
+ radix_tree_for_each_tagged(slot, root, &iter, 0, 0)
+ sink ^= (unsigned long)slot;
+ } else {
+ radix_tree_for_each_slot(slot, root, &iter, 0)
+ sink ^= (unsigned long)slot;
+ }
+ }
+ clock_gettime(CLOCK_MONOTONIC, &finish);
+
+ nsec = (finish.tv_sec - start.tv_sec) * NSEC_PER_SEC +
+ (finish.tv_nsec - start.tv_nsec);
+
+#ifdef BENCHMARK
+ if (loops == 1 && nsec * 5 < NSEC_PER_SEC) {
+ loops = NSEC_PER_SEC / nsec / 4 + 1;
+ goto again;
+ }
+#endif
+
+ nsec /= loops;
+ return nsec;
+}
+
+static void benchmark_size(unsigned long size, unsigned long step, int order)
+{
+ RADIX_TREE(tree, GFP_KERNEL);
+ long long normal, tagged;
+ unsigned long index;
+
+ for (index = 0 ; index < size ; index += step) {
+ item_insert_order(&tree, index, order);
+ radix_tree_tag_set(&tree, index, 0);
+ }
+
+ tagged = benchmark_iter(&tree, true);
+ normal = benchmark_iter(&tree, false);
+
+ printf("Size %ld, step %6ld, order %d tagged %10lld ns, normal %10lld ns\n",
+ size, step, order, tagged, normal);
+
+ item_kill_tree(&tree);
+ rcu_barrier();
+}
+
+void benchmark(void)
+{
+ unsigned long size[] = {1 << 10, 1 << 20, 0};
+ unsigned long step[] = {1, 2, 7, 15, 63, 64, 65,
+ 128, 256, 512, 12345, 0};
+ int c, s;
+
+ printf("starting benchmarks\n");
+ printf("RADIX_TREE_MAP_SHIFT = %d\n", RADIX_TREE_MAP_SHIFT);
+
+ for (c = 0; size[c]; c++)
+ for (s = 0; step[s]; s++)
+ benchmark_size(size[c], step[s], 0);
+
+ for (c = 0; size[c]; c++)
+ for (s = 0; step[s]; s++)
+ benchmark_size(size[c], step[s] << 9, 9);
+}
diff --git a/tools/testing/radix-tree/find_next_bit.c b/tools/testing/radix-tree/find_next_bit.c
deleted file mode 100644
index d1c2178bb2d4..000000000000
--- a/tools/testing/radix-tree/find_next_bit.c
+++ /dev/null
@@ -1,57 +0,0 @@
-/* find_next_bit.c: fallback find next bit implementation
- *
- * Copyright (C) 2004 Red Hat, Inc. All Rights Reserved.
- * Written by David Howells (dhowells@redhat.com)
- *
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of the GNU General Public License
- * as published by the Free Software Foundation; either version
- * 2 of the License, or (at your option) any later version.
- */
-
-#include <linux/types.h>
-#include <linux/bitops.h>
-
-#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG)
-
-/*
- * Find the next set bit in a memory region.
- */
-unsigned long find_next_bit(const unsigned long *addr, unsigned long size,
- unsigned long offset)
-{
- const unsigned long *p = addr + BITOP_WORD(offset);
- unsigned long result = offset & ~(BITS_PER_LONG-1);
- unsigned long tmp;
-
- if (offset >= size)
- return size;
- size -= result;
- offset %= BITS_PER_LONG;
- if (offset) {
- tmp = *(p++);
- tmp &= (~0UL << offset);
- if (size < BITS_PER_LONG)
- goto found_first;
- if (tmp)
- goto found_middle;
- size -= BITS_PER_LONG;
- result += BITS_PER_LONG;
- }
- while (size & ~(BITS_PER_LONG-1)) {
- if ((tmp = *(p++)))
- goto found_middle;
- result += BITS_PER_LONG;
- size -= BITS_PER_LONG;
- }
- if (!size)
- return result;
- tmp = *p;
-
-found_first:
- tmp &= (~0UL >> (BITS_PER_LONG - size));
- if (tmp == 0UL) /* Are any bits set? */
- return result + size; /* Nope. */
-found_middle:
- return result + __ffs(tmp);
-}
diff --git a/tools/testing/radix-tree/iteration_check.c b/tools/testing/radix-tree/iteration_check.c
index 9adb8e7415a6..7572b7ed930e 100644
--- a/tools/testing/radix-tree/iteration_check.c
+++ b/tools/testing/radix-tree/iteration_check.c
@@ -16,35 +16,50 @@
#include <pthread.h>
#include "test.h"
-#define NUM_THREADS 4
-#define TAG 0
+#define NUM_THREADS 5
+#define MAX_IDX 100
+#define TAG 0
+#define NEW_TAG 1
+
static pthread_mutex_t tree_lock = PTHREAD_MUTEX_INITIALIZER;
static pthread_t threads[NUM_THREADS];
-RADIX_TREE(tree, GFP_KERNEL);
-bool test_complete;
+static unsigned int seeds[3];
+static RADIX_TREE(tree, GFP_KERNEL);
+static bool test_complete;
+static int max_order;
/* relentlessly fill the tree with tagged entries */
static void *add_entries_fn(void *arg)
{
- int pgoff;
+ rcu_register_thread();
while (!test_complete) {
- for (pgoff = 0; pgoff < 100; pgoff++) {
+ unsigned long pgoff;
+ int order;
+
+ for (pgoff = 0; pgoff < MAX_IDX; pgoff++) {
pthread_mutex_lock(&tree_lock);
- if (item_insert(&tree, pgoff) == 0)
- item_tag_set(&tree, pgoff, TAG);
+ for (order = max_order; order >= 0; order--) {
+ if (item_insert_order(&tree, pgoff, order)
+ == 0) {
+ item_tag_set(&tree, pgoff, TAG);
+ break;
+ }
+ }
pthread_mutex_unlock(&tree_lock);
}
}
+ rcu_unregister_thread();
+
return NULL;
}
/*
* Iterate over the tagged entries, doing a radix_tree_iter_retry() as we find
* things that have been removed and randomly resetting our iteration to the
- * next chunk with radix_tree_iter_next(). Both radix_tree_iter_retry() and
- * radix_tree_iter_next() cause radix_tree_next_slot() to be called with a
+ * next chunk with radix_tree_iter_resume(). Both radix_tree_iter_retry() and
+ * radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a
* NULL 'slot' variable.
*/
static void *tagged_iteration_fn(void *arg)
@@ -52,17 +67,12 @@ static void *tagged_iteration_fn(void *arg)
struct radix_tree_iter iter;
void **slot;
+ rcu_register_thread();
+
while (!test_complete) {
rcu_read_lock();
radix_tree_for_each_tagged(slot, &tree, &iter, 0, TAG) {
- void *entry;
- int i;
-
- /* busy wait to let removals happen */
- for (i = 0; i < 1000000; i++)
- ;
-
- entry = radix_tree_deref_slot(slot);
+ void *entry = radix_tree_deref_slot(slot);
if (unlikely(!entry))
continue;
@@ -71,20 +81,26 @@ static void *tagged_iteration_fn(void *arg)
continue;
}
- if (rand() % 50 == 0)
- slot = radix_tree_iter_next(&iter);
+ if (rand_r(&seeds[0]) % 50 == 0) {
+ slot = radix_tree_iter_resume(slot, &iter);
+ rcu_read_unlock();
+ rcu_barrier();
+ rcu_read_lock();
+ }
}
rcu_read_unlock();
}
+ rcu_unregister_thread();
+
return NULL;
}
/*
* Iterate over the entries, doing a radix_tree_iter_retry() as we find things
* that have been removed and randomly resetting our iteration to the next
- * chunk with radix_tree_iter_next(). Both radix_tree_iter_retry() and
- * radix_tree_iter_next() cause radix_tree_next_slot() to be called with a
+ * chunk with radix_tree_iter_resume(). Both radix_tree_iter_retry() and
+ * radix_tree_iter_resume() cause radix_tree_next_slot() to be called with a
* NULL 'slot' variable.
*/
static void *untagged_iteration_fn(void *arg)
@@ -92,17 +108,12 @@ static void *untagged_iteration_fn(void *arg)
struct radix_tree_iter iter;
void **slot;
+ rcu_register_thread();
+
while (!test_complete) {
rcu_read_lock();
radix_tree_for_each_slot(slot, &tree, &iter, 0) {
- void *entry;
- int i;
-
- /* busy wait to let removals happen */
- for (i = 0; i < 1000000; i++)
- ;
-
- entry = radix_tree_deref_slot(slot);
+ void *entry = radix_tree_deref_slot(slot);
if (unlikely(!entry))
continue;
@@ -111,12 +122,18 @@ static void *untagged_iteration_fn(void *arg)
continue;
}
- if (rand() % 50 == 0)
- slot = radix_tree_iter_next(&iter);
+ if (rand_r(&seeds[1]) % 50 == 0) {
+ slot = radix_tree_iter_resume(slot, &iter);
+ rcu_read_unlock();
+ rcu_barrier();
+ rcu_read_lock();
+ }
}
rcu_read_unlock();
}
+ rcu_unregister_thread();
+
return NULL;
}
@@ -126,47 +143,71 @@ static void *untagged_iteration_fn(void *arg)
*/
static void *remove_entries_fn(void *arg)
{
+ rcu_register_thread();
+
while (!test_complete) {
int pgoff;
- pgoff = rand() % 100;
+ pgoff = rand_r(&seeds[2]) % MAX_IDX;
pthread_mutex_lock(&tree_lock);
item_delete(&tree, pgoff);
pthread_mutex_unlock(&tree_lock);
}
+ rcu_unregister_thread();
+
+ return NULL;
+}
+
+static void *tag_entries_fn(void *arg)
+{
+ rcu_register_thread();
+
+ while (!test_complete) {
+ tag_tagged_items(&tree, &tree_lock, 0, MAX_IDX, 10, TAG,
+ NEW_TAG);
+ }
+ rcu_unregister_thread();
return NULL;
}
/* This is a unit test for a bug found by the syzkaller tester */
-void iteration_test(void)
+void iteration_test(unsigned order, unsigned test_duration)
{
int i;
- printf("Running iteration tests for 10 seconds\n");
+ printf("Running %siteration tests for %d seconds\n",
+ order > 0 ? "multiorder " : "", test_duration);
- srand(time(0));
+ max_order = order;
test_complete = false;
+ for (i = 0; i < 3; i++)
+ seeds[i] = rand();
+
if (pthread_create(&threads[0], NULL, tagged_iteration_fn, NULL)) {
- perror("pthread_create");
+ perror("create tagged iteration thread");
exit(1);
}
if (pthread_create(&threads[1], NULL, untagged_iteration_fn, NULL)) {
- perror("pthread_create");
+ perror("create untagged iteration thread");
exit(1);
}
if (pthread_create(&threads[2], NULL, add_entries_fn, NULL)) {
- perror("pthread_create");
+ perror("create add entry thread");
exit(1);
}
if (pthread_create(&threads[3], NULL, remove_entries_fn, NULL)) {
- perror("pthread_create");
+ perror("create remove entry thread");
+ exit(1);
+ }
+ if (pthread_create(&threads[4], NULL, tag_entries_fn, NULL)) {
+ perror("create tag entry thread");
exit(1);
}
- sleep(10);
+ sleep(test_duration);
test_complete = true;
for (i = 0; i < NUM_THREADS; i++) {
diff --git a/tools/testing/radix-tree/linux.c b/tools/testing/radix-tree/linux.c
index 154823737b20..d31ea7c9abec 100644
--- a/tools/testing/radix-tree/linux.c
+++ b/tools/testing/radix-tree/linux.c
@@ -1,14 +1,26 @@
#include <stdlib.h>
#include <string.h>
#include <malloc.h>
+#include <pthread.h>
#include <unistd.h>
#include <assert.h>
#include <linux/mempool.h>
+#include <linux/poison.h>
#include <linux/slab.h>
+#include <linux/radix-tree.h>
#include <urcu/uatomic.h>
int nr_allocated;
+int preempt_count;
+
+struct kmem_cache {
+ pthread_mutex_t lock;
+ int size;
+ int nr_objs;
+ void *objs;
+ void (*ctor)(void *);
+};
void *mempool_alloc(mempool_t *pool, int gfp_mask)
{
@@ -33,19 +45,59 @@ mempool_t *mempool_create(int min_nr, mempool_alloc_t *alloc_fn,
void *kmem_cache_alloc(struct kmem_cache *cachep, int flags)
{
- void *ret = malloc(cachep->size);
- if (cachep->ctor)
- cachep->ctor(ret);
+ struct radix_tree_node *node;
+
+ if (flags & __GFP_NOWARN)
+ return NULL;
+
+ pthread_mutex_lock(&cachep->lock);
+ if (cachep->nr_objs) {
+ cachep->nr_objs--;
+ node = cachep->objs;
+ cachep->objs = node->private_data;
+ pthread_mutex_unlock(&cachep->lock);
+ node->private_data = NULL;
+ } else {
+ pthread_mutex_unlock(&cachep->lock);
+ node = malloc(cachep->size);
+ if (cachep->ctor)
+ cachep->ctor(node);
+ }
+
uatomic_inc(&nr_allocated);
- return ret;
+ return node;
}
void kmem_cache_free(struct kmem_cache *cachep, void *objp)
{
assert(objp);
uatomic_dec(&nr_allocated);
- memset(objp, 0, cachep->size);
- free(objp);
+ pthread_mutex_lock(&cachep->lock);
+ if (cachep->nr_objs > 10) {
+ memset(objp, POISON_FREE, cachep->size);
+ free(objp);
+ } else {
+ struct radix_tree_node *node = objp;
+ cachep->nr_objs++;
+ node->private_data = cachep->objs;
+ cachep->objs = node;
+ }
+ pthread_mutex_unlock(&cachep->lock);
+}
+
+void *kmalloc(size_t size, gfp_t gfp)
+{
+ void *ret = malloc(size);
+ uatomic_inc(&nr_allocated);
+ return ret;
+}
+
+void kfree(void *p)
+{
+ if (!p)
+ return;
+ uatomic_dec(&nr_allocated);
+ free(p);
}
struct kmem_cache *
@@ -54,7 +106,10 @@ kmem_cache_create(const char *name, size_t size, size_t offset,
{
struct kmem_cache *ret = malloc(sizeof(*ret));
+ pthread_mutex_init(&ret->lock, NULL);
ret->size = size;
+ ret->nr_objs = 0;
+ ret->objs = NULL;
ret->ctor = ctor;
return ret;
}
diff --git a/tools/testing/radix-tree/linux/bitops.h b/tools/testing/radix-tree/linux/bitops.h
index 71d58427ab60..a13e9bc76eec 100644
--- a/tools/testing/radix-tree/linux/bitops.h
+++ b/tools/testing/radix-tree/linux/bitops.h
@@ -2,9 +2,14 @@
#define _ASM_GENERIC_BITOPS_NON_ATOMIC_H_
#include <linux/types.h>
+#include <linux/bitops/find.h>
+#include <linux/bitops/hweight.h>
+#include <linux/kernel.h>
-#define BITOP_MASK(nr) (1UL << ((nr) % BITS_PER_LONG))
-#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG)
+#define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG))
+#define BIT_WORD(nr) ((nr) / BITS_PER_LONG)
+#define BITS_PER_BYTE 8
+#define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long))
/**
* __set_bit - Set a bit in memory
@@ -17,16 +22,16 @@
*/
static inline void __set_bit(int nr, volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
- unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
+ unsigned long mask = BIT_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
*p |= mask;
}
static inline void __clear_bit(int nr, volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
- unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
+ unsigned long mask = BIT_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
*p &= ~mask;
}
@@ -42,8 +47,8 @@ static inline void __clear_bit(int nr, volatile unsigned long *addr)
*/
static inline void __change_bit(int nr, volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
- unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
+ unsigned long mask = BIT_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
*p ^= mask;
}
@@ -59,8 +64,8 @@ static inline void __change_bit(int nr, volatile unsigned long *addr)
*/
static inline int __test_and_set_bit(int nr, volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
- unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
+ unsigned long mask = BIT_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
unsigned long old = *p;
*p = old | mask;
@@ -78,8 +83,8 @@ static inline int __test_and_set_bit(int nr, volatile unsigned long *addr)
*/
static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
- unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
+ unsigned long mask = BIT_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
unsigned long old = *p;
*p = old & ~mask;
@@ -90,8 +95,8 @@ static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr)
static inline int __test_and_change_bit(int nr,
volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
- unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
+ unsigned long mask = BIT_MASK(nr);
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
unsigned long old = *p;
*p = old ^ mask;
@@ -105,7 +110,7 @@ static inline int __test_and_change_bit(int nr,
*/
static inline int test_bit(int nr, const volatile unsigned long *addr)
{
- return 1UL & (addr[BITOP_WORD(nr)] >> (nr & (BITS_PER_LONG-1)));
+ return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1)));
}
/**
@@ -147,4 +152,9 @@ unsigned long find_next_bit(const unsigned long *addr,
unsigned long size,
unsigned long offset);
+static inline unsigned long hweight_long(unsigned long w)
+{
+ return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
+}
+
#endif /* _ASM_GENERIC_BITOPS_NON_ATOMIC_H_ */
diff --git a/tools/testing/radix-tree/linux/bitops/non-atomic.h b/tools/testing/radix-tree/linux/bitops/non-atomic.h
index 46a825cf2ae1..6a1bcb9d2c4a 100644
--- a/tools/testing/radix-tree/linux/bitops/non-atomic.h
+++ b/tools/testing/radix-tree/linux/bitops/non-atomic.h
@@ -3,7 +3,6 @@
#include <asm/types.h>
-#define BITOP_MASK(nr) (1UL << ((nr) % BITS_PER_LONG))
#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG)
/**
@@ -17,7 +16,7 @@
*/
static inline void __set_bit(int nr, volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
+ unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
*p |= mask;
@@ -25,7 +24,7 @@ static inline void __set_bit(int nr, volatile unsigned long *addr)
static inline void __clear_bit(int nr, volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
+ unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
*p &= ~mask;
@@ -42,7 +41,7 @@ static inline void __clear_bit(int nr, volatile unsigned long *addr)
*/
static inline void __change_bit(int nr, volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
+ unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
*p ^= mask;
@@ -59,7 +58,7 @@ static inline void __change_bit(int nr, volatile unsigned long *addr)
*/
static inline int __test_and_set_bit(int nr, volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
+ unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
unsigned long old = *p;
@@ -78,7 +77,7 @@ static inline int __test_and_set_bit(int nr, volatile unsigned long *addr)
*/
static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
+ unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
unsigned long old = *p;
@@ -90,7 +89,7 @@ static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr)
static inline int __test_and_change_bit(int nr,
volatile unsigned long *addr)
{
- unsigned long mask = BITOP_MASK(nr);
+ unsigned long mask = BIT_MASK(nr);
unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
unsigned long old = *p;
diff --git a/tools/testing/radix-tree/linux/bug.h b/tools/testing/radix-tree/linux/bug.h
index ccbe444977df..23b8ed52f8c8 100644
--- a/tools/testing/radix-tree/linux/bug.h
+++ b/tools/testing/radix-tree/linux/bug.h
@@ -1 +1 @@
-#define WARN_ON_ONCE(x) assert(x)
+#include "asm/bug.h"
diff --git a/tools/testing/radix-tree/linux/gfp.h b/tools/testing/radix-tree/linux/gfp.h
index 5201b915f631..5b09b2ce6c33 100644
--- a/tools/testing/radix-tree/linux/gfp.h
+++ b/tools/testing/radix-tree/linux/gfp.h
@@ -3,8 +3,24 @@
#define __GFP_BITS_SHIFT 26
#define __GFP_BITS_MASK ((gfp_t)((1 << __GFP_BITS_SHIFT) - 1))
-#define __GFP_WAIT 1
-#define __GFP_ACCOUNT 0
-#define __GFP_NOWARN 0
+
+#define __GFP_HIGH 0x20u
+#define __GFP_IO 0x40u
+#define __GFP_FS 0x80u
+#define __GFP_NOWARN 0x200u
+#define __GFP_ATOMIC 0x80000u
+#define __GFP_ACCOUNT 0x100000u
+#define __GFP_DIRECT_RECLAIM 0x400000u
+#define __GFP_KSWAPD_RECLAIM 0x2000000u
+
+#define __GFP_RECLAIM (__GFP_DIRECT_RECLAIM|__GFP_KSWAPD_RECLAIM)
+
+#define GFP_ATOMIC (__GFP_HIGH|__GFP_ATOMIC|__GFP_KSWAPD_RECLAIM)
+#define GFP_KERNEL (__GFP_RECLAIM | __GFP_IO | __GFP_FS)
+
+static inline bool gfpflags_allow_blocking(const gfp_t gfp_flags)
+{
+ return !!(gfp_flags & __GFP_DIRECT_RECLAIM);
+}
#endif
diff --git a/tools/testing/radix-tree/linux/kernel.h b/tools/testing/radix-tree/linux/kernel.h
index be98a47b4e1b..9b43b4975d83 100644
--- a/tools/testing/radix-tree/linux/kernel.h
+++ b/tools/testing/radix-tree/linux/kernel.h
@@ -8,9 +8,14 @@
#include <limits.h>
#include "../../include/linux/compiler.h"
+#include "../../include/linux/err.h"
#include "../../../include/linux/kconfig.h"
+#ifdef BENCHMARK
+#define RADIX_TREE_MAP_SHIFT 6
+#else
#define RADIX_TREE_MAP_SHIFT 3
+#endif
#ifndef NULL
#define NULL 0
@@ -43,4 +48,17 @@ static inline int in_interrupt(void)
{
return 0;
}
+
+/*
+ * This looks more complex than it should be. But we need to
+ * get the type for the ~ right in round_down (it needs to be
+ * as wide as the result!), and we want to evaluate the macro
+ * arguments just once each.
+ */
+#define __round_mask(x, y) ((__typeof__(x))((y)-1))
+#define round_up(x, y) ((((x)-1) | __round_mask(x, y))+1)
+#define round_down(x, y) ((x) & ~__round_mask(x, y))
+
+#define xchg(ptr, x) uatomic_xchg(ptr, x)
+
#endif /* _KERNEL_H */
diff --git a/tools/testing/radix-tree/linux/preempt.h b/tools/testing/radix-tree/linux/preempt.h
index 6210672e3baa..65c04c226965 100644
--- a/tools/testing/radix-tree/linux/preempt.h
+++ b/tools/testing/radix-tree/linux/preempt.h
@@ -1,4 +1,4 @@
-/* */
+extern int preempt_count;
-#define preempt_disable() do { } while (0)
-#define preempt_enable() do { } while (0)
+#define preempt_disable() uatomic_inc(&preempt_count)
+#define preempt_enable() uatomic_dec(&preempt_count)
diff --git a/tools/testing/radix-tree/linux/slab.h b/tools/testing/radix-tree/linux/slab.h
index 6d5a34770fd4..e40337f41a38 100644
--- a/tools/testing/radix-tree/linux/slab.h
+++ b/tools/testing/radix-tree/linux/slab.h
@@ -7,15 +7,8 @@
#define SLAB_PANIC 2
#define SLAB_RECLAIM_ACCOUNT 0x00020000UL /* Objects are reclaimable */
-static inline int gfpflags_allow_blocking(gfp_t mask)
-{
- return 1;
-}
-
-struct kmem_cache {
- int size;
- void (*ctor)(void *);
-};
+void *kmalloc(size_t size, gfp_t);
+void kfree(void *);
void *kmem_cache_alloc(struct kmem_cache *cachep, int flags);
void kmem_cache_free(struct kmem_cache *cachep, void *objp);
diff --git a/tools/testing/radix-tree/linux/types.h b/tools/testing/radix-tree/linux/types.h
index faa0b6ff9ca8..8491d89873bb 100644
--- a/tools/testing/radix-tree/linux/types.h
+++ b/tools/testing/radix-tree/linux/types.h
@@ -6,8 +6,6 @@
#define __rcu
#define __read_mostly
-#define BITS_PER_LONG (sizeof(long) * 8)
-
static inline void INIT_LIST_HEAD(struct list_head *list)
{
list->next = list;
diff --git a/tools/testing/radix-tree/main.c b/tools/testing/radix-tree/main.c
index daa9010693e8..f7e9801a6754 100644
--- a/tools/testing/radix-tree/main.c
+++ b/tools/testing/radix-tree/main.c
@@ -67,7 +67,6 @@ void big_gang_check(bool long_run)
for (i = 0; i < (long_run ? 1000 : 3); i++) {
__big_gang_check();
- srand(time(0));
printf("%d ", i);
fflush(stdout);
}
@@ -206,8 +205,7 @@ void copy_tag_check(void)
}
// printf("\ncopying tags...\n");
- cur = start;
- tagged = radix_tree_range_tag_if_tagged(&tree, &cur, end, ITEMS, 0, 1);
+ tagged = tag_tagged_items(&tree, NULL, start, end, ITEMS, 0, 1);
// printf("checking copied tags\n");
assert(tagged == count);
@@ -215,16 +213,13 @@ void copy_tag_check(void)
/* Copy tags in several rounds */
// printf("\ncopying tags...\n");
- cur = start;
- do {
- tmp = rand() % (count/10+2);
- tagged = radix_tree_range_tag_if_tagged(&tree, &cur, end, tmp, 0, 2);
- } while (tmp == tagged);
+ tmp = rand() % (count / 10 + 2);
+ tagged = tag_tagged_items(&tree, NULL, start, end, tmp, 0, 2);
+ assert(tagged == count);
// printf("%lu %lu %lu\n", tagged, tmp, count);
// printf("checking copied tags\n");
check_copied_tags(&tree, start, end, idx, ITEMS, 0, 2);
- assert(tagged < tmp);
verify_tag_consistency(&tree, 0);
verify_tag_consistency(&tree, 1);
verify_tag_consistency(&tree, 2);
@@ -240,7 +235,7 @@ static void __locate_check(struct radix_tree_root *tree, unsigned long index,
item_insert_order(tree, index, order);
item = item_lookup(tree, index);
- index2 = radix_tree_locate_item(tree, item);
+ index2 = find_item(tree, item);
if (index != index2) {
printf("index %ld order %d inserted; found %ld\n",
index, order, index2);
@@ -274,17 +269,17 @@ static void locate_check(void)
index += (1UL << order)) {
__locate_check(&tree, index + offset, order);
}
- if (radix_tree_locate_item(&tree, &tree) != -1)
+ if (find_item(&tree, &tree) != -1)
abort();
item_kill_tree(&tree);
}
}
- if (radix_tree_locate_item(&tree, &tree) != -1)
+ if (find_item(&tree, &tree) != -1)
abort();
__locate_check(&tree, -1, 0);
- if (radix_tree_locate_item(&tree, &tree) != -1)
+ if (find_item(&tree, &tree) != -1)
abort();
item_kill_tree(&tree);
}
@@ -293,50 +288,80 @@ static void single_thread_tests(bool long_run)
{
int i;
- printf("starting single_thread_tests: %d allocated\n", nr_allocated);
+ printf("starting single_thread_tests: %d allocated, preempt %d\n",
+ nr_allocated, preempt_count);
multiorder_checks();
- printf("after multiorder_check: %d allocated\n", nr_allocated);
+ rcu_barrier();
+ printf("after multiorder_check: %d allocated, preempt %d\n",
+ nr_allocated, preempt_count);
locate_check();
- printf("after locate_check: %d allocated\n", nr_allocated);
+ rcu_barrier();
+ printf("after locate_check: %d allocated, preempt %d\n",
+ nr_allocated, preempt_count);
tag_check();
- printf("after tag_check: %d allocated\n", nr_allocated);
+ rcu_barrier();
+ printf("after tag_check: %d allocated, preempt %d\n",
+ nr_allocated, preempt_count);
gang_check();
- printf("after gang_check: %d allocated\n", nr_allocated);
+ rcu_barrier();
+ printf("after gang_check: %d allocated, preempt %d\n",
+ nr_allocated, preempt_count);
add_and_check();
- printf("after add_and_check: %d allocated\n", nr_allocated);
+ rcu_barrier();
+ printf("after add_and_check: %d allocated, preempt %d\n",
+ nr_allocated, preempt_count);
dynamic_height_check();
- printf("after dynamic_height_check: %d allocated\n", nr_allocated);
+ rcu_barrier();
+ printf("after dynamic_height_check: %d allocated, preempt %d\n",
+ nr_allocated, preempt_count);
big_gang_check(long_run);
- printf("after big_gang_check: %d allocated\n", nr_allocated);
+ rcu_barrier();
+ printf("after big_gang_check: %d allocated, preempt %d\n",
+ nr_allocated, preempt_count);
for (i = 0; i < (long_run ? 2000 : 3); i++) {
copy_tag_check();
printf("%d ", i);
fflush(stdout);
}
- printf("after copy_tag_check: %d allocated\n", nr_allocated);
+ rcu_barrier();
+ printf("after copy_tag_check: %d allocated, preempt %d\n",
+ nr_allocated, preempt_count);
}
int main(int argc, char **argv)
{
bool long_run = false;
int opt;
+ unsigned int seed = time(NULL);
- while ((opt = getopt(argc, argv, "l")) != -1) {
+ while ((opt = getopt(argc, argv, "ls:")) != -1) {
if (opt == 'l')
long_run = true;
+ else if (opt == 's')
+ seed = strtoul(optarg, NULL, 0);
}
+ printf("random seed %u\n", seed);
+ srand(seed);
+
rcu_register_thread();
radix_tree_init();
regression1_test();
regression2_test();
regression3_test();
- iteration_test();
+ iteration_test(0, 10);
+ iteration_test(7, 20);
single_thread_tests(long_run);
- sleep(1);
- printf("after sleep(1): %d allocated\n", nr_allocated);
+ /* Free any remaining preallocated nodes */
+ radix_tree_cpu_dead(0);
+
+ benchmark();
+
+ rcu_barrier();
+ printf("after rcu_barrier: %d allocated, preempt %d\n",
+ nr_allocated, preempt_count);
rcu_unregister_thread();
exit(0);
diff --git a/tools/testing/radix-tree/multiorder.c b/tools/testing/radix-tree/multiorder.c
index d1be94667a30..f79812a5e070 100644
--- a/tools/testing/radix-tree/multiorder.c
+++ b/tools/testing/radix-tree/multiorder.c
@@ -26,7 +26,6 @@ static void __multiorder_tag_test(int index, int order)
{
RADIX_TREE(tree, GFP_KERNEL);
int base, err, i;
- unsigned long first = 0;
/* our canonical entry */
base = index & ~((1 << order) - 1);
@@ -60,7 +59,7 @@ static void __multiorder_tag_test(int index, int order)
assert(!radix_tree_tag_get(&tree, i, 1));
}
- assert(radix_tree_range_tag_if_tagged(&tree, &first, ~0UL, 10, 0, 1) == 1);
+ assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 1);
assert(radix_tree_tag_clear(&tree, index, 0));
for_each_index(i, base, order) {
@@ -76,8 +75,27 @@ static void __multiorder_tag_test(int index, int order)
item_kill_tree(&tree);
}
+static void __multiorder_tag_test2(unsigned order, unsigned long index2)
+{
+ RADIX_TREE(tree, GFP_KERNEL);
+ unsigned long index = (1 << order);
+ index2 += index;
+
+ assert(item_insert_order(&tree, 0, order) == 0);
+ assert(item_insert(&tree, index2) == 0);
+
+ assert(radix_tree_tag_set(&tree, 0, 0));
+ assert(radix_tree_tag_set(&tree, index2, 0));
+
+ assert(tag_tagged_items(&tree, NULL, 0, ~0UL, 10, 0, 1) == 2);
+
+ item_kill_tree(&tree);
+}
+
static void multiorder_tag_tests(void)
{
+ int i, j;
+
/* test multi-order entry for indices 0-7 with no sibling pointers */
__multiorder_tag_test(0, 3);
__multiorder_tag_test(5, 3);
@@ -117,6 +135,10 @@ static void multiorder_tag_tests(void)
__multiorder_tag_test(300, 8);
__multiorder_tag_test(0x12345678UL, 8);
+
+ for (i = 1; i < 10; i++)
+ for (j = 0; j < (10 << i); j++)
+ __multiorder_tag_test2(i, j);
}
static void multiorder_check(unsigned long index, int order)
@@ -125,7 +147,7 @@ static void multiorder_check(unsigned long index, int order)
unsigned long min = index & ~((1UL << order) - 1);
unsigned long max = min + (1UL << order);
void **slot;
- struct item *item2 = item_create(min);
+ struct item *item2 = item_create(min, order);
RADIX_TREE(tree, GFP_KERNEL);
printf("Multiorder index %ld, order %d\n", index, order);
@@ -231,11 +253,14 @@ void multiorder_iteration(void)
radix_tree_for_each_slot(slot, &tree, &iter, j) {
int height = order[i] / RADIX_TREE_MAP_SHIFT;
int shift = height * RADIX_TREE_MAP_SHIFT;
- int mask = (1 << order[i]) - 1;
+ unsigned long mask = (1UL << order[i]) - 1;
+ struct item *item = *slot;
- assert(iter.index >= (index[i] &~ mask));
- assert(iter.index <= (index[i] | mask));
+ assert((iter.index | mask) == (index[i] | mask));
assert(iter.shift == shift);
+ assert(!radix_tree_is_internal_node(item));
+ assert((item->index | mask) == (index[i] | mask));
+ assert(item->order == order[i]);
i++;
}
}
@@ -248,7 +273,6 @@ void multiorder_tagged_iteration(void)
RADIX_TREE(tree, GFP_KERNEL);
struct radix_tree_iter iter;
void **slot;
- unsigned long first = 0;
int i, j;
printf("Multiorder tagged iteration test\n");
@@ -269,7 +293,7 @@ void multiorder_tagged_iteration(void)
assert(radix_tree_tag_set(&tree, tag_index[i], 1));
for (j = 0; j < 256; j++) {
- int mask, k;
+ int k;
for (i = 0; i < TAG_ENTRIES; i++) {
for (k = i; index[k] < tag_index[i]; k++)
@@ -279,18 +303,22 @@ void multiorder_tagged_iteration(void)
}
radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) {
+ unsigned long mask;
+ struct item *item = *slot;
for (k = i; index[k] < tag_index[i]; k++)
;
- mask = (1 << order[k]) - 1;
+ mask = (1UL << order[k]) - 1;
- assert(iter.index >= (tag_index[i] &~ mask));
- assert(iter.index <= (tag_index[i] | mask));
+ assert((iter.index | mask) == (tag_index[i] | mask));
+ assert(!radix_tree_is_internal_node(item));
+ assert((item->index | mask) == (tag_index[i] | mask));
+ assert(item->order == order[k]);
i++;
}
}
- radix_tree_range_tag_if_tagged(&tree, &first, ~0UL,
- MT_NUM_ENTRIES, 1, 2);
+ assert(tag_tagged_items(&tree, NULL, 0, ~0UL, TAG_ENTRIES, 1, 2) ==
+ TAG_ENTRIES);
for (j = 0; j < 256; j++) {
int mask, k;
@@ -303,19 +331,21 @@ void multiorder_tagged_iteration(void)
}
radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) {
+ struct item *item = *slot;
for (k = i; index[k] < tag_index[i]; k++)
;
mask = (1 << order[k]) - 1;
- assert(iter.index >= (tag_index[i] &~ mask));
- assert(iter.index <= (tag_index[i] | mask));
+ assert((iter.index | mask) == (tag_index[i] | mask));
+ assert(!radix_tree_is_internal_node(item));
+ assert((item->index | mask) == (tag_index[i] | mask));
+ assert(item->order == order[k]);
i++;
}
}
- first = 1;
- radix_tree_range_tag_if_tagged(&tree, &first, ~0UL,
- MT_NUM_ENTRIES, 1, 0);
+ assert(tag_tagged_items(&tree, NULL, 1, ~0UL, MT_NUM_ENTRIES * 2, 1, 0)
+ == TAG_ENTRIES);
i = 0;
radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) {
assert(iter.index == tag_index[i]);
@@ -325,6 +355,261 @@ void multiorder_tagged_iteration(void)
item_kill_tree(&tree);
}
+static void multiorder_join1(unsigned long index,
+ unsigned order1, unsigned order2)
+{
+ unsigned long loc;
+ void *item, *item2 = item_create(index + 1, order1);
+ RADIX_TREE(tree, GFP_KERNEL);
+
+ item_insert_order(&tree, index, order2);
+ item = radix_tree_lookup(&tree, index);
+ radix_tree_join(&tree, index + 1, order1, item2);
+ loc = find_item(&tree, item);
+ if (loc == -1)
+ free(item);
+ item = radix_tree_lookup(&tree, index + 1);
+ assert(item == item2);
+ item_kill_tree(&tree);
+}
+
+static void multiorder_join2(unsigned order1, unsigned order2)
+{
+ RADIX_TREE(tree, GFP_KERNEL);
+ struct radix_tree_node *node;
+ void *item1 = item_create(0, order1);
+ void *item2;
+
+ item_insert_order(&tree, 0, order2);
+ radix_tree_insert(&tree, 1 << order2, (void *)0x12UL);
+ item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL);
+ assert(item2 == (void *)0x12UL);
+ assert(node->exceptional == 1);
+
+ radix_tree_join(&tree, 0, order1, item1);
+ item2 = __radix_tree_lookup(&tree, 1 << order2, &node, NULL);
+ assert(item2 == item1);
+ assert(node->exceptional == 0);
+ item_kill_tree(&tree);
+}
+
+/*
+ * This test revealed an accounting bug for exceptional entries at one point.
+ * Nodes were being freed back into the pool with an elevated exception count
+ * by radix_tree_join() and then radix_tree_split() was failing to zero the
+ * count of exceptional entries.
+ */
+static void multiorder_join3(unsigned int order)
+{
+ RADIX_TREE(tree, GFP_KERNEL);
+ struct radix_tree_node *node;
+ void **slot;
+ struct radix_tree_iter iter;
+ unsigned long i;
+
+ for (i = 0; i < (1 << order); i++) {
+ radix_tree_insert(&tree, i, (void *)0x12UL);
+ }
+
+ radix_tree_join(&tree, 0, order, (void *)0x16UL);
+ rcu_barrier();
+
+ radix_tree_split(&tree, 0, 0);
+
+ radix_tree_for_each_slot(slot, &tree, &iter, 0) {
+ radix_tree_iter_replace(&tree, &iter, slot, (void *)0x12UL);
+ }
+
+ __radix_tree_lookup(&tree, 0, &node, NULL);
+ assert(node->exceptional == node->count);
+
+ item_kill_tree(&tree);
+}
+
+static void multiorder_join(void)
+{
+ int i, j, idx;
+
+ for (idx = 0; idx < 1024; idx = idx * 2 + 3) {
+ for (i = 1; i < 15; i++) {
+ for (j = 0; j < i; j++) {
+ multiorder_join1(idx, i, j);
+ }
+ }
+ }
+
+ for (i = 1; i < 15; i++) {
+ for (j = 0; j < i; j++) {
+ multiorder_join2(i, j);
+ }
+ }
+
+ for (i = 3; i < 10; i++) {
+ multiorder_join3(i);
+ }
+}
+
+static void check_mem(unsigned old_order, unsigned new_order, unsigned alloc)
+{
+ struct radix_tree_preload *rtp = &radix_tree_preloads;
+ if (rtp->nr != 0)
+ printf("split(%u %u) remaining %u\n", old_order, new_order,
+ rtp->nr);
+ /*
+ * Can't check for equality here as some nodes may have been
+ * RCU-freed while we ran. But we should never finish with more
+ * nodes allocated since they should have all been preloaded.
+ */
+ if (nr_allocated > alloc)
+ printf("split(%u %u) allocated %u %u\n", old_order, new_order,
+ alloc, nr_allocated);
+}
+
+static void __multiorder_split(int old_order, int new_order)
+{
+ RADIX_TREE(tree, GFP_ATOMIC);
+ void **slot;
+ struct radix_tree_iter iter;
+ unsigned alloc;
+
+ radix_tree_preload(GFP_KERNEL);
+ assert(item_insert_order(&tree, 0, old_order) == 0);
+ radix_tree_preload_end();
+
+ /* Wipe out the preloaded cache or it'll confuse check_mem() */
+ radix_tree_cpu_dead(0);
+
+ radix_tree_tag_set(&tree, 0, 2);
+
+ radix_tree_split_preload(old_order, new_order, GFP_KERNEL);
+ alloc = nr_allocated;
+ radix_tree_split(&tree, 0, new_order);
+ check_mem(old_order, new_order, alloc);
+ radix_tree_for_each_slot(slot, &tree, &iter, 0) {
+ radix_tree_iter_replace(&tree, &iter, slot,
+ item_create(iter.index, new_order));
+ }
+ radix_tree_preload_end();
+
+ item_kill_tree(&tree);
+}
+
+static void __multiorder_split2(int old_order, int new_order)
+{
+ RADIX_TREE(tree, GFP_KERNEL);
+ void **slot;
+ struct radix_tree_iter iter;
+ struct radix_tree_node *node;
+ void *item;
+
+ __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
+
+ item = __radix_tree_lookup(&tree, 0, &node, NULL);
+ assert(item == (void *)0x12);
+ assert(node->exceptional > 0);
+
+ radix_tree_split(&tree, 0, new_order);
+ radix_tree_for_each_slot(slot, &tree, &iter, 0) {
+ radix_tree_iter_replace(&tree, &iter, slot,
+ item_create(iter.index, new_order));
+ }
+
+ item = __radix_tree_lookup(&tree, 0, &node, NULL);
+ assert(item != (void *)0x12);
+ assert(node->exceptional == 0);
+
+ item_kill_tree(&tree);
+}
+
+static void __multiorder_split3(int old_order, int new_order)
+{
+ RADIX_TREE(tree, GFP_KERNEL);
+ void **slot;
+ struct radix_tree_iter iter;
+ struct radix_tree_node *node;
+ void *item;
+
+ __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
+
+ item = __radix_tree_lookup(&tree, 0, &node, NULL);
+ assert(item == (void *)0x12);
+ assert(node->exceptional > 0);
+
+ radix_tree_split(&tree, 0, new_order);
+ radix_tree_for_each_slot(slot, &tree, &iter, 0) {
+ radix_tree_iter_replace(&tree, &iter, slot, (void *)0x16);
+ }
+
+ item = __radix_tree_lookup(&tree, 0, &node, NULL);
+ assert(item == (void *)0x16);
+ assert(node->exceptional > 0);
+
+ item_kill_tree(&tree);
+
+ __radix_tree_insert(&tree, 0, old_order, (void *)0x12);
+
+ item = __radix_tree_lookup(&tree, 0, &node, NULL);
+ assert(item == (void *)0x12);
+ assert(node->exceptional > 0);
+
+ radix_tree_split(&tree, 0, new_order);
+ radix_tree_for_each_slot(slot, &tree, &iter, 0) {
+ if (iter.index == (1 << new_order))
+ radix_tree_iter_replace(&tree, &iter, slot,
+ (void *)0x16);
+ else
+ radix_tree_iter_replace(&tree, &iter, slot, NULL);
+ }
+
+ item = __radix_tree_lookup(&tree, 1 << new_order, &node, NULL);
+ assert(item == (void *)0x16);
+ assert(node->count == node->exceptional);
+ do {
+ node = node->parent;
+ if (!node)
+ break;
+ assert(node->count == 1);
+ assert(node->exceptional == 0);
+ } while (1);
+
+ item_kill_tree(&tree);
+}
+
+static void multiorder_split(void)
+{
+ int i, j;
+
+ for (i = 3; i < 11; i++)
+ for (j = 0; j < i; j++) {
+ __multiorder_split(i, j);
+ __multiorder_split2(i, j);
+ __multiorder_split3(i, j);
+ }
+}
+
+static void multiorder_account(void)
+{
+ RADIX_TREE(tree, GFP_KERNEL);
+ struct radix_tree_node *node;
+ void **slot;
+
+ item_insert_order(&tree, 0, 5);
+
+ __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12);
+ __radix_tree_lookup(&tree, 0, &node, NULL);
+ assert(node->count == node->exceptional * 2);
+ radix_tree_delete(&tree, 1 << 5);
+ assert(node->exceptional == 0);
+
+ __radix_tree_insert(&tree, 1 << 5, 5, (void *)0x12);
+ __radix_tree_lookup(&tree, 1 << 5, &node, &slot);
+ assert(node->count == node->exceptional * 2);
+ __radix_tree_replace(&tree, node, slot, NULL, NULL, NULL);
+ assert(node->exceptional == 0);
+
+ item_kill_tree(&tree);
+}
+
void multiorder_checks(void)
{
int i;
@@ -342,4 +627,9 @@ void multiorder_checks(void)
multiorder_tag_tests();
multiorder_iteration();
multiorder_tagged_iteration();
+ multiorder_join();
+ multiorder_split();
+ multiorder_account();
+
+ radix_tree_cpu_dead(0);
}
diff --git a/tools/testing/radix-tree/rcupdate.c b/tools/testing/radix-tree/rcupdate.c
deleted file mode 100644
index 31a2d14225d6..000000000000
--- a/tools/testing/radix-tree/rcupdate.c
+++ /dev/null
@@ -1,86 +0,0 @@
-#include <linux/rcupdate.h>
-#include <pthread.h>
-#include <stdio.h>
-#include <assert.h>
-
-static pthread_mutex_t rculock = PTHREAD_MUTEX_INITIALIZER;
-static struct rcu_head *rcuhead_global = NULL;
-static __thread int nr_rcuhead = 0;
-static __thread struct rcu_head *rcuhead = NULL;
-static __thread struct rcu_head *rcutail = NULL;
-
-static pthread_cond_t rcu_worker_cond = PTHREAD_COND_INITIALIZER;
-
-/* switch to urcu implementation when it is merged. */
-void call_rcu(struct rcu_head *head, void (*func)(struct rcu_head *head))
-{
- head->func = func;
- head->next = rcuhead;
- rcuhead = head;
- if (!rcutail)
- rcutail = head;
- nr_rcuhead++;
- if (nr_rcuhead >= 1000) {
- int signal = 0;
-
- pthread_mutex_lock(&rculock);
- if (!rcuhead_global)
- signal = 1;
- rcutail->next = rcuhead_global;
- rcuhead_global = head;
- pthread_mutex_unlock(&rculock);
-
- nr_rcuhead = 0;
- rcuhead = NULL;
- rcutail = NULL;
-
- if (signal) {
- pthread_cond_signal(&rcu_worker_cond);
- }
- }
-}
-
-static void *rcu_worker(void *arg)
-{
- struct rcu_head *r;
-
- rcupdate_thread_init();
-
- while (1) {
- pthread_mutex_lock(&rculock);
- while (!rcuhead_global) {
- pthread_cond_wait(&rcu_worker_cond, &rculock);
- }
- r = rcuhead_global;
- rcuhead_global = NULL;
-
- pthread_mutex_unlock(&rculock);
-
- synchronize_rcu();
-
- while (r) {
- struct rcu_head *tmp = r->next;
- r->func(r);
- r = tmp;
- }
- }
-
- rcupdate_thread_exit();
-
- return NULL;
-}
-
-static pthread_t worker_thread;
-void rcupdate_init(void)
-{
- pthread_create(&worker_thread, NULL, rcu_worker, NULL);
-}
-
-void rcupdate_thread_init(void)
-{
- rcu_register_thread();
-}
-void rcupdate_thread_exit(void)
-{
- rcu_unregister_thread();
-}
diff --git a/tools/testing/radix-tree/regression2.c b/tools/testing/radix-tree/regression2.c
index 63bf347aaf33..a41325d7a170 100644
--- a/tools/testing/radix-tree/regression2.c
+++ b/tools/testing/radix-tree/regression2.c
@@ -50,6 +50,7 @@
#include <stdio.h>
#include "regression.h"
+#include "test.h"
#define PAGECACHE_TAG_DIRTY 0
#define PAGECACHE_TAG_WRITEBACK 1
@@ -90,7 +91,7 @@ void regression2_test(void)
/* 1. */
start = 0;
end = max_slots - 2;
- radix_tree_range_tag_if_tagged(&mt_tree, &start, end, 1,
+ tag_tagged_items(&mt_tree, NULL, start, end, 1,
PAGECACHE_TAG_DIRTY, PAGECACHE_TAG_TOWRITE);
/* 2. */
diff --git a/tools/testing/radix-tree/regression3.c b/tools/testing/radix-tree/regression3.c
index 1f06ed73d0a8..b594841fae85 100644
--- a/tools/testing/radix-tree/regression3.c
+++ b/tools/testing/radix-tree/regression3.c
@@ -5,7 +5,7 @@
* In following radix_tree_next_slot current chunk size becomes zero.
* This isn't checked and it tries to dereference null pointer in slot.
*
- * Helper radix_tree_iter_next reset slot to NULL and next_index to index + 1,
+ * Helper radix_tree_iter_resume reset slot to NULL and next_index to index + 1,
* for tagger iteraction it also must reset cached tags in iterator to abort
* next radix_tree_next_slot and go to slow-path into radix_tree_next_chunk.
*
@@ -88,7 +88,7 @@ void regression3_test(void)
printf("slot %ld %p\n", iter.index, *slot);
if (!iter.index) {
printf("next at %ld\n", iter.index);
- slot = radix_tree_iter_next(&iter);
+ slot = radix_tree_iter_resume(slot, &iter);
}
}
@@ -96,7 +96,7 @@ void regression3_test(void)
printf("contig %ld %p\n", iter.index, *slot);
if (!iter.index) {
printf("next at %ld\n", iter.index);
- slot = radix_tree_iter_next(&iter);
+ slot = radix_tree_iter_resume(slot, &iter);
}
}
@@ -106,7 +106,7 @@ void regression3_test(void)
printf("tagged %ld %p\n", iter.index, *slot);
if (!iter.index) {
printf("next at %ld\n", iter.index);
- slot = radix_tree_iter_next(&iter);
+ slot = radix_tree_iter_resume(slot, &iter);
}
}
diff --git a/tools/testing/radix-tree/tag_check.c b/tools/testing/radix-tree/tag_check.c
index b0ac05741750..fd98c132207a 100644
--- a/tools/testing/radix-tree/tag_check.c
+++ b/tools/testing/radix-tree/tag_check.c
@@ -23,7 +23,7 @@ __simple_checks(struct radix_tree_root *tree, unsigned long index, int tag)
item_tag_set(tree, index, tag);
ret = item_tag_get(tree, index, tag);
assert(ret != 0);
- ret = radix_tree_range_tag_if_tagged(tree, &first, ~0UL, 10, tag, !tag);
+ ret = tag_tagged_items(tree, NULL, first, ~0UL, 10, tag, !tag);
assert(ret == 1);
ret = item_tag_get(tree, index, !tag);
assert(ret != 0);
@@ -51,6 +51,7 @@ void simple_checks(void)
verify_tag_consistency(&tree, 1);
printf("before item_kill_tree: %d allocated\n", nr_allocated);
item_kill_tree(&tree);
+ rcu_barrier();
printf("after item_kill_tree: %d allocated\n", nr_allocated);
}
@@ -319,10 +320,13 @@ static void single_check(void)
assert(ret == 0);
verify_tag_consistency(&tree, 0);
verify_tag_consistency(&tree, 1);
- ret = radix_tree_range_tag_if_tagged(&tree, &first, 10, 10, 0, 1);
+ ret = tag_tagged_items(&tree, NULL, first, 10, 10, 0, 1);
assert(ret == 1);
ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 0, BATCH, 1);
assert(ret == 1);
+ item_tag_clear(&tree, 0, 0);
+ ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 0, BATCH, 0);
+ assert(ret == 0);
item_kill_tree(&tree);
}
@@ -331,12 +335,16 @@ void tag_check(void)
single_check();
extend_checks();
contract_checks();
+ rcu_barrier();
printf("after extend_checks: %d allocated\n", nr_allocated);
__leak_check();
leak_check();
+ rcu_barrier();
printf("after leak_check: %d allocated\n", nr_allocated);
simple_checks();
+ rcu_barrier();
printf("after simple_checks: %d allocated\n", nr_allocated);
thrash_tags();
+ rcu_barrier();
printf("after thrash_tags: %d allocated\n", nr_allocated);
}
diff --git a/tools/testing/radix-tree/test.c b/tools/testing/radix-tree/test.c
index a6e8099eaf4f..e5726e373646 100644
--- a/tools/testing/radix-tree/test.c
+++ b/tools/testing/radix-tree/test.c
@@ -24,21 +24,29 @@ int item_tag_get(struct radix_tree_root *root, unsigned long index, int tag)
return radix_tree_tag_get(root, index, tag);
}
-int __item_insert(struct radix_tree_root *root, struct item *item,
- unsigned order)
+int __item_insert(struct radix_tree_root *root, struct item *item)
{
- return __radix_tree_insert(root, item->index, order, item);
+ return __radix_tree_insert(root, item->index, item->order, item);
}
int item_insert(struct radix_tree_root *root, unsigned long index)
{
- return __item_insert(root, item_create(index), 0);
+ return __item_insert(root, item_create(index, 0));
}
int item_insert_order(struct radix_tree_root *root, unsigned long index,
unsigned order)
{
- return __item_insert(root, item_create(index), order);
+ return __item_insert(root, item_create(index, order));
+}
+
+void item_sanity(struct item *item, unsigned long index)
+{
+ unsigned long mask;
+ assert(!radix_tree_is_internal_node(item));
+ assert(item->order < BITS_PER_LONG);
+ mask = (1UL << item->order) - 1;
+ assert((item->index | mask) == (index | mask));
}
int item_delete(struct radix_tree_root *root, unsigned long index)
@@ -46,18 +54,19 @@ int item_delete(struct radix_tree_root *root, unsigned long index)
struct item *item = radix_tree_delete(root, index);
if (item) {
- assert(item->index == index);
+ item_sanity(item, index);
free(item);
return 1;
}
return 0;
}
-struct item *item_create(unsigned long index)
+struct item *item_create(unsigned long index, unsigned int order)
{
struct item *ret = malloc(sizeof(*ret));
ret->index = index;
+ ret->order = order;
return ret;
}
@@ -66,8 +75,8 @@ void item_check_present(struct radix_tree_root *root, unsigned long index)
struct item *item;
item = radix_tree_lookup(root, index);
- assert(item != 0);
- assert(item->index == index);
+ assert(item != NULL);
+ item_sanity(item, index);
}
struct item *item_lookup(struct radix_tree_root *root, unsigned long index)
@@ -80,7 +89,7 @@ void item_check_absent(struct radix_tree_root *root, unsigned long index)
struct item *item;
item = radix_tree_lookup(root, index);
- assert(item == 0);
+ assert(item == NULL);
}
/*
@@ -142,6 +151,62 @@ void item_full_scan(struct radix_tree_root *root, unsigned long start,
assert(nfound == 0);
}
+/* Use the same pattern as tag_pages_for_writeback() in mm/page-writeback.c */
+int tag_tagged_items(struct radix_tree_root *root, pthread_mutex_t *lock,
+ unsigned long start, unsigned long end, unsigned batch,
+ unsigned iftag, unsigned thentag)
+{
+ unsigned long tagged = 0;
+ struct radix_tree_iter iter;
+ void **slot;
+
+ if (batch == 0)
+ batch = 1;
+
+ if (lock)
+ pthread_mutex_lock(lock);
+ radix_tree_for_each_tagged(slot, root, &iter, start, iftag) {
+ if (iter.index > end)
+ break;
+ radix_tree_iter_tag_set(root, &iter, thentag);
+ tagged++;
+ if ((tagged % batch) != 0)
+ continue;
+ slot = radix_tree_iter_resume(slot, &iter);
+ if (lock) {
+ pthread_mutex_unlock(lock);
+ rcu_barrier();
+ pthread_mutex_lock(lock);
+ }
+ }
+ if (lock)
+ pthread_mutex_unlock(lock);
+
+ return tagged;
+}
+
+/* Use the same pattern as find_swap_entry() in mm/shmem.c */
+unsigned long find_item(struct radix_tree_root *root, void *item)
+{
+ struct radix_tree_iter iter;
+ void **slot;
+ unsigned long found = -1;
+ unsigned long checked = 0;
+
+ radix_tree_for_each_slot(slot, root, &iter, 0) {
+ if (*slot == item) {
+ found = iter.index;
+ break;
+ }
+ checked++;
+ if ((checked % 4) != 0)
+ continue;
+ slot = radix_tree_iter_resume(slot, &iter);
+ }
+
+ return found;
+}
+
static int verify_node(struct radix_tree_node *slot, unsigned int tag,
int tagged)
{
@@ -200,9 +265,16 @@ void verify_tag_consistency(struct radix_tree_root *root, unsigned int tag)
void item_kill_tree(struct radix_tree_root *root)
{
+ struct radix_tree_iter iter;
+ void **slot;
struct item *items[32];
int nfound;
+ radix_tree_for_each_slot(slot, root, &iter, 0) {
+ if (radix_tree_exceptional_entry(*slot))
+ radix_tree_delete(root, iter.index);
+ }
+
while ((nfound = radix_tree_gang_lookup(root, (void **)items, 0, 32))) {
int i;
diff --git a/tools/testing/radix-tree/test.h b/tools/testing/radix-tree/test.h
index 217fb2403f09..056a23b56467 100644
--- a/tools/testing/radix-tree/test.h
+++ b/tools/testing/radix-tree/test.h
@@ -5,11 +5,11 @@
struct item {
unsigned long index;
+ unsigned int order;
};
-struct item *item_create(unsigned long index);
-int __item_insert(struct radix_tree_root *root, struct item *item,
- unsigned order);
+struct item *item_create(unsigned long index, unsigned int order);
+int __item_insert(struct radix_tree_root *root, struct item *item);
int item_insert(struct radix_tree_root *root, unsigned long index);
int item_insert_order(struct radix_tree_root *root, unsigned long index,
unsigned order);
@@ -25,9 +25,15 @@ void item_full_scan(struct radix_tree_root *root, unsigned long start,
unsigned long nr, int chunk);
void item_kill_tree(struct radix_tree_root *root);
+int tag_tagged_items(struct radix_tree_root *, pthread_mutex_t *,
+ unsigned long start, unsigned long end, unsigned batch,
+ unsigned iftag, unsigned thentag);
+unsigned long find_item(struct radix_tree_root *, void *item);
+
void tag_check(void);
void multiorder_checks(void);
-void iteration_test(void);
+void iteration_test(unsigned order, unsigned duration);
+void benchmark(void);
struct item *
item_tag_set(struct radix_tree_root *root, unsigned long index, int tag);
@@ -40,7 +46,14 @@ void verify_tag_consistency(struct radix_tree_root *root, unsigned int tag);
extern int nr_allocated;
/* Normally private parts of lib/radix-tree.c */
+struct radix_tree_node *entry_to_node(void *ptr);
void radix_tree_dump(struct radix_tree_root *root);
int root_tag_get(struct radix_tree_root *root, unsigned int tag);
unsigned long node_maxindex(struct radix_tree_node *);
unsigned long shift_maxindex(unsigned int shift);
+int radix_tree_cpu_dead(unsigned int cpu);
+struct radix_tree_preload {
+ unsigned nr;
+ struct radix_tree_node *nodes;
+};
+extern struct radix_tree_preload radix_tree_preloads;