diff options
author | Linus Torvalds <torvalds@linux-foundation.org> | 2022-10-10 22:49:34 +0300 |
---|---|---|
committer | Linus Torvalds <torvalds@linux-foundation.org> | 2022-10-10 22:49:34 +0300 |
commit | d4013bc4d49f6da8178a340348369bb9920225c9 (patch) | |
tree | 3e7ad8a2b2d726179aca30d04d52c2fabef97e7c /lib | |
parent | cdf072acb5baa18e5b05bdf3f13d6481f62396fc (diff) | |
parent | 585463f0d58aa4d29b744c7c53b222b8028de87f (diff) | |
download | linux-d4013bc4d49f6da8178a340348369bb9920225c9.tar.xz |
Merge tag 'bitmap-6.1-rc1' of https://github.com/norov/linux
Pull bitmap updates from Yury Norov:
- Fix unsigned comparison to -1 in CPUMAP_FILE_MAX_BYTES (Phil Auld)
- cleanup nr_cpu_ids vs nr_cpumask_bits mess (me)
This series cleans that mess and adds new config FORCE_NR_CPUS that
allows to optimize cpumask subsystem if the number of CPUs is known
at compile-time.
- optimize find_bit() functions (me)
Reworks find_bit() functions based on new FIND_{FIRST,NEXT}_BIT()
macros.
- add find_nth_bit() (me)
Adds find_nth_bit(), which is ~70 times faster than bitcounting with
for_each() loop:
for_each_set_bit(bit, mask, size)
if (n-- == 0)
return bit;
Also adds bitmap_weight_and() to let people replace this pattern:
tmp = bitmap_alloc(nbits);
bitmap_and(tmp, map1, map2, nbits);
weight = bitmap_weight(tmp, nbits);
bitmap_free(tmp);
with a single bitmap_weight_and() call.
- repair cpumask_check() (me)
After switching cpumask to use nr_cpu_ids, cpumask_check() started
generating many false-positive warnings. This series fixes it.
- Add for_each_cpu_andnot() and for_each_cpu_andnot() (Valentin
Schneider)
Extends the API with one more function and applies it in sched/core.
* tag 'bitmap-6.1-rc1' of https://github.com/norov/linux: (28 commits)
sched/core: Merge cpumask_andnot()+for_each_cpu() into for_each_cpu_andnot()
lib/test_cpumask: Add for_each_cpu_and(not) tests
cpumask: Introduce for_each_cpu_andnot()
lib/find_bit: Introduce find_next_andnot_bit()
cpumask: fix checking valid cpu range
lib/bitmap: add tests for for_each() loops
lib/find: optimize for_each() macros
lib/bitmap: introduce for_each_set_bit_wrap() macro
lib/find_bit: add find_next{,_and}_bit_wrap
cpumask: switch for_each_cpu{,_not} to use for_each_bit()
net: fix cpu_max_bits_warn() usage in netif_attrmask_next{,_and}
cpumask: add cpumask_nth_{,and,andnot}
lib/bitmap: remove bitmap_ord_to_pos
lib/bitmap: add tests for find_nth_bit()
lib: add find_nth{,_and,_andnot}_bit()
lib/bitmap: add bitmap_weight_and()
lib/bitmap: don't call __bitmap_weight() in kernel code
tools: sync find_bit() implementation
lib/find_bit: optimize find_next_bit() functions
lib/find_bit: create find_first_zero_bit_le()
...
Diffstat (limited to 'lib')
-rw-r--r-- | lib/Kconfig | 9 | ||||
-rw-r--r-- | lib/bitmap.c | 68 | ||||
-rw-r--r-- | lib/cpumask.c | 40 | ||||
-rw-r--r-- | lib/cpumask_kunit.c | 19 | ||||
-rw-r--r-- | lib/find_bit.c | 233 | ||||
-rw-r--r-- | lib/find_bit_benchmark.c | 18 | ||||
-rw-r--r-- | lib/test_bitmap.c | 291 |
7 files changed, 536 insertions, 142 deletions
diff --git a/lib/Kconfig b/lib/Kconfig index 3ea8941ab18d..d628235f7934 100644 --- a/lib/Kconfig +++ b/lib/Kconfig @@ -531,6 +531,15 @@ config CPUMASK_OFFSTACK them on the stack. This is a bit more expensive, but avoids stack overflow. +config FORCE_NR_CPUS + bool "NR_CPUS is set to an actual number of CPUs" + depends on SMP + help + Say Yes if you have NR_CPUS set to an actual number of possible + CPUs in your system, not to a default value. This forces the core + code to rely on compile-time value and optimize kernel routines + better. + config CPU_RMAP bool depends on SMP diff --git a/lib/bitmap.c b/lib/bitmap.c index 488e6c3e5acc..1c81413c51f8 100644 --- a/lib/bitmap.c +++ b/lib/bitmap.c @@ -333,20 +333,32 @@ bool __bitmap_subset(const unsigned long *bitmap1, } EXPORT_SYMBOL(__bitmap_subset); +#define BITMAP_WEIGHT(FETCH, bits) \ +({ \ + unsigned int __bits = (bits), idx, w = 0; \ + \ + for (idx = 0; idx < __bits / BITS_PER_LONG; idx++) \ + w += hweight_long(FETCH); \ + \ + if (__bits % BITS_PER_LONG) \ + w += hweight_long((FETCH) & BITMAP_LAST_WORD_MASK(__bits)); \ + \ + w; \ +}) + unsigned int __bitmap_weight(const unsigned long *bitmap, unsigned int bits) { - unsigned int k, lim = bits/BITS_PER_LONG, w = 0; - - for (k = 0; k < lim; k++) - w += hweight_long(bitmap[k]); - - if (bits % BITS_PER_LONG) - w += hweight_long(bitmap[k] & BITMAP_LAST_WORD_MASK(bits)); - - return w; + return BITMAP_WEIGHT(bitmap[idx], bits); } EXPORT_SYMBOL(__bitmap_weight); +unsigned int __bitmap_weight_and(const unsigned long *bitmap1, + const unsigned long *bitmap2, unsigned int bits) +{ + return BITMAP_WEIGHT(bitmap1[idx] & bitmap2[idx], bits); +} +EXPORT_SYMBOL(__bitmap_weight_and); + void __bitmap_set(unsigned long *map, unsigned int start, int len) { unsigned long *p = map + BIT_WORD(start); @@ -953,37 +965,7 @@ static int bitmap_pos_to_ord(const unsigned long *buf, unsigned int pos, unsigne if (pos >= nbits || !test_bit(pos, buf)) return -1; - return __bitmap_weight(buf, pos); -} - -/** - * bitmap_ord_to_pos - find position of n-th set bit in bitmap - * @buf: pointer to bitmap - * @ord: ordinal bit position (n-th set bit, n >= 0) - * @nbits: number of valid bit positions in @buf - * - * Map the ordinal offset of bit @ord in @buf to its position in @buf. - * Value of @ord should be in range 0 <= @ord < weight(buf). If @ord - * >= weight(buf), returns @nbits. - * - * If for example, just bits 4 through 7 are set in @buf, then @ord - * values 0 through 3 will get mapped to 4 through 7, respectively, - * and all other @ord values returns @nbits. When @ord value 3 - * gets mapped to (returns) @pos value 7 in this example, that means - * that the 3rd set bit (starting with 0th) is at position 7 in @buf. - * - * The bit positions 0 through @nbits-1 are valid positions in @buf. - */ -unsigned int bitmap_ord_to_pos(const unsigned long *buf, unsigned int ord, unsigned int nbits) -{ - unsigned int pos; - - for (pos = find_first_bit(buf, nbits); - pos < nbits && ord; - pos = find_next_bit(buf, nbits, pos + 1)) - ord--; - - return pos; + return bitmap_weight(buf, pos); } /** @@ -1035,7 +1017,7 @@ void bitmap_remap(unsigned long *dst, const unsigned long *src, if (n < 0 || w == 0) set_bit(oldbit, dst); /* identity map */ else - set_bit(bitmap_ord_to_pos(new, n % w, nbits), dst); + set_bit(find_nth_bit(new, nbits, n % w), dst); } } EXPORT_SYMBOL(bitmap_remap); @@ -1074,7 +1056,7 @@ int bitmap_bitremap(int oldbit, const unsigned long *old, if (n < 0 || w == 0) return oldbit; else - return bitmap_ord_to_pos(new, n % w, bits); + return find_nth_bit(new, bits, n % w); } EXPORT_SYMBOL(bitmap_bitremap); @@ -1198,7 +1180,7 @@ void bitmap_onto(unsigned long *dst, const unsigned long *orig, * The following code is a more efficient, but less * obvious, equivalent to the loop: * for (m = 0; m < bitmap_weight(relmap, bits); m++) { - * n = bitmap_ord_to_pos(orig, m, bits); + * n = find_nth_bit(orig, bits, m); * if (test_bit(m, orig)) * set_bit(n, dst); * } diff --git a/lib/cpumask.c b/lib/cpumask.c index f0ae119be8c4..c7c392514fd3 100644 --- a/lib/cpumask.c +++ b/lib/cpumask.c @@ -128,23 +128,21 @@ unsigned int cpumask_local_spread(unsigned int i, int node) i %= num_online_cpus(); if (node == NUMA_NO_NODE) { - for_each_cpu(cpu, cpu_online_mask) - if (i-- == 0) - return cpu; + cpu = cpumask_nth(i, cpu_online_mask); + if (cpu < nr_cpu_ids) + return cpu; } else { /* NUMA first. */ - for_each_cpu_and(cpu, cpumask_of_node(node), cpu_online_mask) - if (i-- == 0) - return cpu; - - for_each_cpu(cpu, cpu_online_mask) { - /* Skip NUMA nodes, done above. */ - if (cpumask_test_cpu(cpu, cpumask_of_node(node))) - continue; - - if (i-- == 0) - return cpu; - } + cpu = cpumask_nth_and(i, cpu_online_mask, cpumask_of_node(node)); + if (cpu < nr_cpu_ids) + return cpu; + + i -= cpumask_weight_and(cpu_online_mask, cpumask_of_node(node)); + + /* Skip NUMA nodes, done above. */ + cpu = cpumask_nth_andnot(i, cpu_online_mask, cpumask_of_node(node)); + if (cpu < nr_cpu_ids) + return cpu; } BUG(); } @@ -168,10 +166,8 @@ unsigned int cpumask_any_and_distribute(const struct cpumask *src1p, /* NOTE: our first selection will skip 0. */ prev = __this_cpu_read(distribute_cpu_mask_prev); - next = cpumask_next_and(prev, src1p, src2p); - if (next >= nr_cpu_ids) - next = cpumask_first_and(src1p, src2p); - + next = find_next_and_bit_wrap(cpumask_bits(src1p), cpumask_bits(src2p), + nr_cpumask_bits, prev + 1); if (next < nr_cpu_ids) __this_cpu_write(distribute_cpu_mask_prev, next); @@ -185,11 +181,7 @@ unsigned int cpumask_any_distribute(const struct cpumask *srcp) /* NOTE: our first selection will skip 0. */ prev = __this_cpu_read(distribute_cpu_mask_prev); - - next = cpumask_next(prev, srcp); - if (next >= nr_cpu_ids) - next = cpumask_first(srcp); - + next = find_next_bit_wrap(cpumask_bits(srcp), nr_cpumask_bits, prev + 1); if (next < nr_cpu_ids) __this_cpu_write(distribute_cpu_mask_prev, next); diff --git a/lib/cpumask_kunit.c b/lib/cpumask_kunit.c index ecbeec72221e..d1fc6ece21f3 100644 --- a/lib/cpumask_kunit.c +++ b/lib/cpumask_kunit.c @@ -33,6 +33,19 @@ KUNIT_EXPECT_EQ_MSG((test), nr_cpu_ids - mask_weight, iter, MASK_MSG(mask)); \ } while (0) +#define EXPECT_FOR_EACH_CPU_OP_EQ(test, op, mask1, mask2) \ + do { \ + const cpumask_t *m1 = (mask1); \ + const cpumask_t *m2 = (mask2); \ + int weight; \ + int cpu, iter = 0; \ + cpumask_##op(&mask_tmp, m1, m2); \ + weight = cpumask_weight(&mask_tmp); \ + for_each_cpu_##op(cpu, mask1, mask2) \ + iter++; \ + KUNIT_EXPECT_EQ((test), weight, iter); \ + } while (0) + #define EXPECT_FOR_EACH_CPU_WRAP_EQ(test, mask) \ do { \ const cpumask_t *m = (mask); \ @@ -54,6 +67,7 @@ static cpumask_t mask_empty; static cpumask_t mask_all; +static cpumask_t mask_tmp; static void test_cpumask_weight(struct kunit *test) { @@ -101,10 +115,15 @@ static void test_cpumask_iterators(struct kunit *test) EXPECT_FOR_EACH_CPU_EQ(test, &mask_empty); EXPECT_FOR_EACH_CPU_NOT_EQ(test, &mask_empty); EXPECT_FOR_EACH_CPU_WRAP_EQ(test, &mask_empty); + EXPECT_FOR_EACH_CPU_OP_EQ(test, and, &mask_empty, &mask_empty); + EXPECT_FOR_EACH_CPU_OP_EQ(test, and, cpu_possible_mask, &mask_empty); + EXPECT_FOR_EACH_CPU_OP_EQ(test, andnot, &mask_empty, &mask_empty); EXPECT_FOR_EACH_CPU_EQ(test, cpu_possible_mask); EXPECT_FOR_EACH_CPU_NOT_EQ(test, cpu_possible_mask); EXPECT_FOR_EACH_CPU_WRAP_EQ(test, cpu_possible_mask); + EXPECT_FOR_EACH_CPU_OP_EQ(test, and, cpu_possible_mask, cpu_possible_mask); + EXPECT_FOR_EACH_CPU_OP_EQ(test, andnot, cpu_possible_mask, &mask_empty); } static void test_cpumask_iterators_builtin(struct kunit *test) diff --git a/lib/find_bit.c b/lib/find_bit.c index 1b8e4b2a9cba..18bc0a7ac8ee 100644 --- a/lib/find_bit.c +++ b/lib/find_bit.c @@ -19,57 +19,78 @@ #include <linux/minmax.h> #include <linux/swab.h> -#if !defined(find_next_bit) || !defined(find_next_zero_bit) || \ - !defined(find_next_bit_le) || !defined(find_next_zero_bit_le) || \ - !defined(find_next_and_bit) /* - * This is a common helper function for find_next_bit, find_next_zero_bit, and - * find_next_and_bit. The differences are: - * - The "invert" argument, which is XORed with each fetched word before - * searching it for one bits. - * - The optional "addr2", which is anded with "addr1" if present. + * Common helper for find_bit() function family + * @FETCH: The expression that fetches and pre-processes each word of bitmap(s) + * @MUNGE: The expression that post-processes a word containing found bit (may be empty) + * @size: The bitmap size in bits */ -unsigned long _find_next_bit(const unsigned long *addr1, - const unsigned long *addr2, unsigned long nbits, - unsigned long start, unsigned long invert, unsigned long le) -{ - unsigned long tmp, mask; - - if (unlikely(start >= nbits)) - return nbits; - - tmp = addr1[start / BITS_PER_LONG]; - if (addr2) - tmp &= addr2[start / BITS_PER_LONG]; - tmp ^= invert; - - /* Handle 1st word. */ - mask = BITMAP_FIRST_WORD_MASK(start); - if (le) - mask = swab(mask); - - tmp &= mask; - - start = round_down(start, BITS_PER_LONG); - - while (!tmp) { - start += BITS_PER_LONG; - if (start >= nbits) - return nbits; - - tmp = addr1[start / BITS_PER_LONG]; - if (addr2) - tmp &= addr2[start / BITS_PER_LONG]; - tmp ^= invert; - } +#define FIND_FIRST_BIT(FETCH, MUNGE, size) \ +({ \ + unsigned long idx, val, sz = (size); \ + \ + for (idx = 0; idx * BITS_PER_LONG < sz; idx++) { \ + val = (FETCH); \ + if (val) { \ + sz = min(idx * BITS_PER_LONG + __ffs(MUNGE(val)), sz); \ + break; \ + } \ + } \ + \ + sz; \ +}) - if (le) - tmp = swab(tmp); - - return min(start + __ffs(tmp), nbits); -} -EXPORT_SYMBOL(_find_next_bit); -#endif +/* + * Common helper for find_next_bit() function family + * @FETCH: The expression that fetches and pre-processes each word of bitmap(s) + * @MUNGE: The expression that post-processes a word containing found bit (may be empty) + * @size: The bitmap size in bits + * @start: The bitnumber to start searching at + */ +#define FIND_NEXT_BIT(FETCH, MUNGE, size, start) \ +({ \ + unsigned long mask, idx, tmp, sz = (size), __start = (start); \ + \ + if (unlikely(__start >= sz)) \ + goto out; \ + \ + mask = MUNGE(BITMAP_FIRST_WORD_MASK(__start)); \ + idx = __start / BITS_PER_LONG; \ + \ + for (tmp = (FETCH) & mask; !tmp; tmp = (FETCH)) { \ + if ((idx + 1) * BITS_PER_LONG >= sz) \ + goto out; \ + idx++; \ + } \ + \ + sz = min(idx * BITS_PER_LONG + __ffs(MUNGE(tmp)), sz); \ +out: \ + sz; \ +}) + +#define FIND_NTH_BIT(FETCH, size, num) \ +({ \ + unsigned long sz = (size), nr = (num), idx, w, tmp; \ + \ + for (idx = 0; (idx + 1) * BITS_PER_LONG <= sz; idx++) { \ + if (idx * BITS_PER_LONG + nr >= sz) \ + goto out; \ + \ + tmp = (FETCH); \ + w = hweight_long(tmp); \ + if (w > nr) \ + goto found; \ + \ + nr -= w; \ + } \ + \ + if (sz % BITS_PER_LONG) \ + tmp = (FETCH) & BITMAP_LAST_WORD_MASK(sz); \ +found: \ + sz = min(idx * BITS_PER_LONG + fns(tmp, nr), sz); \ +out: \ + sz; \ +}) #ifndef find_first_bit /* @@ -77,14 +98,7 @@ EXPORT_SYMBOL(_find_next_bit); */ unsigned long _find_first_bit(const unsigned long *addr, unsigned long size) { - unsigned long idx; - - for (idx = 0; idx * BITS_PER_LONG < size; idx++) { - if (addr[idx]) - return min(idx * BITS_PER_LONG + __ffs(addr[idx]), size); - } - - return size; + return FIND_FIRST_BIT(addr[idx], /* nop */, size); } EXPORT_SYMBOL(_find_first_bit); #endif @@ -97,15 +111,7 @@ unsigned long _find_first_and_bit(const unsigned long *addr1, const unsigned long *addr2, unsigned long size) { - unsigned long idx, val; - - for (idx = 0; idx * BITS_PER_LONG < size; idx++) { - val = addr1[idx] & addr2[idx]; - if (val) - return min(idx * BITS_PER_LONG + __ffs(val), size); - } - - return size; + return FIND_FIRST_BIT(addr1[idx] & addr2[idx], /* nop */, size); } EXPORT_SYMBOL(_find_first_and_bit); #endif @@ -116,16 +122,64 @@ EXPORT_SYMBOL(_find_first_and_bit); */ unsigned long _find_first_zero_bit(const unsigned long *addr, unsigned long size) { - unsigned long idx; + return FIND_FIRST_BIT(~addr[idx], /* nop */, size); +} +EXPORT_SYMBOL(_find_first_zero_bit); +#endif - for (idx = 0; idx * BITS_PER_LONG < size; idx++) { - if (addr[idx] != ~0UL) - return min(idx * BITS_PER_LONG + ffz(addr[idx]), size); - } +#ifndef find_next_bit +unsigned long _find_next_bit(const unsigned long *addr, unsigned long nbits, unsigned long start) +{ + return FIND_NEXT_BIT(addr[idx], /* nop */, nbits, start); +} +EXPORT_SYMBOL(_find_next_bit); +#endif - return size; +unsigned long __find_nth_bit(const unsigned long *addr, unsigned long size, unsigned long n) +{ + return FIND_NTH_BIT(addr[idx], size, n); } -EXPORT_SYMBOL(_find_first_zero_bit); +EXPORT_SYMBOL(__find_nth_bit); + +unsigned long __find_nth_and_bit(const unsigned long *addr1, const unsigned long *addr2, + unsigned long size, unsigned long n) +{ + return FIND_NTH_BIT(addr1[idx] & addr2[idx], size, n); +} +EXPORT_SYMBOL(__find_nth_and_bit); + +unsigned long __find_nth_andnot_bit(const unsigned long *addr1, const unsigned long *addr2, + unsigned long size, unsigned long n) +{ + return FIND_NTH_BIT(addr1[idx] & ~addr2[idx], size, n); +} +EXPORT_SYMBOL(__find_nth_andnot_bit); + +#ifndef find_next_and_bit +unsigned long _find_next_and_bit(const unsigned long *addr1, const unsigned long *addr2, + unsigned long nbits, unsigned long start) +{ + return FIND_NEXT_BIT(addr1[idx] & addr2[idx], /* nop */, nbits, start); +} +EXPORT_SYMBOL(_find_next_and_bit); +#endif + +#ifndef find_next_andnot_bit +unsigned long _find_next_andnot_bit(const unsigned long *addr1, const unsigned long *addr2, + unsigned long nbits, unsigned long start) +{ + return FIND_NEXT_BIT(addr1[idx] & ~addr2[idx], /* nop */, nbits, start); +} +EXPORT_SYMBOL(_find_next_andnot_bit); +#endif + +#ifndef find_next_zero_bit +unsigned long _find_next_zero_bit(const unsigned long *addr, unsigned long nbits, + unsigned long start) +{ + return FIND_NEXT_BIT(~addr[idx], /* nop */, nbits, start); +} +EXPORT_SYMBOL(_find_next_zero_bit); #endif #ifndef find_last_bit @@ -161,3 +215,38 @@ unsigned long find_next_clump8(unsigned long *clump, const unsigned long *addr, return offset; } EXPORT_SYMBOL(find_next_clump8); + +#ifdef __BIG_ENDIAN + +#ifndef find_first_zero_bit_le +/* + * Find the first cleared bit in an LE memory region. + */ +unsigned long _find_first_zero_bit_le(const unsigned long *addr, unsigned long size) +{ + return FIND_FIRST_BIT(~addr[idx], swab, size); +} +EXPORT_SYMBOL(_find_first_zero_bit_le); + +#endif + +#ifndef find_next_zero_bit_le +unsigned long _find_next_zero_bit_le(const unsigned long *addr, + unsigned long size, unsigned long offset) +{ + return FIND_NEXT_BIT(~addr[idx], swab, size, offset); +} +EXPORT_SYMBOL(_find_next_zero_bit_le); +#endif + +#ifndef find_next_bit_le +unsigned long _find_next_bit_le(const unsigned long *addr, + unsigned long size, unsigned long offset) +{ + return FIND_NEXT_BIT(addr[idx], swab, size, offset); +} +EXPORT_SYMBOL(_find_next_bit_le); + +#endif + +#endif /* __BIG_ENDIAN */ diff --git a/lib/find_bit_benchmark.c b/lib/find_bit_benchmark.c index db904b57d4b8..10754586403b 100644 --- a/lib/find_bit_benchmark.c +++ b/lib/find_bit_benchmark.c @@ -115,6 +115,22 @@ static int __init test_find_last_bit(const void *bitmap, unsigned long len) return 0; } +static int __init test_find_nth_bit(const unsigned long *bitmap, unsigned long len) +{ + unsigned long l, n, w = bitmap_weight(bitmap, len); + ktime_t time; + + time = ktime_get(); + for (n = 0; n < w; n++) { + l = find_nth_bit(bitmap, len, n); + WARN_ON(l >= len); + } + time = ktime_get() - time; + pr_err("find_nth_bit: %18llu ns, %6ld iterations\n", time, w); + + return 0; +} + static int __init test_find_next_and_bit(const void *bitmap, const void *bitmap2, unsigned long len) { @@ -142,6 +158,7 @@ static int __init find_bit_test(void) test_find_next_bit(bitmap, BITMAP_LEN); test_find_next_zero_bit(bitmap, BITMAP_LEN); test_find_last_bit(bitmap, BITMAP_LEN); + test_find_nth_bit(bitmap, BITMAP_LEN / 10); /* * test_find_first_bit() may take some time, so @@ -164,6 +181,7 @@ static int __init find_bit_test(void) test_find_next_bit(bitmap, BITMAP_LEN); test_find_next_zero_bit(bitmap, BITMAP_LEN); test_find_last_bit(bitmap, BITMAP_LEN); + test_find_nth_bit(bitmap, BITMAP_LEN); test_find_first_bit(bitmap, BITMAP_LEN); test_find_first_and_bit(bitmap, bitmap2, BITMAP_LEN); test_find_next_and_bit(bitmap, bitmap2, BITMAP_LEN); diff --git a/lib/test_bitmap.c b/lib/test_bitmap.c index 98754ff9fe68..a8005ad3bd58 100644 --- a/lib/test_bitmap.c +++ b/lib/test_bitmap.c @@ -16,6 +16,8 @@ #include "../tools/testing/selftests/kselftest_module.h" +#define EXP1_IN_BITS (sizeof(exp1) * 8) + KSTM_MODULE_GLOBALS(); static char pbl_buffer[PAGE_SIZE] __initdata; @@ -219,6 +221,47 @@ static void __init test_zero_clear(void) expect_eq_pbl("", bmap, 1024); } +static void __init test_find_nth_bit(void) +{ + unsigned long b, bit, cnt = 0; + DECLARE_BITMAP(bmap, 64 * 3); + + bitmap_zero(bmap, 64 * 3); + __set_bit(10, bmap); + __set_bit(20, bmap); + __set_bit(30, bmap); + __set_bit(40, bmap); + __set_bit(50, bmap); + __set_bit(60, bmap); + __set_bit(80, bmap); + __set_bit(123, bmap); + + expect_eq_uint(10, find_nth_bit(bmap, 64 * 3, 0)); + expect_eq_uint(20, find_nth_bit(bmap, 64 * 3, 1)); + expect_eq_uint(30, find_nth_bit(bmap, 64 * 3, 2)); + expect_eq_uint(40, find_nth_bit(bmap, 64 * 3, 3)); + expect_eq_uint(50, find_nth_bit(bmap, 64 * 3, 4)); + expect_eq_uint(60, find_nth_bit(bmap, 64 * 3, 5)); + expect_eq_uint(80, find_nth_bit(bmap, 64 * 3, 6)); + expect_eq_uint(123, find_nth_bit(bmap, 64 * 3, 7)); + expect_eq_uint(64 * 3, find_nth_bit(bmap, 64 * 3, 8)); + + expect_eq_uint(10, find_nth_bit(bmap, 64 * 3 - 1, 0)); + expect_eq_uint(20, find_nth_bit(bmap, 64 * 3 - 1, 1)); + expect_eq_uint(30, find_nth_bit(bmap, 64 * 3 - 1, 2)); + expect_eq_uint(40, find_nth_bit(bmap, 64 * 3 - 1, 3)); + expect_eq_uint(50, find_nth_bit(bmap, 64 * 3 - 1, 4)); + expect_eq_uint(60, find_nth_bit(bmap, 64 * 3 - 1, 5)); + expect_eq_uint(80, find_nth_bit(bmap, 64 * 3 - 1, 6)); + expect_eq_uint(123, find_nth_bit(bmap, 64 * 3 - 1, 7)); + expect_eq_uint(64 * 3 - 1, find_nth_bit(bmap, 64 * 3 - 1, 8)); + + for_each_set_bit(bit, exp1, EXP1_IN_BITS) { + b = find_nth_bit(exp1, EXP1_IN_BITS, cnt++); + expect_eq_uint(b, bit); + } +} + static void __init test_fill_set(void) { DECLARE_BITMAP(bmap, 1024); @@ -557,8 +600,6 @@ static void __init test_bitmap_parse(void) } } -#define EXP1_IN_BITS (sizeof(exp1) * 8) - static void __init test_bitmap_arr32(void) { unsigned int nbits, next_bit; @@ -685,6 +726,239 @@ static void __init test_for_each_set_clump8(void) expect_eq_clump8(start, CLUMP_EXP_NUMBITS, clump_exp, &clump); } +static void __init test_for_each_set_bit_wrap(void) +{ + DECLARE_BITMAP(orig, 500); + DECLARE_BITMAP(copy, 500); + unsigned int wr, bit; + + bitmap_zero(orig, 500); + + /* Set individual bits */ + for (bit = 0; bit < 500; bit += 10) + bitmap_set(orig, bit, 1); + + /* Set range of bits */ + bitmap_set(orig, 100, 50); + + for (wr = 0; wr < 500; wr++) { + bitmap_zero(copy, 500); + + for_each_set_bit_wrap(bit, orig, 500, wr) + bitmap_set(copy, bit, 1); + + expect_eq_bitmap(orig, copy, 500); + } +} + +static void __init test_for_each_set_bit(void) +{ + DECLARE_BITMAP(orig, 500); + DECLARE_BITMAP(copy, 500); + unsigned int bit; + + bitmap_zero(orig, 500); + bitmap_zero(copy, 500); + + /* Set individual bits */ + for (bit = 0; bit < 500; bit += 10) + bitmap_set(orig, bit, 1); + + /* Set range of bits */ + bitmap_set(orig, 100, 50); + + for_each_set_bit(bit, orig, 500) + bitmap_set(copy, bit, 1); + + expect_eq_bitmap(orig, copy, 500); +} + +static void __init test_for_each_set_bit_from(void) +{ + DECLARE_BITMAP(orig, 500); + DECLARE_BITMAP(copy, 500); + unsigned int wr, bit; + + bitmap_zero(orig, 500); + + /* Set individual bits */ + for (bit = 0; bit < 500; bit += 10) + bitmap_set(orig, bit, 1); + + /* Set range of bits */ + bitmap_set(orig, 100, 50); + + for (wr = 0; wr < 500; wr++) { + DECLARE_BITMAP(tmp, 500); + + bitmap_zero(copy, 500); + bit = wr; + + for_each_set_bit_from(bit, orig, 500) + bitmap_set(copy, bit, 1); + + bitmap_copy(tmp, orig, 500); + bitmap_clear(tmp, 0, wr); + expect_eq_bitmap(tmp, copy, 500); + } +} + +static void __init test_for_each_clear_bit(void) +{ + DECLARE_BITMAP(orig, 500); + DECLARE_BITMAP(copy, 500); + unsigned int bit; + + bitmap_fill(orig, 500); + bitmap_fill(copy, 500); + + /* Set individual bits */ + for (bit = 0; bit < 500; bit += 10) + bitmap_clear(orig, bit, 1); + + /* Set range of bits */ + bitmap_clear(orig, 100, 50); + + for_each_clear_bit(bit, orig, 500) + bitmap_clear(copy, bit, 1); + + expect_eq_bitmap(orig, copy, 500); +} + +static void __init test_for_each_clear_bit_from(void) +{ + DECLARE_BITMAP(orig, 500); + DECLARE_BITMAP(copy, 500); + unsigned int wr, bit; + + bitmap_fill(orig, 500); + + /* Set individual bits */ + for (bit = 0; bit < 500; bit += 10) + bitmap_clear(orig, bit, 1); + + /* Set range of bits */ + bitmap_clear(orig, 100, 50); + + for (wr = 0; wr < 500; wr++) { + DECLARE_BITMAP(tmp, 500); + + bitmap_fill(copy, 500); + bit = wr; + + for_each_clear_bit_from(bit, orig, 500) + bitmap_clear(copy, bit, 1); + + bitmap_copy(tmp, orig, 500); + bitmap_set(tmp, 0, wr); + expect_eq_bitmap(tmp, copy, 500); + } +} + +static void __init test_for_each_set_bitrange(void) +{ + DECLARE_BITMAP(orig, 500); + DECLARE_BITMAP(copy, 500); + unsigned int s, e; + + bitmap_zero(orig, 500); + bitmap_zero(copy, 500); + + /* Set individual bits */ + for (s = 0; s < 500; s += 10) + bitmap_set(orig, s, 1); + + /* Set range of bits */ + bitmap_set(orig, 100, 50); + + for_each_set_bitrange(s, e, orig, 500) + bitmap_set(copy, s, e-s); + + expect_eq_bitmap(orig, copy, 500); +} + +static void __init test_for_each_clear_bitrange(void) +{ + DECLARE_BITMAP(orig, 500); + DECLARE_BITMAP(copy, 500); + unsigned int s, e; + + bitmap_fill(orig, 500); + bitmap_fill(copy, 500); + + /* Set individual bits */ + for (s = 0; s < 500; s += 10) + bitmap_clear(orig, s, 1); + + /* Set range of bits */ + bitmap_clear(orig, 100, 50); + + for_each_clear_bitrange(s, e, orig, 500) + bitmap_clear(copy, s, e-s); + + expect_eq_bitmap(orig, copy, 500); +} + +static void __init test_for_each_set_bitrange_from(void) +{ + DECLARE_BITMAP(orig, 500); + DECLARE_BITMAP(copy, 500); + unsigned int wr, s, e; + + bitmap_zero(orig, 500); + + /* Set individual bits */ + for (s = 0; s < 500; s += 10) + bitmap_set(orig, s, 1); + + /* Set range of bits */ + bitmap_set(orig, 100, 50); + + for (wr = 0; wr < 500; wr++) { + DECLARE_BITMAP(tmp, 500); + + bitmap_zero(copy, 500); + s = wr; + + for_each_set_bitrange_from(s, e, orig, 500) + bitmap_set(copy, s, e - s); + + bitmap_copy(tmp, orig, 500); + bitmap_clear(tmp, 0, wr); + expect_eq_bitmap(tmp, copy, 500); + } +} + +static void __init test_for_each_clear_bitrange_from(void) +{ + DECLARE_BITMAP(orig, 500); + DECLARE_BITMAP(copy, 500); + unsigned int wr, s, e; + + bitmap_fill(orig, 500); + + /* Set individual bits */ + for (s = 0; s < 500; s += 10) + bitmap_clear(orig, s, 1); + + /* Set range of bits */ + bitmap_set(orig, 100, 50); + + for (wr = 0; wr < 500; wr++) { + DECLARE_BITMAP(tmp, 500); + + bitmap_fill(copy, 500); + s = wr; + + for_each_clear_bitrange_from(s, e, orig, 500) + bitmap_clear(copy, s, e - s); + + bitmap_copy(tmp, orig, 500); + bitmap_set(tmp, 0, wr); + expect_eq_bitmap(tmp, copy, 500); + } +} + struct test_bitmap_cut { unsigned int first; unsigned int cut; @@ -948,10 +1222,21 @@ static void __init selftest(void) test_bitmap_parselist(); test_bitmap_printlist(); test_mem_optimisations(); - test_for_each_set_clump8(); test_bitmap_cut(); test_bitmap_print_buf(); test_bitmap_const_eval(); + + test_find_nth_bit(); + test_for_each_set_bit(); + test_for_each_set_bit_from(); + test_for_each_clear_bit(); + test_for_each_clear_bit_from(); + test_for_each_set_bitrange(); + test_for_each_clear_bitrange(); + test_for_each_set_bitrange_from(); + test_for_each_clear_bitrange_from(); + test_for_each_set_clump8(); + test_for_each_set_bit_wrap(); } KSTM_MODULE_LOADERS(test_bitmap); |