From: "Yury Norov [NVIDIA]" <yury.norov@gmail.com>
Generalize node_random() and make it available to general bitmaps and
cpumasks users.
Notice, find_first_bit() is generally faster than find_nth_bit(), and we
employ it when there's a single set bit in the bitmap.
See commit 3e061d924fe9c7b4 ("lib/nodemask: optimize node_random for
nodemask with single NUMA node").
Signed-off-by: Yury Norov [NVIDIA] <yury.norov@gmail.com>
---
include/linux/find.h | 2 ++
include/linux/nodemask.h | 16 +---------------
lib/find_bit.c | 24 ++++++++++++++++++++++++
3 files changed, 27 insertions(+), 15 deletions(-)
diff --git a/include/linux/find.h b/include/linux/find.h
index 5a2c267ea7f9..98c61838002c 100644
--- a/include/linux/find.h
+++ b/include/linux/find.h
@@ -44,6 +44,8 @@ unsigned long _find_next_bit_le(const unsigned long *addr, unsigned
long size, unsigned long offset);
#endif
+unsigned long find_random_bit(const unsigned long *addr, unsigned long size);
+
#ifndef find_next_bit
/**
* find_next_bit - find the next set bit in a memory region
diff --git a/include/linux/nodemask.h b/include/linux/nodemask.h
index f08ae71585fa..1cedc7132b76 100644
--- a/include/linux/nodemask.h
+++ b/include/linux/nodemask.h
@@ -492,21 +492,7 @@ static __always_inline int num_node_state(enum node_states state)
static __always_inline int node_random(const nodemask_t *maskp)
{
#if defined(CONFIG_NUMA) && (MAX_NUMNODES > 1)
- int w, bit;
-
- w = nodes_weight(*maskp);
- switch (w) {
- case 0:
- bit = NUMA_NO_NODE;
- break;
- case 1:
- bit = first_node(*maskp);
- break;
- default:
- bit = find_nth_bit(maskp->bits, MAX_NUMNODES, get_random_u32_below(w));
- break;
- }
- return bit;
+ return find_random_bit(maskp->bits, MAX_NUMNODES);
#else
return 0;
#endif
diff --git a/lib/find_bit.c b/lib/find_bit.c
index 06b6342aa3ae..d4b5a29e3e72 100644
--- a/lib/find_bit.c
+++ b/lib/find_bit.c
@@ -18,6 +18,7 @@
#include <linux/math.h>
#include <linux/minmax.h>
#include <linux/swab.h>
+#include <linux/random.h>
/*
* Common helper for find_bit() function family
@@ -291,3 +292,26 @@ EXPORT_SYMBOL(_find_next_bit_le);
#endif
#endif /* __BIG_ENDIAN */
+
+/**
+ * find_random_bit - find a set bit at random position
+ * @addr: The address to base the search on
+ * @size: The bitmap size in bits
+ *
+ * Returns: a position of a random set bit; >= @size otherwise
+ */
+unsigned long find_random_bit(const unsigned long *addr, unsigned long size)
+{
+ int w = bitmap_weight(addr, size);
+
+ switch (w) {
+ case 0:
+ return size;
+ case 1:
+ /* Performance trick for single-bit bitmaps */
+ return find_first_bit(addr, size);
+ default:
+ return find_nth_bit(addr, size, get_random_u32_below(w));
+ }
+}
+EXPORT_SYMBOL(find_random_bit);
--
2.43.0
On Tue, 17 Jun 2025 16:08:51 -0400 Yury Norov <yury.norov@gmail.com> wrote:
> From: "Yury Norov [NVIDIA]" <yury.norov@gmail.com>
>
> Generalize node_random() and make it available to general bitmaps and
> cpumasks users.
>
> Notice, find_first_bit() is generally faster than find_nth_bit(), and we
> employ it when there's a single set bit in the bitmap.
>
> See commit 3e061d924fe9c7b4 ("lib/nodemask: optimize node_random for
> nodemask with single NUMA node").
>
> ...
>
> --- a/include/linux/nodemask.h
> +++ b/include/linux/nodemask.h
> @@ -492,21 +492,7 @@ static __always_inline int num_node_state(enum node_states state)
> static __always_inline int node_random(const nodemask_t *maskp)
> {
> #if defined(CONFIG_NUMA) && (MAX_NUMNODES > 1)
> - int w, bit;
> -
> - w = nodes_weight(*maskp);
> - switch (w) {
> - case 0:
> - bit = NUMA_NO_NODE;
If the mask has no bits set, return -1.
> - break;
> - case 1:
> - bit = first_node(*maskp);
> - break;
> - default:
> - bit = find_nth_bit(maskp->bits, MAX_NUMNODES, get_random_u32_below(w));
> - break;
> - }
> - return bit;
> + return find_random_bit(maskp->bits, MAX_NUMNODES);
>
> ...
>
> +unsigned long find_random_bit(const unsigned long *addr, unsigned long size)
> +{
> + int w = bitmap_weight(addr, size);
> +
> + switch (w) {
> + case 0:
> + return size;
If the mask has no bits set, return the mask's size.
> + case 1:
> + /* Performance trick for single-bit bitmaps */
> + return find_first_bit(addr, size);
> + default:
> + return find_nth_bit(addr, size, get_random_u32_below(w));
> + }
> +}
I'm not seeing how this is correct?
On Tue, Jun 17, 2025 at 03:50:56PM -0700, Andrew Morton wrote:
> On Tue, 17 Jun 2025 16:08:51 -0400 Yury Norov <yury.norov@gmail.com> wrote:
>
> > From: "Yury Norov [NVIDIA]" <yury.norov@gmail.com>
> >
> > Generalize node_random() and make it available to general bitmaps and
> > cpumasks users.
> >
> > Notice, find_first_bit() is generally faster than find_nth_bit(), and we
> > employ it when there's a single set bit in the bitmap.
> >
> > See commit 3e061d924fe9c7b4 ("lib/nodemask: optimize node_random for
> > nodemask with single NUMA node").
> >
> > ...
> >
> > --- a/include/linux/nodemask.h
> > +++ b/include/linux/nodemask.h
> > @@ -492,21 +492,7 @@ static __always_inline int num_node_state(enum node_states state)
> > static __always_inline int node_random(const nodemask_t *maskp)
> > {
> > #if defined(CONFIG_NUMA) && (MAX_NUMNODES > 1)
> > - int w, bit;
> > -
> > - w = nodes_weight(*maskp);
> > - switch (w) {
> > - case 0:
> > - bit = NUMA_NO_NODE;
>
> If the mask has no bits set, return -1.
...
> If the mask has no bits set, return the mask's size.
>
> > + case 1:
> > + /* Performance trick for single-bit bitmaps */
> > + return find_first_bit(addr, size);
> > + default:
> > + return find_nth_bit(addr, size, get_random_u32_below(w));
> > + }
> > +}
>
> I'm not seeing how this is correct?
Ahh... Indeed you're right.
Thanks, Andrew, I'll send v4
© 2016 - 2026 Red Hat, Inc.