1

lib/generic-radix-tree.c: Make nodes more reasonably sized

this code originally used the page allocator directly, but most code
shouldn't do that - PAGE_SIZE varies with architecture, and slab is
faster.

4k is also on the large side for typical usage, 512 bytes is a better
choice for typical usage that might be somewhat sparse.

Signed-off-by: Kent Overstreet <kent.overstreet@linux.dev>
This commit is contained in:
Kent Overstreet 2024-03-07 22:32:06 -05:00
parent d64547999c
commit 3a319a2476
2 changed files with 28 additions and 36 deletions

View File

@ -5,7 +5,7 @@
* DOC: Generic radix trees/sparse arrays * DOC: Generic radix trees/sparse arrays
* *
* Very simple and minimalistic, supporting arbitrary size entries up to * Very simple and minimalistic, supporting arbitrary size entries up to
* PAGE_SIZE. * GENRADIX_NODE_SIZE.
* *
* A genradix is defined with the type it will store, like so: * A genradix is defined with the type it will store, like so:
* *
@ -45,12 +45,15 @@
struct genradix_root; struct genradix_root;
#define GENRADIX_NODE_SHIFT 9
#define GENRADIX_NODE_SIZE (1U << GENRADIX_NODE_SHIFT)
struct __genradix { struct __genradix {
struct genradix_root *root; struct genradix_root *root;
}; };
/* /*
* NOTE: currently, sizeof(_type) must not be larger than PAGE_SIZE: * NOTE: currently, sizeof(_type) must not be larger than GENRADIX_NODE_SIZE:
*/ */
#define __GENRADIX_INITIALIZER \ #define __GENRADIX_INITIALIZER \
@ -101,14 +104,14 @@ void __genradix_free(struct __genradix *);
static inline size_t __idx_to_offset(size_t idx, size_t obj_size) static inline size_t __idx_to_offset(size_t idx, size_t obj_size)
{ {
if (__builtin_constant_p(obj_size)) if (__builtin_constant_p(obj_size))
BUILD_BUG_ON(obj_size > PAGE_SIZE); BUILD_BUG_ON(obj_size > GENRADIX_NODE_SIZE);
else else
BUG_ON(obj_size > PAGE_SIZE); BUG_ON(obj_size > GENRADIX_NODE_SIZE);
if (!is_power_of_2(obj_size)) { if (!is_power_of_2(obj_size)) {
size_t objs_per_page = PAGE_SIZE / obj_size; size_t objs_per_page = GENRADIX_NODE_SIZE / obj_size;
return (idx / objs_per_page) * PAGE_SIZE + return (idx / objs_per_page) * GENRADIX_NODE_SIZE +
(idx % objs_per_page) * obj_size; (idx % objs_per_page) * obj_size;
} else { } else {
return idx * obj_size; return idx * obj_size;
@ -118,9 +121,9 @@ static inline size_t __idx_to_offset(size_t idx, size_t obj_size)
#define __genradix_cast(_radix) (typeof((_radix)->type[0]) *) #define __genradix_cast(_radix) (typeof((_radix)->type[0]) *)
#define __genradix_obj_size(_radix) sizeof((_radix)->type[0]) #define __genradix_obj_size(_radix) sizeof((_radix)->type[0])
#define __genradix_objs_per_page(_radix) \ #define __genradix_objs_per_page(_radix) \
(PAGE_SIZE / sizeof((_radix)->type[0])) (GENRADIX_NODE_SIZE / sizeof((_radix)->type[0]))
#define __genradix_page_remainder(_radix) \ #define __genradix_page_remainder(_radix) \
(PAGE_SIZE % sizeof((_radix)->type[0])) (GENRADIX_NODE_SIZE % sizeof((_radix)->type[0]))
#define __genradix_idx_to_offset(_radix, _idx) \ #define __genradix_idx_to_offset(_radix, _idx) \
__idx_to_offset(_idx, __genradix_obj_size(_radix)) __idx_to_offset(_idx, __genradix_obj_size(_radix))
@ -217,8 +220,8 @@ static inline void __genradix_iter_advance(struct genradix_iter *iter,
iter->offset += obj_size; iter->offset += obj_size;
if (!is_power_of_2(obj_size) && if (!is_power_of_2(obj_size) &&
(iter->offset & (PAGE_SIZE - 1)) + obj_size > PAGE_SIZE) (iter->offset & (GENRADIX_NODE_SIZE - 1)) + obj_size > GENRADIX_NODE_SIZE)
iter->offset = round_up(iter->offset, PAGE_SIZE); iter->offset = round_up(iter->offset, GENRADIX_NODE_SIZE);
iter->pos++; iter->pos++;
} }
@ -235,8 +238,8 @@ static inline void __genradix_iter_rewind(struct genradix_iter *iter,
return; return;
} }
if ((iter->offset & (PAGE_SIZE - 1)) == 0) if ((iter->offset & (GENRADIX_NODE_SIZE - 1)) == 0)
iter->offset -= PAGE_SIZE % obj_size; iter->offset -= GENRADIX_NODE_SIZE % obj_size;
iter->offset -= obj_size; iter->offset -= obj_size;
iter->pos--; iter->pos--;
@ -263,7 +266,7 @@ static inline void __genradix_iter_rewind(struct genradix_iter *iter,
genradix_for_each_from(_radix, _iter, _p, 0) genradix_for_each_from(_radix, _iter, _p, 0)
#define genradix_last_pos(_radix) \ #define genradix_last_pos(_radix) \
(SIZE_MAX / PAGE_SIZE * __genradix_objs_per_page(_radix) - 1) (SIZE_MAX / GENRADIX_NODE_SIZE * __genradix_objs_per_page(_radix) - 1)
/** /**
* genradix_for_each_reverse - iterate over entry in a genradix, reverse order * genradix_for_each_reverse - iterate over entry in a genradix, reverse order

View File

@ -5,7 +5,7 @@
#include <linux/gfp.h> #include <linux/gfp.h>
#include <linux/kmemleak.h> #include <linux/kmemleak.h>
#define GENRADIX_ARY (PAGE_SIZE / sizeof(struct genradix_node *)) #define GENRADIX_ARY (GENRADIX_NODE_SIZE / sizeof(struct genradix_node *))
#define GENRADIX_ARY_SHIFT ilog2(GENRADIX_ARY) #define GENRADIX_ARY_SHIFT ilog2(GENRADIX_ARY)
struct genradix_node { struct genradix_node {
@ -14,13 +14,13 @@ struct genradix_node {
struct genradix_node *children[GENRADIX_ARY]; struct genradix_node *children[GENRADIX_ARY];
/* Leaf: */ /* Leaf: */
u8 data[PAGE_SIZE]; u8 data[GENRADIX_NODE_SIZE];
}; };
}; };
static inline int genradix_depth_shift(unsigned depth) static inline int genradix_depth_shift(unsigned depth)
{ {
return PAGE_SHIFT + GENRADIX_ARY_SHIFT * depth; return GENRADIX_NODE_SHIFT + GENRADIX_ARY_SHIFT * depth;
} }
/* /*
@ -33,7 +33,7 @@ static inline size_t genradix_depth_size(unsigned depth)
/* depth that's needed for a genradix that can address up to ULONG_MAX: */ /* depth that's needed for a genradix that can address up to ULONG_MAX: */
#define GENRADIX_MAX_DEPTH \ #define GENRADIX_MAX_DEPTH \
DIV_ROUND_UP(BITS_PER_LONG - PAGE_SHIFT, GENRADIX_ARY_SHIFT) DIV_ROUND_UP(BITS_PER_LONG - GENRADIX_NODE_SHIFT, GENRADIX_ARY_SHIFT)
#define GENRADIX_DEPTH_MASK \ #define GENRADIX_DEPTH_MASK \
((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1)) ((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
@ -79,23 +79,12 @@ EXPORT_SYMBOL(__genradix_ptr);
static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask) static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
{ {
struct genradix_node *node; return kzalloc(GENRADIX_NODE_SIZE, gfp_mask);
node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO);
/*
* We're using pages (not slab allocations) directly for kernel data
* structures, so we need to explicitly inform kmemleak of them in order
* to avoid false positive memory leak reports.
*/
kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask);
return node;
} }
static inline void genradix_free_node(struct genradix_node *node) static inline void genradix_free_node(struct genradix_node *node)
{ {
kmemleak_free(node); kfree(node);
free_page((unsigned long)node);
} }
/* /*
@ -200,7 +189,7 @@ restart:
i++; i++;
iter->offset = round_down(iter->offset + objs_per_ptr, iter->offset = round_down(iter->offset + objs_per_ptr,
objs_per_ptr); objs_per_ptr);
iter->pos = (iter->offset >> PAGE_SHIFT) * iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) *
objs_per_page; objs_per_page;
if (i == GENRADIX_ARY) if (i == GENRADIX_ARY)
goto restart; goto restart;
@ -209,7 +198,7 @@ restart:
n = n->children[i]; n = n->children[i];
} }
return &n->data[iter->offset & (PAGE_SIZE - 1)]; return &n->data[iter->offset & (GENRADIX_NODE_SIZE - 1)];
} }
EXPORT_SYMBOL(__genradix_iter_peek); EXPORT_SYMBOL(__genradix_iter_peek);
@ -235,7 +224,7 @@ restart:
if (ilog2(iter->offset) >= genradix_depth_shift(level)) { if (ilog2(iter->offset) >= genradix_depth_shift(level)) {
iter->offset = genradix_depth_size(level); iter->offset = genradix_depth_size(level);
iter->pos = (iter->offset >> PAGE_SHIFT) * objs_per_page; iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) * objs_per_page;
iter->offset -= obj_size_plus_page_remainder; iter->offset -= obj_size_plus_page_remainder;
iter->pos--; iter->pos--;
@ -251,7 +240,7 @@ restart:
size_t objs_per_ptr = genradix_depth_size(level); size_t objs_per_ptr = genradix_depth_size(level);
iter->offset = round_down(iter->offset, objs_per_ptr); iter->offset = round_down(iter->offset, objs_per_ptr);
iter->pos = (iter->offset >> PAGE_SHIFT) * objs_per_page; iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) * objs_per_page;
if (!iter->offset) if (!iter->offset)
return NULL; return NULL;
@ -267,7 +256,7 @@ restart:
n = n->children[i]; n = n->children[i];
} }
return &n->data[iter->offset & (PAGE_SIZE - 1)]; return &n->data[iter->offset & (GENRADIX_NODE_SIZE - 1)];
} }
EXPORT_SYMBOL(__genradix_iter_peek_prev); EXPORT_SYMBOL(__genradix_iter_peek_prev);
@ -289,7 +278,7 @@ int __genradix_prealloc(struct __genradix *radix, size_t size,
{ {
size_t offset; size_t offset;
for (offset = 0; offset < size; offset += PAGE_SIZE) for (offset = 0; offset < size; offset += GENRADIX_NODE_SIZE)
if (!__genradix_ptr_alloc(radix, offset, gfp_mask)) if (!__genradix_ptr_alloc(radix, offset, gfp_mask))
return -ENOMEM; return -ENOMEM;