2019-03-12 06:31:14 +00:00
|
|
|
|
2023-09-12 05:17:22 +00:00
|
|
|
#include <linux/atomic.h>
|
2019-03-12 06:31:14 +00:00
|
|
|
#include <linux/export.h>
|
|
|
|
#include <linux/generic-radix-tree.h>
|
|
|
|
#include <linux/gfp.h>
|
2019-10-14 21:11:54 +00:00
|
|
|
#include <linux/kmemleak.h>
|
2019-03-12 06:31:14 +00:00
|
|
|
|
2024-03-08 03:32:06 +00:00
|
|
|
#define GENRADIX_ARY (GENRADIX_NODE_SIZE / sizeof(struct genradix_node *))
|
2019-03-12 06:31:14 +00:00
|
|
|
#define GENRADIX_ARY_SHIFT ilog2(GENRADIX_ARY)
|
|
|
|
|
|
|
|
struct genradix_node {
|
|
|
|
union {
|
|
|
|
/* Interior node: */
|
|
|
|
struct genradix_node *children[GENRADIX_ARY];
|
|
|
|
|
|
|
|
/* Leaf: */
|
2024-03-08 03:32:06 +00:00
|
|
|
u8 data[GENRADIX_NODE_SIZE];
|
2019-03-12 06:31:14 +00:00
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
static inline int genradix_depth_shift(unsigned depth)
|
|
|
|
{
|
2024-03-08 03:32:06 +00:00
|
|
|
return GENRADIX_NODE_SHIFT + GENRADIX_ARY_SHIFT * depth;
|
2019-03-12 06:31:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Returns size (of data, in bytes) that a tree of a given depth holds:
|
|
|
|
*/
|
|
|
|
static inline size_t genradix_depth_size(unsigned depth)
|
|
|
|
{
|
|
|
|
return 1UL << genradix_depth_shift(depth);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* depth that's needed for a genradix that can address up to ULONG_MAX: */
|
|
|
|
#define GENRADIX_MAX_DEPTH \
|
2024-03-08 03:32:06 +00:00
|
|
|
DIV_ROUND_UP(BITS_PER_LONG - GENRADIX_NODE_SHIFT, GENRADIX_ARY_SHIFT)
|
2019-03-12 06:31:14 +00:00
|
|
|
|
|
|
|
#define GENRADIX_DEPTH_MASK \
|
|
|
|
((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
|
|
|
|
|
2019-09-25 23:46:23 +00:00
|
|
|
static inline unsigned genradix_root_to_depth(struct genradix_root *r)
|
2019-03-12 06:31:14 +00:00
|
|
|
{
|
|
|
|
return (unsigned long) r & GENRADIX_DEPTH_MASK;
|
|
|
|
}
|
|
|
|
|
2019-09-25 23:46:23 +00:00
|
|
|
static inline struct genradix_node *genradix_root_to_node(struct genradix_root *r)
|
2019-03-12 06:31:14 +00:00
|
|
|
{
|
|
|
|
return (void *) ((unsigned long) r & ~GENRADIX_DEPTH_MASK);
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Returns pointer to the specified byte @offset within @radix, or NULL if not
|
|
|
|
* allocated
|
|
|
|
*/
|
|
|
|
void *__genradix_ptr(struct __genradix *radix, size_t offset)
|
|
|
|
{
|
|
|
|
struct genradix_root *r = READ_ONCE(radix->root);
|
|
|
|
struct genradix_node *n = genradix_root_to_node(r);
|
|
|
|
unsigned level = genradix_root_to_depth(r);
|
|
|
|
|
|
|
|
if (ilog2(offset) >= genradix_depth_shift(level))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
while (1) {
|
|
|
|
if (!n)
|
|
|
|
return NULL;
|
|
|
|
if (!level)
|
|
|
|
break;
|
|
|
|
|
|
|
|
level--;
|
|
|
|
|
|
|
|
n = n->children[offset >> genradix_depth_shift(level)];
|
|
|
|
offset &= genradix_depth_size(level) - 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
return &n->data[offset];
|
|
|
|
}
|
|
|
|
EXPORT_SYMBOL(__genradix_ptr);
|
|
|
|
|
2019-10-14 21:11:54 +00:00
|
|
|
static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
|
|
|
|
{
|
2024-03-08 03:32:06 +00:00
|
|
|
return kzalloc(GENRADIX_NODE_SIZE, gfp_mask);
|
2019-10-14 21:11:54 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void genradix_free_node(struct genradix_node *node)
|
|
|
|
{
|
2024-03-08 03:32:06 +00:00
|
|
|
kfree(node);
|
2019-10-14 21:11:54 +00:00
|
|
|
}
|
|
|
|
|
2019-03-12 06:31:14 +00:00
|
|
|
/*
|
|
|
|
* Returns pointer to the specified byte @offset within @radix, allocating it if
|
|
|
|
* necessary - newly allocated slots are always zeroed out:
|
|
|
|
*/
|
|
|
|
void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset,
|
|
|
|
gfp_t gfp_mask)
|
|
|
|
{
|
|
|
|
struct genradix_root *v = READ_ONCE(radix->root);
|
|
|
|
struct genradix_node *n, *new_node = NULL;
|
|
|
|
unsigned level;
|
|
|
|
|
|
|
|
/* Increase tree depth if necessary: */
|
|
|
|
while (1) {
|
|
|
|
struct genradix_root *r = v, *new_root;
|
|
|
|
|
|
|
|
n = genradix_root_to_node(r);
|
|
|
|
level = genradix_root_to_depth(r);
|
|
|
|
|
|
|
|
if (n && ilog2(offset) < genradix_depth_shift(level))
|
|
|
|
break;
|
|
|
|
|
|
|
|
if (!new_node) {
|
2019-10-14 21:11:54 +00:00
|
|
|
new_node = genradix_alloc_node(gfp_mask);
|
2019-03-12 06:31:14 +00:00
|
|
|
if (!new_node)
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
new_node->children[0] = n;
|
|
|
|
new_root = ((struct genradix_root *)
|
|
|
|
((unsigned long) new_node | (n ? level + 1 : 0)));
|
|
|
|
|
|
|
|
if ((v = cmpxchg_release(&radix->root, r, new_root)) == r) {
|
|
|
|
v = new_root;
|
|
|
|
new_node = NULL;
|
2024-08-11 01:04:35 +00:00
|
|
|
} else {
|
|
|
|
new_node->children[0] = NULL;
|
2019-03-12 06:31:14 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
while (level--) {
|
|
|
|
struct genradix_node **p =
|
|
|
|
&n->children[offset >> genradix_depth_shift(level)];
|
|
|
|
offset &= genradix_depth_size(level) - 1;
|
|
|
|
|
|
|
|
n = READ_ONCE(*p);
|
|
|
|
if (!n) {
|
|
|
|
if (!new_node) {
|
2019-10-14 21:11:54 +00:00
|
|
|
new_node = genradix_alloc_node(gfp_mask);
|
2019-03-12 06:31:14 +00:00
|
|
|
if (!new_node)
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!(n = cmpxchg_release(p, NULL, new_node)))
|
|
|
|
swap(n, new_node);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (new_node)
|
2019-10-14 21:11:54 +00:00
|
|
|
genradix_free_node(new_node);
|
2019-03-12 06:31:14 +00:00
|
|
|
|
|
|
|
return &n->data[offset];
|
|
|
|
}
|
|
|
|
EXPORT_SYMBOL(__genradix_ptr_alloc);
|
|
|
|
|
|
|
|
void *__genradix_iter_peek(struct genradix_iter *iter,
|
|
|
|
struct __genradix *radix,
|
|
|
|
size_t objs_per_page)
|
|
|
|
{
|
|
|
|
struct genradix_root *r;
|
|
|
|
struct genradix_node *n;
|
|
|
|
unsigned level, i;
|
2021-02-13 01:11:25 +00:00
|
|
|
|
|
|
|
if (iter->offset == SIZE_MAX)
|
|
|
|
return NULL;
|
|
|
|
|
2019-03-12 06:31:14 +00:00
|
|
|
restart:
|
|
|
|
r = READ_ONCE(radix->root);
|
|
|
|
if (!r)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
n = genradix_root_to_node(r);
|
|
|
|
level = genradix_root_to_depth(r);
|
|
|
|
|
|
|
|
if (ilog2(iter->offset) >= genradix_depth_shift(level))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
while (level) {
|
|
|
|
level--;
|
|
|
|
|
|
|
|
i = (iter->offset >> genradix_depth_shift(level)) &
|
|
|
|
(GENRADIX_ARY - 1);
|
|
|
|
|
|
|
|
while (!n->children[i]) {
|
2021-02-13 01:11:25 +00:00
|
|
|
size_t objs_per_ptr = genradix_depth_size(level);
|
|
|
|
|
|
|
|
if (iter->offset + objs_per_ptr < iter->offset) {
|
|
|
|
iter->offset = SIZE_MAX;
|
|
|
|
iter->pos = SIZE_MAX;
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2019-03-12 06:31:14 +00:00
|
|
|
i++;
|
2021-02-13 01:11:25 +00:00
|
|
|
iter->offset = round_down(iter->offset + objs_per_ptr,
|
|
|
|
objs_per_ptr);
|
2024-03-08 03:32:06 +00:00
|
|
|
iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) *
|
2019-03-12 06:31:14 +00:00
|
|
|
objs_per_page;
|
|
|
|
if (i == GENRADIX_ARY)
|
|
|
|
goto restart;
|
|
|
|
}
|
|
|
|
|
|
|
|
n = n->children[i];
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:32:06 +00:00
|
|
|
return &n->data[iter->offset & (GENRADIX_NODE_SIZE - 1)];
|
2019-03-12 06:31:14 +00:00
|
|
|
}
|
|
|
|
EXPORT_SYMBOL(__genradix_iter_peek);
|
|
|
|
|
2023-09-12 05:17:22 +00:00
|
|
|
void *__genradix_iter_peek_prev(struct genradix_iter *iter,
|
|
|
|
struct __genradix *radix,
|
|
|
|
size_t objs_per_page,
|
|
|
|
size_t obj_size_plus_page_remainder)
|
|
|
|
{
|
|
|
|
struct genradix_root *r;
|
|
|
|
struct genradix_node *n;
|
|
|
|
unsigned level, i;
|
|
|
|
|
|
|
|
if (iter->offset == SIZE_MAX)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
restart:
|
|
|
|
r = READ_ONCE(radix->root);
|
|
|
|
if (!r)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
n = genradix_root_to_node(r);
|
|
|
|
level = genradix_root_to_depth(r);
|
|
|
|
|
|
|
|
if (ilog2(iter->offset) >= genradix_depth_shift(level)) {
|
|
|
|
iter->offset = genradix_depth_size(level);
|
2024-03-08 03:32:06 +00:00
|
|
|
iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) * objs_per_page;
|
2023-09-12 05:17:22 +00:00
|
|
|
|
|
|
|
iter->offset -= obj_size_plus_page_remainder;
|
|
|
|
iter->pos--;
|
|
|
|
}
|
|
|
|
|
|
|
|
while (level) {
|
|
|
|
level--;
|
|
|
|
|
|
|
|
i = (iter->offset >> genradix_depth_shift(level)) &
|
|
|
|
(GENRADIX_ARY - 1);
|
|
|
|
|
|
|
|
while (!n->children[i]) {
|
|
|
|
size_t objs_per_ptr = genradix_depth_size(level);
|
|
|
|
|
|
|
|
iter->offset = round_down(iter->offset, objs_per_ptr);
|
2024-03-08 03:32:06 +00:00
|
|
|
iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) * objs_per_page;
|
2023-09-12 05:17:22 +00:00
|
|
|
|
|
|
|
if (!iter->offset)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
iter->offset -= obj_size_plus_page_remainder;
|
|
|
|
iter->pos--;
|
|
|
|
|
|
|
|
if (!i)
|
|
|
|
goto restart;
|
|
|
|
--i;
|
|
|
|
}
|
|
|
|
|
|
|
|
n = n->children[i];
|
|
|
|
}
|
|
|
|
|
2024-03-08 03:32:06 +00:00
|
|
|
return &n->data[iter->offset & (GENRADIX_NODE_SIZE - 1)];
|
2023-09-12 05:17:22 +00:00
|
|
|
}
|
|
|
|
EXPORT_SYMBOL(__genradix_iter_peek_prev);
|
|
|
|
|
2019-03-12 06:31:14 +00:00
|
|
|
static void genradix_free_recurse(struct genradix_node *n, unsigned level)
|
|
|
|
{
|
|
|
|
if (level) {
|
|
|
|
unsigned i;
|
|
|
|
|
|
|
|
for (i = 0; i < GENRADIX_ARY; i++)
|
|
|
|
if (n->children[i])
|
|
|
|
genradix_free_recurse(n->children[i], level - 1);
|
|
|
|
}
|
|
|
|
|
2019-10-14 21:11:54 +00:00
|
|
|
genradix_free_node(n);
|
2019-03-12 06:31:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
int __genradix_prealloc(struct __genradix *radix, size_t size,
|
|
|
|
gfp_t gfp_mask)
|
|
|
|
{
|
|
|
|
size_t offset;
|
|
|
|
|
2024-03-08 03:32:06 +00:00
|
|
|
for (offset = 0; offset < size; offset += GENRADIX_NODE_SIZE)
|
2019-03-12 06:31:14 +00:00
|
|
|
if (!__genradix_ptr_alloc(radix, offset, gfp_mask))
|
|
|
|
return -ENOMEM;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
EXPORT_SYMBOL(__genradix_prealloc);
|
|
|
|
|
|
|
|
void __genradix_free(struct __genradix *radix)
|
|
|
|
{
|
|
|
|
struct genradix_root *r = xchg(&radix->root, NULL);
|
|
|
|
|
|
|
|
genradix_free_recurse(genradix_root_to_node(r),
|
|
|
|
genradix_root_to_depth(r));
|
|
|
|
}
|
|
|
|
EXPORT_SYMBOL(__genradix_free);
|