^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 1)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 2) #include <linux/export.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 3) #include <linux/generic-radix-tree.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 4) #include <linux/gfp.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 5) #include <linux/kmemleak.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 6)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 7) #define GENRADIX_ARY (PAGE_SIZE / sizeof(struct genradix_node *))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 8) #define GENRADIX_ARY_SHIFT ilog2(GENRADIX_ARY)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 9)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 10) struct genradix_node {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 11) union {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 12) /* Interior node: */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 13) struct genradix_node *children[GENRADIX_ARY];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 14)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 15) /* Leaf: */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 16) u8 data[PAGE_SIZE];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 17) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 18) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 19)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 20) static inline int genradix_depth_shift(unsigned depth)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 21) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 22) return PAGE_SHIFT + GENRADIX_ARY_SHIFT * depth;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 23) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 24)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 25) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 26) * Returns size (of data, in bytes) that a tree of a given depth holds:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 27) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 28) static inline size_t genradix_depth_size(unsigned depth)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 29) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 30) return 1UL << genradix_depth_shift(depth);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 31) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 32)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 33) /* depth that's needed for a genradix that can address up to ULONG_MAX: */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 34) #define GENRADIX_MAX_DEPTH \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 35) DIV_ROUND_UP(BITS_PER_LONG - PAGE_SHIFT, GENRADIX_ARY_SHIFT)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 36)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 37) #define GENRADIX_DEPTH_MASK \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 38) ((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 39)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 40) static inline unsigned genradix_root_to_depth(struct genradix_root *r)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 41) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 42) return (unsigned long) r & GENRADIX_DEPTH_MASK;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 43) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 44)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 45) static inline struct genradix_node *genradix_root_to_node(struct genradix_root *r)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 46) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 47) return (void *) ((unsigned long) r & ~GENRADIX_DEPTH_MASK);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 48) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 49)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 50) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 51) * Returns pointer to the specified byte @offset within @radix, or NULL if not
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 52) * allocated
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 53) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 54) void *__genradix_ptr(struct __genradix *radix, size_t offset)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 55) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 56) struct genradix_root *r = READ_ONCE(radix->root);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 57) struct genradix_node *n = genradix_root_to_node(r);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 58) unsigned level = genradix_root_to_depth(r);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 59)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 60) if (ilog2(offset) >= genradix_depth_shift(level))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 61) return NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 62)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 63) while (1) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 64) if (!n)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 65) return NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 66) if (!level)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 67) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 68)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 69) level--;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 70)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 71) n = n->children[offset >> genradix_depth_shift(level)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 72) offset &= genradix_depth_size(level) - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 73) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 74)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 75) return &n->data[offset];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 76) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 77) EXPORT_SYMBOL(__genradix_ptr);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 78)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 79) static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 80) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 81) struct genradix_node *node;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 82)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 83) node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 84)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 85) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 86) * We're using pages (not slab allocations) directly for kernel data
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 87) * structures, so we need to explicitly inform kmemleak of them in order
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 88) * to avoid false positive memory leak reports.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 89) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 90) kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 91) return node;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 92) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 93)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 94) static inline void genradix_free_node(struct genradix_node *node)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 95) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 96) kmemleak_free(node);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 97) free_page((unsigned long)node);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 98) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 99)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) * Returns pointer to the specified byte @offset within @radix, allocating it if
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) * necessary - newly allocated slots are always zeroed out:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) gfp_t gfp_mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) struct genradix_root *v = READ_ONCE(radix->root);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) struct genradix_node *n, *new_node = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) unsigned level;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) /* Increase tree depth if necessary: */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) while (1) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) struct genradix_root *r = v, *new_root;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) n = genradix_root_to_node(r);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) level = genradix_root_to_depth(r);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) if (n && ilog2(offset) < genradix_depth_shift(level))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) if (!new_node) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) new_node = genradix_alloc_node(gfp_mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) if (!new_node)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) return NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) new_node->children[0] = n;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) new_root = ((struct genradix_root *)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) ((unsigned long) new_node | (n ? level + 1 : 0)));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) if ((v = cmpxchg_release(&radix->root, r, new_root)) == r) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) v = new_root;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) new_node = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) while (level--) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) struct genradix_node **p =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) &n->children[offset >> genradix_depth_shift(level)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) offset &= genradix_depth_size(level) - 1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) n = READ_ONCE(*p);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) if (!n) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) if (!new_node) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) new_node = genradix_alloc_node(gfp_mask);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) if (!new_node)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) return NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) if (!(n = cmpxchg_release(p, NULL, new_node)))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) swap(n, new_node);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) if (new_node)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) genradix_free_node(new_node);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) return &n->data[offset];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) EXPORT_SYMBOL(__genradix_ptr_alloc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) void *__genradix_iter_peek(struct genradix_iter *iter,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) struct __genradix *radix,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) size_t objs_per_page)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) struct genradix_root *r;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) struct genradix_node *n;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) unsigned level, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) restart:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) r = READ_ONCE(radix->root);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) if (!r)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) return NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) n = genradix_root_to_node(r);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) level = genradix_root_to_depth(r);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) if (ilog2(iter->offset) >= genradix_depth_shift(level))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) return NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) while (level) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) level--;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) i = (iter->offset >> genradix_depth_shift(level)) &
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) (GENRADIX_ARY - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) while (!n->children[i]) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) i++;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) iter->offset = round_down(iter->offset +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) genradix_depth_size(level),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) genradix_depth_size(level));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) iter->pos = (iter->offset >> PAGE_SHIFT) *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) objs_per_page;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) if (i == GENRADIX_ARY)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) goto restart;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) n = n->children[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) return &n->data[iter->offset & (PAGE_SIZE - 1)];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) EXPORT_SYMBOL(__genradix_iter_peek);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) static void genradix_free_recurse(struct genradix_node *n, unsigned level)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) if (level) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) unsigned i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) for (i = 0; i < GENRADIX_ARY; i++)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) if (n->children[i])
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) genradix_free_recurse(n->children[i], level - 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) genradix_free_node(n);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) int __genradix_prealloc(struct __genradix *radix, size_t size,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) gfp_t gfp_mask)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) size_t offset;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) for (offset = 0; offset < size; offset += PAGE_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) if (!__genradix_ptr_alloc(radix, offset, gfp_mask))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) EXPORT_SYMBOL(__genradix_prealloc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) void __genradix_free(struct __genradix *radix)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) struct genradix_root *r = xchg(&radix->root, NULL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) genradix_free_recurse(genradix_root_to_node(r),
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) genradix_root_to_depth(r));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) EXPORT_SYMBOL(__genradix_free);