radix-tree: replace preallocated node array with linked list

Currently we use per-cpu array to hold pointers to preallocated nodes.
Let's replace it with linked list.  On x86_64 it saves 256 bytes in
per-cpu ELF section which may translate into freeing up 2MB of memory for
NR_CPUS==8192.

[akpm@linux-foundation.org: fix comment, coding style]
Signed-off-by: Kirill A. Shutemov <kirill.shutemov@linux.intel.com>
Acked-by: Johannes Weiner <hannes@cmpxchg.org>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
This commit is contained in:
Kirill A. Shutemov 2015-06-25 15:02:19 -07:00 committed by Linus Torvalds
parent 9cf79d115f
commit 9d2a8da006

View File

@ -65,7 +65,8 @@ static struct kmem_cache *radix_tree_node_cachep;
*/ */
struct radix_tree_preload { struct radix_tree_preload {
int nr; int nr;
struct radix_tree_node *nodes[RADIX_TREE_PRELOAD_SIZE]; /* nodes->private_data points to next preallocated node */
struct radix_tree_node *nodes;
}; };
static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, }; static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
@ -197,8 +198,9 @@ radix_tree_node_alloc(struct radix_tree_root *root)
*/ */
rtp = this_cpu_ptr(&radix_tree_preloads); rtp = this_cpu_ptr(&radix_tree_preloads);
if (rtp->nr) { if (rtp->nr) {
ret = rtp->nodes[rtp->nr - 1]; ret = rtp->nodes;
rtp->nodes[rtp->nr - 1] = NULL; rtp->nodes = ret->private_data;
ret->private_data = NULL;
rtp->nr--; rtp->nr--;
} }
/* /*
@ -257,17 +259,20 @@ static int __radix_tree_preload(gfp_t gfp_mask)
preempt_disable(); preempt_disable();
rtp = this_cpu_ptr(&radix_tree_preloads); rtp = this_cpu_ptr(&radix_tree_preloads);
while (rtp->nr < ARRAY_SIZE(rtp->nodes)) { while (rtp->nr < RADIX_TREE_PRELOAD_SIZE) {
preempt_enable(); preempt_enable();
node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask); node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
if (node == NULL) if (node == NULL)
goto out; goto out;
preempt_disable(); preempt_disable();
rtp = this_cpu_ptr(&radix_tree_preloads); rtp = this_cpu_ptr(&radix_tree_preloads);
if (rtp->nr < ARRAY_SIZE(rtp->nodes)) if (rtp->nr < RADIX_TREE_PRELOAD_SIZE) {
rtp->nodes[rtp->nr++] = node; node->private_data = rtp->nodes;
else rtp->nodes = node;
rtp->nr++;
} else {
kmem_cache_free(radix_tree_node_cachep, node); kmem_cache_free(radix_tree_node_cachep, node);
}
} }
ret = 0; ret = 0;
out: out:
@ -1463,15 +1468,16 @@ static int radix_tree_callback(struct notifier_block *nfb,
{ {
int cpu = (long)hcpu; int cpu = (long)hcpu;
struct radix_tree_preload *rtp; struct radix_tree_preload *rtp;
struct radix_tree_node *node;
/* Free per-cpu pool of perloaded nodes */ /* Free per-cpu pool of perloaded nodes */
if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) { if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
rtp = &per_cpu(radix_tree_preloads, cpu); rtp = &per_cpu(radix_tree_preloads, cpu);
while (rtp->nr) { while (rtp->nr) {
kmem_cache_free(radix_tree_node_cachep, node = rtp->nodes;
rtp->nodes[rtp->nr-1]); rtp->nodes = node->private_data;
rtp->nodes[rtp->nr-1] = NULL; kmem_cache_free(radix_tree_node_cachep, node);
rtp->nr--; rtp->nr--;
} }
} }
return NOTIFY_OK; return NOTIFY_OK;