1/*
2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
7 *
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public License as
10 * published by the Free Software Foundation; either version 2, or (at
11 * your option) any later version.
12 *
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
16 * General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21 */
22
23#include <linux/errno.h>
24#include <linux/init.h>
25#include <linux/kernel.h>
26#include <linux/export.h>
27#include <linux/radix-tree.h>
28#include <linux/percpu.h>
29#include <linux/slab.h>
30#include <linux/kmemleak.h>
31#include <linux/notifier.h>
32#include <linux/cpu.h>
33#include <linux/string.h>
34#include <linux/bitops.h>
35#include <linux/rcupdate.h>
36#include <linux/hardirq.h>		/* in_interrupt() */
37
38
39/*
40 * The height_to_maxindex array needs to be one deeper than the maximum
41 * path as height 0 holds only 1 entry.
42 */
43static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly;
44
45/*
46 * Radix tree node cache.
47 */
48static struct kmem_cache *radix_tree_node_cachep;
49
50/*
51 * The radix tree is variable-height, so an insert operation not only has
52 * to build the branch to its corresponding item, it also has to build the
53 * branch to existing items if the size has to be increased (by
54 * radix_tree_extend).
55 *
56 * The worst case is a zero height tree with just a single item at index 0,
57 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
58 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
59 * Hence:
60 */
61#define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
62
63/*
64 * Per-cpu pool of preloaded nodes
65 */
66struct radix_tree_preload {
67	int nr;
68	struct radix_tree_node *nodes[RADIX_TREE_PRELOAD_SIZE];
69};
70static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
71
72static inline void *ptr_to_indirect(void *ptr)
73{
74	return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
75}
76
77static inline void *indirect_to_ptr(void *ptr)
78{
79	return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
80}
81
82static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
83{
84	return root->gfp_mask & __GFP_BITS_MASK;
85}
86
87static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
88		int offset)
89{
90	__set_bit(offset, node->tags[tag]);
91}
92
93static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
94		int offset)
95{
96	__clear_bit(offset, node->tags[tag]);
97}
98
99static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
100		int offset)
101{
102	return test_bit(offset, node->tags[tag]);
103}
104
105static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
106{
107	root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
108}
109
110static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag)
111{
112	root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
113}
114
115static inline void root_tag_clear_all(struct radix_tree_root *root)
116{
117	root->gfp_mask &= __GFP_BITS_MASK;
118}
119
120static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
121{
122	return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
123}
124
125/*
126 * Returns 1 if any slot in the node has this tag set.
127 * Otherwise returns 0.
128 */
129static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
130{
131	int idx;
132	for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
133		if (node->tags[tag][idx])
134			return 1;
135	}
136	return 0;
137}
138
139/**
140 * radix_tree_find_next_bit - find the next set bit in a memory region
141 *
142 * @addr: The address to base the search on
143 * @size: The bitmap size in bits
144 * @offset: The bitnumber to start searching at
145 *
146 * Unrollable variant of find_next_bit() for constant size arrays.
147 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
148 * Returns next bit offset, or size if nothing found.
149 */
150static __always_inline unsigned long
151radix_tree_find_next_bit(const unsigned long *addr,
152			 unsigned long size, unsigned long offset)
153{
154	if (!__builtin_constant_p(size))
155		return find_next_bit(addr, size, offset);
156
157	if (offset < size) {
158		unsigned long tmp;
159
160		addr += offset / BITS_PER_LONG;
161		tmp = *addr >> (offset % BITS_PER_LONG);
162		if (tmp)
163			return __ffs(tmp) + offset;
164		offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
165		while (offset < size) {
166			tmp = *++addr;
167			if (tmp)
168				return __ffs(tmp) + offset;
169			offset += BITS_PER_LONG;
170		}
171	}
172	return size;
173}
174
175/*
176 * This assumes that the caller has performed appropriate preallocation, and
177 * that the caller has pinned this thread of control to the current CPU.
178 */
179static struct radix_tree_node *
180radix_tree_node_alloc(struct radix_tree_root *root)
181{
182	struct radix_tree_node *ret = NULL;
183	gfp_t gfp_mask = root_gfp_mask(root);
184
185	/*
186	 * Preload code isn't irq safe and it doesn't make sence to use
187	 * preloading in the interrupt anyway as all the allocations have to
188	 * be atomic. So just do normal allocation when in interrupt.
189	 */
190	if (!(gfp_mask & __GFP_WAIT) && !in_interrupt()) {
191		struct radix_tree_preload *rtp;
192
193		/*
194		 * Provided the caller has preloaded here, we will always
195		 * succeed in getting a node here (and never reach
196		 * kmem_cache_alloc)
197		 */
198		rtp = this_cpu_ptr(&radix_tree_preloads);
199		if (rtp->nr) {
200			ret = rtp->nodes[rtp->nr - 1];
201			rtp->nodes[rtp->nr - 1] = NULL;
202			rtp->nr--;
203		}
204		/*
205		 * Update the allocation stack trace as this is more useful
206		 * for debugging.
207		 */
208		kmemleak_update_trace(ret);
209	}
210	if (ret == NULL)
211		ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
212
213	BUG_ON(radix_tree_is_indirect_ptr(ret));
214	return ret;
215}
216
217static void radix_tree_node_rcu_free(struct rcu_head *head)
218{
219	struct radix_tree_node *node =
220			container_of(head, struct radix_tree_node, rcu_head);
221	int i;
222
223	/*
224	 * must only free zeroed nodes into the slab. radix_tree_shrink
225	 * can leave us with a non-NULL entry in the first slot, so clear
226	 * that here to make sure.
227	 */
228	for (i = 0; i < RADIX_TREE_MAX_TAGS; i++)
229		tag_clear(node, i, 0);
230
231	node->slots[0] = NULL;
232	node->count = 0;
233
234	kmem_cache_free(radix_tree_node_cachep, node);
235}
236
237static inline void
238radix_tree_node_free(struct radix_tree_node *node)
239{
240	call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
241}
242
243/*
244 * Load up this CPU's radix_tree_node buffer with sufficient objects to
245 * ensure that the addition of a single element in the tree cannot fail.  On
246 * success, return zero, with preemption disabled.  On error, return -ENOMEM
247 * with preemption not disabled.
248 *
249 * To make use of this facility, the radix tree must be initialised without
250 * __GFP_WAIT being passed to INIT_RADIX_TREE().
251 */
252static int __radix_tree_preload(gfp_t gfp_mask)
253{
254	struct radix_tree_preload *rtp;
255	struct radix_tree_node *node;
256	int ret = -ENOMEM;
257
258	preempt_disable();
259	rtp = this_cpu_ptr(&radix_tree_preloads);
260	while (rtp->nr < ARRAY_SIZE(rtp->nodes)) {
261		preempt_enable();
262		node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
263		if (node == NULL)
264			goto out;
265		preempt_disable();
266		rtp = this_cpu_ptr(&radix_tree_preloads);
267		if (rtp->nr < ARRAY_SIZE(rtp->nodes))
268			rtp->nodes[rtp->nr++] = node;
269		else
270			kmem_cache_free(radix_tree_node_cachep, node);
271	}
272	ret = 0;
273out:
274	return ret;
275}
276
277/*
278 * Load up this CPU's radix_tree_node buffer with sufficient objects to
279 * ensure that the addition of a single element in the tree cannot fail.  On
280 * success, return zero, with preemption disabled.  On error, return -ENOMEM
281 * with preemption not disabled.
282 *
283 * To make use of this facility, the radix tree must be initialised without
284 * __GFP_WAIT being passed to INIT_RADIX_TREE().
285 */
286int radix_tree_preload(gfp_t gfp_mask)
287{
288	/* Warn on non-sensical use... */
289	WARN_ON_ONCE(!(gfp_mask & __GFP_WAIT));
290	return __radix_tree_preload(gfp_mask);
291}
292EXPORT_SYMBOL(radix_tree_preload);
293
294/*
295 * The same as above function, except we don't guarantee preloading happens.
296 * We do it, if we decide it helps. On success, return zero with preemption
297 * disabled. On error, return -ENOMEM with preemption not disabled.
298 */
299int radix_tree_maybe_preload(gfp_t gfp_mask)
300{
301	if (gfp_mask & __GFP_WAIT)
302		return __radix_tree_preload(gfp_mask);
303	/* Preloading doesn't help anything with this gfp mask, skip it */
304	preempt_disable();
305	return 0;
306}
307EXPORT_SYMBOL(radix_tree_maybe_preload);
308
309/*
310 *	Return the maximum key which can be store into a
311 *	radix tree with height HEIGHT.
312 */
313static inline unsigned long radix_tree_maxindex(unsigned int height)
314{
315	return height_to_maxindex[height];
316}
317
318/*
319 *	Extend a radix tree so it can store key @index.
320 */
321static int radix_tree_extend(struct radix_tree_root *root, unsigned long index)
322{
323	struct radix_tree_node *node;
324	struct radix_tree_node *slot;
325	unsigned int height;
326	int tag;
327
328	/* Figure out what the height should be.  */
329	height = root->height + 1;
330	while (index > radix_tree_maxindex(height))
331		height++;
332
333	if (root->rnode == NULL) {
334		root->height = height;
335		goto out;
336	}
337
338	do {
339		unsigned int newheight;
340		if (!(node = radix_tree_node_alloc(root)))
341			return -ENOMEM;
342
343		/* Propagate the aggregated tag info into the new root */
344		for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
345			if (root_tag_get(root, tag))
346				tag_set(node, tag, 0);
347		}
348
349		/* Increase the height.  */
350		newheight = root->height+1;
351		BUG_ON(newheight & ~RADIX_TREE_HEIGHT_MASK);
352		node->path = newheight;
353		node->count = 1;
354		node->parent = NULL;
355		slot = root->rnode;
356		if (newheight > 1) {
357			slot = indirect_to_ptr(slot);
358			slot->parent = node;
359		}
360		node->slots[0] = slot;
361		node = ptr_to_indirect(node);
362		rcu_assign_pointer(root->rnode, node);
363		root->height = newheight;
364	} while (height > root->height);
365out:
366	return 0;
367}
368
369/**
370 *	__radix_tree_create	-	create a slot in a radix tree
371 *	@root:		radix tree root
372 *	@index:		index key
373 *	@nodep:		returns node
374 *	@slotp:		returns slot
375 *
376 *	Create, if necessary, and return the node and slot for an item
377 *	at position @index in the radix tree @root.
378 *
379 *	Until there is more than one item in the tree, no nodes are
380 *	allocated and @root->rnode is used as a direct slot instead of
381 *	pointing to a node, in which case *@nodep will be NULL.
382 *
383 *	Returns -ENOMEM, or 0 for success.
384 */
385int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
386			struct radix_tree_node **nodep, void ***slotp)
387{
388	struct radix_tree_node *node = NULL, *slot;
389	unsigned int height, shift, offset;
390	int error;
391
392	/* Make sure the tree is high enough.  */
393	if (index > radix_tree_maxindex(root->height)) {
394		error = radix_tree_extend(root, index);
395		if (error)
396			return error;
397	}
398
399	slot = indirect_to_ptr(root->rnode);
400
401	height = root->height;
402	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
403
404	offset = 0;			/* uninitialised var warning */
405	while (height > 0) {
406		if (slot == NULL) {
407			/* Have to add a child node.  */
408			if (!(slot = radix_tree_node_alloc(root)))
409				return -ENOMEM;
410			slot->path = height;
411			slot->parent = node;
412			if (node) {
413				rcu_assign_pointer(node->slots[offset], slot);
414				node->count++;
415				slot->path |= offset << RADIX_TREE_HEIGHT_SHIFT;
416			} else
417				rcu_assign_pointer(root->rnode, ptr_to_indirect(slot));
418		}
419
420		/* Go a level down */
421		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
422		node = slot;
423		slot = node->slots[offset];
424		shift -= RADIX_TREE_MAP_SHIFT;
425		height--;
426	}
427
428	if (nodep)
429		*nodep = node;
430	if (slotp)
431		*slotp = node ? node->slots + offset : (void **)&root->rnode;
432	return 0;
433}
434
435/**
436 *	radix_tree_insert    -    insert into a radix tree
437 *	@root:		radix tree root
438 *	@index:		index key
439 *	@item:		item to insert
440 *
441 *	Insert an item into the radix tree at position @index.
442 */
443int radix_tree_insert(struct radix_tree_root *root,
444			unsigned long index, void *item)
445{
446	struct radix_tree_node *node;
447	void **slot;
448	int error;
449
450	BUG_ON(radix_tree_is_indirect_ptr(item));
451
452	error = __radix_tree_create(root, index, &node, &slot);
453	if (error)
454		return error;
455	if (*slot != NULL)
456		return -EEXIST;
457	rcu_assign_pointer(*slot, item);
458
459	if (node) {
460		node->count++;
461		BUG_ON(tag_get(node, 0, index & RADIX_TREE_MAP_MASK));
462		BUG_ON(tag_get(node, 1, index & RADIX_TREE_MAP_MASK));
463	} else {
464		BUG_ON(root_tag_get(root, 0));
465		BUG_ON(root_tag_get(root, 1));
466	}
467
468	return 0;
469}
470EXPORT_SYMBOL(radix_tree_insert);
471
472/**
473 *	__radix_tree_lookup	-	lookup an item in a radix tree
474 *	@root:		radix tree root
475 *	@index:		index key
476 *	@nodep:		returns node
477 *	@slotp:		returns slot
478 *
479 *	Lookup and return the item at position @index in the radix
480 *	tree @root.
481 *
482 *	Until there is more than one item in the tree, no nodes are
483 *	allocated and @root->rnode is used as a direct slot instead of
484 *	pointing to a node, in which case *@nodep will be NULL.
485 */
486void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
487			  struct radix_tree_node **nodep, void ***slotp)
488{
489	struct radix_tree_node *node, *parent;
490	unsigned int height, shift;
491	void **slot;
492
493	node = rcu_dereference_raw(root->rnode);
494	if (node == NULL)
495		return NULL;
496
497	if (!radix_tree_is_indirect_ptr(node)) {
498		if (index > 0)
499			return NULL;
500
501		if (nodep)
502			*nodep = NULL;
503		if (slotp)
504			*slotp = (void **)&root->rnode;
505		return node;
506	}
507	node = indirect_to_ptr(node);
508
509	height = node->path & RADIX_TREE_HEIGHT_MASK;
510	if (index > radix_tree_maxindex(height))
511		return NULL;
512
513	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
514
515	do {
516		parent = node;
517		slot = node->slots + ((index >> shift) & RADIX_TREE_MAP_MASK);
518		node = rcu_dereference_raw(*slot);
519		if (node == NULL)
520			return NULL;
521
522		shift -= RADIX_TREE_MAP_SHIFT;
523		height--;
524	} while (height > 0);
525
526	if (nodep)
527		*nodep = parent;
528	if (slotp)
529		*slotp = slot;
530	return node;
531}
532
533/**
534 *	radix_tree_lookup_slot    -    lookup a slot in a radix tree
535 *	@root:		radix tree root
536 *	@index:		index key
537 *
538 *	Returns:  the slot corresponding to the position @index in the
539 *	radix tree @root. This is useful for update-if-exists operations.
540 *
541 *	This function can be called under rcu_read_lock iff the slot is not
542 *	modified by radix_tree_replace_slot, otherwise it must be called
543 *	exclusive from other writers. Any dereference of the slot must be done
544 *	using radix_tree_deref_slot.
545 */
546void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
547{
548	void **slot;
549
550	if (!__radix_tree_lookup(root, index, NULL, &slot))
551		return NULL;
552	return slot;
553}
554EXPORT_SYMBOL(radix_tree_lookup_slot);
555
556/**
557 *	radix_tree_lookup    -    perform lookup operation on a radix tree
558 *	@root:		radix tree root
559 *	@index:		index key
560 *
561 *	Lookup the item at the position @index in the radix tree @root.
562 *
563 *	This function can be called under rcu_read_lock, however the caller
564 *	must manage lifetimes of leaf nodes (eg. RCU may also be used to free
565 *	them safely). No RCU barriers are required to access or modify the
566 *	returned item, however.
567 */
568void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
569{
570	return __radix_tree_lookup(root, index, NULL, NULL);
571}
572EXPORT_SYMBOL(radix_tree_lookup);
573
574/**
575 *	radix_tree_tag_set - set a tag on a radix tree node
576 *	@root:		radix tree root
577 *	@index:		index key
578 *	@tag: 		tag index
579 *
580 *	Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
581 *	corresponding to @index in the radix tree.  From
582 *	the root all the way down to the leaf node.
583 *
584 *	Returns the address of the tagged item.   Setting a tag on a not-present
585 *	item is a bug.
586 */
587void *radix_tree_tag_set(struct radix_tree_root *root,
588			unsigned long index, unsigned int tag)
589{
590	unsigned int height, shift;
591	struct radix_tree_node *slot;
592
593	height = root->height;
594	BUG_ON(index > radix_tree_maxindex(height));
595
596	slot = indirect_to_ptr(root->rnode);
597	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
598
599	while (height > 0) {
600		int offset;
601
602		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
603		if (!tag_get(slot, tag, offset))
604			tag_set(slot, tag, offset);
605		slot = slot->slots[offset];
606		BUG_ON(slot == NULL);
607		shift -= RADIX_TREE_MAP_SHIFT;
608		height--;
609	}
610
611	/* set the root's tag bit */
612	if (slot && !root_tag_get(root, tag))
613		root_tag_set(root, tag);
614
615	return slot;
616}
617EXPORT_SYMBOL(radix_tree_tag_set);
618
619/**
620 *	radix_tree_tag_clear - clear a tag on a radix tree node
621 *	@root:		radix tree root
622 *	@index:		index key
623 *	@tag: 		tag index
624 *
625 *	Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
626 *	corresponding to @index in the radix tree.  If
627 *	this causes the leaf node to have no tags set then clear the tag in the
628 *	next-to-leaf node, etc.
629 *
630 *	Returns the address of the tagged item on success, else NULL.  ie:
631 *	has the same return value and semantics as radix_tree_lookup().
632 */
633void *radix_tree_tag_clear(struct radix_tree_root *root,
634			unsigned long index, unsigned int tag)
635{
636	struct radix_tree_node *node = NULL;
637	struct radix_tree_node *slot = NULL;
638	unsigned int height, shift;
639	int uninitialized_var(offset);
640
641	height = root->height;
642	if (index > radix_tree_maxindex(height))
643		goto out;
644
645	shift = height * RADIX_TREE_MAP_SHIFT;
646	slot = indirect_to_ptr(root->rnode);
647
648	while (shift) {
649		if (slot == NULL)
650			goto out;
651
652		shift -= RADIX_TREE_MAP_SHIFT;
653		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
654		node = slot;
655		slot = slot->slots[offset];
656	}
657
658	if (slot == NULL)
659		goto out;
660
661	while (node) {
662		if (!tag_get(node, tag, offset))
663			goto out;
664		tag_clear(node, tag, offset);
665		if (any_tag_set(node, tag))
666			goto out;
667
668		index >>= RADIX_TREE_MAP_SHIFT;
669		offset = index & RADIX_TREE_MAP_MASK;
670		node = node->parent;
671	}
672
673	/* clear the root's tag bit */
674	if (root_tag_get(root, tag))
675		root_tag_clear(root, tag);
676
677out:
678	return slot;
679}
680EXPORT_SYMBOL(radix_tree_tag_clear);
681
682/**
683 * radix_tree_tag_get - get a tag on a radix tree node
684 * @root:		radix tree root
685 * @index:		index key
686 * @tag: 		tag index (< RADIX_TREE_MAX_TAGS)
687 *
688 * Return values:
689 *
690 *  0: tag not present or not set
691 *  1: tag set
692 *
693 * Note that the return value of this function may not be relied on, even if
694 * the RCU lock is held, unless tag modification and node deletion are excluded
695 * from concurrency.
696 */
697int radix_tree_tag_get(struct radix_tree_root *root,
698			unsigned long index, unsigned int tag)
699{
700	unsigned int height, shift;
701	struct radix_tree_node *node;
702
703	/* check the root's tag bit */
704	if (!root_tag_get(root, tag))
705		return 0;
706
707	node = rcu_dereference_raw(root->rnode);
708	if (node == NULL)
709		return 0;
710
711	if (!radix_tree_is_indirect_ptr(node))
712		return (index == 0);
713	node = indirect_to_ptr(node);
714
715	height = node->path & RADIX_TREE_HEIGHT_MASK;
716	if (index > radix_tree_maxindex(height))
717		return 0;
718
719	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
720
721	for ( ; ; ) {
722		int offset;
723
724		if (node == NULL)
725			return 0;
726
727		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
728		if (!tag_get(node, tag, offset))
729			return 0;
730		if (height == 1)
731			return 1;
732		node = rcu_dereference_raw(node->slots[offset]);
733		shift -= RADIX_TREE_MAP_SHIFT;
734		height--;
735	}
736}
737EXPORT_SYMBOL(radix_tree_tag_get);
738
739/**
740 * radix_tree_next_chunk - find next chunk of slots for iteration
741 *
742 * @root:	radix tree root
743 * @iter:	iterator state
744 * @flags:	RADIX_TREE_ITER_* flags and tag index
745 * Returns:	pointer to chunk first slot, or NULL if iteration is over
746 */
747void **radix_tree_next_chunk(struct radix_tree_root *root,
748			     struct radix_tree_iter *iter, unsigned flags)
749{
750	unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK;
751	struct radix_tree_node *rnode, *node;
752	unsigned long index, offset, height;
753
754	if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
755		return NULL;
756
757	/*
758	 * Catch next_index overflow after ~0UL. iter->index never overflows
759	 * during iterating; it can be zero only at the beginning.
760	 * And we cannot overflow iter->next_index in a single step,
761	 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
762	 *
763	 * This condition also used by radix_tree_next_slot() to stop
764	 * contiguous iterating, and forbid swithing to the next chunk.
765	 */
766	index = iter->next_index;
767	if (!index && iter->index)
768		return NULL;
769
770	rnode = rcu_dereference_raw(root->rnode);
771	if (radix_tree_is_indirect_ptr(rnode)) {
772		rnode = indirect_to_ptr(rnode);
773	} else if (rnode && !index) {
774		/* Single-slot tree */
775		iter->index = 0;
776		iter->next_index = 1;
777		iter->tags = 1;
778		return (void **)&root->rnode;
779	} else
780		return NULL;
781
782restart:
783	height = rnode->path & RADIX_TREE_HEIGHT_MASK;
784	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
785	offset = index >> shift;
786
787	/* Index outside of the tree */
788	if (offset >= RADIX_TREE_MAP_SIZE)
789		return NULL;
790
791	node = rnode;
792	while (1) {
793		if ((flags & RADIX_TREE_ITER_TAGGED) ?
794				!test_bit(offset, node->tags[tag]) :
795				!node->slots[offset]) {
796			/* Hole detected */
797			if (flags & RADIX_TREE_ITER_CONTIG)
798				return NULL;
799
800			if (flags & RADIX_TREE_ITER_TAGGED)
801				offset = radix_tree_find_next_bit(
802						node->tags[tag],
803						RADIX_TREE_MAP_SIZE,
804						offset + 1);
805			else
806				while (++offset	< RADIX_TREE_MAP_SIZE) {
807					if (node->slots[offset])
808						break;
809				}
810			index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1);
811			index += offset << shift;
812			/* Overflow after ~0UL */
813			if (!index)
814				return NULL;
815			if (offset == RADIX_TREE_MAP_SIZE)
816				goto restart;
817		}
818
819		/* This is leaf-node */
820		if (!shift)
821			break;
822
823		node = rcu_dereference_raw(node->slots[offset]);
824		if (node == NULL)
825			goto restart;
826		shift -= RADIX_TREE_MAP_SHIFT;
827		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
828	}
829
830	/* Update the iterator state */
831	iter->index = index;
832	iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1;
833
834	/* Construct iter->tags bit-mask from node->tags[tag] array */
835	if (flags & RADIX_TREE_ITER_TAGGED) {
836		unsigned tag_long, tag_bit;
837
838		tag_long = offset / BITS_PER_LONG;
839		tag_bit  = offset % BITS_PER_LONG;
840		iter->tags = node->tags[tag][tag_long] >> tag_bit;
841		/* This never happens if RADIX_TREE_TAG_LONGS == 1 */
842		if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
843			/* Pick tags from next element */
844			if (tag_bit)
845				iter->tags |= node->tags[tag][tag_long + 1] <<
846						(BITS_PER_LONG - tag_bit);
847			/* Clip chunk size, here only BITS_PER_LONG tags */
848			iter->next_index = index + BITS_PER_LONG;
849		}
850	}
851
852	return node->slots + offset;
853}
854EXPORT_SYMBOL(radix_tree_next_chunk);
855
856/**
857 * radix_tree_range_tag_if_tagged - for each item in given range set given
858 *				   tag if item has another tag set
859 * @root:		radix tree root
860 * @first_indexp:	pointer to a starting index of a range to scan
861 * @last_index:		last index of a range to scan
862 * @nr_to_tag:		maximum number items to tag
863 * @iftag:		tag index to test
864 * @settag:		tag index to set if tested tag is set
865 *
866 * This function scans range of radix tree from first_index to last_index
867 * (inclusive).  For each item in the range if iftag is set, the function sets
868 * also settag. The function stops either after tagging nr_to_tag items or
869 * after reaching last_index.
870 *
871 * The tags must be set from the leaf level only and propagated back up the
872 * path to the root. We must do this so that we resolve the full path before
873 * setting any tags on intermediate nodes. If we set tags as we descend, then
874 * we can get to the leaf node and find that the index that has the iftag
875 * set is outside the range we are scanning. This reults in dangling tags and
876 * can lead to problems with later tag operations (e.g. livelocks on lookups).
877 *
878 * The function returns number of leaves where the tag was set and sets
879 * *first_indexp to the first unscanned index.
880 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
881 * be prepared to handle that.
882 */
883unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
884		unsigned long *first_indexp, unsigned long last_index,
885		unsigned long nr_to_tag,
886		unsigned int iftag, unsigned int settag)
887{
888	unsigned int height = root->height;
889	struct radix_tree_node *node = NULL;
890	struct radix_tree_node *slot;
891	unsigned int shift;
892	unsigned long tagged = 0;
893	unsigned long index = *first_indexp;
894
895	last_index = min(last_index, radix_tree_maxindex(height));
896	if (index > last_index)
897		return 0;
898	if (!nr_to_tag)
899		return 0;
900	if (!root_tag_get(root, iftag)) {
901		*first_indexp = last_index + 1;
902		return 0;
903	}
904	if (height == 0) {
905		*first_indexp = last_index + 1;
906		root_tag_set(root, settag);
907		return 1;
908	}
909
910	shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
911	slot = indirect_to_ptr(root->rnode);
912
913	for (;;) {
914		unsigned long upindex;
915		int offset;
916
917		offset = (index >> shift) & RADIX_TREE_MAP_MASK;
918		if (!slot->slots[offset])
919			goto next;
920		if (!tag_get(slot, iftag, offset))
921			goto next;
922		if (shift) {
923			/* Go down one level */
924			shift -= RADIX_TREE_MAP_SHIFT;
925			node = slot;
926			slot = slot->slots[offset];
927			continue;
928		}
929
930		/* tag the leaf */
931		tagged++;
932		tag_set(slot, settag, offset);
933
934		/* walk back up the path tagging interior nodes */
935		upindex = index;
936		while (node) {
937			upindex >>= RADIX_TREE_MAP_SHIFT;
938			offset = upindex & RADIX_TREE_MAP_MASK;
939
940			/* stop if we find a node with the tag already set */
941			if (tag_get(node, settag, offset))
942				break;
943			tag_set(node, settag, offset);
944			node = node->parent;
945		}
946
947		/*
948		 * Small optimization: now clear that node pointer.
949		 * Since all of this slot's ancestors now have the tag set
950		 * from setting it above, we have no further need to walk
951		 * back up the tree setting tags, until we update slot to
952		 * point to another radix_tree_node.
953		 */
954		node = NULL;
955
956next:
957		/* Go to next item at level determined by 'shift' */
958		index = ((index >> shift) + 1) << shift;
959		/* Overflow can happen when last_index is ~0UL... */
960		if (index > last_index || !index)
961			break;
962		if (tagged >= nr_to_tag)
963			break;
964		while (((index >> shift) & RADIX_TREE_MAP_MASK) == 0) {
965			/*
966			 * We've fully scanned this node. Go up. Because
967			 * last_index is guaranteed to be in the tree, what
968			 * we do below cannot wander astray.
969			 */
970			slot = slot->parent;
971			shift += RADIX_TREE_MAP_SHIFT;
972		}
973	}
974	/*
975	 * We need not to tag the root tag if there is no tag which is set with
976	 * settag within the range from *first_indexp to last_index.
977	 */
978	if (tagged > 0)
979		root_tag_set(root, settag);
980	*first_indexp = index;
981
982	return tagged;
983}
984EXPORT_SYMBOL(radix_tree_range_tag_if_tagged);
985
986/**
987 *	radix_tree_gang_lookup - perform multiple lookup on a radix tree
988 *	@root:		radix tree root
989 *	@results:	where the results of the lookup are placed
990 *	@first_index:	start the lookup from this key
991 *	@max_items:	place up to this many items at *results
992 *
993 *	Performs an index-ascending scan of the tree for present items.  Places
994 *	them at *@results and returns the number of items which were placed at
995 *	*@results.
996 *
997 *	The implementation is naive.
998 *
999 *	Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1000 *	rcu_read_lock. In this case, rather than the returned results being
1001 *	an atomic snapshot of the tree at a single point in time, the semantics
1002 *	of an RCU protected gang lookup are as though multiple radix_tree_lookups
1003 *	have been issued in individual locks, and results stored in 'results'.
1004 */
1005unsigned int
1006radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
1007			unsigned long first_index, unsigned int max_items)
1008{
1009	struct radix_tree_iter iter;
1010	void **slot;
1011	unsigned int ret = 0;
1012
1013	if (unlikely(!max_items))
1014		return 0;
1015
1016	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1017		results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot));
1018		if (!results[ret])
1019			continue;
1020		if (++ret == max_items)
1021			break;
1022	}
1023
1024	return ret;
1025}
1026EXPORT_SYMBOL(radix_tree_gang_lookup);
1027
1028/**
1029 *	radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1030 *	@root:		radix tree root
1031 *	@results:	where the results of the lookup are placed
1032 *	@indices:	where their indices should be placed (but usually NULL)
1033 *	@first_index:	start the lookup from this key
1034 *	@max_items:	place up to this many items at *results
1035 *
1036 *	Performs an index-ascending scan of the tree for present items.  Places
1037 *	their slots at *@results and returns the number of items which were
1038 *	placed at *@results.
1039 *
1040 *	The implementation is naive.
1041 *
1042 *	Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1043 *	be dereferenced with radix_tree_deref_slot, and if using only RCU
1044 *	protection, radix_tree_deref_slot may fail requiring a retry.
1045 */
1046unsigned int
1047radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1048			void ***results, unsigned long *indices,
1049			unsigned long first_index, unsigned int max_items)
1050{
1051	struct radix_tree_iter iter;
1052	void **slot;
1053	unsigned int ret = 0;
1054
1055	if (unlikely(!max_items))
1056		return 0;
1057
1058	radix_tree_for_each_slot(slot, root, &iter, first_index) {
1059		results[ret] = slot;
1060		if (indices)
1061			indices[ret] = iter.index;
1062		if (++ret == max_items)
1063			break;
1064	}
1065
1066	return ret;
1067}
1068EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1069
1070/**
1071 *	radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1072 *	                             based on a tag
1073 *	@root:		radix tree root
1074 *	@results:	where the results of the lookup are placed
1075 *	@first_index:	start the lookup from this key
1076 *	@max_items:	place up to this many items at *results
1077 *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1078 *
1079 *	Performs an index-ascending scan of the tree for present items which
1080 *	have the tag indexed by @tag set.  Places the items at *@results and
1081 *	returns the number of items which were placed at *@results.
1082 */
1083unsigned int
1084radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1085		unsigned long first_index, unsigned int max_items,
1086		unsigned int tag)
1087{
1088	struct radix_tree_iter iter;
1089	void **slot;
1090	unsigned int ret = 0;
1091
1092	if (unlikely(!max_items))
1093		return 0;
1094
1095	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1096		results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot));
1097		if (!results[ret])
1098			continue;
1099		if (++ret == max_items)
1100			break;
1101	}
1102
1103	return ret;
1104}
1105EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1106
1107/**
1108 *	radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1109 *					  radix tree based on a tag
1110 *	@root:		radix tree root
1111 *	@results:	where the results of the lookup are placed
1112 *	@first_index:	start the lookup from this key
1113 *	@max_items:	place up to this many items at *results
1114 *	@tag:		the tag index (< RADIX_TREE_MAX_TAGS)
1115 *
1116 *	Performs an index-ascending scan of the tree for present items which
1117 *	have the tag indexed by @tag set.  Places the slots at *@results and
1118 *	returns the number of slots which were placed at *@results.
1119 */
1120unsigned int
1121radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1122		unsigned long first_index, unsigned int max_items,
1123		unsigned int tag)
1124{
1125	struct radix_tree_iter iter;
1126	void **slot;
1127	unsigned int ret = 0;
1128
1129	if (unlikely(!max_items))
1130		return 0;
1131
1132	radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1133		results[ret] = slot;
1134		if (++ret == max_items)
1135			break;
1136	}
1137
1138	return ret;
1139}
1140EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1141
1142#if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1143#include <linux/sched.h> /* for cond_resched() */
1144
1145/*
1146 * This linear search is at present only useful to shmem_unuse_inode().
1147 */
1148static unsigned long __locate(struct radix_tree_node *slot, void *item,
1149			      unsigned long index, unsigned long *found_index)
1150{
1151	unsigned int shift, height;
1152	unsigned long i;
1153
1154	height = slot->path & RADIX_TREE_HEIGHT_MASK;
1155	shift = (height-1) * RADIX_TREE_MAP_SHIFT;
1156
1157	for ( ; height > 1; height--) {
1158		i = (index >> shift) & RADIX_TREE_MAP_MASK;
1159		for (;;) {
1160			if (slot->slots[i] != NULL)
1161				break;
1162			index &= ~((1UL << shift) - 1);
1163			index += 1UL << shift;
1164			if (index == 0)
1165				goto out;	/* 32-bit wraparound */
1166			i++;
1167			if (i == RADIX_TREE_MAP_SIZE)
1168				goto out;
1169		}
1170
1171		shift -= RADIX_TREE_MAP_SHIFT;
1172		slot = rcu_dereference_raw(slot->slots[i]);
1173		if (slot == NULL)
1174			goto out;
1175	}
1176
1177	/* Bottom level: check items */
1178	for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
1179		if (slot->slots[i] == item) {
1180			*found_index = index + i;
1181			index = 0;
1182			goto out;
1183		}
1184	}
1185	index += RADIX_TREE_MAP_SIZE;
1186out:
1187	return index;
1188}
1189
1190/**
1191 *	radix_tree_locate_item - search through radix tree for item
1192 *	@root:		radix tree root
1193 *	@item:		item to be found
1194 *
1195 *	Returns index where item was found, or -1 if not found.
1196 *	Caller must hold no lock (since this time-consuming function needs
1197 *	to be preemptible), and must check afterwards if item is still there.
1198 */
1199unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1200{
1201	struct radix_tree_node *node;
1202	unsigned long max_index;
1203	unsigned long cur_index = 0;
1204	unsigned long found_index = -1;
1205
1206	do {
1207		rcu_read_lock();
1208		node = rcu_dereference_raw(root->rnode);
1209		if (!radix_tree_is_indirect_ptr(node)) {
1210			rcu_read_unlock();
1211			if (node == item)
1212				found_index = 0;
1213			break;
1214		}
1215
1216		node = indirect_to_ptr(node);
1217		max_index = radix_tree_maxindex(node->path &
1218						RADIX_TREE_HEIGHT_MASK);
1219		if (cur_index > max_index) {
1220			rcu_read_unlock();
1221			break;
1222		}
1223
1224		cur_index = __locate(node, item, cur_index, &found_index);
1225		rcu_read_unlock();
1226		cond_resched();
1227	} while (cur_index != 0 && cur_index <= max_index);
1228
1229	return found_index;
1230}
1231#else
1232unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1233{
1234	return -1;
1235}
1236#endif /* CONFIG_SHMEM && CONFIG_SWAP */
1237
1238/**
1239 *	radix_tree_shrink    -    shrink height of a radix tree to minimal
1240 *	@root		radix tree root
1241 */
1242static inline void radix_tree_shrink(struct radix_tree_root *root)
1243{
1244	/* try to shrink tree height */
1245	while (root->height > 0) {
1246		struct radix_tree_node *to_free = root->rnode;
1247		struct radix_tree_node *slot;
1248
1249		BUG_ON(!radix_tree_is_indirect_ptr(to_free));
1250		to_free = indirect_to_ptr(to_free);
1251
1252		/*
1253		 * The candidate node has more than one child, or its child
1254		 * is not at the leftmost slot, we cannot shrink.
1255		 */
1256		if (to_free->count != 1)
1257			break;
1258		if (!to_free->slots[0])
1259			break;
1260
1261		/*
1262		 * We don't need rcu_assign_pointer(), since we are simply
1263		 * moving the node from one part of the tree to another: if it
1264		 * was safe to dereference the old pointer to it
1265		 * (to_free->slots[0]), it will be safe to dereference the new
1266		 * one (root->rnode) as far as dependent read barriers go.
1267		 */
1268		slot = to_free->slots[0];
1269		if (root->height > 1) {
1270			slot->parent = NULL;
1271			slot = ptr_to_indirect(slot);
1272		}
1273		root->rnode = slot;
1274		root->height--;
1275
1276		/*
1277		 * We have a dilemma here. The node's slot[0] must not be
1278		 * NULLed in case there are concurrent lookups expecting to
1279		 * find the item. However if this was a bottom-level node,
1280		 * then it may be subject to the slot pointer being visible
1281		 * to callers dereferencing it. If item corresponding to
1282		 * slot[0] is subsequently deleted, these callers would expect
1283		 * their slot to become empty sooner or later.
1284		 *
1285		 * For example, lockless pagecache will look up a slot, deref
1286		 * the page pointer, and if the page is 0 refcount it means it
1287		 * was concurrently deleted from pagecache so try the deref
1288		 * again. Fortunately there is already a requirement for logic
1289		 * to retry the entire slot lookup -- the indirect pointer
1290		 * problem (replacing direct root node with an indirect pointer
1291		 * also results in a stale slot). So tag the slot as indirect
1292		 * to force callers to retry.
1293		 */
1294		if (root->height == 0)
1295			*((unsigned long *)&to_free->slots[0]) |=
1296						RADIX_TREE_INDIRECT_PTR;
1297
1298		radix_tree_node_free(to_free);
1299	}
1300}
1301
1302/**
1303 *	__radix_tree_delete_node    -    try to free node after clearing a slot
1304 *	@root:		radix tree root
1305 *	@node:		node containing @index
1306 *
1307 *	After clearing the slot at @index in @node from radix tree
1308 *	rooted at @root, call this function to attempt freeing the
1309 *	node and shrinking the tree.
1310 *
1311 *	Returns %true if @node was freed, %false otherwise.
1312 */
1313bool __radix_tree_delete_node(struct radix_tree_root *root,
1314			      struct radix_tree_node *node)
1315{
1316	bool deleted = false;
1317
1318	do {
1319		struct radix_tree_node *parent;
1320
1321		if (node->count) {
1322			if (node == indirect_to_ptr(root->rnode)) {
1323				radix_tree_shrink(root);
1324				if (root->height == 0)
1325					deleted = true;
1326			}
1327			return deleted;
1328		}
1329
1330		parent = node->parent;
1331		if (parent) {
1332			unsigned int offset;
1333
1334			offset = node->path >> RADIX_TREE_HEIGHT_SHIFT;
1335			parent->slots[offset] = NULL;
1336			parent->count--;
1337		} else {
1338			root_tag_clear_all(root);
1339			root->height = 0;
1340			root->rnode = NULL;
1341		}
1342
1343		radix_tree_node_free(node);
1344		deleted = true;
1345
1346		node = parent;
1347	} while (node);
1348
1349	return deleted;
1350}
1351
1352/**
1353 *	radix_tree_delete_item    -    delete an item from a radix tree
1354 *	@root:		radix tree root
1355 *	@index:		index key
1356 *	@item:		expected item
1357 *
1358 *	Remove @item at @index from the radix tree rooted at @root.
1359 *
1360 *	Returns the address of the deleted item, or NULL if it was not present
1361 *	or the entry at the given @index was not @item.
1362 */
1363void *radix_tree_delete_item(struct radix_tree_root *root,
1364			     unsigned long index, void *item)
1365{
1366	struct radix_tree_node *node;
1367	unsigned int offset;
1368	void **slot;
1369	void *entry;
1370	int tag;
1371
1372	entry = __radix_tree_lookup(root, index, &node, &slot);
1373	if (!entry)
1374		return NULL;
1375
1376	if (item && entry != item)
1377		return NULL;
1378
1379	if (!node) {
1380		root_tag_clear_all(root);
1381		root->rnode = NULL;
1382		return entry;
1383	}
1384
1385	offset = index & RADIX_TREE_MAP_MASK;
1386
1387	/*
1388	 * Clear all tags associated with the item to be deleted.
1389	 * This way of doing it would be inefficient, but seldom is any set.
1390	 */
1391	for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
1392		if (tag_get(node, tag, offset))
1393			radix_tree_tag_clear(root, index, tag);
1394	}
1395
1396	node->slots[offset] = NULL;
1397	node->count--;
1398
1399	__radix_tree_delete_node(root, node);
1400
1401	return entry;
1402}
1403EXPORT_SYMBOL(radix_tree_delete_item);
1404
1405/**
1406 *	radix_tree_delete    -    delete an item from a radix tree
1407 *	@root:		radix tree root
1408 *	@index:		index key
1409 *
1410 *	Remove the item at @index from the radix tree rooted at @root.
1411 *
1412 *	Returns the address of the deleted item, or NULL if it was not present.
1413 */
1414void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1415{
1416	return radix_tree_delete_item(root, index, NULL);
1417}
1418EXPORT_SYMBOL(radix_tree_delete);
1419
1420/**
1421 *	radix_tree_tagged - test whether any items in the tree are tagged
1422 *	@root:		radix tree root
1423 *	@tag:		tag to test
1424 */
1425int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1426{
1427	return root_tag_get(root, tag);
1428}
1429EXPORT_SYMBOL(radix_tree_tagged);
1430
1431static void
1432radix_tree_node_ctor(void *arg)
1433{
1434	struct radix_tree_node *node = arg;
1435
1436	memset(node, 0, sizeof(*node));
1437	INIT_LIST_HEAD(&node->private_list);
1438}
1439
1440static __init unsigned long __maxindex(unsigned int height)
1441{
1442	unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1443	int shift = RADIX_TREE_INDEX_BITS - width;
1444
1445	if (shift < 0)
1446		return ~0UL;
1447	if (shift >= BITS_PER_LONG)
1448		return 0UL;
1449	return ~0UL >> shift;
1450}
1451
1452static __init void radix_tree_init_maxindex(void)
1453{
1454	unsigned int i;
1455
1456	for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1457		height_to_maxindex[i] = __maxindex(i);
1458}
1459
1460static int radix_tree_callback(struct notifier_block *nfb,
1461                            unsigned long action,
1462                            void *hcpu)
1463{
1464       int cpu = (long)hcpu;
1465       struct radix_tree_preload *rtp;
1466
1467       /* Free per-cpu pool of perloaded nodes */
1468       if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
1469               rtp = &per_cpu(radix_tree_preloads, cpu);
1470               while (rtp->nr) {
1471                       kmem_cache_free(radix_tree_node_cachep,
1472                                       rtp->nodes[rtp->nr-1]);
1473                       rtp->nodes[rtp->nr-1] = NULL;
1474                       rtp->nr--;
1475               }
1476       }
1477       return NOTIFY_OK;
1478}
1479
1480void __init radix_tree_init(void)
1481{
1482	radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1483			sizeof(struct radix_tree_node), 0,
1484			SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1485			radix_tree_node_ctor);
1486	radix_tree_init_maxindex();
1487	hotcpu_notifier(radix_tree_callback, 0);
1488}
1489