2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
7 * Copyright (C) 2016 Intel, Matthew Wilcox
8 * Copyright (C) 2016 Intel, Ross Zwisler
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License as
12 * published by the Free Software Foundation; either version 2, or (at
13 * your option) any later version.
15 * This program is distributed in the hope that it will be useful, but
16 * WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * General Public License for more details.
20 * You should have received a copy of the GNU General Public License
21 * along with this program; if not, write to the Free Software
22 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
25 #include <linux/errno.h>
26 #include <linux/init.h>
27 #include <linux/kernel.h>
28 #include <linux/export.h>
29 #include <linux/radix-tree.h>
30 #include <linux/percpu.h>
31 #include <linux/slab.h>
32 #include <linux/kmemleak.h>
33 #include <linux/notifier.h>
34 #include <linux/cpu.h>
35 #include <linux/string.h>
36 #include <linux/bitops.h>
37 #include <linux/rcupdate.h>
38 #include <linux/preempt.h> /* in_interrupt() */
41 /* Number of nodes in fully populated tree of given height */
42 static unsigned long height_to_maxnodes[RADIX_TREE_MAX_PATH + 1] __read_mostly;
45 * Radix tree node cache.
47 static struct kmem_cache *radix_tree_node_cachep;
50 * The radix tree is variable-height, so an insert operation not only has
51 * to build the branch to its corresponding item, it also has to build the
52 * branch to existing items if the size has to be increased (by
55 * The worst case is a zero height tree with just a single item at index 0,
56 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
57 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
60 #define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
63 * Per-cpu pool of preloaded nodes
65 struct radix_tree_preload {
67 /* nodes->private_data points to next preallocated node */
68 struct radix_tree_node *nodes;
70 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
72 static inline void *node_to_entry(void *ptr)
74 return (void *)((unsigned long)ptr | RADIX_TREE_INTERNAL_NODE);
77 #define RADIX_TREE_RETRY node_to_entry(NULL)
79 #ifdef CONFIG_RADIX_TREE_MULTIORDER
80 /* Sibling slots point directly to another slot in the same node */
81 static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
84 return (parent->slots <= ptr) &&
85 (ptr < parent->slots + RADIX_TREE_MAP_SIZE);
88 static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
94 static inline unsigned long get_slot_offset(struct radix_tree_node *parent,
97 return slot - parent->slots;
100 static unsigned int radix_tree_descend(struct radix_tree_node *parent,
101 struct radix_tree_node **nodep, unsigned long index)
103 unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK;
104 void **entry = rcu_dereference_raw(parent->slots[offset]);
106 #ifdef CONFIG_RADIX_TREE_MULTIORDER
107 if (radix_tree_is_internal_node(entry)) {
108 if (is_sibling_entry(parent, entry)) {
109 void **sibentry = (void **) entry_to_node(entry);
110 offset = get_slot_offset(parent, sibentry);
111 entry = rcu_dereference_raw(*sibentry);
116 *nodep = (void *)entry;
120 static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
122 return root->gfp_mask & __GFP_BITS_MASK;
125 static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
128 __set_bit(offset, node->tags[tag]);
131 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
134 __clear_bit(offset, node->tags[tag]);
137 static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
140 return test_bit(offset, node->tags[tag]);
143 static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
145 root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
148 static inline void root_tag_clear(struct radix_tree_root *root, unsigned tag)
150 root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
153 static inline void root_tag_clear_all(struct radix_tree_root *root)
155 root->gfp_mask &= __GFP_BITS_MASK;
158 static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
160 return (__force int)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
163 static inline unsigned root_tags_get(struct radix_tree_root *root)
165 return (__force unsigned)root->gfp_mask >> __GFP_BITS_SHIFT;
169 * Returns 1 if any slot in the node has this tag set.
170 * Otherwise returns 0.
172 static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
175 for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
176 if (node->tags[tag][idx])
183 * radix_tree_find_next_bit - find the next set bit in a memory region
185 * @addr: The address to base the search on
186 * @size: The bitmap size in bits
187 * @offset: The bitnumber to start searching at
189 * Unrollable variant of find_next_bit() for constant size arrays.
190 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
191 * Returns next bit offset, or size if nothing found.
193 static __always_inline unsigned long
194 radix_tree_find_next_bit(const unsigned long *addr,
195 unsigned long size, unsigned long offset)
197 if (!__builtin_constant_p(size))
198 return find_next_bit(addr, size, offset);
203 addr += offset / BITS_PER_LONG;
204 tmp = *addr >> (offset % BITS_PER_LONG);
206 return __ffs(tmp) + offset;
207 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
208 while (offset < size) {
211 return __ffs(tmp) + offset;
212 offset += BITS_PER_LONG;
219 static void dump_node(struct radix_tree_node *node, unsigned long index)
223 pr_debug("radix node: %p offset %d tags %lx %lx %lx shift %d count %d parent %p\n",
225 node->tags[0][0], node->tags[1][0], node->tags[2][0],
226 node->shift, node->count, node->parent);
228 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
229 unsigned long first = index | (i << node->shift);
230 unsigned long last = first | ((1UL << node->shift) - 1);
231 void *entry = node->slots[i];
234 if (is_sibling_entry(node, entry)) {
235 pr_debug("radix sblng %p offset %ld val %p indices %ld-%ld\n",
237 *(void **)entry_to_node(entry),
239 } else if (!radix_tree_is_internal_node(entry)) {
240 pr_debug("radix entry %p offset %ld indices %ld-%ld\n",
241 entry, i, first, last);
243 dump_node(entry_to_node(entry), first);
249 static void radix_tree_dump(struct radix_tree_root *root)
251 pr_debug("radix root: %p rnode %p tags %x\n",
253 root->gfp_mask >> __GFP_BITS_SHIFT);
254 if (!radix_tree_is_internal_node(root->rnode))
256 dump_node(entry_to_node(root->rnode), 0);
261 * This assumes that the caller has performed appropriate preallocation, and
262 * that the caller has pinned this thread of control to the current CPU.
264 static struct radix_tree_node *
265 radix_tree_node_alloc(struct radix_tree_root *root)
267 struct radix_tree_node *ret = NULL;
268 gfp_t gfp_mask = root_gfp_mask(root);
271 * Preload code isn't irq safe and it doesn't make sense to use
272 * preloading during an interrupt anyway as all the allocations have
273 * to be atomic. So just do normal allocation when in interrupt.
275 if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) {
276 struct radix_tree_preload *rtp;
279 * Even if the caller has preloaded, try to allocate from the
280 * cache first for the new node to get accounted to the memory
283 ret = kmem_cache_alloc(radix_tree_node_cachep,
284 gfp_mask | __GFP_NOWARN);
289 * Provided the caller has preloaded here, we will always
290 * succeed in getting a node here (and never reach
293 rtp = &get_cpu_var(radix_tree_preloads);
296 rtp->nodes = ret->private_data;
297 ret->private_data = NULL;
300 put_cpu_var(radix_tree_preloads);
302 * Update the allocation stack trace as this is more useful
305 kmemleak_update_trace(ret);
308 ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
310 BUG_ON(radix_tree_is_internal_node(ret));
314 static void radix_tree_node_rcu_free(struct rcu_head *head)
316 struct radix_tree_node *node =
317 container_of(head, struct radix_tree_node, rcu_head);
321 * must only free zeroed nodes into the slab. radix_tree_shrink
322 * can leave us with a non-NULL entry in the first slot, so clear
323 * that here to make sure.
325 for (i = 0; i < RADIX_TREE_MAX_TAGS; i++)
326 tag_clear(node, i, 0);
328 node->slots[0] = NULL;
331 kmem_cache_free(radix_tree_node_cachep, node);
335 radix_tree_node_free(struct radix_tree_node *node)
337 call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
340 #ifndef CONFIG_PREEMPT_RT_FULL
342 * Load up this CPU's radix_tree_node buffer with sufficient objects to
343 * ensure that the addition of a single element in the tree cannot fail. On
344 * success, return zero, with preemption disabled. On error, return -ENOMEM
345 * with preemption not disabled.
347 * To make use of this facility, the radix tree must be initialised without
348 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
350 static int __radix_tree_preload(gfp_t gfp_mask, int nr)
352 struct radix_tree_preload *rtp;
353 struct radix_tree_node *node;
357 * Nodes preloaded by one cgroup can be be used by another cgroup, so
358 * they should never be accounted to any particular memory cgroup.
360 gfp_mask &= ~__GFP_ACCOUNT;
363 rtp = this_cpu_ptr(&radix_tree_preloads);
364 while (rtp->nr < nr) {
366 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
370 rtp = this_cpu_ptr(&radix_tree_preloads);
372 node->private_data = rtp->nodes;
376 kmem_cache_free(radix_tree_node_cachep, node);
385 * Load up this CPU's radix_tree_node buffer with sufficient objects to
386 * ensure that the addition of a single element in the tree cannot fail. On
387 * success, return zero, with preemption disabled. On error, return -ENOMEM
388 * with preemption not disabled.
390 * To make use of this facility, the radix tree must be initialised without
391 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
393 int radix_tree_preload(gfp_t gfp_mask)
395 /* Warn on non-sensical use... */
396 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
397 return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
399 EXPORT_SYMBOL(radix_tree_preload);
402 * The same as above function, except we don't guarantee preloading happens.
403 * We do it, if we decide it helps. On success, return zero with preemption
404 * disabled. On error, return -ENOMEM with preemption not disabled.
406 int radix_tree_maybe_preload(gfp_t gfp_mask)
408 if (gfpflags_allow_blocking(gfp_mask))
409 return __radix_tree_preload(gfp_mask, RADIX_TREE_PRELOAD_SIZE);
410 /* Preloading doesn't help anything with this gfp mask, skip it */
414 EXPORT_SYMBOL(radix_tree_maybe_preload);
417 * The same as function above, but preload number of nodes required to insert
418 * (1 << order) continuous naturally-aligned elements.
420 int radix_tree_maybe_preload_order(gfp_t gfp_mask, int order)
422 unsigned long nr_subtrees;
423 int nr_nodes, subtree_height;
425 /* Preloading doesn't help anything with this gfp mask, skip it */
426 if (!gfpflags_allow_blocking(gfp_mask)) {
432 * Calculate number and height of fully populated subtrees it takes to
433 * store (1 << order) elements.
435 nr_subtrees = 1 << order;
436 for (subtree_height = 0; nr_subtrees > RADIX_TREE_MAP_SIZE;
438 nr_subtrees >>= RADIX_TREE_MAP_SHIFT;
441 * The worst case is zero height tree with a single item at index 0 and
442 * then inserting items starting at ULONG_MAX - (1 << order).
444 * This requires RADIX_TREE_MAX_PATH nodes to build branch from root to
447 nr_nodes = RADIX_TREE_MAX_PATH;
449 /* Plus branch to fully populated subtrees. */
450 nr_nodes += RADIX_TREE_MAX_PATH - subtree_height;
452 /* Root node is shared. */
455 /* Plus nodes required to build subtrees. */
456 nr_nodes += nr_subtrees * height_to_maxnodes[subtree_height];
458 return __radix_tree_preload(gfp_mask, nr_nodes);
463 * The maximum index which can be stored in a radix tree
465 static inline unsigned long shift_maxindex(unsigned int shift)
467 return (RADIX_TREE_MAP_SIZE << shift) - 1;
470 static inline unsigned long node_maxindex(struct radix_tree_node *node)
472 return shift_maxindex(node->shift);
475 static unsigned radix_tree_load_root(struct radix_tree_root *root,
476 struct radix_tree_node **nodep, unsigned long *maxindex)
478 struct radix_tree_node *node = rcu_dereference_raw(root->rnode);
482 if (likely(radix_tree_is_internal_node(node))) {
483 node = entry_to_node(node);
484 *maxindex = node_maxindex(node);
485 return node->shift + RADIX_TREE_MAP_SHIFT;
493 * Extend a radix tree so it can store key @index.
495 static int radix_tree_extend(struct radix_tree_root *root,
496 unsigned long index, unsigned int shift)
498 struct radix_tree_node *slot;
499 unsigned int maxshift;
502 /* Figure out what the shift should be. */
504 while (index > shift_maxindex(maxshift))
505 maxshift += RADIX_TREE_MAP_SHIFT;
512 struct radix_tree_node *node = radix_tree_node_alloc(root);
517 /* Propagate the aggregated tag info into the new root */
518 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
519 if (root_tag_get(root, tag))
520 tag_set(node, tag, 0);
523 BUG_ON(shift > BITS_PER_LONG);
528 if (radix_tree_is_internal_node(slot))
529 entry_to_node(slot)->parent = node;
530 node->slots[0] = slot;
531 slot = node_to_entry(node);
532 rcu_assign_pointer(root->rnode, slot);
533 shift += RADIX_TREE_MAP_SHIFT;
534 } while (shift <= maxshift);
536 return maxshift + RADIX_TREE_MAP_SHIFT;
540 * __radix_tree_create - create a slot in a radix tree
541 * @root: radix tree root
543 * @order: index occupies 2^order aligned slots
544 * @nodep: returns node
545 * @slotp: returns slot
547 * Create, if necessary, and return the node and slot for an item
548 * at position @index in the radix tree @root.
550 * Until there is more than one item in the tree, no nodes are
551 * allocated and @root->rnode is used as a direct slot instead of
552 * pointing to a node, in which case *@nodep will be NULL.
554 * Returns -ENOMEM, or 0 for success.
556 int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
557 unsigned order, struct radix_tree_node **nodep,
560 struct radix_tree_node *node = NULL, *child;
561 void **slot = (void **)&root->rnode;
562 unsigned long maxindex;
563 unsigned int shift, offset = 0;
564 unsigned long max = index | ((1UL << order) - 1);
566 shift = radix_tree_load_root(root, &child, &maxindex);
568 /* Make sure the tree is high enough. */
569 if (max > maxindex) {
570 int error = radix_tree_extend(root, max, shift);
576 shift += RADIX_TREE_MAP_SHIFT;
579 while (shift > order) {
580 shift -= RADIX_TREE_MAP_SHIFT;
582 /* Have to add a child node. */
583 child = radix_tree_node_alloc(root);
586 child->shift = shift;
587 child->offset = offset;
588 child->parent = node;
589 rcu_assign_pointer(*slot, node_to_entry(child));
592 } else if (!radix_tree_is_internal_node(child))
595 /* Go a level down */
596 node = entry_to_node(child);
597 offset = radix_tree_descend(node, &child, index);
598 slot = &node->slots[offset];
601 #ifdef CONFIG_RADIX_TREE_MULTIORDER
602 /* Insert pointers to the canonical entry */
604 unsigned i, n = 1 << (order - shift);
605 offset = offset & ~(n - 1);
606 slot = &node->slots[offset];
607 child = node_to_entry(slot);
608 for (i = 0; i < n; i++) {
613 for (i = 1; i < n; i++) {
614 rcu_assign_pointer(slot[i], child);
628 * __radix_tree_insert - insert into a radix tree
629 * @root: radix tree root
631 * @order: key covers the 2^order indices around index
632 * @item: item to insert
634 * Insert an item into the radix tree at position @index.
636 int __radix_tree_insert(struct radix_tree_root *root, unsigned long index,
637 unsigned order, void *item)
639 struct radix_tree_node *node;
643 BUG_ON(radix_tree_is_internal_node(item));
645 error = __radix_tree_create(root, index, order, &node, &slot);
650 rcu_assign_pointer(*slot, item);
653 unsigned offset = get_slot_offset(node, slot);
655 BUG_ON(tag_get(node, 0, offset));
656 BUG_ON(tag_get(node, 1, offset));
657 BUG_ON(tag_get(node, 2, offset));
659 BUG_ON(root_tags_get(root));
664 EXPORT_SYMBOL(__radix_tree_insert);
667 * __radix_tree_lookup - lookup an item in a radix tree
668 * @root: radix tree root
670 * @nodep: returns node
671 * @slotp: returns slot
673 * Lookup and return the item at position @index in the radix
676 * Until there is more than one item in the tree, no nodes are
677 * allocated and @root->rnode is used as a direct slot instead of
678 * pointing to a node, in which case *@nodep will be NULL.
680 void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
681 struct radix_tree_node **nodep, void ***slotp)
683 struct radix_tree_node *node, *parent;
684 unsigned long maxindex;
689 slot = (void **)&root->rnode;
690 radix_tree_load_root(root, &node, &maxindex);
691 if (index > maxindex)
694 while (radix_tree_is_internal_node(node)) {
697 if (node == RADIX_TREE_RETRY)
699 parent = entry_to_node(node);
700 offset = radix_tree_descend(parent, &node, index);
701 slot = parent->slots + offset;
712 * radix_tree_lookup_slot - lookup a slot in a radix tree
713 * @root: radix tree root
716 * Returns: the slot corresponding to the position @index in the
717 * radix tree @root. This is useful for update-if-exists operations.
719 * This function can be called under rcu_read_lock iff the slot is not
720 * modified by radix_tree_replace_slot, otherwise it must be called
721 * exclusive from other writers. Any dereference of the slot must be done
722 * using radix_tree_deref_slot.
724 void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
728 if (!__radix_tree_lookup(root, index, NULL, &slot))
732 EXPORT_SYMBOL(radix_tree_lookup_slot);
735 * radix_tree_lookup - perform lookup operation on a radix tree
736 * @root: radix tree root
739 * Lookup the item at the position @index in the radix tree @root.
741 * This function can be called under rcu_read_lock, however the caller
742 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
743 * them safely). No RCU barriers are required to access or modify the
744 * returned item, however.
746 void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
748 return __radix_tree_lookup(root, index, NULL, NULL);
750 EXPORT_SYMBOL(radix_tree_lookup);
753 * radix_tree_tag_set - set a tag on a radix tree node
754 * @root: radix tree root
758 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
759 * corresponding to @index in the radix tree. From
760 * the root all the way down to the leaf node.
762 * Returns the address of the tagged item. Setting a tag on a not-present
765 void *radix_tree_tag_set(struct radix_tree_root *root,
766 unsigned long index, unsigned int tag)
768 struct radix_tree_node *node, *parent;
769 unsigned long maxindex;
771 radix_tree_load_root(root, &node, &maxindex);
772 BUG_ON(index > maxindex);
774 while (radix_tree_is_internal_node(node)) {
777 parent = entry_to_node(node);
778 offset = radix_tree_descend(parent, &node, index);
781 if (!tag_get(parent, tag, offset))
782 tag_set(parent, tag, offset);
785 /* set the root's tag bit */
786 if (!root_tag_get(root, tag))
787 root_tag_set(root, tag);
791 EXPORT_SYMBOL(radix_tree_tag_set);
793 static void node_tag_clear(struct radix_tree_root *root,
794 struct radix_tree_node *node,
795 unsigned int tag, unsigned int offset)
798 if (!tag_get(node, tag, offset))
800 tag_clear(node, tag, offset);
801 if (any_tag_set(node, tag))
804 offset = node->offset;
808 /* clear the root's tag bit */
809 if (root_tag_get(root, tag))
810 root_tag_clear(root, tag);
814 * radix_tree_tag_clear - clear a tag on a radix tree node
815 * @root: radix tree root
819 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
820 * corresponding to @index in the radix tree. If this causes
821 * the leaf node to have no tags set then clear the tag in the
822 * next-to-leaf node, etc.
824 * Returns the address of the tagged item on success, else NULL. ie:
825 * has the same return value and semantics as radix_tree_lookup().
827 void *radix_tree_tag_clear(struct radix_tree_root *root,
828 unsigned long index, unsigned int tag)
830 struct radix_tree_node *node, *parent;
831 unsigned long maxindex;
832 int uninitialized_var(offset);
834 radix_tree_load_root(root, &node, &maxindex);
835 if (index > maxindex)
840 while (radix_tree_is_internal_node(node)) {
841 parent = entry_to_node(node);
842 offset = radix_tree_descend(parent, &node, index);
846 node_tag_clear(root, parent, tag, offset);
850 EXPORT_SYMBOL(radix_tree_tag_clear);
853 * radix_tree_tag_get - get a tag on a radix tree node
854 * @root: radix tree root
856 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
860 * 0: tag not present or not set
863 * Note that the return value of this function may not be relied on, even if
864 * the RCU lock is held, unless tag modification and node deletion are excluded
867 int radix_tree_tag_get(struct radix_tree_root *root,
868 unsigned long index, unsigned int tag)
870 struct radix_tree_node *node, *parent;
871 unsigned long maxindex;
873 if (!root_tag_get(root, tag))
876 radix_tree_load_root(root, &node, &maxindex);
877 if (index > maxindex)
882 while (radix_tree_is_internal_node(node)) {
885 parent = entry_to_node(node);
886 offset = radix_tree_descend(parent, &node, index);
890 if (!tag_get(parent, tag, offset))
892 if (node == RADIX_TREE_RETRY)
898 EXPORT_SYMBOL(radix_tree_tag_get);
900 static inline void __set_iter_shift(struct radix_tree_iter *iter,
903 #ifdef CONFIG_RADIX_TREE_MULTIORDER
909 * radix_tree_next_chunk - find next chunk of slots for iteration
911 * @root: radix tree root
912 * @iter: iterator state
913 * @flags: RADIX_TREE_ITER_* flags and tag index
914 * Returns: pointer to chunk first slot, or NULL if iteration is over
916 void **radix_tree_next_chunk(struct radix_tree_root *root,
917 struct radix_tree_iter *iter, unsigned flags)
919 unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK;
920 struct radix_tree_node *node, *child;
921 unsigned long index, offset, maxindex;
923 if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
927 * Catch next_index overflow after ~0UL. iter->index never overflows
928 * during iterating; it can be zero only at the beginning.
929 * And we cannot overflow iter->next_index in a single step,
930 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
932 * This condition also used by radix_tree_next_slot() to stop
933 * contiguous iterating, and forbid swithing to the next chunk.
935 index = iter->next_index;
936 if (!index && iter->index)
940 radix_tree_load_root(root, &child, &maxindex);
941 if (index > maxindex)
946 if (!radix_tree_is_internal_node(child)) {
947 /* Single-slot tree */
949 iter->next_index = maxindex + 1;
951 __set_iter_shift(iter, 0);
952 return (void **)&root->rnode;
956 node = entry_to_node(child);
957 offset = radix_tree_descend(node, &child, index);
959 if ((flags & RADIX_TREE_ITER_TAGGED) ?
960 !tag_get(node, tag, offset) : !child) {
962 if (flags & RADIX_TREE_ITER_CONTIG)
965 if (flags & RADIX_TREE_ITER_TAGGED)
966 offset = radix_tree_find_next_bit(
971 while (++offset < RADIX_TREE_MAP_SIZE) {
972 void *slot = node->slots[offset];
973 if (is_sibling_entry(node, slot))
978 index &= ~node_maxindex(node);
979 index += offset << node->shift;
980 /* Overflow after ~0UL */
983 if (offset == RADIX_TREE_MAP_SIZE)
985 child = rcu_dereference_raw(node->slots[offset]);
988 if ((child == NULL) || (child == RADIX_TREE_RETRY))
990 } while (radix_tree_is_internal_node(child));
992 /* Update the iterator state */
993 iter->index = (index &~ node_maxindex(node)) | (offset << node->shift);
994 iter->next_index = (index | node_maxindex(node)) + 1;
995 __set_iter_shift(iter, node->shift);
997 /* Construct iter->tags bit-mask from node->tags[tag] array */
998 if (flags & RADIX_TREE_ITER_TAGGED) {
999 unsigned tag_long, tag_bit;
1001 tag_long = offset / BITS_PER_LONG;
1002 tag_bit = offset % BITS_PER_LONG;
1003 iter->tags = node->tags[tag][tag_long] >> tag_bit;
1004 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
1005 if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
1006 /* Pick tags from next element */
1008 iter->tags |= node->tags[tag][tag_long + 1] <<
1009 (BITS_PER_LONG - tag_bit);
1010 /* Clip chunk size, here only BITS_PER_LONG tags */
1011 iter->next_index = index + BITS_PER_LONG;
1015 return node->slots + offset;
1017 EXPORT_SYMBOL(radix_tree_next_chunk);
1020 * radix_tree_range_tag_if_tagged - for each item in given range set given
1021 * tag if item has another tag set
1022 * @root: radix tree root
1023 * @first_indexp: pointer to a starting index of a range to scan
1024 * @last_index: last index of a range to scan
1025 * @nr_to_tag: maximum number items to tag
1026 * @iftag: tag index to test
1027 * @settag: tag index to set if tested tag is set
1029 * This function scans range of radix tree from first_index to last_index
1030 * (inclusive). For each item in the range if iftag is set, the function sets
1031 * also settag. The function stops either after tagging nr_to_tag items or
1032 * after reaching last_index.
1034 * The tags must be set from the leaf level only and propagated back up the
1035 * path to the root. We must do this so that we resolve the full path before
1036 * setting any tags on intermediate nodes. If we set tags as we descend, then
1037 * we can get to the leaf node and find that the index that has the iftag
1038 * set is outside the range we are scanning. This reults in dangling tags and
1039 * can lead to problems with later tag operations (e.g. livelocks on lookups).
1041 * The function returns the number of leaves where the tag was set and sets
1042 * *first_indexp to the first unscanned index.
1043 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
1044 * be prepared to handle that.
1046 unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
1047 unsigned long *first_indexp, unsigned long last_index,
1048 unsigned long nr_to_tag,
1049 unsigned int iftag, unsigned int settag)
1051 struct radix_tree_node *parent, *node, *child;
1052 unsigned long maxindex;
1053 unsigned long tagged = 0;
1054 unsigned long index = *first_indexp;
1056 radix_tree_load_root(root, &child, &maxindex);
1057 last_index = min(last_index, maxindex);
1058 if (index > last_index)
1062 if (!root_tag_get(root, iftag)) {
1063 *first_indexp = last_index + 1;
1066 if (!radix_tree_is_internal_node(child)) {
1067 *first_indexp = last_index + 1;
1068 root_tag_set(root, settag);
1072 node = entry_to_node(child);
1075 unsigned offset = radix_tree_descend(node, &child, index);
1078 if (!tag_get(node, iftag, offset))
1080 /* Sibling slots never have tags set on them */
1081 if (radix_tree_is_internal_node(child)) {
1082 node = entry_to_node(child);
1088 tag_set(node, settag, offset);
1090 /* walk back up the path tagging interior nodes */
1093 offset = parent->offset;
1094 parent = parent->parent;
1097 /* stop if we find a node with the tag already set */
1098 if (tag_get(parent, settag, offset))
1100 tag_set(parent, settag, offset);
1103 /* Go to next entry in node */
1104 index = ((index >> node->shift) + 1) << node->shift;
1105 /* Overflow can happen when last_index is ~0UL... */
1106 if (index > last_index || !index)
1108 offset = (index >> node->shift) & RADIX_TREE_MAP_MASK;
1109 while (offset == 0) {
1111 * We've fully scanned this node. Go up. Because
1112 * last_index is guaranteed to be in the tree, what
1113 * we do below cannot wander astray.
1115 node = node->parent;
1116 offset = (index >> node->shift) & RADIX_TREE_MAP_MASK;
1118 if (is_sibling_entry(node, node->slots[offset]))
1120 if (tagged >= nr_to_tag)
1124 * We need not to tag the root tag if there is no tag which is set with
1125 * settag within the range from *first_indexp to last_index.
1128 root_tag_set(root, settag);
1129 *first_indexp = index;
1133 EXPORT_SYMBOL(radix_tree_range_tag_if_tagged);
1136 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
1137 * @root: radix tree root
1138 * @results: where the results of the lookup are placed
1139 * @first_index: start the lookup from this key
1140 * @max_items: place up to this many items at *results
1142 * Performs an index-ascending scan of the tree for present items. Places
1143 * them at *@results and returns the number of items which were placed at
1146 * The implementation is naive.
1148 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1149 * rcu_read_lock. In this case, rather than the returned results being
1150 * an atomic snapshot of the tree at a single point in time, the
1151 * semantics of an RCU protected gang lookup are as though multiple
1152 * radix_tree_lookups have been issued in individual locks, and results
1153 * stored in 'results'.
1156 radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
1157 unsigned long first_index, unsigned int max_items)
1159 struct radix_tree_iter iter;
1161 unsigned int ret = 0;
1163 if (unlikely(!max_items))
1166 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1167 results[ret] = rcu_dereference_raw(*slot);
1170 if (radix_tree_is_internal_node(results[ret])) {
1171 slot = radix_tree_iter_retry(&iter);
1174 if (++ret == max_items)
1180 EXPORT_SYMBOL(radix_tree_gang_lookup);
1183 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1184 * @root: radix tree root
1185 * @results: where the results of the lookup are placed
1186 * @indices: where their indices should be placed (but usually NULL)
1187 * @first_index: start the lookup from this key
1188 * @max_items: place up to this many items at *results
1190 * Performs an index-ascending scan of the tree for present items. Places
1191 * their slots at *@results and returns the number of items which were
1192 * placed at *@results.
1194 * The implementation is naive.
1196 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1197 * be dereferenced with radix_tree_deref_slot, and if using only RCU
1198 * protection, radix_tree_deref_slot may fail requiring a retry.
1201 radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1202 void ***results, unsigned long *indices,
1203 unsigned long first_index, unsigned int max_items)
1205 struct radix_tree_iter iter;
1207 unsigned int ret = 0;
1209 if (unlikely(!max_items))
1212 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1213 results[ret] = slot;
1215 indices[ret] = iter.index;
1216 if (++ret == max_items)
1222 EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1225 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1227 * @root: radix tree root
1228 * @results: where the results of the lookup are placed
1229 * @first_index: start the lookup from this key
1230 * @max_items: place up to this many items at *results
1231 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1233 * Performs an index-ascending scan of the tree for present items which
1234 * have the tag indexed by @tag set. Places the items at *@results and
1235 * returns the number of items which were placed at *@results.
1238 radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1239 unsigned long first_index, unsigned int max_items,
1242 struct radix_tree_iter iter;
1244 unsigned int ret = 0;
1246 if (unlikely(!max_items))
1249 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1250 results[ret] = rcu_dereference_raw(*slot);
1253 if (radix_tree_is_internal_node(results[ret])) {
1254 slot = radix_tree_iter_retry(&iter);
1257 if (++ret == max_items)
1263 EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1266 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1267 * radix tree based on a tag
1268 * @root: radix tree root
1269 * @results: where the results of the lookup are placed
1270 * @first_index: start the lookup from this key
1271 * @max_items: place up to this many items at *results
1272 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1274 * Performs an index-ascending scan of the tree for present items which
1275 * have the tag indexed by @tag set. Places the slots at *@results and
1276 * returns the number of slots which were placed at *@results.
1279 radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1280 unsigned long first_index, unsigned int max_items,
1283 struct radix_tree_iter iter;
1285 unsigned int ret = 0;
1287 if (unlikely(!max_items))
1290 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1291 results[ret] = slot;
1292 if (++ret == max_items)
1298 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1300 #if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1301 #include <linux/sched.h> /* for cond_resched() */
1303 struct locate_info {
1304 unsigned long found_index;
1309 * This linear search is at present only useful to shmem_unuse_inode().
1311 static unsigned long __locate(struct radix_tree_node *slot, void *item,
1312 unsigned long index, struct locate_info *info)
1317 unsigned int shift = slot->shift;
1319 for (i = (index >> shift) & RADIX_TREE_MAP_MASK;
1320 i < RADIX_TREE_MAP_SIZE;
1321 i++, index += (1UL << shift)) {
1322 struct radix_tree_node *node =
1323 rcu_dereference_raw(slot->slots[i]);
1324 if (node == RADIX_TREE_RETRY)
1326 if (!radix_tree_is_internal_node(node)) {
1328 info->found_index = index;
1334 node = entry_to_node(node);
1335 if (is_sibling_entry(slot, node))
1340 } while (i < RADIX_TREE_MAP_SIZE);
1343 if ((index == 0) && (i == RADIX_TREE_MAP_SIZE))
1349 * radix_tree_locate_item - search through radix tree for item
1350 * @root: radix tree root
1351 * @item: item to be found
1353 * Returns index where item was found, or -1 if not found.
1354 * Caller must hold no lock (since this time-consuming function needs
1355 * to be preemptible), and must check afterwards if item is still there.
1357 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1359 struct radix_tree_node *node;
1360 unsigned long max_index;
1361 unsigned long cur_index = 0;
1362 struct locate_info info = {
1369 node = rcu_dereference_raw(root->rnode);
1370 if (!radix_tree_is_internal_node(node)) {
1373 info.found_index = 0;
1377 node = entry_to_node(node);
1379 max_index = node_maxindex(node);
1380 if (cur_index > max_index) {
1385 cur_index = __locate(node, item, cur_index, &info);
1388 } while (!info.stop && cur_index <= max_index);
1390 return info.found_index;
1393 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1397 #endif /* CONFIG_SHMEM && CONFIG_SWAP */
1400 * radix_tree_shrink - shrink radix tree to minimum height
1401 * @root radix tree root
1403 static inline bool radix_tree_shrink(struct radix_tree_root *root)
1405 bool shrunk = false;
1408 struct radix_tree_node *node = root->rnode;
1409 struct radix_tree_node *child;
1411 if (!radix_tree_is_internal_node(node))
1413 node = entry_to_node(node);
1416 * The candidate node has more than one child, or its child
1417 * is not at the leftmost slot, or the child is a multiorder
1418 * entry, we cannot shrink.
1420 if (node->count != 1)
1422 child = node->slots[0];
1425 if (!radix_tree_is_internal_node(child) && node->shift)
1428 if (radix_tree_is_internal_node(child))
1429 entry_to_node(child)->parent = NULL;
1432 * We don't need rcu_assign_pointer(), since we are simply
1433 * moving the node from one part of the tree to another: if it
1434 * was safe to dereference the old pointer to it
1435 * (node->slots[0]), it will be safe to dereference the new
1436 * one (root->rnode) as far as dependent read barriers go.
1438 root->rnode = child;
1441 * We have a dilemma here. The node's slot[0] must not be
1442 * NULLed in case there are concurrent lookups expecting to
1443 * find the item. However if this was a bottom-level node,
1444 * then it may be subject to the slot pointer being visible
1445 * to callers dereferencing it. If item corresponding to
1446 * slot[0] is subsequently deleted, these callers would expect
1447 * their slot to become empty sooner or later.
1449 * For example, lockless pagecache will look up a slot, deref
1450 * the page pointer, and if the page has 0 refcount it means it
1451 * was concurrently deleted from pagecache so try the deref
1452 * again. Fortunately there is already a requirement for logic
1453 * to retry the entire slot lookup -- the indirect pointer
1454 * problem (replacing direct root node with an indirect pointer
1455 * also results in a stale slot). So tag the slot as indirect
1456 * to force callers to retry.
1458 if (!radix_tree_is_internal_node(child))
1459 node->slots[0] = RADIX_TREE_RETRY;
1461 radix_tree_node_free(node);
1469 * __radix_tree_delete_node - try to free node after clearing a slot
1470 * @root: radix tree root
1471 * @node: node containing @index
1473 * After clearing the slot at @index in @node from radix tree
1474 * rooted at @root, call this function to attempt freeing the
1475 * node and shrinking the tree.
1477 * Returns %true if @node was freed, %false otherwise.
1479 bool __radix_tree_delete_node(struct radix_tree_root *root,
1480 struct radix_tree_node *node)
1482 bool deleted = false;
1485 struct radix_tree_node *parent;
1488 if (node == entry_to_node(root->rnode))
1489 deleted |= radix_tree_shrink(root);
1493 parent = node->parent;
1495 parent->slots[node->offset] = NULL;
1498 root_tag_clear_all(root);
1502 radix_tree_node_free(node);
1511 static inline void delete_sibling_entries(struct radix_tree_node *node,
1512 void *ptr, unsigned offset)
1514 #ifdef CONFIG_RADIX_TREE_MULTIORDER
1516 for (i = 1; offset + i < RADIX_TREE_MAP_SIZE; i++) {
1517 if (node->slots[offset + i] != ptr)
1519 node->slots[offset + i] = NULL;
1526 * radix_tree_delete_item - delete an item from a radix tree
1527 * @root: radix tree root
1529 * @item: expected item
1531 * Remove @item at @index from the radix tree rooted at @root.
1533 * Returns the address of the deleted item, or NULL if it was not present
1534 * or the entry at the given @index was not @item.
1536 void *radix_tree_delete_item(struct radix_tree_root *root,
1537 unsigned long index, void *item)
1539 struct radix_tree_node *node;
1540 unsigned int offset;
1545 entry = __radix_tree_lookup(root, index, &node, &slot);
1549 if (item && entry != item)
1553 root_tag_clear_all(root);
1558 offset = get_slot_offset(node, slot);
1560 /* Clear all tags associated with the item to be deleted. */
1561 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1562 node_tag_clear(root, node, tag, offset);
1564 delete_sibling_entries(node, node_to_entry(slot), offset);
1565 node->slots[offset] = NULL;
1568 __radix_tree_delete_node(root, node);
1572 EXPORT_SYMBOL(radix_tree_delete_item);
1575 * radix_tree_delete - delete an item from a radix tree
1576 * @root: radix tree root
1579 * Remove the item at @index from the radix tree rooted at @root.
1581 * Returns the address of the deleted item, or NULL if it was not present.
1583 void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1585 return radix_tree_delete_item(root, index, NULL);
1587 EXPORT_SYMBOL(radix_tree_delete);
1589 void radix_tree_clear_tags(struct radix_tree_root *root,
1590 struct radix_tree_node *node,
1594 unsigned int tag, offset = get_slot_offset(node, slot);
1595 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++)
1596 node_tag_clear(root, node, tag, offset);
1598 /* Clear root node tags */
1599 root->gfp_mask &= __GFP_BITS_MASK;
1604 * radix_tree_tagged - test whether any items in the tree are tagged
1605 * @root: radix tree root
1608 int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1610 return root_tag_get(root, tag);
1612 EXPORT_SYMBOL(radix_tree_tagged);
1615 radix_tree_node_ctor(void *arg)
1617 struct radix_tree_node *node = arg;
1619 memset(node, 0, sizeof(*node));
1620 INIT_LIST_HEAD(&node->private_list);
1623 static __init unsigned long __maxindex(unsigned int height)
1625 unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1626 int shift = RADIX_TREE_INDEX_BITS - width;
1630 if (shift >= BITS_PER_LONG)
1632 return ~0UL >> shift;
1635 static __init void radix_tree_init_maxnodes(void)
1637 unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1];
1640 for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1641 height_to_maxindex[i] = __maxindex(i);
1642 for (i = 0; i < ARRAY_SIZE(height_to_maxnodes); i++) {
1643 for (j = i; j > 0; j--)
1644 height_to_maxnodes[i] += height_to_maxindex[j - 1] + 1;
1648 static int radix_tree_callback(struct notifier_block *nfb,
1649 unsigned long action, void *hcpu)
1651 int cpu = (long)hcpu;
1652 struct radix_tree_preload *rtp;
1653 struct radix_tree_node *node;
1655 /* Free per-cpu pool of preloaded nodes */
1656 if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
1657 rtp = &per_cpu(radix_tree_preloads, cpu);
1660 rtp->nodes = node->private_data;
1661 kmem_cache_free(radix_tree_node_cachep, node);
1668 void __init radix_tree_init(void)
1670 radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1671 sizeof(struct radix_tree_node), 0,
1672 SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1673 radix_tree_node_ctor);
1674 radix_tree_init_maxnodes();
1675 hotcpu_notifier(radix_tree_callback, 0);