radix-tree.c 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107
  1. /*
  2. * Copyright (C) 2001 Momchil Velikov
  3. * Portions Copyright (C) 2001 Christoph Hellwig
  4. * Copyright (C) 2005 SGI, Christoph Lameter
  5. * Copyright (C) 2006 Nick Piggin
  6. *
  7. * This program is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU General Public License as
  9. * published by the Free Software Foundation; either version 2, or (at
  10. * your option) any later version.
  11. *
  12. * This program is distributed in the hope that it will be useful, but
  13. * WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
  20. */
  21. #include <linux/errno.h>
  22. #include <linux/init.h>
  23. #include <linux/kernel.h>
  24. #include <linux/module.h>
  25. #include <linux/radix-tree.h>
  26. #include <linux/percpu.h>
  27. #include <linux/slab.h>
  28. #include <linux/notifier.h>
  29. #include <linux/cpu.h>
  30. #include <linux/gfp.h>
  31. #include <linux/string.h>
  32. #include <linux/bitops.h>
  33. #include <linux/rcupdate.h>
  34. #ifdef __KERNEL__
  35. #define RADIX_TREE_MAP_SHIFT (CONFIG_BASE_SMALL ? 4 : 6)
  36. #else
  37. #define RADIX_TREE_MAP_SHIFT 3 /* For more stressful testing */
  38. #endif
  39. #define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT)
  40. #define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1)
  41. #define RADIX_TREE_TAG_LONGS \
  42. ((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG)
  43. struct radix_tree_node {
  44. unsigned int height; /* Height from the bottom */
  45. unsigned int count;
  46. struct rcu_head rcu_head;
  47. void *slots[RADIX_TREE_MAP_SIZE];
  48. unsigned long tags[RADIX_TREE_MAX_TAGS][RADIX_TREE_TAG_LONGS];
  49. };
  50. struct radix_tree_path {
  51. struct radix_tree_node *node;
  52. int offset;
  53. };
  54. #define RADIX_TREE_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(unsigned long))
  55. #define RADIX_TREE_MAX_PATH (DIV_ROUND_UP(RADIX_TREE_INDEX_BITS, \
  56. RADIX_TREE_MAP_SHIFT))
  57. /*
  58. * The height_to_maxindex array needs to be one deeper than the maximum
  59. * path as height 0 holds only 1 entry.
  60. */
  61. static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly;
  62. /*
  63. * Radix tree node cache.
  64. */
  65. static struct kmem_cache *radix_tree_node_cachep;
  66. /*
  67. * Per-cpu pool of preloaded nodes
  68. */
  69. struct radix_tree_preload {
  70. int nr;
  71. struct radix_tree_node *nodes[RADIX_TREE_MAX_PATH];
  72. };
  73. DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
  74. static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
  75. {
  76. return root->gfp_mask & __GFP_BITS_MASK;
  77. }
  78. static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
  79. int offset)
  80. {
  81. __set_bit(offset, node->tags[tag]);
  82. }
  83. static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
  84. int offset)
  85. {
  86. __clear_bit(offset, node->tags[tag]);
  87. }
  88. static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
  89. int offset)
  90. {
  91. return test_bit(offset, node->tags[tag]);
  92. }
  93. static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
  94. {
  95. root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
  96. }
  97. static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag)
  98. {
  99. root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
  100. }
  101. static inline void root_tag_clear_all(struct radix_tree_root *root)
  102. {
  103. root->gfp_mask &= __GFP_BITS_MASK;
  104. }
  105. static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
  106. {
  107. return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
  108. }
  109. /*
  110. * Returns 1 if any slot in the node has this tag set.
  111. * Otherwise returns 0.
  112. */
  113. static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
  114. {
  115. int idx;
  116. for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
  117. if (node->tags[tag][idx])
  118. return 1;
  119. }
  120. return 0;
  121. }
  122. /*
  123. * This assumes that the caller has performed appropriate preallocation, and
  124. * that the caller has pinned this thread of control to the current CPU.
  125. */
  126. static struct radix_tree_node *
  127. radix_tree_node_alloc(struct radix_tree_root *root)
  128. {
  129. struct radix_tree_node *ret = NULL;
  130. gfp_t gfp_mask = root_gfp_mask(root);
  131. if (!(gfp_mask & __GFP_WAIT)) {
  132. struct radix_tree_preload *rtp;
  133. /*
  134. * Provided the caller has preloaded here, we will always
  135. * succeed in getting a node here (and never reach
  136. * kmem_cache_alloc)
  137. */
  138. rtp = &__get_cpu_var(radix_tree_preloads);
  139. if (rtp->nr) {
  140. ret = rtp->nodes[rtp->nr - 1];
  141. rtp->nodes[rtp->nr - 1] = NULL;
  142. rtp->nr--;
  143. }
  144. }
  145. if (ret == NULL)
  146. ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
  147. BUG_ON(radix_tree_is_indirect_ptr(ret));
  148. return ret;
  149. }
  150. static void radix_tree_node_rcu_free(struct rcu_head *head)
  151. {
  152. struct radix_tree_node *node =
  153. container_of(head, struct radix_tree_node, rcu_head);
  154. /*
  155. * must only free zeroed nodes into the slab. radix_tree_shrink
  156. * can leave us with a non-NULL entry in the first slot, so clear
  157. * that here to make sure.
  158. */
  159. tag_clear(node, 0, 0);
  160. tag_clear(node, 1, 0);
  161. node->slots[0] = NULL;
  162. node->count = 0;
  163. kmem_cache_free(radix_tree_node_cachep, node);
  164. }
  165. static inline void
  166. radix_tree_node_free(struct radix_tree_node *node)
  167. {
  168. call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
  169. }
  170. /*
  171. * Load up this CPU's radix_tree_node buffer with sufficient objects to
  172. * ensure that the addition of a single element in the tree cannot fail. On
  173. * success, return zero, with preemption disabled. On error, return -ENOMEM
  174. * with preemption not disabled.
  175. */
  176. int radix_tree_preload(gfp_t gfp_mask)
  177. {
  178. struct radix_tree_preload *rtp;
  179. struct radix_tree_node *node;
  180. int ret = -ENOMEM;
  181. preempt_disable();
  182. rtp = &__get_cpu_var(radix_tree_preloads);
  183. while (rtp->nr < ARRAY_SIZE(rtp->nodes)) {
  184. preempt_enable();
  185. node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
  186. if (node == NULL)
  187. goto out;
  188. preempt_disable();
  189. rtp = &__get_cpu_var(radix_tree_preloads);
  190. if (rtp->nr < ARRAY_SIZE(rtp->nodes))
  191. rtp->nodes[rtp->nr++] = node;
  192. else
  193. kmem_cache_free(radix_tree_node_cachep, node);
  194. }
  195. ret = 0;
  196. out:
  197. return ret;
  198. }
  199. EXPORT_SYMBOL(radix_tree_preload);
  200. /*
  201. * Return the maximum key which can be store into a
  202. * radix tree with height HEIGHT.
  203. */
  204. static inline unsigned long radix_tree_maxindex(unsigned int height)
  205. {
  206. return height_to_maxindex[height];
  207. }
  208. /*
  209. * Extend a radix tree so it can store key @index.
  210. */
  211. static int radix_tree_extend(struct radix_tree_root *root, unsigned long index)
  212. {
  213. struct radix_tree_node *node;
  214. unsigned int height;
  215. int tag;
  216. /* Figure out what the height should be. */
  217. height = root->height + 1;
  218. while (index > radix_tree_maxindex(height))
  219. height++;
  220. if (root->rnode == NULL) {
  221. root->height = height;
  222. goto out;
  223. }
  224. do {
  225. unsigned int newheight;
  226. if (!(node = radix_tree_node_alloc(root)))
  227. return -ENOMEM;
  228. /* Increase the height. */
  229. node->slots[0] = radix_tree_indirect_to_ptr(root->rnode);
  230. /* Propagate the aggregated tag info into the new root */
  231. for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
  232. if (root_tag_get(root, tag))
  233. tag_set(node, tag, 0);
  234. }
  235. newheight = root->height+1;
  236. node->height = newheight;
  237. node->count = 1;
  238. node = radix_tree_ptr_to_indirect(node);
  239. rcu_assign_pointer(root->rnode, node);
  240. root->height = newheight;
  241. } while (height > root->height);
  242. out:
  243. return 0;
  244. }
  245. /**
  246. * radix_tree_insert - insert into a radix tree
  247. * @root: radix tree root
  248. * @index: index key
  249. * @item: item to insert
  250. *
  251. * Insert an item into the radix tree at position @index.
  252. */
  253. int radix_tree_insert(struct radix_tree_root *root,
  254. unsigned long index, void *item)
  255. {
  256. struct radix_tree_node *node = NULL, *slot;
  257. unsigned int height, shift;
  258. int offset;
  259. int error;
  260. BUG_ON(radix_tree_is_indirect_ptr(item));
  261. /* Make sure the tree is high enough. */
  262. if (index > radix_tree_maxindex(root->height)) {
  263. error = radix_tree_extend(root, index);
  264. if (error)
  265. return error;
  266. }
  267. slot = radix_tree_indirect_to_ptr(root->rnode);
  268. height = root->height;
  269. shift = (height-1) * RADIX_TREE_MAP_SHIFT;
  270. offset = 0; /* uninitialised var warning */
  271. while (height > 0) {
  272. if (slot == NULL) {
  273. /* Have to add a child node. */
  274. if (!(slot = radix_tree_node_alloc(root)))
  275. return -ENOMEM;
  276. slot->height = height;
  277. if (node) {
  278. rcu_assign_pointer(node->slots[offset], slot);
  279. node->count++;
  280. } else
  281. rcu_assign_pointer(root->rnode,
  282. radix_tree_ptr_to_indirect(slot));
  283. }
  284. /* Go a level down */
  285. offset = (index >> shift) & RADIX_TREE_MAP_MASK;
  286. node = slot;
  287. slot = node->slots[offset];
  288. shift -= RADIX_TREE_MAP_SHIFT;
  289. height--;
  290. }
  291. if (slot != NULL)
  292. return -EEXIST;
  293. if (node) {
  294. node->count++;
  295. rcu_assign_pointer(node->slots[offset], item);
  296. BUG_ON(tag_get(node, 0, offset));
  297. BUG_ON(tag_get(node, 1, offset));
  298. } else {
  299. rcu_assign_pointer(root->rnode, item);
  300. BUG_ON(root_tag_get(root, 0));
  301. BUG_ON(root_tag_get(root, 1));
  302. }
  303. return 0;
  304. }
  305. EXPORT_SYMBOL(radix_tree_insert);
  306. /**
  307. * radix_tree_lookup_slot - lookup a slot in a radix tree
  308. * @root: radix tree root
  309. * @index: index key
  310. *
  311. * Returns: the slot corresponding to the position @index in the
  312. * radix tree @root. This is useful for update-if-exists operations.
  313. *
  314. * This function cannot be called under rcu_read_lock, it must be
  315. * excluded from writers, as must the returned slot for subsequent
  316. * use by radix_tree_deref_slot() and radix_tree_replace slot.
  317. * Caller must hold tree write locked across slot lookup and
  318. * replace.
  319. */
  320. void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
  321. {
  322. unsigned int height, shift;
  323. struct radix_tree_node *node, **slot;
  324. node = root->rnode;
  325. if (node == NULL)
  326. return NULL;
  327. if (!radix_tree_is_indirect_ptr(node)) {
  328. if (index > 0)
  329. return NULL;
  330. return (void **)&root->rnode;
  331. }
  332. node = radix_tree_indirect_to_ptr(node);
  333. height = node->height;
  334. if (index > radix_tree_maxindex(height))
  335. return NULL;
  336. shift = (height-1) * RADIX_TREE_MAP_SHIFT;
  337. do {
  338. slot = (struct radix_tree_node **)
  339. (node->slots + ((index>>shift) & RADIX_TREE_MAP_MASK));
  340. node = *slot;
  341. if (node == NULL)
  342. return NULL;
  343. shift -= RADIX_TREE_MAP_SHIFT;
  344. height--;
  345. } while (height > 0);
  346. return (void **)slot;
  347. }
  348. EXPORT_SYMBOL(radix_tree_lookup_slot);
  349. /**
  350. * radix_tree_lookup - perform lookup operation on a radix tree
  351. * @root: radix tree root
  352. * @index: index key
  353. *
  354. * Lookup the item at the position @index in the radix tree @root.
  355. *
  356. * This function can be called under rcu_read_lock, however the caller
  357. * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
  358. * them safely). No RCU barriers are required to access or modify the
  359. * returned item, however.
  360. */
  361. void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
  362. {
  363. unsigned int height, shift;
  364. struct radix_tree_node *node, **slot;
  365. node = rcu_dereference(root->rnode);
  366. if (node == NULL)
  367. return NULL;
  368. if (!radix_tree_is_indirect_ptr(node)) {
  369. if (index > 0)
  370. return NULL;
  371. return node;
  372. }
  373. node = radix_tree_indirect_to_ptr(node);
  374. height = node->height;
  375. if (index > radix_tree_maxindex(height))
  376. return NULL;
  377. shift = (height-1) * RADIX_TREE_MAP_SHIFT;
  378. do {
  379. slot = (struct radix_tree_node **)
  380. (node->slots + ((index>>shift) & RADIX_TREE_MAP_MASK));
  381. node = rcu_dereference(*slot);
  382. if (node == NULL)
  383. return NULL;
  384. shift -= RADIX_TREE_MAP_SHIFT;
  385. height--;
  386. } while (height > 0);
  387. return node;
  388. }
  389. EXPORT_SYMBOL(radix_tree_lookup);
  390. /**
  391. * radix_tree_tag_set - set a tag on a radix tree node
  392. * @root: radix tree root
  393. * @index: index key
  394. * @tag: tag index
  395. *
  396. * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
  397. * corresponding to @index in the radix tree. From
  398. * the root all the way down to the leaf node.
  399. *
  400. * Returns the address of the tagged item. Setting a tag on a not-present
  401. * item is a bug.
  402. */
  403. void *radix_tree_tag_set(struct radix_tree_root *root,
  404. unsigned long index, unsigned int tag)
  405. {
  406. unsigned int height, shift;
  407. struct radix_tree_node *slot;
  408. height = root->height;
  409. BUG_ON(index > radix_tree_maxindex(height));
  410. slot = radix_tree_indirect_to_ptr(root->rnode);
  411. shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
  412. while (height > 0) {
  413. int offset;
  414. offset = (index >> shift) & RADIX_TREE_MAP_MASK;
  415. if (!tag_get(slot, tag, offset))
  416. tag_set(slot, tag, offset);
  417. slot = slot->slots[offset];
  418. BUG_ON(slot == NULL);
  419. shift -= RADIX_TREE_MAP_SHIFT;
  420. height--;
  421. }
  422. /* set the root's tag bit */
  423. if (slot && !root_tag_get(root, tag))
  424. root_tag_set(root, tag);
  425. return slot;
  426. }
  427. EXPORT_SYMBOL(radix_tree_tag_set);
  428. /**
  429. * radix_tree_tag_clear - clear a tag on a radix tree node
  430. * @root: radix tree root
  431. * @index: index key
  432. * @tag: tag index
  433. *
  434. * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
  435. * corresponding to @index in the radix tree. If
  436. * this causes the leaf node to have no tags set then clear the tag in the
  437. * next-to-leaf node, etc.
  438. *
  439. * Returns the address of the tagged item on success, else NULL. ie:
  440. * has the same return value and semantics as radix_tree_lookup().
  441. */
  442. void *radix_tree_tag_clear(struct radix_tree_root *root,
  443. unsigned long index, unsigned int tag)
  444. {
  445. /*
  446. * The radix tree path needs to be one longer than the maximum path
  447. * since the "list" is null terminated.
  448. */
  449. struct radix_tree_path path[RADIX_TREE_MAX_PATH + 1], *pathp = path;
  450. struct radix_tree_node *slot = NULL;
  451. unsigned int height, shift;
  452. height = root->height;
  453. if (index > radix_tree_maxindex(height))
  454. goto out;
  455. shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
  456. pathp->node = NULL;
  457. slot = radix_tree_indirect_to_ptr(root->rnode);
  458. while (height > 0) {
  459. int offset;
  460. if (slot == NULL)
  461. goto out;
  462. offset = (index >> shift) & RADIX_TREE_MAP_MASK;
  463. pathp[1].offset = offset;
  464. pathp[1].node = slot;
  465. slot = slot->slots[offset];
  466. pathp++;
  467. shift -= RADIX_TREE_MAP_SHIFT;
  468. height--;
  469. }
  470. if (slot == NULL)
  471. goto out;
  472. while (pathp->node) {
  473. if (!tag_get(pathp->node, tag, pathp->offset))
  474. goto out;
  475. tag_clear(pathp->node, tag, pathp->offset);
  476. if (any_tag_set(pathp->node, tag))
  477. goto out;
  478. pathp--;
  479. }
  480. /* clear the root's tag bit */
  481. if (root_tag_get(root, tag))
  482. root_tag_clear(root, tag);
  483. out:
  484. return slot;
  485. }
  486. EXPORT_SYMBOL(radix_tree_tag_clear);
  487. #ifndef __KERNEL__ /* Only the test harness uses this at present */
  488. /**
  489. * radix_tree_tag_get - get a tag on a radix tree node
  490. * @root: radix tree root
  491. * @index: index key
  492. * @tag: tag index (< RADIX_TREE_MAX_TAGS)
  493. *
  494. * Return values:
  495. *
  496. * 0: tag not present or not set
  497. * 1: tag set
  498. */
  499. int radix_tree_tag_get(struct radix_tree_root *root,
  500. unsigned long index, unsigned int tag)
  501. {
  502. unsigned int height, shift;
  503. struct radix_tree_node *node;
  504. int saw_unset_tag = 0;
  505. /* check the root's tag bit */
  506. if (!root_tag_get(root, tag))
  507. return 0;
  508. node = rcu_dereference(root->rnode);
  509. if (node == NULL)
  510. return 0;
  511. if (!radix_tree_is_indirect_ptr(node))
  512. return (index == 0);
  513. node = radix_tree_indirect_to_ptr(node);
  514. height = node->height;
  515. if (index > radix_tree_maxindex(height))
  516. return 0;
  517. shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
  518. for ( ; ; ) {
  519. int offset;
  520. if (node == NULL)
  521. return 0;
  522. offset = (index >> shift) & RADIX_TREE_MAP_MASK;
  523. /*
  524. * This is just a debug check. Later, we can bale as soon as
  525. * we see an unset tag.
  526. */
  527. if (!tag_get(node, tag, offset))
  528. saw_unset_tag = 1;
  529. if (height == 1) {
  530. int ret = tag_get(node, tag, offset);
  531. BUG_ON(ret && saw_unset_tag);
  532. return !!ret;
  533. }
  534. node = rcu_dereference(node->slots[offset]);
  535. shift -= RADIX_TREE_MAP_SHIFT;
  536. height--;
  537. }
  538. }
  539. EXPORT_SYMBOL(radix_tree_tag_get);
  540. #endif
  541. /**
  542. * radix_tree_next_hole - find the next hole (not-present entry)
  543. * @root: tree root
  544. * @index: index key
  545. * @max_scan: maximum range to search
  546. *
  547. * Search the set [index, min(index+max_scan-1, MAX_INDEX)] for the lowest
  548. * indexed hole.
  549. *
  550. * Returns: the index of the hole if found, otherwise returns an index
  551. * outside of the set specified (in which case 'return - index >= max_scan'
  552. * will be true).
  553. *
  554. * radix_tree_next_hole may be called under rcu_read_lock. However, like
  555. * radix_tree_gang_lookup, this will not atomically search a snapshot of the
  556. * tree at a single point in time. For example, if a hole is created at index
  557. * 5, then subsequently a hole is created at index 10, radix_tree_next_hole
  558. * covering both indexes may return 10 if called under rcu_read_lock.
  559. */
  560. unsigned long radix_tree_next_hole(struct radix_tree_root *root,
  561. unsigned long index, unsigned long max_scan)
  562. {
  563. unsigned long i;
  564. for (i = 0; i < max_scan; i++) {
  565. if (!radix_tree_lookup(root, index))
  566. break;
  567. index++;
  568. if (index == 0)
  569. break;
  570. }
  571. return index;
  572. }
  573. EXPORT_SYMBOL(radix_tree_next_hole);
  574. static unsigned int
  575. __lookup(struct radix_tree_node *slot, void **results, unsigned long index,
  576. unsigned int max_items, unsigned long *next_index)
  577. {
  578. unsigned int nr_found = 0;
  579. unsigned int shift, height;
  580. unsigned long i;
  581. height = slot->height;
  582. if (height == 0)
  583. goto out;
  584. shift = (height-1) * RADIX_TREE_MAP_SHIFT;
  585. for ( ; height > 1; height--) {
  586. i = (index >> shift) & RADIX_TREE_MAP_MASK;
  587. for (;;) {
  588. if (slot->slots[i] != NULL)
  589. break;
  590. index &= ~((1UL << shift) - 1);
  591. index += 1UL << shift;
  592. if (index == 0)
  593. goto out; /* 32-bit wraparound */
  594. i++;
  595. if (i == RADIX_TREE_MAP_SIZE)
  596. goto out;
  597. }
  598. shift -= RADIX_TREE_MAP_SHIFT;
  599. slot = rcu_dereference(slot->slots[i]);
  600. if (slot == NULL)
  601. goto out;
  602. }
  603. /* Bottom level: grab some items */
  604. for (i = index & RADIX_TREE_MAP_MASK; i < RADIX_TREE_MAP_SIZE; i++) {
  605. struct radix_tree_node *node;
  606. index++;
  607. node = slot->slots[i];
  608. if (node) {
  609. results[nr_found++] = rcu_dereference(node);
  610. if (nr_found == max_items)
  611. goto out;
  612. }
  613. }
  614. out:
  615. *next_index = index;
  616. return nr_found;
  617. }
  618. /**
  619. * radix_tree_gang_lookup - perform multiple lookup on a radix tree
  620. * @root: radix tree root
  621. * @results: where the results of the lookup are placed
  622. * @first_index: start the lookup from this key
  623. * @max_items: place up to this many items at *results
  624. *
  625. * Performs an index-ascending scan of the tree for present items. Places
  626. * them at *@results and returns the number of items which were placed at
  627. * *@results.
  628. *
  629. * The implementation is naive.
  630. *
  631. * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
  632. * rcu_read_lock. In this case, rather than the returned results being
  633. * an atomic snapshot of the tree at a single point in time, the semantics
  634. * of an RCU protected gang lookup are as though multiple radix_tree_lookups
  635. * have been issued in individual locks, and results stored in 'results'.
  636. */
  637. unsigned int
  638. radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
  639. unsigned long first_index, unsigned int max_items)
  640. {
  641. unsigned long max_index;
  642. struct radix_tree_node *node;
  643. unsigned long cur_index = first_index;
  644. unsigned int ret;
  645. node = rcu_dereference(root->rnode);
  646. if (!node)
  647. return 0;
  648. if (!radix_tree_is_indirect_ptr(node)) {
  649. if (first_index > 0)
  650. return 0;
  651. results[0] = node;
  652. return 1;
  653. }
  654. node = radix_tree_indirect_to_ptr(node);
  655. max_index = radix_tree_maxindex(node->height);
  656. ret = 0;
  657. while (ret < max_items) {
  658. unsigned int nr_found;
  659. unsigned long next_index; /* Index of next search */
  660. if (cur_index > max_index)
  661. break;
  662. nr_found = __lookup(node, results + ret, cur_index,
  663. max_items - ret, &next_index);
  664. ret += nr_found;
  665. if (next_index == 0)
  666. break;
  667. cur_index = next_index;
  668. }
  669. return ret;
  670. }
  671. EXPORT_SYMBOL(radix_tree_gang_lookup);
  672. /*
  673. * FIXME: the two tag_get()s here should use find_next_bit() instead of
  674. * open-coding the search.
  675. */
  676. static unsigned int
  677. __lookup_tag(struct radix_tree_node *slot, void **results, unsigned long index,
  678. unsigned int max_items, unsigned long *next_index, unsigned int tag)
  679. {
  680. unsigned int nr_found = 0;
  681. unsigned int shift, height;
  682. height = slot->height;
  683. if (height == 0)
  684. goto out;
  685. shift = (height-1) * RADIX_TREE_MAP_SHIFT;
  686. while (height > 0) {
  687. unsigned long i = (index >> shift) & RADIX_TREE_MAP_MASK ;
  688. for (;;) {
  689. if (tag_get(slot, tag, i))
  690. break;
  691. index &= ~((1UL << shift) - 1);
  692. index += 1UL << shift;
  693. if (index == 0)
  694. goto out; /* 32-bit wraparound */
  695. i++;
  696. if (i == RADIX_TREE_MAP_SIZE)
  697. goto out;
  698. }
  699. height--;
  700. if (height == 0) { /* Bottom level: grab some items */
  701. unsigned long j = index & RADIX_TREE_MAP_MASK;
  702. for ( ; j < RADIX_TREE_MAP_SIZE; j++) {
  703. struct radix_tree_node *node;
  704. index++;
  705. if (!tag_get(slot, tag, j))
  706. continue;
  707. node = slot->slots[j];
  708. /*
  709. * Even though the tag was found set, we need to
  710. * recheck that we have a non-NULL node, because
  711. * if this lookup is lockless, it may have been
  712. * subsequently deleted.
  713. *
  714. * Similar care must be taken in any place that
  715. * lookup ->slots[x] without a lock (ie. can't
  716. * rely on its value remaining the same).
  717. */
  718. if (node) {
  719. node = rcu_dereference(node);
  720. results[nr_found++] = node;
  721. if (nr_found == max_items)
  722. goto out;
  723. }
  724. }
  725. }
  726. shift -= RADIX_TREE_MAP_SHIFT;
  727. slot = rcu_dereference(slot->slots[i]);
  728. if (slot == NULL)
  729. break;
  730. }
  731. out:
  732. *next_index = index;
  733. return nr_found;
  734. }
  735. /**
  736. * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
  737. * based on a tag
  738. * @root: radix tree root
  739. * @results: where the results of the lookup are placed
  740. * @first_index: start the lookup from this key
  741. * @max_items: place up to this many items at *results
  742. * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
  743. *
  744. * Performs an index-ascending scan of the tree for present items which
  745. * have the tag indexed by @tag set. Places the items at *@results and
  746. * returns the number of items which were placed at *@results.
  747. */
  748. unsigned int
  749. radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
  750. unsigned long first_index, unsigned int max_items,
  751. unsigned int tag)
  752. {
  753. struct radix_tree_node *node;
  754. unsigned long max_index;
  755. unsigned long cur_index = first_index;
  756. unsigned int ret;
  757. /* check the root's tag bit */
  758. if (!root_tag_get(root, tag))
  759. return 0;
  760. node = rcu_dereference(root->rnode);
  761. if (!node)
  762. return 0;
  763. if (!radix_tree_is_indirect_ptr(node)) {
  764. if (first_index > 0)
  765. return 0;
  766. results[0] = node;
  767. return 1;
  768. }
  769. node = radix_tree_indirect_to_ptr(node);
  770. max_index = radix_tree_maxindex(node->height);
  771. ret = 0;
  772. while (ret < max_items) {
  773. unsigned int nr_found;
  774. unsigned long next_index; /* Index of next search */
  775. if (cur_index > max_index)
  776. break;
  777. nr_found = __lookup_tag(node, results + ret, cur_index,
  778. max_items - ret, &next_index, tag);
  779. ret += nr_found;
  780. if (next_index == 0)
  781. break;
  782. cur_index = next_index;
  783. }
  784. return ret;
  785. }
  786. EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
  787. /**
  788. * radix_tree_shrink - shrink height of a radix tree to minimal
  789. * @root radix tree root
  790. */
  791. static inline void radix_tree_shrink(struct radix_tree_root *root)
  792. {
  793. /* try to shrink tree height */
  794. while (root->height > 0) {
  795. struct radix_tree_node *to_free = root->rnode;
  796. void *newptr;
  797. BUG_ON(!radix_tree_is_indirect_ptr(to_free));
  798. to_free = radix_tree_indirect_to_ptr(to_free);
  799. /*
  800. * The candidate node has more than one child, or its child
  801. * is not at the leftmost slot, we cannot shrink.
  802. */
  803. if (to_free->count != 1)
  804. break;
  805. if (!to_free->slots[0])
  806. break;
  807. /*
  808. * We don't need rcu_assign_pointer(), since we are simply
  809. * moving the node from one part of the tree to another. If
  810. * it was safe to dereference the old pointer to it
  811. * (to_free->slots[0]), it will be safe to dereference the new
  812. * one (root->rnode).
  813. */
  814. newptr = to_free->slots[0];
  815. if (root->height > 1)
  816. newptr = radix_tree_ptr_to_indirect(newptr);
  817. root->rnode = newptr;
  818. root->height--;
  819. radix_tree_node_free(to_free);
  820. }
  821. }
  822. /**
  823. * radix_tree_delete - delete an item from a radix tree
  824. * @root: radix tree root
  825. * @index: index key
  826. *
  827. * Remove the item at @index from the radix tree rooted at @root.
  828. *
  829. * Returns the address of the deleted item, or NULL if it was not present.
  830. */
  831. void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
  832. {
  833. /*
  834. * The radix tree path needs to be one longer than the maximum path
  835. * since the "list" is null terminated.
  836. */
  837. struct radix_tree_path path[RADIX_TREE_MAX_PATH + 1], *pathp = path;
  838. struct radix_tree_node *slot = NULL;
  839. struct radix_tree_node *to_free;
  840. unsigned int height, shift;
  841. int tag;
  842. int offset;
  843. height = root->height;
  844. if (index > radix_tree_maxindex(height))
  845. goto out;
  846. slot = root->rnode;
  847. if (height == 0) {
  848. root_tag_clear_all(root);
  849. root->rnode = NULL;
  850. goto out;
  851. }
  852. slot = radix_tree_indirect_to_ptr(slot);
  853. shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
  854. pathp->node = NULL;
  855. do {
  856. if (slot == NULL)
  857. goto out;
  858. pathp++;
  859. offset = (index >> shift) & RADIX_TREE_MAP_MASK;
  860. pathp->offset = offset;
  861. pathp->node = slot;
  862. slot = slot->slots[offset];
  863. shift -= RADIX_TREE_MAP_SHIFT;
  864. height--;
  865. } while (height > 0);
  866. if (slot == NULL)
  867. goto out;
  868. /*
  869. * Clear all tags associated with the just-deleted item
  870. */
  871. for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
  872. if (tag_get(pathp->node, tag, pathp->offset))
  873. radix_tree_tag_clear(root, index, tag);
  874. }
  875. to_free = NULL;
  876. /* Now free the nodes we do not need anymore */
  877. while (pathp->node) {
  878. pathp->node->slots[pathp->offset] = NULL;
  879. pathp->node->count--;
  880. /*
  881. * Queue the node for deferred freeing after the
  882. * last reference to it disappears (set NULL, above).
  883. */
  884. if (to_free)
  885. radix_tree_node_free(to_free);
  886. if (pathp->node->count) {
  887. if (pathp->node ==
  888. radix_tree_indirect_to_ptr(root->rnode))
  889. radix_tree_shrink(root);
  890. goto out;
  891. }
  892. /* Node with zero slots in use so free it */
  893. to_free = pathp->node;
  894. pathp--;
  895. }
  896. root_tag_clear_all(root);
  897. root->height = 0;
  898. root->rnode = NULL;
  899. if (to_free)
  900. radix_tree_node_free(to_free);
  901. out:
  902. return slot;
  903. }
  904. EXPORT_SYMBOL(radix_tree_delete);
  905. /**
  906. * radix_tree_tagged - test whether any items in the tree are tagged
  907. * @root: radix tree root
  908. * @tag: tag to test
  909. */
  910. int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
  911. {
  912. return root_tag_get(root, tag);
  913. }
  914. EXPORT_SYMBOL(radix_tree_tagged);
  915. static void
  916. radix_tree_node_ctor(struct kmem_cache *cachep, void *node)
  917. {
  918. memset(node, 0, sizeof(struct radix_tree_node));
  919. }
  920. static __init unsigned long __maxindex(unsigned int height)
  921. {
  922. unsigned int width = height * RADIX_TREE_MAP_SHIFT;
  923. int shift = RADIX_TREE_INDEX_BITS - width;
  924. if (shift < 0)
  925. return ~0UL;
  926. if (shift >= BITS_PER_LONG)
  927. return 0UL;
  928. return ~0UL >> shift;
  929. }
  930. static __init void radix_tree_init_maxindex(void)
  931. {
  932. unsigned int i;
  933. for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
  934. height_to_maxindex[i] = __maxindex(i);
  935. }
  936. static int radix_tree_callback(struct notifier_block *nfb,
  937. unsigned long action,
  938. void *hcpu)
  939. {
  940. int cpu = (long)hcpu;
  941. struct radix_tree_preload *rtp;
  942. /* Free per-cpu pool of perloaded nodes */
  943. if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
  944. rtp = &per_cpu(radix_tree_preloads, cpu);
  945. while (rtp->nr) {
  946. kmem_cache_free(radix_tree_node_cachep,
  947. rtp->nodes[rtp->nr-1]);
  948. rtp->nodes[rtp->nr-1] = NULL;
  949. rtp->nr--;
  950. }
  951. }
  952. return NOTIFY_OK;
  953. }
  954. void __init radix_tree_init(void)
  955. {
  956. radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
  957. sizeof(struct radix_tree_node), 0,
  958. SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
  959. radix_tree_node_ctor);
  960. radix_tree_init_maxindex();
  961. hotcpu_notifier(radix_tree_callback, 0);
  962. }