rb.h 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964
  1. /******************************************************************************
  2. *
  3. * Copyright (C) 2008 Jason Evans <jasone@FreeBSD.org>.
  4. * Copyright (C) 2015-2019 Mark Straver <moonchild@palemoon.org>
  5. *
  6. * Redistribution and use in source and binary forms, with or without
  7. * modification, are permitted provided that the following conditions
  8. * are met:
  9. * 1. Redistributions of source code must retain the above copyright
  10. * notice(s), this list of conditions and the following disclaimer
  11. * unmodified other than the allowable addition of one or more
  12. * copyright notices.
  13. * 2. Redistributions in binary form must reproduce the above copyright
  14. * notice(s), this list of conditions and the following disclaimer in
  15. * the documentation and/or other materials provided with the
  16. * distribution.
  17. *
  18. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY
  19. * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  20. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  21. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) BE
  22. * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
  23. * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
  24. * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
  25. * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
  26. * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
  27. * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
  28. * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  29. *
  30. ******************************************************************************
  31. *
  32. * cpp macro implementation of left-leaning red-black trees.
  33. *
  34. * Usage:
  35. *
  36. * (Optional.)
  37. * #define SIZEOF_PTR ...
  38. * #define SIZEOF_PTR_2POW ...
  39. *
  40. * (Optional, see assert(3).)
  41. * #define NDEBUG
  42. *
  43. * (Required.)
  44. * #include <assert.h>
  45. * #include <rb.h>
  46. * ...
  47. *
  48. * All operations are done non-recursively. Parent pointers are not used, and
  49. * color bits are stored in the least significant bit of right-child pointers,
  50. * thus making node linkage as compact as is possible for red-black trees.
  51. *
  52. * Some macros use a comparison function pointer, which is expected to have the
  53. * following prototype:
  54. *
  55. * int (a_cmp *)(a_type *a_node, a_type *a_other);
  56. * ^^^^^^
  57. * or a_key
  58. *
  59. * Interpretation of comparision function return values:
  60. *
  61. * -1 : a_node < a_other
  62. * 0 : a_node == a_other
  63. * 1 : a_node > a_other
  64. *
  65. * In all cases, the a_node or a_key macro argument is the first argument to the
  66. * comparison function, which makes it possible to write comparison functions
  67. * that treat the first argument specially.
  68. *
  69. ******************************************************************************/
  70. #ifndef RB_H_
  71. #define RB_H_
  72. /* Node structure. */
  73. #define rb_node(a_type) \
  74. struct { \
  75. a_type *rbn_left; \
  76. a_type *rbn_right_red; \
  77. }
  78. /* Root structure. */
  79. #define rb_tree(a_type) \
  80. struct { \
  81. a_type *rbt_root; \
  82. a_type rbt_nil; \
  83. }
  84. /* Left accessors. */
  85. #define rbp_left_get(a_type, a_field, a_node) \
  86. ((a_node)->a_field.rbn_left)
  87. #define rbp_left_set(a_type, a_field, a_node, a_left) do { \
  88. (a_node)->a_field.rbn_left = a_left; \
  89. } while (0)
  90. /* Right accessors. */
  91. #define rbp_right_get(a_type, a_field, a_node) \
  92. ((a_type *) (((intptr_t) (a_node)->a_field.rbn_right_red) \
  93. & ((ssize_t)-2)))
  94. #define rbp_right_set(a_type, a_field, a_node, a_right) do { \
  95. (a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) a_right) \
  96. | (((uintptr_t) (a_node)->a_field.rbn_right_red) & ((size_t)1))); \
  97. } while (0)
  98. /* Color accessors. */
  99. #define rbp_red_get(a_type, a_field, a_node) \
  100. ((bool) (((uintptr_t) (a_node)->a_field.rbn_right_red) \
  101. & ((size_t)1)))
  102. #define rbp_color_set(a_type, a_field, a_node, a_red) do { \
  103. (a_node)->a_field.rbn_right_red = (a_type *) ((((intptr_t) \
  104. (a_node)->a_field.rbn_right_red) & ((ssize_t)-2)) \
  105. | ((ssize_t)a_red)); \
  106. } while (0)
  107. #define rbp_red_set(a_type, a_field, a_node) do { \
  108. (a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) \
  109. (a_node)->a_field.rbn_right_red) | ((size_t)1)); \
  110. } while (0)
  111. #define rbp_black_set(a_type, a_field, a_node) do { \
  112. (a_node)->a_field.rbn_right_red = (a_type *) (((intptr_t) \
  113. (a_node)->a_field.rbn_right_red) & ((ssize_t)-2)); \
  114. } while (0)
  115. /* Node initializer. */
  116. #define rbp_node_new(a_type, a_field, a_tree, a_node) do { \
  117. rbp_left_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil); \
  118. rbp_right_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil); \
  119. rbp_red_set(a_type, a_field, (a_node)); \
  120. } while (0)
  121. /* Tree initializer. */
  122. #define rb_new(a_type, a_field, a_tree) do { \
  123. (a_tree)->rbt_root = &(a_tree)->rbt_nil; \
  124. rbp_node_new(a_type, a_field, a_tree, &(a_tree)->rbt_nil); \
  125. rbp_black_set(a_type, a_field, &(a_tree)->rbt_nil); \
  126. } while (0)
  127. /* Tree operations. */
  128. #define rbp_black_height(a_type, a_field, a_tree, r_height) do { \
  129. a_type *rbp_bh_t; \
  130. for (rbp_bh_t = (a_tree)->rbt_root, (r_height) = 0; \
  131. rbp_bh_t != &(a_tree)->rbt_nil; \
  132. rbp_bh_t = rbp_left_get(a_type, a_field, rbp_bh_t)) { \
  133. if (rbp_red_get(a_type, a_field, rbp_bh_t) == false) { \
  134. (r_height)++; \
  135. } \
  136. } \
  137. } while (0)
  138. #define rbp_first(a_type, a_field, a_tree, a_root, r_node) do { \
  139. for ((r_node) = (a_root); \
  140. rbp_left_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil; \
  141. (r_node) = rbp_left_get(a_type, a_field, (r_node))) { \
  142. } \
  143. } while (0)
  144. #define rbp_last(a_type, a_field, a_tree, a_root, r_node) do { \
  145. for ((r_node) = (a_root); \
  146. rbp_right_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil; \
  147. (r_node) = rbp_right_get(a_type, a_field, (r_node))) { \
  148. } \
  149. } while (0)
  150. #define rbp_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \
  151. if (rbp_right_get(a_type, a_field, (a_node)) \
  152. != &(a_tree)->rbt_nil) { \
  153. rbp_first(a_type, a_field, a_tree, rbp_right_get(a_type, \
  154. a_field, (a_node)), (r_node)); \
  155. } else { \
  156. a_type *rbp_n_t = (a_tree)->rbt_root; \
  157. assert(rbp_n_t != &(a_tree)->rbt_nil); \
  158. (r_node) = &(a_tree)->rbt_nil; \
  159. while (true) { \
  160. int rbp_n_cmp = (a_cmp)((a_node), rbp_n_t); \
  161. if (rbp_n_cmp < 0) { \
  162. (r_node) = rbp_n_t; \
  163. rbp_n_t = rbp_left_get(a_type, a_field, rbp_n_t); \
  164. } else if (rbp_n_cmp > 0) { \
  165. rbp_n_t = rbp_right_get(a_type, a_field, rbp_n_t); \
  166. } else { \
  167. break; \
  168. } \
  169. assert(rbp_n_t != &(a_tree)->rbt_nil); \
  170. } \
  171. } \
  172. } while (0)
  173. #define rbp_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \
  174. if (rbp_left_get(a_type, a_field, (a_node)) != &(a_tree)->rbt_nil) {\
  175. rbp_last(a_type, a_field, a_tree, rbp_left_get(a_type, \
  176. a_field, (a_node)), (r_node)); \
  177. } else { \
  178. a_type *rbp_p_t = (a_tree)->rbt_root; \
  179. assert(rbp_p_t != &(a_tree)->rbt_nil); \
  180. (r_node) = &(a_tree)->rbt_nil; \
  181. while (true) { \
  182. int rbp_p_cmp = (a_cmp)((a_node), rbp_p_t); \
  183. if (rbp_p_cmp < 0) { \
  184. rbp_p_t = rbp_left_get(a_type, a_field, rbp_p_t); \
  185. } else if (rbp_p_cmp > 0) { \
  186. (r_node) = rbp_p_t; \
  187. rbp_p_t = rbp_right_get(a_type, a_field, rbp_p_t); \
  188. } else { \
  189. break; \
  190. } \
  191. assert(rbp_p_t != &(a_tree)->rbt_nil); \
  192. } \
  193. } \
  194. } while (0)
  195. #define rb_first(a_type, a_field, a_tree, r_node) do { \
  196. rbp_first(a_type, a_field, a_tree, (a_tree)->rbt_root, (r_node)); \
  197. if ((r_node) == &(a_tree)->rbt_nil) { \
  198. (r_node) = NULL; \
  199. } \
  200. } while (0)
  201. #define rb_last(a_type, a_field, a_tree, r_node) do { \
  202. rbp_last(a_type, a_field, a_tree, (a_tree)->rbt_root, r_node); \
  203. if ((r_node) == &(a_tree)->rbt_nil) { \
  204. (r_node) = NULL; \
  205. } \
  206. } while (0)
  207. #define rb_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \
  208. rbp_next(a_type, a_field, a_cmp, a_tree, (a_node), (r_node)); \
  209. if ((r_node) == &(a_tree)->rbt_nil) { \
  210. (r_node) = NULL; \
  211. } \
  212. } while (0)
  213. #define rb_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \
  214. rbp_prev(a_type, a_field, a_cmp, a_tree, (a_node), (r_node)); \
  215. if ((r_node) == &(a_tree)->rbt_nil) { \
  216. (r_node) = NULL; \
  217. } \
  218. } while (0)
  219. #define rb_search(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \
  220. int rbp_se_cmp; \
  221. (r_node) = (a_tree)->rbt_root; \
  222. while ((r_node) != &(a_tree)->rbt_nil \
  223. && (rbp_se_cmp = (a_cmp)((a_key), (r_node))) != 0) { \
  224. if (rbp_se_cmp < 0) { \
  225. (r_node) = rbp_left_get(a_type, a_field, (r_node)); \
  226. } else { \
  227. (r_node) = rbp_right_get(a_type, a_field, (r_node)); \
  228. } \
  229. } \
  230. if ((r_node) == &(a_tree)->rbt_nil) { \
  231. (r_node) = NULL; \
  232. } \
  233. } while (0)
  234. /*
  235. * Find a match if it exists. Otherwise, find the next greater node, if one
  236. * exists.
  237. */
  238. #define rb_nsearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \
  239. a_type *rbp_ns_t = (a_tree)->rbt_root; \
  240. (r_node) = NULL; \
  241. while (rbp_ns_t != &(a_tree)->rbt_nil) { \
  242. int rbp_ns_cmp = (a_cmp)((a_key), rbp_ns_t); \
  243. if (rbp_ns_cmp < 0) { \
  244. (r_node) = rbp_ns_t; \
  245. rbp_ns_t = rbp_left_get(a_type, a_field, rbp_ns_t); \
  246. } else if (rbp_ns_cmp > 0) { \
  247. rbp_ns_t = rbp_right_get(a_type, a_field, rbp_ns_t); \
  248. } else { \
  249. (r_node) = rbp_ns_t; \
  250. break; \
  251. } \
  252. } \
  253. } while (0)
  254. /*
  255. * Find a match if it exists. Otherwise, find the previous lesser node, if one
  256. * exists.
  257. */
  258. #define rb_psearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \
  259. a_type *rbp_ps_t = (a_tree)->rbt_root; \
  260. (r_node) = NULL; \
  261. while (rbp_ps_t != &(a_tree)->rbt_nil) { \
  262. int rbp_ps_cmp = (a_cmp)((a_key), rbp_ps_t); \
  263. if (rbp_ps_cmp < 0) { \
  264. rbp_ps_t = rbp_left_get(a_type, a_field, rbp_ps_t); \
  265. } else if (rbp_ps_cmp > 0) { \
  266. (r_node) = rbp_ps_t; \
  267. rbp_ps_t = rbp_right_get(a_type, a_field, rbp_ps_t); \
  268. } else { \
  269. (r_node) = rbp_ps_t; \
  270. break; \
  271. } \
  272. } \
  273. } while (0)
  274. #define rbp_rotate_left(a_type, a_field, a_node, r_node) do { \
  275. (r_node) = rbp_right_get(a_type, a_field, (a_node)); \
  276. rbp_right_set(a_type, a_field, (a_node), \
  277. rbp_left_get(a_type, a_field, (r_node))); \
  278. rbp_left_set(a_type, a_field, (r_node), (a_node)); \
  279. } while (0)
  280. #define rbp_rotate_right(a_type, a_field, a_node, r_node) do { \
  281. (r_node) = rbp_left_get(a_type, a_field, (a_node)); \
  282. rbp_left_set(a_type, a_field, (a_node), \
  283. rbp_right_get(a_type, a_field, (r_node))); \
  284. rbp_right_set(a_type, a_field, (r_node), (a_node)); \
  285. } while (0)
  286. #define rbp_lean_left(a_type, a_field, a_node, r_node) do { \
  287. bool rbp_ll_red; \
  288. rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \
  289. rbp_ll_red = rbp_red_get(a_type, a_field, (a_node)); \
  290. rbp_color_set(a_type, a_field, (r_node), rbp_ll_red); \
  291. rbp_red_set(a_type, a_field, (a_node)); \
  292. } while (0)
  293. #define rbp_lean_right(a_type, a_field, a_node, r_node) do { \
  294. bool rbp_lr_red; \
  295. rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \
  296. rbp_lr_red = rbp_red_get(a_type, a_field, (a_node)); \
  297. rbp_color_set(a_type, a_field, (r_node), rbp_lr_red); \
  298. rbp_red_set(a_type, a_field, (a_node)); \
  299. } while (0)
  300. #define rbp_move_red_left(a_type, a_field, a_node, r_node) do { \
  301. a_type *rbp_mrl_t, *rbp_mrl_u; \
  302. rbp_mrl_t = rbp_left_get(a_type, a_field, (a_node)); \
  303. rbp_red_set(a_type, a_field, rbp_mrl_t); \
  304. rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node)); \
  305. rbp_mrl_u = rbp_left_get(a_type, a_field, rbp_mrl_t); \
  306. if (rbp_red_get(a_type, a_field, rbp_mrl_u)) { \
  307. rbp_rotate_right(a_type, a_field, rbp_mrl_t, rbp_mrl_u); \
  308. rbp_right_set(a_type, a_field, (a_node), rbp_mrl_u); \
  309. rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \
  310. rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node)); \
  311. if (rbp_red_get(a_type, a_field, rbp_mrl_t)) { \
  312. rbp_black_set(a_type, a_field, rbp_mrl_t); \
  313. rbp_red_set(a_type, a_field, (a_node)); \
  314. rbp_rotate_left(a_type, a_field, (a_node), rbp_mrl_t); \
  315. rbp_left_set(a_type, a_field, (r_node), rbp_mrl_t); \
  316. } else { \
  317. rbp_black_set(a_type, a_field, (a_node)); \
  318. } \
  319. } else { \
  320. rbp_red_set(a_type, a_field, (a_node)); \
  321. rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \
  322. } \
  323. } while (0)
  324. #define rbp_move_red_right(a_type, a_field, a_node, r_node) do { \
  325. a_type *rbp_mrr_t; \
  326. rbp_mrr_t = rbp_left_get(a_type, a_field, (a_node)); \
  327. if (rbp_red_get(a_type, a_field, rbp_mrr_t)) { \
  328. a_type *rbp_mrr_u, *rbp_mrr_v; \
  329. rbp_mrr_u = rbp_right_get(a_type, a_field, rbp_mrr_t); \
  330. rbp_mrr_v = rbp_left_get(a_type, a_field, rbp_mrr_u); \
  331. if (rbp_red_get(a_type, a_field, rbp_mrr_v)) { \
  332. rbp_color_set(a_type, a_field, rbp_mrr_u, \
  333. rbp_red_get(a_type, a_field, (a_node))); \
  334. rbp_black_set(a_type, a_field, rbp_mrr_v); \
  335. rbp_rotate_left(a_type, a_field, rbp_mrr_t, rbp_mrr_u); \
  336. rbp_left_set(a_type, a_field, (a_node), rbp_mrr_u); \
  337. rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \
  338. rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \
  339. rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \
  340. } else { \
  341. rbp_color_set(a_type, a_field, rbp_mrr_t, \
  342. rbp_red_get(a_type, a_field, (a_node))); \
  343. rbp_red_set(a_type, a_field, rbp_mrr_u); \
  344. rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \
  345. rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \
  346. rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \
  347. } \
  348. rbp_red_set(a_type, a_field, (a_node)); \
  349. } else { \
  350. rbp_red_set(a_type, a_field, rbp_mrr_t); \
  351. rbp_mrr_t = rbp_left_get(a_type, a_field, rbp_mrr_t); \
  352. if (rbp_red_get(a_type, a_field, rbp_mrr_t)) { \
  353. rbp_black_set(a_type, a_field, rbp_mrr_t); \
  354. rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \
  355. rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \
  356. rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \
  357. } else { \
  358. rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \
  359. } \
  360. } \
  361. } while (0)
  362. #define rb_insert(a_type, a_field, a_cmp, a_tree, a_node) do { \
  363. a_type rbp_i_s; \
  364. a_type *rbp_i_g, *rbp_i_p, *rbp_i_c, *rbp_i_t, *rbp_i_u; \
  365. int rbp_i_cmp = 0; \
  366. rbp_i_g = &(a_tree)->rbt_nil; \
  367. rbp_left_set(a_type, a_field, &rbp_i_s, (a_tree)->rbt_root); \
  368. rbp_right_set(a_type, a_field, &rbp_i_s, &(a_tree)->rbt_nil); \
  369. rbp_black_set(a_type, a_field, &rbp_i_s); \
  370. rbp_i_p = &rbp_i_s; \
  371. rbp_i_c = (a_tree)->rbt_root; \
  372. /* Iteratively search down the tree for the insertion point, */\
  373. /* splitting 4-nodes as they are encountered. At the end of each */\
  374. /* iteration, rbp_i_g->rbp_i_p->rbp_i_c is a 3-level path down */\
  375. /* the tree, assuming a sufficiently deep tree. */\
  376. while (rbp_i_c != &(a_tree)->rbt_nil) { \
  377. rbp_i_t = rbp_left_get(a_type, a_field, rbp_i_c); \
  378. rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t); \
  379. if (rbp_red_get(a_type, a_field, rbp_i_t) \
  380. && rbp_red_get(a_type, a_field, rbp_i_u)) { \
  381. /* rbp_i_c is the top of a logical 4-node, so split it. */\
  382. /* This iteration does not move down the tree, due to the */\
  383. /* disruptiveness of node splitting. */\
  384. /* */\
  385. /* Rotate right. */\
  386. rbp_rotate_right(a_type, a_field, rbp_i_c, rbp_i_t); \
  387. /* Pass red links up one level. */\
  388. rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t); \
  389. rbp_black_set(a_type, a_field, rbp_i_u); \
  390. if (rbp_left_get(a_type, a_field, rbp_i_p) == rbp_i_c) { \
  391. rbp_left_set(a_type, a_field, rbp_i_p, rbp_i_t); \
  392. rbp_i_c = rbp_i_t; \
  393. } else { \
  394. /* rbp_i_c was the right child of rbp_i_p, so rotate */\
  395. /* left in order to maintain the left-leaning */\
  396. /* invariant. */\
  397. assert(rbp_right_get(a_type, a_field, rbp_i_p) \
  398. == rbp_i_c); \
  399. rbp_right_set(a_type, a_field, rbp_i_p, rbp_i_t); \
  400. rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_u); \
  401. if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\
  402. rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_u); \
  403. } else { \
  404. assert(rbp_right_get(a_type, a_field, rbp_i_g) \
  405. == rbp_i_p); \
  406. rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_u); \
  407. } \
  408. rbp_i_p = rbp_i_u; \
  409. rbp_i_cmp = (a_cmp)((a_node), rbp_i_p); \
  410. if (rbp_i_cmp < 0) { \
  411. rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_p); \
  412. } else { \
  413. assert(rbp_i_cmp > 0); \
  414. rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_p); \
  415. } \
  416. continue; \
  417. } \
  418. } \
  419. rbp_i_g = rbp_i_p; \
  420. rbp_i_p = rbp_i_c; \
  421. rbp_i_cmp = (a_cmp)((a_node), rbp_i_c); \
  422. if (rbp_i_cmp < 0) { \
  423. rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_c); \
  424. } else { \
  425. assert(rbp_i_cmp > 0); \
  426. rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_c); \
  427. } \
  428. } \
  429. /* rbp_i_p now refers to the node under which to insert. */\
  430. rbp_node_new(a_type, a_field, a_tree, (a_node)); \
  431. if (rbp_i_cmp > 0) { \
  432. rbp_right_set(a_type, a_field, rbp_i_p, (a_node)); \
  433. rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_t); \
  434. if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) { \
  435. rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_t); \
  436. } else if (rbp_right_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\
  437. rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_t); \
  438. } \
  439. } else { \
  440. rbp_left_set(a_type, a_field, rbp_i_p, (a_node)); \
  441. } \
  442. /* Update the root and make sure that it is black. */\
  443. (a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_i_s); \
  444. rbp_black_set(a_type, a_field, (a_tree)->rbt_root); \
  445. } while (0)
  446. #define rb_remove(a_type, a_field, a_cmp, a_tree, a_node) do { \
  447. a_type rbp_r_s; \
  448. a_type *rbp_r_p, *rbp_r_c, *rbp_r_xp, *rbp_r_t, *rbp_r_u; \
  449. int rbp_r_cmp; \
  450. rbp_left_set(a_type, a_field, &rbp_r_s, (a_tree)->rbt_root); \
  451. rbp_right_set(a_type, a_field, &rbp_r_s, &(a_tree)->rbt_nil); \
  452. rbp_black_set(a_type, a_field, &rbp_r_s); \
  453. rbp_r_p = &rbp_r_s; \
  454. rbp_r_c = (a_tree)->rbt_root; \
  455. rbp_r_xp = &(a_tree)->rbt_nil; \
  456. /* Iterate down the tree, but always transform 2-nodes to 3- or */\
  457. /* 4-nodes in order to maintain the invariant that the current */\
  458. /* node is not a 2-node. This allows simple deletion once a leaf */\
  459. /* is reached. Handle the root specially though, since there may */\
  460. /* be no way to convert it from a 2-node to a 3-node. */\
  461. rbp_r_cmp = (a_cmp)((a_node), rbp_r_c); \
  462. if (rbp_r_cmp < 0) { \
  463. rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \
  464. rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \
  465. if (rbp_red_get(a_type, a_field, rbp_r_t) == false \
  466. && rbp_red_get(a_type, a_field, rbp_r_u) == false) { \
  467. /* Apply standard transform to prepare for left move. */\
  468. rbp_move_red_left(a_type, a_field, rbp_r_c, rbp_r_t); \
  469. rbp_black_set(a_type, a_field, rbp_r_t); \
  470. rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \
  471. rbp_r_c = rbp_r_t; \
  472. } else { \
  473. /* Move left. */\
  474. rbp_r_p = rbp_r_c; \
  475. rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c); \
  476. } \
  477. } else { \
  478. if (rbp_r_cmp == 0) { \
  479. assert((a_node) == rbp_r_c); \
  480. if (rbp_right_get(a_type, a_field, rbp_r_c) \
  481. == &(a_tree)->rbt_nil) { \
  482. /* Delete root node (which is also a leaf node). */\
  483. if (rbp_left_get(a_type, a_field, rbp_r_c) \
  484. != &(a_tree)->rbt_nil) { \
  485. rbp_lean_right(a_type, a_field, rbp_r_c, rbp_r_t); \
  486. rbp_right_set(a_type, a_field, rbp_r_t, \
  487. &(a_tree)->rbt_nil); \
  488. } else { \
  489. rbp_r_t = &(a_tree)->rbt_nil; \
  490. } \
  491. rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \
  492. } else { \
  493. /* This is the node we want to delete, but we will */\
  494. /* instead swap it with its successor and delete the */\
  495. /* successor. Record enough information to do the */\
  496. /* swap later. rbp_r_xp is the a_node's parent. */\
  497. rbp_r_xp = rbp_r_p; \
  498. rbp_r_cmp = 1; /* Note that deletion is incomplete. */\
  499. } \
  500. } \
  501. if (rbp_r_cmp == 1) { \
  502. if (rbp_red_get(a_type, a_field, rbp_left_get(a_type, \
  503. a_field, rbp_right_get(a_type, a_field, rbp_r_c))) \
  504. == false) { \
  505. rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \
  506. if (rbp_red_get(a_type, a_field, rbp_r_t)) { \
  507. /* Standard transform. */\
  508. rbp_move_red_right(a_type, a_field, rbp_r_c, \
  509. rbp_r_t); \
  510. } else { \
  511. /* Root-specific transform. */\
  512. rbp_red_set(a_type, a_field, rbp_r_c); \
  513. rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \
  514. if (rbp_red_get(a_type, a_field, rbp_r_u)) { \
  515. rbp_black_set(a_type, a_field, rbp_r_u); \
  516. rbp_rotate_right(a_type, a_field, rbp_r_c, \
  517. rbp_r_t); \
  518. rbp_rotate_left(a_type, a_field, rbp_r_c, \
  519. rbp_r_u); \
  520. rbp_right_set(a_type, a_field, rbp_r_t, \
  521. rbp_r_u); \
  522. } else { \
  523. rbp_red_set(a_type, a_field, rbp_r_t); \
  524. rbp_rotate_left(a_type, a_field, rbp_r_c, \
  525. rbp_r_t); \
  526. } \
  527. } \
  528. rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \
  529. rbp_r_c = rbp_r_t; \
  530. } else { \
  531. /* Move right. */\
  532. rbp_r_p = rbp_r_c; \
  533. rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c); \
  534. } \
  535. } \
  536. } \
  537. if (rbp_r_cmp != 0) { \
  538. while (true) { \
  539. assert(rbp_r_p != &(a_tree)->rbt_nil); \
  540. rbp_r_cmp = (a_cmp)((a_node), rbp_r_c); \
  541. if (rbp_r_cmp < 0) { \
  542. rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \
  543. if (rbp_r_t == &(a_tree)->rbt_nil) { \
  544. /* rbp_r_c now refers to the successor node to */\
  545. /* relocate, and rbp_r_xp/a_node refer to the */\
  546. /* context for the relocation. */\
  547. if (rbp_left_get(a_type, a_field, rbp_r_xp) \
  548. == (a_node)) { \
  549. rbp_left_set(a_type, a_field, rbp_r_xp, \
  550. rbp_r_c); \
  551. } else { \
  552. assert(rbp_right_get(a_type, a_field, \
  553. rbp_r_xp) == (a_node)); \
  554. rbp_right_set(a_type, a_field, rbp_r_xp, \
  555. rbp_r_c); \
  556. } \
  557. rbp_left_set(a_type, a_field, rbp_r_c, \
  558. rbp_left_get(a_type, a_field, (a_node))); \
  559. rbp_right_set(a_type, a_field, rbp_r_c, \
  560. rbp_right_get(a_type, a_field, (a_node))); \
  561. rbp_color_set(a_type, a_field, rbp_r_c, \
  562. rbp_red_get(a_type, a_field, (a_node))); \
  563. if (rbp_left_get(a_type, a_field, rbp_r_p) \
  564. == rbp_r_c) { \
  565. rbp_left_set(a_type, a_field, rbp_r_p, \
  566. &(a_tree)->rbt_nil); \
  567. } else { \
  568. assert(rbp_right_get(a_type, a_field, rbp_r_p) \
  569. == rbp_r_c); \
  570. rbp_right_set(a_type, a_field, rbp_r_p, \
  571. &(a_tree)->rbt_nil); \
  572. } \
  573. break; \
  574. } \
  575. rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \
  576. if (rbp_red_get(a_type, a_field, rbp_r_t) == false \
  577. && rbp_red_get(a_type, a_field, rbp_r_u) == false) { \
  578. rbp_move_red_left(a_type, a_field, rbp_r_c, \
  579. rbp_r_t); \
  580. if (rbp_left_get(a_type, a_field, rbp_r_p) \
  581. == rbp_r_c) { \
  582. rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\
  583. } else { \
  584. rbp_right_set(a_type, a_field, rbp_r_p, \
  585. rbp_r_t); \
  586. } \
  587. rbp_r_c = rbp_r_t; \
  588. } else { \
  589. rbp_r_p = rbp_r_c; \
  590. rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c); \
  591. } \
  592. } else { \
  593. /* Check whether to delete this node (it has to be */\
  594. /* the correct node and a leaf node). */\
  595. if (rbp_r_cmp == 0) { \
  596. assert((a_node) == rbp_r_c); \
  597. if (rbp_right_get(a_type, a_field, rbp_r_c) \
  598. == &(a_tree)->rbt_nil) { \
  599. /* Delete leaf node. */\
  600. if (rbp_left_get(a_type, a_field, rbp_r_c) \
  601. != &(a_tree)->rbt_nil) { \
  602. rbp_lean_right(a_type, a_field, rbp_r_c, \
  603. rbp_r_t); \
  604. rbp_right_set(a_type, a_field, rbp_r_t, \
  605. &(a_tree)->rbt_nil); \
  606. } else { \
  607. rbp_r_t = &(a_tree)->rbt_nil; \
  608. } \
  609. if (rbp_left_get(a_type, a_field, rbp_r_p) \
  610. == rbp_r_c) { \
  611. rbp_left_set(a_type, a_field, rbp_r_p, \
  612. rbp_r_t); \
  613. } else { \
  614. rbp_right_set(a_type, a_field, rbp_r_p, \
  615. rbp_r_t); \
  616. } \
  617. break; \
  618. } else { \
  619. /* This is the node we want to delete, but we */\
  620. /* will instead swap it with its successor */\
  621. /* and delete the successor. Record enough */\
  622. /* information to do the swap later. */\
  623. /* rbp_r_xp is a_node's parent. */\
  624. rbp_r_xp = rbp_r_p; \
  625. } \
  626. } \
  627. rbp_r_t = rbp_right_get(a_type, a_field, rbp_r_c); \
  628. rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \
  629. if (rbp_red_get(a_type, a_field, rbp_r_u) == false) { \
  630. rbp_move_red_right(a_type, a_field, rbp_r_c, \
  631. rbp_r_t); \
  632. if (rbp_left_get(a_type, a_field, rbp_r_p) \
  633. == rbp_r_c) { \
  634. rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\
  635. } else { \
  636. rbp_right_set(a_type, a_field, rbp_r_p, \
  637. rbp_r_t); \
  638. } \
  639. rbp_r_c = rbp_r_t; \
  640. } else { \
  641. rbp_r_p = rbp_r_c; \
  642. rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c); \
  643. } \
  644. } \
  645. } \
  646. } \
  647. /* Update root. */\
  648. (a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_r_s); \
  649. } while (0)
  650. /*
  651. * The rb_wrap() macro provides a convenient way to wrap functions around the
  652. * cpp macros. The main benefits of wrapping are that 1) repeated macro
  653. * expansion can cause code bloat, especially for rb_{insert,remove)(), and
  654. * 2) type, linkage, comparison functions, etc. need not be specified at every
  655. * call point.
  656. */
  657. #define rb_wrap(a_attr, a_prefix, a_tree_type, a_type, a_field, a_cmp) \
  658. a_attr void \
  659. a_prefix##new(a_tree_type *tree) { \
  660. rb_new(a_type, a_field, tree); \
  661. } \
  662. a_attr a_type * \
  663. a_prefix##first(a_tree_type *tree) { \
  664. a_type *ret; \
  665. rb_first(a_type, a_field, tree, ret); \
  666. return (ret); \
  667. } \
  668. a_attr a_type * \
  669. a_prefix##last(a_tree_type *tree) { \
  670. a_type *ret; \
  671. rb_last(a_type, a_field, tree, ret); \
  672. return (ret); \
  673. } \
  674. a_attr a_type * \
  675. a_prefix##next(a_tree_type *tree, a_type *node) { \
  676. a_type *ret; \
  677. rb_next(a_type, a_field, a_cmp, tree, node, ret); \
  678. return (ret); \
  679. } \
  680. a_attr a_type * \
  681. a_prefix##prev(a_tree_type *tree, a_type *node) { \
  682. a_type *ret; \
  683. rb_prev(a_type, a_field, a_cmp, tree, node, ret); \
  684. return (ret); \
  685. } \
  686. a_attr a_type * \
  687. a_prefix##search(a_tree_type *tree, a_type *key) { \
  688. a_type *ret; \
  689. rb_search(a_type, a_field, a_cmp, tree, key, ret); \
  690. return (ret); \
  691. } \
  692. a_attr a_type * \
  693. a_prefix##nsearch(a_tree_type *tree, a_type *key) { \
  694. a_type *ret; \
  695. rb_nsearch(a_type, a_field, a_cmp, tree, key, ret); \
  696. return (ret); \
  697. } \
  698. a_attr a_type * \
  699. a_prefix##psearch(a_tree_type *tree, a_type *key) { \
  700. a_type *ret; \
  701. rb_psearch(a_type, a_field, a_cmp, tree, key, ret); \
  702. return (ret); \
  703. } \
  704. a_attr void \
  705. a_prefix##insert(a_tree_type *tree, a_type *node) { \
  706. rb_insert(a_type, a_field, a_cmp, tree, node); \
  707. } \
  708. a_attr void \
  709. a_prefix##remove(a_tree_type *tree, a_type *node) { \
  710. rb_remove(a_type, a_field, a_cmp, tree, node); \
  711. }
  712. /*
  713. * The iterators simulate recursion via an array of pointers that store the
  714. * current path. This is critical to performance, since a series of calls to
  715. * rb_{next,prev}() would require time proportional to (n lg n), whereas this
  716. * implementation only requires time proportional to (n).
  717. *
  718. * Since the iterators cache a path down the tree, any tree modification may
  719. * cause the cached path to become invalid. In order to continue iteration,
  720. * use something like the following sequence:
  721. *
  722. * {
  723. * a_type *node, *tnode;
  724. *
  725. * rb_foreach_begin(a_type, a_field, a_tree, node) {
  726. * ...
  727. * rb_next(a_type, a_field, a_cmp, a_tree, node, tnode);
  728. * rb_remove(a_type, a_field, a_cmp, a_tree, node);
  729. * rb_foreach_next(a_type, a_field, a_cmp, a_tree, tnode);
  730. * ...
  731. * } rb_foreach_end(a_type, a_field, a_tree, node)
  732. * }
  733. *
  734. * Note that this idiom is not advised if every iteration modifies the tree,
  735. * since in that case there is no algorithmic complexity improvement over a
  736. * series of rb_{next,prev}() calls, thus making the setup overhead wasted
  737. * effort.
  738. */
  739. /*
  740. * Avoid using variable-length arrays.
  741. * Size the path arrays such that they are always large enough, even if a
  742. * tree consumes all of memory. Since each node must contain a minimum of
  743. * two pointers, there can never be more nodes than:
  744. *
  745. * 1 << ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))
  746. *
  747. * Since the depth of a tree is limited to 3*lg(#nodes), the maximum depth
  748. * is:
  749. *
  750. * (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1)))
  751. *
  752. * This works out to a maximum depth of 87 and 180 for 32- and 64-bit
  753. * systems, respectively (approximatly 348 and 1440 bytes, respectively).
  754. */
  755. #define rbp_compute_f_height(a_type, a_field, a_tree)
  756. #define rbp_f_height (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1)))
  757. #define rbp_compute_fr_height(a_type, a_field, a_tree)
  758. #define rbp_fr_height (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1)))
  759. #define rb_foreach_begin(a_type, a_field, a_tree, a_var) { \
  760. rbp_compute_f_height(a_type, a_field, a_tree) \
  761. { \
  762. /* Initialize the path to contain the left spine. */\
  763. a_type *rbp_f_path[rbp_f_height]; \
  764. a_type *rbp_f_node; \
  765. bool rbp_f_synced = false; \
  766. unsigned rbp_f_depth = 0; \
  767. if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \
  768. rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root; \
  769. rbp_f_depth++; \
  770. while ((rbp_f_node = rbp_left_get(a_type, a_field, \
  771. rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \
  772. rbp_f_path[rbp_f_depth] = rbp_f_node; \
  773. rbp_f_depth++; \
  774. } \
  775. } \
  776. /* While the path is non-empty, iterate. */\
  777. while (rbp_f_depth > 0) { \
  778. (a_var) = rbp_f_path[rbp_f_depth-1];
  779. /* Only use if modifying the tree during iteration. */
  780. #define rb_foreach_next(a_type, a_field, a_cmp, a_tree, a_node) \
  781. /* Re-initialize the path to contain the path to a_node. */\
  782. rbp_f_depth = 0; \
  783. if (a_node != NULL) { \
  784. if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \
  785. rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root; \
  786. rbp_f_depth++; \
  787. rbp_f_node = rbp_f_path[0]; \
  788. while (true) { \
  789. int rbp_f_cmp = (a_cmp)((a_node), \
  790. rbp_f_path[rbp_f_depth-1]); \
  791. if (rbp_f_cmp < 0) { \
  792. rbp_f_node = rbp_left_get(a_type, a_field, \
  793. rbp_f_path[rbp_f_depth-1]); \
  794. } else if (rbp_f_cmp > 0) { \
  795. rbp_f_node = rbp_right_get(a_type, a_field, \
  796. rbp_f_path[rbp_f_depth-1]); \
  797. } else { \
  798. break; \
  799. } \
  800. assert(rbp_f_node != &(a_tree)->rbt_nil); \
  801. rbp_f_path[rbp_f_depth] = rbp_f_node; \
  802. rbp_f_depth++; \
  803. } \
  804. } \
  805. } \
  806. rbp_f_synced = true;
  807. #define rb_foreach_end(a_type, a_field, a_tree, a_var) \
  808. if (rbp_f_synced) { \
  809. rbp_f_synced = false; \
  810. continue; \
  811. } \
  812. /* Find the successor. */\
  813. if ((rbp_f_node = rbp_right_get(a_type, a_field, \
  814. rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \
  815. /* The successor is the left-most node in the right */\
  816. /* subtree. */\
  817. rbp_f_path[rbp_f_depth] = rbp_f_node; \
  818. rbp_f_depth++; \
  819. while ((rbp_f_node = rbp_left_get(a_type, a_field, \
  820. rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \
  821. rbp_f_path[rbp_f_depth] = rbp_f_node; \
  822. rbp_f_depth++; \
  823. } \
  824. } else { \
  825. /* The successor is above the current node. Unwind */\
  826. /* until a left-leaning edge is removed from the */\
  827. /* path, or the path is empty. */\
  828. for (rbp_f_depth--; rbp_f_depth > 0; rbp_f_depth--) { \
  829. if (rbp_left_get(a_type, a_field, \
  830. rbp_f_path[rbp_f_depth-1]) \
  831. == rbp_f_path[rbp_f_depth]) { \
  832. break; \
  833. } \
  834. } \
  835. } \
  836. } \
  837. } \
  838. }
  839. #define rb_foreach_reverse_begin(a_type, a_field, a_tree, a_var) { \
  840. rbp_compute_fr_height(a_type, a_field, a_tree) \
  841. { \
  842. /* Initialize the path to contain the right spine. */\
  843. a_type *rbp_fr_path[rbp_fr_height]; \
  844. a_type *rbp_fr_node; \
  845. bool rbp_fr_synced = false; \
  846. unsigned rbp_fr_depth = 0; \
  847. if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \
  848. rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root; \
  849. rbp_fr_depth++; \
  850. while ((rbp_fr_node = rbp_right_get(a_type, a_field, \
  851. rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) { \
  852. rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \
  853. rbp_fr_depth++; \
  854. } \
  855. } \
  856. /* While the path is non-empty, iterate. */\
  857. while (rbp_fr_depth > 0) { \
  858. (a_var) = rbp_fr_path[rbp_fr_depth-1];
  859. /* Only use if modifying the tree during iteration. */
  860. #define rb_foreach_reverse_prev(a_type, a_field, a_cmp, a_tree, a_node) \
  861. /* Re-initialize the path to contain the path to a_node. */\
  862. rbp_fr_depth = 0; \
  863. if (a_node != NULL) { \
  864. if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \
  865. rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root; \
  866. rbp_fr_depth++; \
  867. rbp_fr_node = rbp_fr_path[0]; \
  868. while (true) { \
  869. int rbp_fr_cmp = (a_cmp)((a_node), \
  870. rbp_fr_path[rbp_fr_depth-1]); \
  871. if (rbp_fr_cmp < 0) { \
  872. rbp_fr_node = rbp_left_get(a_type, a_field, \
  873. rbp_fr_path[rbp_fr_depth-1]); \
  874. } else if (rbp_fr_cmp > 0) { \
  875. rbp_fr_node = rbp_right_get(a_type, a_field,\
  876. rbp_fr_path[rbp_fr_depth-1]); \
  877. } else { \
  878. break; \
  879. } \
  880. assert(rbp_fr_node != &(a_tree)->rbt_nil); \
  881. rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \
  882. rbp_fr_depth++; \
  883. } \
  884. } \
  885. } \
  886. rbp_fr_synced = true;
  887. #define rb_foreach_reverse_end(a_type, a_field, a_tree, a_var) \
  888. if (rbp_fr_synced) { \
  889. rbp_fr_synced = false; \
  890. continue; \
  891. } \
  892. if (rbp_fr_depth == 0) { \
  893. /* rb_foreach_reverse_sync() was called with a NULL */\
  894. /* a_node. */\
  895. break; \
  896. } \
  897. /* Find the predecessor. */\
  898. if ((rbp_fr_node = rbp_left_get(a_type, a_field, \
  899. rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) { \
  900. /* The predecessor is the right-most node in the left */\
  901. /* subtree. */\
  902. rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \
  903. rbp_fr_depth++; \
  904. while ((rbp_fr_node = rbp_right_get(a_type, a_field, \
  905. rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) {\
  906. rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \
  907. rbp_fr_depth++; \
  908. } \
  909. } else { \
  910. /* The predecessor is above the current node. Unwind */\
  911. /* until a right-leaning edge is removed from the */\
  912. /* path, or the path is empty. */\
  913. for (rbp_fr_depth--; rbp_fr_depth > 0; rbp_fr_depth--) {\
  914. if (rbp_right_get(a_type, a_field, \
  915. rbp_fr_path[rbp_fr_depth-1]) \
  916. == rbp_fr_path[rbp_fr_depth]) { \
  917. break; \
  918. } \
  919. } \
  920. } \
  921. } \
  922. } \
  923. }
  924. #endif /* RB_H_ */