1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/memory/mozjemalloc/rb.h Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,982 @@ 1.4 +/****************************************************************************** 1.5 + * 1.6 + * Copyright (C) 2008 Jason Evans <jasone@FreeBSD.org>. 1.7 + * All rights reserved. 1.8 + * 1.9 + * Redistribution and use in source and binary forms, with or without 1.10 + * modification, are permitted provided that the following conditions 1.11 + * are met: 1.12 + * 1. Redistributions of source code must retain the above copyright 1.13 + * notice(s), this list of conditions and the following disclaimer 1.14 + * unmodified other than the allowable addition of one or more 1.15 + * copyright notices. 1.16 + * 2. Redistributions in binary form must reproduce the above copyright 1.17 + * notice(s), this list of conditions and the following disclaimer in 1.18 + * the documentation and/or other materials provided with the 1.19 + * distribution. 1.20 + * 1.21 + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY 1.22 + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 1.23 + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 1.24 + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) BE 1.25 + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 1.26 + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 1.27 + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 1.28 + * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 1.29 + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 1.30 + * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, 1.31 + * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 1.32 + * 1.33 + ****************************************************************************** 1.34 + * 1.35 + * cpp macro implementation of left-leaning red-black trees. 1.36 + * 1.37 + * Usage: 1.38 + * 1.39 + * (Optional.) 1.40 + * #define SIZEOF_PTR ... 1.41 + * #define SIZEOF_PTR_2POW ... 1.42 + * #define RB_NO_C99_VARARRAYS 1.43 + * 1.44 + * (Optional, see assert(3).) 1.45 + * #define NDEBUG 1.46 + * 1.47 + * (Required.) 1.48 + * #include <assert.h> 1.49 + * #include <rb.h> 1.50 + * ... 1.51 + * 1.52 + * All operations are done non-recursively. Parent pointers are not used, and 1.53 + * color bits are stored in the least significant bit of right-child pointers, 1.54 + * thus making node linkage as compact as is possible for red-black trees. 1.55 + * 1.56 + * Some macros use a comparison function pointer, which is expected to have the 1.57 + * following prototype: 1.58 + * 1.59 + * int (a_cmp *)(a_type *a_node, a_type *a_other); 1.60 + * ^^^^^^ 1.61 + * or a_key 1.62 + * 1.63 + * Interpretation of comparision function return values: 1.64 + * 1.65 + * -1 : a_node < a_other 1.66 + * 0 : a_node == a_other 1.67 + * 1 : a_node > a_other 1.68 + * 1.69 + * In all cases, the a_node or a_key macro argument is the first argument to the 1.70 + * comparison function, which makes it possible to write comparison functions 1.71 + * that treat the first argument specially. 1.72 + * 1.73 + ******************************************************************************/ 1.74 + 1.75 +#ifndef RB_H_ 1.76 +#define RB_H_ 1.77 + 1.78 +#if 0 1.79 +#include <sys/cdefs.h> 1.80 +__FBSDID("$FreeBSD: head/lib/libc/stdlib/rb.h 178995 2008-05-14 18:33:13Z jasone $"); 1.81 +#endif 1.82 + 1.83 +/* Node structure. */ 1.84 +#define rb_node(a_type) \ 1.85 +struct { \ 1.86 + a_type *rbn_left; \ 1.87 + a_type *rbn_right_red; \ 1.88 +} 1.89 + 1.90 +/* Root structure. */ 1.91 +#define rb_tree(a_type) \ 1.92 +struct { \ 1.93 + a_type *rbt_root; \ 1.94 + a_type rbt_nil; \ 1.95 +} 1.96 + 1.97 +/* Left accessors. */ 1.98 +#define rbp_left_get(a_type, a_field, a_node) \ 1.99 + ((a_node)->a_field.rbn_left) 1.100 +#define rbp_left_set(a_type, a_field, a_node, a_left) do { \ 1.101 + (a_node)->a_field.rbn_left = a_left; \ 1.102 +} while (0) 1.103 + 1.104 +/* Right accessors. */ 1.105 +#define rbp_right_get(a_type, a_field, a_node) \ 1.106 + ((a_type *) (((intptr_t) (a_node)->a_field.rbn_right_red) \ 1.107 + & ((ssize_t)-2))) 1.108 +#define rbp_right_set(a_type, a_field, a_node, a_right) do { \ 1.109 + (a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) a_right) \ 1.110 + | (((uintptr_t) (a_node)->a_field.rbn_right_red) & ((size_t)1))); \ 1.111 +} while (0) 1.112 + 1.113 +/* Color accessors. */ 1.114 +#define rbp_red_get(a_type, a_field, a_node) \ 1.115 + ((bool) (((uintptr_t) (a_node)->a_field.rbn_right_red) \ 1.116 + & ((size_t)1))) 1.117 +#define rbp_color_set(a_type, a_field, a_node, a_red) do { \ 1.118 + (a_node)->a_field.rbn_right_red = (a_type *) ((((intptr_t) \ 1.119 + (a_node)->a_field.rbn_right_red) & ((ssize_t)-2)) \ 1.120 + | ((ssize_t)a_red)); \ 1.121 +} while (0) 1.122 +#define rbp_red_set(a_type, a_field, a_node) do { \ 1.123 + (a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) \ 1.124 + (a_node)->a_field.rbn_right_red) | ((size_t)1)); \ 1.125 +} while (0) 1.126 +#define rbp_black_set(a_type, a_field, a_node) do { \ 1.127 + (a_node)->a_field.rbn_right_red = (a_type *) (((intptr_t) \ 1.128 + (a_node)->a_field.rbn_right_red) & ((ssize_t)-2)); \ 1.129 +} while (0) 1.130 + 1.131 +/* Node initializer. */ 1.132 +#define rbp_node_new(a_type, a_field, a_tree, a_node) do { \ 1.133 + rbp_left_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil); \ 1.134 + rbp_right_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil); \ 1.135 + rbp_red_set(a_type, a_field, (a_node)); \ 1.136 +} while (0) 1.137 + 1.138 +/* Tree initializer. */ 1.139 +#define rb_new(a_type, a_field, a_tree) do { \ 1.140 + (a_tree)->rbt_root = &(a_tree)->rbt_nil; \ 1.141 + rbp_node_new(a_type, a_field, a_tree, &(a_tree)->rbt_nil); \ 1.142 + rbp_black_set(a_type, a_field, &(a_tree)->rbt_nil); \ 1.143 +} while (0) 1.144 + 1.145 +/* Tree operations. */ 1.146 +#define rbp_black_height(a_type, a_field, a_tree, r_height) do { \ 1.147 + a_type *rbp_bh_t; \ 1.148 + for (rbp_bh_t = (a_tree)->rbt_root, (r_height) = 0; \ 1.149 + rbp_bh_t != &(a_tree)->rbt_nil; \ 1.150 + rbp_bh_t = rbp_left_get(a_type, a_field, rbp_bh_t)) { \ 1.151 + if (rbp_red_get(a_type, a_field, rbp_bh_t) == false) { \ 1.152 + (r_height)++; \ 1.153 + } \ 1.154 + } \ 1.155 +} while (0) 1.156 + 1.157 +#define rbp_first(a_type, a_field, a_tree, a_root, r_node) do { \ 1.158 + for ((r_node) = (a_root); \ 1.159 + rbp_left_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil; \ 1.160 + (r_node) = rbp_left_get(a_type, a_field, (r_node))) { \ 1.161 + } \ 1.162 +} while (0) 1.163 + 1.164 +#define rbp_last(a_type, a_field, a_tree, a_root, r_node) do { \ 1.165 + for ((r_node) = (a_root); \ 1.166 + rbp_right_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil; \ 1.167 + (r_node) = rbp_right_get(a_type, a_field, (r_node))) { \ 1.168 + } \ 1.169 +} while (0) 1.170 + 1.171 +#define rbp_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ 1.172 + if (rbp_right_get(a_type, a_field, (a_node)) \ 1.173 + != &(a_tree)->rbt_nil) { \ 1.174 + rbp_first(a_type, a_field, a_tree, rbp_right_get(a_type, \ 1.175 + a_field, (a_node)), (r_node)); \ 1.176 + } else { \ 1.177 + a_type *rbp_n_t = (a_tree)->rbt_root; \ 1.178 + assert(rbp_n_t != &(a_tree)->rbt_nil); \ 1.179 + (r_node) = &(a_tree)->rbt_nil; \ 1.180 + while (true) { \ 1.181 + int rbp_n_cmp = (a_cmp)((a_node), rbp_n_t); \ 1.182 + if (rbp_n_cmp < 0) { \ 1.183 + (r_node) = rbp_n_t; \ 1.184 + rbp_n_t = rbp_left_get(a_type, a_field, rbp_n_t); \ 1.185 + } else if (rbp_n_cmp > 0) { \ 1.186 + rbp_n_t = rbp_right_get(a_type, a_field, rbp_n_t); \ 1.187 + } else { \ 1.188 + break; \ 1.189 + } \ 1.190 + assert(rbp_n_t != &(a_tree)->rbt_nil); \ 1.191 + } \ 1.192 + } \ 1.193 +} while (0) 1.194 + 1.195 +#define rbp_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ 1.196 + if (rbp_left_get(a_type, a_field, (a_node)) != &(a_tree)->rbt_nil) {\ 1.197 + rbp_last(a_type, a_field, a_tree, rbp_left_get(a_type, \ 1.198 + a_field, (a_node)), (r_node)); \ 1.199 + } else { \ 1.200 + a_type *rbp_p_t = (a_tree)->rbt_root; \ 1.201 + assert(rbp_p_t != &(a_tree)->rbt_nil); \ 1.202 + (r_node) = &(a_tree)->rbt_nil; \ 1.203 + while (true) { \ 1.204 + int rbp_p_cmp = (a_cmp)((a_node), rbp_p_t); \ 1.205 + if (rbp_p_cmp < 0) { \ 1.206 + rbp_p_t = rbp_left_get(a_type, a_field, rbp_p_t); \ 1.207 + } else if (rbp_p_cmp > 0) { \ 1.208 + (r_node) = rbp_p_t; \ 1.209 + rbp_p_t = rbp_right_get(a_type, a_field, rbp_p_t); \ 1.210 + } else { \ 1.211 + break; \ 1.212 + } \ 1.213 + assert(rbp_p_t != &(a_tree)->rbt_nil); \ 1.214 + } \ 1.215 + } \ 1.216 +} while (0) 1.217 + 1.218 +#define rb_first(a_type, a_field, a_tree, r_node) do { \ 1.219 + rbp_first(a_type, a_field, a_tree, (a_tree)->rbt_root, (r_node)); \ 1.220 + if ((r_node) == &(a_tree)->rbt_nil) { \ 1.221 + (r_node) = NULL; \ 1.222 + } \ 1.223 +} while (0) 1.224 + 1.225 +#define rb_last(a_type, a_field, a_tree, r_node) do { \ 1.226 + rbp_last(a_type, a_field, a_tree, (a_tree)->rbt_root, r_node); \ 1.227 + if ((r_node) == &(a_tree)->rbt_nil) { \ 1.228 + (r_node) = NULL; \ 1.229 + } \ 1.230 +} while (0) 1.231 + 1.232 +#define rb_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ 1.233 + rbp_next(a_type, a_field, a_cmp, a_tree, (a_node), (r_node)); \ 1.234 + if ((r_node) == &(a_tree)->rbt_nil) { \ 1.235 + (r_node) = NULL; \ 1.236 + } \ 1.237 +} while (0) 1.238 + 1.239 +#define rb_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ 1.240 + rbp_prev(a_type, a_field, a_cmp, a_tree, (a_node), (r_node)); \ 1.241 + if ((r_node) == &(a_tree)->rbt_nil) { \ 1.242 + (r_node) = NULL; \ 1.243 + } \ 1.244 +} while (0) 1.245 + 1.246 +#define rb_search(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \ 1.247 + int rbp_se_cmp; \ 1.248 + (r_node) = (a_tree)->rbt_root; \ 1.249 + while ((r_node) != &(a_tree)->rbt_nil \ 1.250 + && (rbp_se_cmp = (a_cmp)((a_key), (r_node))) != 0) { \ 1.251 + if (rbp_se_cmp < 0) { \ 1.252 + (r_node) = rbp_left_get(a_type, a_field, (r_node)); \ 1.253 + } else { \ 1.254 + (r_node) = rbp_right_get(a_type, a_field, (r_node)); \ 1.255 + } \ 1.256 + } \ 1.257 + if ((r_node) == &(a_tree)->rbt_nil) { \ 1.258 + (r_node) = NULL; \ 1.259 + } \ 1.260 +} while (0) 1.261 + 1.262 +/* 1.263 + * Find a match if it exists. Otherwise, find the next greater node, if one 1.264 + * exists. 1.265 + */ 1.266 +#define rb_nsearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \ 1.267 + a_type *rbp_ns_t = (a_tree)->rbt_root; \ 1.268 + (r_node) = NULL; \ 1.269 + while (rbp_ns_t != &(a_tree)->rbt_nil) { \ 1.270 + int rbp_ns_cmp = (a_cmp)((a_key), rbp_ns_t); \ 1.271 + if (rbp_ns_cmp < 0) { \ 1.272 + (r_node) = rbp_ns_t; \ 1.273 + rbp_ns_t = rbp_left_get(a_type, a_field, rbp_ns_t); \ 1.274 + } else if (rbp_ns_cmp > 0) { \ 1.275 + rbp_ns_t = rbp_right_get(a_type, a_field, rbp_ns_t); \ 1.276 + } else { \ 1.277 + (r_node) = rbp_ns_t; \ 1.278 + break; \ 1.279 + } \ 1.280 + } \ 1.281 +} while (0) 1.282 + 1.283 +/* 1.284 + * Find a match if it exists. Otherwise, find the previous lesser node, if one 1.285 + * exists. 1.286 + */ 1.287 +#define rb_psearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \ 1.288 + a_type *rbp_ps_t = (a_tree)->rbt_root; \ 1.289 + (r_node) = NULL; \ 1.290 + while (rbp_ps_t != &(a_tree)->rbt_nil) { \ 1.291 + int rbp_ps_cmp = (a_cmp)((a_key), rbp_ps_t); \ 1.292 + if (rbp_ps_cmp < 0) { \ 1.293 + rbp_ps_t = rbp_left_get(a_type, a_field, rbp_ps_t); \ 1.294 + } else if (rbp_ps_cmp > 0) { \ 1.295 + (r_node) = rbp_ps_t; \ 1.296 + rbp_ps_t = rbp_right_get(a_type, a_field, rbp_ps_t); \ 1.297 + } else { \ 1.298 + (r_node) = rbp_ps_t; \ 1.299 + break; \ 1.300 + } \ 1.301 + } \ 1.302 +} while (0) 1.303 + 1.304 +#define rbp_rotate_left(a_type, a_field, a_node, r_node) do { \ 1.305 + (r_node) = rbp_right_get(a_type, a_field, (a_node)); \ 1.306 + rbp_right_set(a_type, a_field, (a_node), \ 1.307 + rbp_left_get(a_type, a_field, (r_node))); \ 1.308 + rbp_left_set(a_type, a_field, (r_node), (a_node)); \ 1.309 +} while (0) 1.310 + 1.311 +#define rbp_rotate_right(a_type, a_field, a_node, r_node) do { \ 1.312 + (r_node) = rbp_left_get(a_type, a_field, (a_node)); \ 1.313 + rbp_left_set(a_type, a_field, (a_node), \ 1.314 + rbp_right_get(a_type, a_field, (r_node))); \ 1.315 + rbp_right_set(a_type, a_field, (r_node), (a_node)); \ 1.316 +} while (0) 1.317 + 1.318 +#define rbp_lean_left(a_type, a_field, a_node, r_node) do { \ 1.319 + bool rbp_ll_red; \ 1.320 + rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ 1.321 + rbp_ll_red = rbp_red_get(a_type, a_field, (a_node)); \ 1.322 + rbp_color_set(a_type, a_field, (r_node), rbp_ll_red); \ 1.323 + rbp_red_set(a_type, a_field, (a_node)); \ 1.324 +} while (0) 1.325 + 1.326 +#define rbp_lean_right(a_type, a_field, a_node, r_node) do { \ 1.327 + bool rbp_lr_red; \ 1.328 + rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ 1.329 + rbp_lr_red = rbp_red_get(a_type, a_field, (a_node)); \ 1.330 + rbp_color_set(a_type, a_field, (r_node), rbp_lr_red); \ 1.331 + rbp_red_set(a_type, a_field, (a_node)); \ 1.332 +} while (0) 1.333 + 1.334 +#define rbp_move_red_left(a_type, a_field, a_node, r_node) do { \ 1.335 + a_type *rbp_mrl_t, *rbp_mrl_u; \ 1.336 + rbp_mrl_t = rbp_left_get(a_type, a_field, (a_node)); \ 1.337 + rbp_red_set(a_type, a_field, rbp_mrl_t); \ 1.338 + rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node)); \ 1.339 + rbp_mrl_u = rbp_left_get(a_type, a_field, rbp_mrl_t); \ 1.340 + if (rbp_red_get(a_type, a_field, rbp_mrl_u)) { \ 1.341 + rbp_rotate_right(a_type, a_field, rbp_mrl_t, rbp_mrl_u); \ 1.342 + rbp_right_set(a_type, a_field, (a_node), rbp_mrl_u); \ 1.343 + rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ 1.344 + rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node)); \ 1.345 + if (rbp_red_get(a_type, a_field, rbp_mrl_t)) { \ 1.346 + rbp_black_set(a_type, a_field, rbp_mrl_t); \ 1.347 + rbp_red_set(a_type, a_field, (a_node)); \ 1.348 + rbp_rotate_left(a_type, a_field, (a_node), rbp_mrl_t); \ 1.349 + rbp_left_set(a_type, a_field, (r_node), rbp_mrl_t); \ 1.350 + } else { \ 1.351 + rbp_black_set(a_type, a_field, (a_node)); \ 1.352 + } \ 1.353 + } else { \ 1.354 + rbp_red_set(a_type, a_field, (a_node)); \ 1.355 + rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ 1.356 + } \ 1.357 +} while (0) 1.358 + 1.359 +#define rbp_move_red_right(a_type, a_field, a_node, r_node) do { \ 1.360 + a_type *rbp_mrr_t; \ 1.361 + rbp_mrr_t = rbp_left_get(a_type, a_field, (a_node)); \ 1.362 + if (rbp_red_get(a_type, a_field, rbp_mrr_t)) { \ 1.363 + a_type *rbp_mrr_u, *rbp_mrr_v; \ 1.364 + rbp_mrr_u = rbp_right_get(a_type, a_field, rbp_mrr_t); \ 1.365 + rbp_mrr_v = rbp_left_get(a_type, a_field, rbp_mrr_u); \ 1.366 + if (rbp_red_get(a_type, a_field, rbp_mrr_v)) { \ 1.367 + rbp_color_set(a_type, a_field, rbp_mrr_u, \ 1.368 + rbp_red_get(a_type, a_field, (a_node))); \ 1.369 + rbp_black_set(a_type, a_field, rbp_mrr_v); \ 1.370 + rbp_rotate_left(a_type, a_field, rbp_mrr_t, rbp_mrr_u); \ 1.371 + rbp_left_set(a_type, a_field, (a_node), rbp_mrr_u); \ 1.372 + rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ 1.373 + rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \ 1.374 + rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \ 1.375 + } else { \ 1.376 + rbp_color_set(a_type, a_field, rbp_mrr_t, \ 1.377 + rbp_red_get(a_type, a_field, (a_node))); \ 1.378 + rbp_red_set(a_type, a_field, rbp_mrr_u); \ 1.379 + rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ 1.380 + rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \ 1.381 + rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \ 1.382 + } \ 1.383 + rbp_red_set(a_type, a_field, (a_node)); \ 1.384 + } else { \ 1.385 + rbp_red_set(a_type, a_field, rbp_mrr_t); \ 1.386 + rbp_mrr_t = rbp_left_get(a_type, a_field, rbp_mrr_t); \ 1.387 + if (rbp_red_get(a_type, a_field, rbp_mrr_t)) { \ 1.388 + rbp_black_set(a_type, a_field, rbp_mrr_t); \ 1.389 + rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ 1.390 + rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \ 1.391 + rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \ 1.392 + } else { \ 1.393 + rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ 1.394 + } \ 1.395 + } \ 1.396 +} while (0) 1.397 + 1.398 +#define rb_insert(a_type, a_field, a_cmp, a_tree, a_node) do { \ 1.399 + a_type rbp_i_s; \ 1.400 + a_type *rbp_i_g, *rbp_i_p, *rbp_i_c, *rbp_i_t, *rbp_i_u; \ 1.401 + int rbp_i_cmp = 0; \ 1.402 + rbp_i_g = &(a_tree)->rbt_nil; \ 1.403 + rbp_left_set(a_type, a_field, &rbp_i_s, (a_tree)->rbt_root); \ 1.404 + rbp_right_set(a_type, a_field, &rbp_i_s, &(a_tree)->rbt_nil); \ 1.405 + rbp_black_set(a_type, a_field, &rbp_i_s); \ 1.406 + rbp_i_p = &rbp_i_s; \ 1.407 + rbp_i_c = (a_tree)->rbt_root; \ 1.408 + /* Iteratively search down the tree for the insertion point, */\ 1.409 + /* splitting 4-nodes as they are encountered. At the end of each */\ 1.410 + /* iteration, rbp_i_g->rbp_i_p->rbp_i_c is a 3-level path down */\ 1.411 + /* the tree, assuming a sufficiently deep tree. */\ 1.412 + while (rbp_i_c != &(a_tree)->rbt_nil) { \ 1.413 + rbp_i_t = rbp_left_get(a_type, a_field, rbp_i_c); \ 1.414 + rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t); \ 1.415 + if (rbp_red_get(a_type, a_field, rbp_i_t) \ 1.416 + && rbp_red_get(a_type, a_field, rbp_i_u)) { \ 1.417 + /* rbp_i_c is the top of a logical 4-node, so split it. */\ 1.418 + /* This iteration does not move down the tree, due to the */\ 1.419 + /* disruptiveness of node splitting. */\ 1.420 + /* */\ 1.421 + /* Rotate right. */\ 1.422 + rbp_rotate_right(a_type, a_field, rbp_i_c, rbp_i_t); \ 1.423 + /* Pass red links up one level. */\ 1.424 + rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t); \ 1.425 + rbp_black_set(a_type, a_field, rbp_i_u); \ 1.426 + if (rbp_left_get(a_type, a_field, rbp_i_p) == rbp_i_c) { \ 1.427 + rbp_left_set(a_type, a_field, rbp_i_p, rbp_i_t); \ 1.428 + rbp_i_c = rbp_i_t; \ 1.429 + } else { \ 1.430 + /* rbp_i_c was the right child of rbp_i_p, so rotate */\ 1.431 + /* left in order to maintain the left-leaning */\ 1.432 + /* invariant. */\ 1.433 + assert(rbp_right_get(a_type, a_field, rbp_i_p) \ 1.434 + == rbp_i_c); \ 1.435 + rbp_right_set(a_type, a_field, rbp_i_p, rbp_i_t); \ 1.436 + rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_u); \ 1.437 + if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\ 1.438 + rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_u); \ 1.439 + } else { \ 1.440 + assert(rbp_right_get(a_type, a_field, rbp_i_g) \ 1.441 + == rbp_i_p); \ 1.442 + rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_u); \ 1.443 + } \ 1.444 + rbp_i_p = rbp_i_u; \ 1.445 + rbp_i_cmp = (a_cmp)((a_node), rbp_i_p); \ 1.446 + if (rbp_i_cmp < 0) { \ 1.447 + rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_p); \ 1.448 + } else { \ 1.449 + assert(rbp_i_cmp > 0); \ 1.450 + rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_p); \ 1.451 + } \ 1.452 + continue; \ 1.453 + } \ 1.454 + } \ 1.455 + rbp_i_g = rbp_i_p; \ 1.456 + rbp_i_p = rbp_i_c; \ 1.457 + rbp_i_cmp = (a_cmp)((a_node), rbp_i_c); \ 1.458 + if (rbp_i_cmp < 0) { \ 1.459 + rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_c); \ 1.460 + } else { \ 1.461 + assert(rbp_i_cmp > 0); \ 1.462 + rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_c); \ 1.463 + } \ 1.464 + } \ 1.465 + /* rbp_i_p now refers to the node under which to insert. */\ 1.466 + rbp_node_new(a_type, a_field, a_tree, (a_node)); \ 1.467 + if (rbp_i_cmp > 0) { \ 1.468 + rbp_right_set(a_type, a_field, rbp_i_p, (a_node)); \ 1.469 + rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_t); \ 1.470 + if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) { \ 1.471 + rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_t); \ 1.472 + } else if (rbp_right_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\ 1.473 + rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_t); \ 1.474 + } \ 1.475 + } else { \ 1.476 + rbp_left_set(a_type, a_field, rbp_i_p, (a_node)); \ 1.477 + } \ 1.478 + /* Update the root and make sure that it is black. */\ 1.479 + (a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_i_s); \ 1.480 + rbp_black_set(a_type, a_field, (a_tree)->rbt_root); \ 1.481 +} while (0) 1.482 + 1.483 +#define rb_remove(a_type, a_field, a_cmp, a_tree, a_node) do { \ 1.484 + a_type rbp_r_s; \ 1.485 + a_type *rbp_r_p, *rbp_r_c, *rbp_r_xp, *rbp_r_t, *rbp_r_u; \ 1.486 + int rbp_r_cmp; \ 1.487 + rbp_left_set(a_type, a_field, &rbp_r_s, (a_tree)->rbt_root); \ 1.488 + rbp_right_set(a_type, a_field, &rbp_r_s, &(a_tree)->rbt_nil); \ 1.489 + rbp_black_set(a_type, a_field, &rbp_r_s); \ 1.490 + rbp_r_p = &rbp_r_s; \ 1.491 + rbp_r_c = (a_tree)->rbt_root; \ 1.492 + rbp_r_xp = &(a_tree)->rbt_nil; \ 1.493 + /* Iterate down the tree, but always transform 2-nodes to 3- or */\ 1.494 + /* 4-nodes in order to maintain the invariant that the current */\ 1.495 + /* node is not a 2-node. This allows simple deletion once a leaf */\ 1.496 + /* is reached. Handle the root specially though, since there may */\ 1.497 + /* be no way to convert it from a 2-node to a 3-node. */\ 1.498 + rbp_r_cmp = (a_cmp)((a_node), rbp_r_c); \ 1.499 + if (rbp_r_cmp < 0) { \ 1.500 + rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \ 1.501 + rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ 1.502 + if (rbp_red_get(a_type, a_field, rbp_r_t) == false \ 1.503 + && rbp_red_get(a_type, a_field, rbp_r_u) == false) { \ 1.504 + /* Apply standard transform to prepare for left move. */\ 1.505 + rbp_move_red_left(a_type, a_field, rbp_r_c, rbp_r_t); \ 1.506 + rbp_black_set(a_type, a_field, rbp_r_t); \ 1.507 + rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \ 1.508 + rbp_r_c = rbp_r_t; \ 1.509 + } else { \ 1.510 + /* Move left. */\ 1.511 + rbp_r_p = rbp_r_c; \ 1.512 + rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c); \ 1.513 + } \ 1.514 + } else { \ 1.515 + if (rbp_r_cmp == 0) { \ 1.516 + assert((a_node) == rbp_r_c); \ 1.517 + if (rbp_right_get(a_type, a_field, rbp_r_c) \ 1.518 + == &(a_tree)->rbt_nil) { \ 1.519 + /* Delete root node (which is also a leaf node). */\ 1.520 + if (rbp_left_get(a_type, a_field, rbp_r_c) \ 1.521 + != &(a_tree)->rbt_nil) { \ 1.522 + rbp_lean_right(a_type, a_field, rbp_r_c, rbp_r_t); \ 1.523 + rbp_right_set(a_type, a_field, rbp_r_t, \ 1.524 + &(a_tree)->rbt_nil); \ 1.525 + } else { \ 1.526 + rbp_r_t = &(a_tree)->rbt_nil; \ 1.527 + } \ 1.528 + rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \ 1.529 + } else { \ 1.530 + /* This is the node we want to delete, but we will */\ 1.531 + /* instead swap it with its successor and delete the */\ 1.532 + /* successor. Record enough information to do the */\ 1.533 + /* swap later. rbp_r_xp is the a_node's parent. */\ 1.534 + rbp_r_xp = rbp_r_p; \ 1.535 + rbp_r_cmp = 1; /* Note that deletion is incomplete. */\ 1.536 + } \ 1.537 + } \ 1.538 + if (rbp_r_cmp == 1) { \ 1.539 + if (rbp_red_get(a_type, a_field, rbp_left_get(a_type, \ 1.540 + a_field, rbp_right_get(a_type, a_field, rbp_r_c))) \ 1.541 + == false) { \ 1.542 + rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \ 1.543 + if (rbp_red_get(a_type, a_field, rbp_r_t)) { \ 1.544 + /* Standard transform. */\ 1.545 + rbp_move_red_right(a_type, a_field, rbp_r_c, \ 1.546 + rbp_r_t); \ 1.547 + } else { \ 1.548 + /* Root-specific transform. */\ 1.549 + rbp_red_set(a_type, a_field, rbp_r_c); \ 1.550 + rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ 1.551 + if (rbp_red_get(a_type, a_field, rbp_r_u)) { \ 1.552 + rbp_black_set(a_type, a_field, rbp_r_u); \ 1.553 + rbp_rotate_right(a_type, a_field, rbp_r_c, \ 1.554 + rbp_r_t); \ 1.555 + rbp_rotate_left(a_type, a_field, rbp_r_c, \ 1.556 + rbp_r_u); \ 1.557 + rbp_right_set(a_type, a_field, rbp_r_t, \ 1.558 + rbp_r_u); \ 1.559 + } else { \ 1.560 + rbp_red_set(a_type, a_field, rbp_r_t); \ 1.561 + rbp_rotate_left(a_type, a_field, rbp_r_c, \ 1.562 + rbp_r_t); \ 1.563 + } \ 1.564 + } \ 1.565 + rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \ 1.566 + rbp_r_c = rbp_r_t; \ 1.567 + } else { \ 1.568 + /* Move right. */\ 1.569 + rbp_r_p = rbp_r_c; \ 1.570 + rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c); \ 1.571 + } \ 1.572 + } \ 1.573 + } \ 1.574 + if (rbp_r_cmp != 0) { \ 1.575 + while (true) { \ 1.576 + assert(rbp_r_p != &(a_tree)->rbt_nil); \ 1.577 + rbp_r_cmp = (a_cmp)((a_node), rbp_r_c); \ 1.578 + if (rbp_r_cmp < 0) { \ 1.579 + rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \ 1.580 + if (rbp_r_t == &(a_tree)->rbt_nil) { \ 1.581 + /* rbp_r_c now refers to the successor node to */\ 1.582 + /* relocate, and rbp_r_xp/a_node refer to the */\ 1.583 + /* context for the relocation. */\ 1.584 + if (rbp_left_get(a_type, a_field, rbp_r_xp) \ 1.585 + == (a_node)) { \ 1.586 + rbp_left_set(a_type, a_field, rbp_r_xp, \ 1.587 + rbp_r_c); \ 1.588 + } else { \ 1.589 + assert(rbp_right_get(a_type, a_field, \ 1.590 + rbp_r_xp) == (a_node)); \ 1.591 + rbp_right_set(a_type, a_field, rbp_r_xp, \ 1.592 + rbp_r_c); \ 1.593 + } \ 1.594 + rbp_left_set(a_type, a_field, rbp_r_c, \ 1.595 + rbp_left_get(a_type, a_field, (a_node))); \ 1.596 + rbp_right_set(a_type, a_field, rbp_r_c, \ 1.597 + rbp_right_get(a_type, a_field, (a_node))); \ 1.598 + rbp_color_set(a_type, a_field, rbp_r_c, \ 1.599 + rbp_red_get(a_type, a_field, (a_node))); \ 1.600 + if (rbp_left_get(a_type, a_field, rbp_r_p) \ 1.601 + == rbp_r_c) { \ 1.602 + rbp_left_set(a_type, a_field, rbp_r_p, \ 1.603 + &(a_tree)->rbt_nil); \ 1.604 + } else { \ 1.605 + assert(rbp_right_get(a_type, a_field, rbp_r_p) \ 1.606 + == rbp_r_c); \ 1.607 + rbp_right_set(a_type, a_field, rbp_r_p, \ 1.608 + &(a_tree)->rbt_nil); \ 1.609 + } \ 1.610 + break; \ 1.611 + } \ 1.612 + rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ 1.613 + if (rbp_red_get(a_type, a_field, rbp_r_t) == false \ 1.614 + && rbp_red_get(a_type, a_field, rbp_r_u) == false) { \ 1.615 + rbp_move_red_left(a_type, a_field, rbp_r_c, \ 1.616 + rbp_r_t); \ 1.617 + if (rbp_left_get(a_type, a_field, rbp_r_p) \ 1.618 + == rbp_r_c) { \ 1.619 + rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\ 1.620 + } else { \ 1.621 + rbp_right_set(a_type, a_field, rbp_r_p, \ 1.622 + rbp_r_t); \ 1.623 + } \ 1.624 + rbp_r_c = rbp_r_t; \ 1.625 + } else { \ 1.626 + rbp_r_p = rbp_r_c; \ 1.627 + rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c); \ 1.628 + } \ 1.629 + } else { \ 1.630 + /* Check whether to delete this node (it has to be */\ 1.631 + /* the correct node and a leaf node). */\ 1.632 + if (rbp_r_cmp == 0) { \ 1.633 + assert((a_node) == rbp_r_c); \ 1.634 + if (rbp_right_get(a_type, a_field, rbp_r_c) \ 1.635 + == &(a_tree)->rbt_nil) { \ 1.636 + /* Delete leaf node. */\ 1.637 + if (rbp_left_get(a_type, a_field, rbp_r_c) \ 1.638 + != &(a_tree)->rbt_nil) { \ 1.639 + rbp_lean_right(a_type, a_field, rbp_r_c, \ 1.640 + rbp_r_t); \ 1.641 + rbp_right_set(a_type, a_field, rbp_r_t, \ 1.642 + &(a_tree)->rbt_nil); \ 1.643 + } else { \ 1.644 + rbp_r_t = &(a_tree)->rbt_nil; \ 1.645 + } \ 1.646 + if (rbp_left_get(a_type, a_field, rbp_r_p) \ 1.647 + == rbp_r_c) { \ 1.648 + rbp_left_set(a_type, a_field, rbp_r_p, \ 1.649 + rbp_r_t); \ 1.650 + } else { \ 1.651 + rbp_right_set(a_type, a_field, rbp_r_p, \ 1.652 + rbp_r_t); \ 1.653 + } \ 1.654 + break; \ 1.655 + } else { \ 1.656 + /* This is the node we want to delete, but we */\ 1.657 + /* will instead swap it with its successor */\ 1.658 + /* and delete the successor. Record enough */\ 1.659 + /* information to do the swap later. */\ 1.660 + /* rbp_r_xp is a_node's parent. */\ 1.661 + rbp_r_xp = rbp_r_p; \ 1.662 + } \ 1.663 + } \ 1.664 + rbp_r_t = rbp_right_get(a_type, a_field, rbp_r_c); \ 1.665 + rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ 1.666 + if (rbp_red_get(a_type, a_field, rbp_r_u) == false) { \ 1.667 + rbp_move_red_right(a_type, a_field, rbp_r_c, \ 1.668 + rbp_r_t); \ 1.669 + if (rbp_left_get(a_type, a_field, rbp_r_p) \ 1.670 + == rbp_r_c) { \ 1.671 + rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\ 1.672 + } else { \ 1.673 + rbp_right_set(a_type, a_field, rbp_r_p, \ 1.674 + rbp_r_t); \ 1.675 + } \ 1.676 + rbp_r_c = rbp_r_t; \ 1.677 + } else { \ 1.678 + rbp_r_p = rbp_r_c; \ 1.679 + rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c); \ 1.680 + } \ 1.681 + } \ 1.682 + } \ 1.683 + } \ 1.684 + /* Update root. */\ 1.685 + (a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_r_s); \ 1.686 +} while (0) 1.687 + 1.688 +/* 1.689 + * The rb_wrap() macro provides a convenient way to wrap functions around the 1.690 + * cpp macros. The main benefits of wrapping are that 1) repeated macro 1.691 + * expansion can cause code bloat, especially for rb_{insert,remove)(), and 1.692 + * 2) type, linkage, comparison functions, etc. need not be specified at every 1.693 + * call point. 1.694 + */ 1.695 + 1.696 +#define rb_wrap(a_attr, a_prefix, a_tree_type, a_type, a_field, a_cmp) \ 1.697 +a_attr void \ 1.698 +a_prefix##new(a_tree_type *tree) { \ 1.699 + rb_new(a_type, a_field, tree); \ 1.700 +} \ 1.701 +a_attr a_type * \ 1.702 +a_prefix##first(a_tree_type *tree) { \ 1.703 + a_type *ret; \ 1.704 + rb_first(a_type, a_field, tree, ret); \ 1.705 + return (ret); \ 1.706 +} \ 1.707 +a_attr a_type * \ 1.708 +a_prefix##last(a_tree_type *tree) { \ 1.709 + a_type *ret; \ 1.710 + rb_last(a_type, a_field, tree, ret); \ 1.711 + return (ret); \ 1.712 +} \ 1.713 +a_attr a_type * \ 1.714 +a_prefix##next(a_tree_type *tree, a_type *node) { \ 1.715 + a_type *ret; \ 1.716 + rb_next(a_type, a_field, a_cmp, tree, node, ret); \ 1.717 + return (ret); \ 1.718 +} \ 1.719 +a_attr a_type * \ 1.720 +a_prefix##prev(a_tree_type *tree, a_type *node) { \ 1.721 + a_type *ret; \ 1.722 + rb_prev(a_type, a_field, a_cmp, tree, node, ret); \ 1.723 + return (ret); \ 1.724 +} \ 1.725 +a_attr a_type * \ 1.726 +a_prefix##search(a_tree_type *tree, a_type *key) { \ 1.727 + a_type *ret; \ 1.728 + rb_search(a_type, a_field, a_cmp, tree, key, ret); \ 1.729 + return (ret); \ 1.730 +} \ 1.731 +a_attr a_type * \ 1.732 +a_prefix##nsearch(a_tree_type *tree, a_type *key) { \ 1.733 + a_type *ret; \ 1.734 + rb_nsearch(a_type, a_field, a_cmp, tree, key, ret); \ 1.735 + return (ret); \ 1.736 +} \ 1.737 +a_attr a_type * \ 1.738 +a_prefix##psearch(a_tree_type *tree, a_type *key) { \ 1.739 + a_type *ret; \ 1.740 + rb_psearch(a_type, a_field, a_cmp, tree, key, ret); \ 1.741 + return (ret); \ 1.742 +} \ 1.743 +a_attr void \ 1.744 +a_prefix##insert(a_tree_type *tree, a_type *node) { \ 1.745 + rb_insert(a_type, a_field, a_cmp, tree, node); \ 1.746 +} \ 1.747 +a_attr void \ 1.748 +a_prefix##remove(a_tree_type *tree, a_type *node) { \ 1.749 + rb_remove(a_type, a_field, a_cmp, tree, node); \ 1.750 +} 1.751 + 1.752 +/* 1.753 + * The iterators simulate recursion via an array of pointers that store the 1.754 + * current path. This is critical to performance, since a series of calls to 1.755 + * rb_{next,prev}() would require time proportional to (n lg n), whereas this 1.756 + * implementation only requires time proportional to (n). 1.757 + * 1.758 + * Since the iterators cache a path down the tree, any tree modification may 1.759 + * cause the cached path to become invalid. In order to continue iteration, 1.760 + * use something like the following sequence: 1.761 + * 1.762 + * { 1.763 + * a_type *node, *tnode; 1.764 + * 1.765 + * rb_foreach_begin(a_type, a_field, a_tree, node) { 1.766 + * ... 1.767 + * rb_next(a_type, a_field, a_cmp, a_tree, node, tnode); 1.768 + * rb_remove(a_type, a_field, a_cmp, a_tree, node); 1.769 + * rb_foreach_next(a_type, a_field, a_cmp, a_tree, tnode); 1.770 + * ... 1.771 + * } rb_foreach_end(a_type, a_field, a_tree, node) 1.772 + * } 1.773 + * 1.774 + * Note that this idiom is not advised if every iteration modifies the tree, 1.775 + * since in that case there is no algorithmic complexity improvement over a 1.776 + * series of rb_{next,prev}() calls, thus making the setup overhead wasted 1.777 + * effort. 1.778 + */ 1.779 + 1.780 +#ifdef RB_NO_C99_VARARRAYS 1.781 + /* 1.782 + * Avoid using variable-length arrays, at the cost of using more stack space. 1.783 + * Size the path arrays such that they are always large enough, even if a 1.784 + * tree consumes all of memory. Since each node must contain a minimum of 1.785 + * two pointers, there can never be more nodes than: 1.786 + * 1.787 + * 1 << ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1)) 1.788 + * 1.789 + * Since the depth of a tree is limited to 3*lg(#nodes), the maximum depth 1.790 + * is: 1.791 + * 1.792 + * (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))) 1.793 + * 1.794 + * This works out to a maximum depth of 87 and 180 for 32- and 64-bit 1.795 + * systems, respectively (approximatly 348 and 1440 bytes, respectively). 1.796 + */ 1.797 +# define rbp_compute_f_height(a_type, a_field, a_tree) 1.798 +# define rbp_f_height (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))) 1.799 +# define rbp_compute_fr_height(a_type, a_field, a_tree) 1.800 +# define rbp_fr_height (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))) 1.801 +#else 1.802 +# define rbp_compute_f_height(a_type, a_field, a_tree) \ 1.803 + /* Compute the maximum possible tree depth (3X the black height). */\ 1.804 + unsigned rbp_f_height; \ 1.805 + rbp_black_height(a_type, a_field, a_tree, rbp_f_height); \ 1.806 + rbp_f_height *= 3; 1.807 +# define rbp_compute_fr_height(a_type, a_field, a_tree) \ 1.808 + /* Compute the maximum possible tree depth (3X the black height). */\ 1.809 + unsigned rbp_fr_height; \ 1.810 + rbp_black_height(a_type, a_field, a_tree, rbp_fr_height); \ 1.811 + rbp_fr_height *= 3; 1.812 +#endif 1.813 + 1.814 +#define rb_foreach_begin(a_type, a_field, a_tree, a_var) { \ 1.815 + rbp_compute_f_height(a_type, a_field, a_tree) \ 1.816 + { \ 1.817 + /* Initialize the path to contain the left spine. */\ 1.818 + a_type *rbp_f_path[rbp_f_height]; \ 1.819 + a_type *rbp_f_node; \ 1.820 + bool rbp_f_synced = false; \ 1.821 + unsigned rbp_f_depth = 0; \ 1.822 + if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ 1.823 + rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root; \ 1.824 + rbp_f_depth++; \ 1.825 + while ((rbp_f_node = rbp_left_get(a_type, a_field, \ 1.826 + rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \ 1.827 + rbp_f_path[rbp_f_depth] = rbp_f_node; \ 1.828 + rbp_f_depth++; \ 1.829 + } \ 1.830 + } \ 1.831 + /* While the path is non-empty, iterate. */\ 1.832 + while (rbp_f_depth > 0) { \ 1.833 + (a_var) = rbp_f_path[rbp_f_depth-1]; 1.834 + 1.835 +/* Only use if modifying the tree during iteration. */ 1.836 +#define rb_foreach_next(a_type, a_field, a_cmp, a_tree, a_node) \ 1.837 + /* Re-initialize the path to contain the path to a_node. */\ 1.838 + rbp_f_depth = 0; \ 1.839 + if (a_node != NULL) { \ 1.840 + if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ 1.841 + rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root; \ 1.842 + rbp_f_depth++; \ 1.843 + rbp_f_node = rbp_f_path[0]; \ 1.844 + while (true) { \ 1.845 + int rbp_f_cmp = (a_cmp)((a_node), \ 1.846 + rbp_f_path[rbp_f_depth-1]); \ 1.847 + if (rbp_f_cmp < 0) { \ 1.848 + rbp_f_node = rbp_left_get(a_type, a_field, \ 1.849 + rbp_f_path[rbp_f_depth-1]); \ 1.850 + } else if (rbp_f_cmp > 0) { \ 1.851 + rbp_f_node = rbp_right_get(a_type, a_field, \ 1.852 + rbp_f_path[rbp_f_depth-1]); \ 1.853 + } else { \ 1.854 + break; \ 1.855 + } \ 1.856 + assert(rbp_f_node != &(a_tree)->rbt_nil); \ 1.857 + rbp_f_path[rbp_f_depth] = rbp_f_node; \ 1.858 + rbp_f_depth++; \ 1.859 + } \ 1.860 + } \ 1.861 + } \ 1.862 + rbp_f_synced = true; 1.863 + 1.864 +#define rb_foreach_end(a_type, a_field, a_tree, a_var) \ 1.865 + if (rbp_f_synced) { \ 1.866 + rbp_f_synced = false; \ 1.867 + continue; \ 1.868 + } \ 1.869 + /* Find the successor. */\ 1.870 + if ((rbp_f_node = rbp_right_get(a_type, a_field, \ 1.871 + rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \ 1.872 + /* The successor is the left-most node in the right */\ 1.873 + /* subtree. */\ 1.874 + rbp_f_path[rbp_f_depth] = rbp_f_node; \ 1.875 + rbp_f_depth++; \ 1.876 + while ((rbp_f_node = rbp_left_get(a_type, a_field, \ 1.877 + rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \ 1.878 + rbp_f_path[rbp_f_depth] = rbp_f_node; \ 1.879 + rbp_f_depth++; \ 1.880 + } \ 1.881 + } else { \ 1.882 + /* The successor is above the current node. Unwind */\ 1.883 + /* until a left-leaning edge is removed from the */\ 1.884 + /* path, or the path is empty. */\ 1.885 + for (rbp_f_depth--; rbp_f_depth > 0; rbp_f_depth--) { \ 1.886 + if (rbp_left_get(a_type, a_field, \ 1.887 + rbp_f_path[rbp_f_depth-1]) \ 1.888 + == rbp_f_path[rbp_f_depth]) { \ 1.889 + break; \ 1.890 + } \ 1.891 + } \ 1.892 + } \ 1.893 + } \ 1.894 + } \ 1.895 +} 1.896 + 1.897 +#define rb_foreach_reverse_begin(a_type, a_field, a_tree, a_var) { \ 1.898 + rbp_compute_fr_height(a_type, a_field, a_tree) \ 1.899 + { \ 1.900 + /* Initialize the path to contain the right spine. */\ 1.901 + a_type *rbp_fr_path[rbp_fr_height]; \ 1.902 + a_type *rbp_fr_node; \ 1.903 + bool rbp_fr_synced = false; \ 1.904 + unsigned rbp_fr_depth = 0; \ 1.905 + if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ 1.906 + rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root; \ 1.907 + rbp_fr_depth++; \ 1.908 + while ((rbp_fr_node = rbp_right_get(a_type, a_field, \ 1.909 + rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) { \ 1.910 + rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ 1.911 + rbp_fr_depth++; \ 1.912 + } \ 1.913 + } \ 1.914 + /* While the path is non-empty, iterate. */\ 1.915 + while (rbp_fr_depth > 0) { \ 1.916 + (a_var) = rbp_fr_path[rbp_fr_depth-1]; 1.917 + 1.918 +/* Only use if modifying the tree during iteration. */ 1.919 +#define rb_foreach_reverse_prev(a_type, a_field, a_cmp, a_tree, a_node) \ 1.920 + /* Re-initialize the path to contain the path to a_node. */\ 1.921 + rbp_fr_depth = 0; \ 1.922 + if (a_node != NULL) { \ 1.923 + if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ 1.924 + rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root; \ 1.925 + rbp_fr_depth++; \ 1.926 + rbp_fr_node = rbp_fr_path[0]; \ 1.927 + while (true) { \ 1.928 + int rbp_fr_cmp = (a_cmp)((a_node), \ 1.929 + rbp_fr_path[rbp_fr_depth-1]); \ 1.930 + if (rbp_fr_cmp < 0) { \ 1.931 + rbp_fr_node = rbp_left_get(a_type, a_field, \ 1.932 + rbp_fr_path[rbp_fr_depth-1]); \ 1.933 + } else if (rbp_fr_cmp > 0) { \ 1.934 + rbp_fr_node = rbp_right_get(a_type, a_field,\ 1.935 + rbp_fr_path[rbp_fr_depth-1]); \ 1.936 + } else { \ 1.937 + break; \ 1.938 + } \ 1.939 + assert(rbp_fr_node != &(a_tree)->rbt_nil); \ 1.940 + rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ 1.941 + rbp_fr_depth++; \ 1.942 + } \ 1.943 + } \ 1.944 + } \ 1.945 + rbp_fr_synced = true; 1.946 + 1.947 +#define rb_foreach_reverse_end(a_type, a_field, a_tree, a_var) \ 1.948 + if (rbp_fr_synced) { \ 1.949 + rbp_fr_synced = false; \ 1.950 + continue; \ 1.951 + } \ 1.952 + if (rbp_fr_depth == 0) { \ 1.953 + /* rb_foreach_reverse_sync() was called with a NULL */\ 1.954 + /* a_node. */\ 1.955 + break; \ 1.956 + } \ 1.957 + /* Find the predecessor. */\ 1.958 + if ((rbp_fr_node = rbp_left_get(a_type, a_field, \ 1.959 + rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) { \ 1.960 + /* The predecessor is the right-most node in the left */\ 1.961 + /* subtree. */\ 1.962 + rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ 1.963 + rbp_fr_depth++; \ 1.964 + while ((rbp_fr_node = rbp_right_get(a_type, a_field, \ 1.965 + rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) {\ 1.966 + rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ 1.967 + rbp_fr_depth++; \ 1.968 + } \ 1.969 + } else { \ 1.970 + /* The predecessor is above the current node. Unwind */\ 1.971 + /* until a right-leaning edge is removed from the */\ 1.972 + /* path, or the path is empty. */\ 1.973 + for (rbp_fr_depth--; rbp_fr_depth > 0; rbp_fr_depth--) {\ 1.974 + if (rbp_right_get(a_type, a_field, \ 1.975 + rbp_fr_path[rbp_fr_depth-1]) \ 1.976 + == rbp_fr_path[rbp_fr_depth]) { \ 1.977 + break; \ 1.978 + } \ 1.979 + } \ 1.980 + } \ 1.981 + } \ 1.982 + } \ 1.983 +} 1.984 + 1.985 +#endif /* RB_H_ */