Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | /****************************************************************************** |
michael@0 | 2 | * |
michael@0 | 3 | * Copyright (C) 2008 Jason Evans <jasone@FreeBSD.org>. |
michael@0 | 4 | * All rights reserved. |
michael@0 | 5 | * |
michael@0 | 6 | * Redistribution and use in source and binary forms, with or without |
michael@0 | 7 | * modification, are permitted provided that the following conditions |
michael@0 | 8 | * are met: |
michael@0 | 9 | * 1. Redistributions of source code must retain the above copyright |
michael@0 | 10 | * notice(s), this list of conditions and the following disclaimer |
michael@0 | 11 | * unmodified other than the allowable addition of one or more |
michael@0 | 12 | * copyright notices. |
michael@0 | 13 | * 2. Redistributions in binary form must reproduce the above copyright |
michael@0 | 14 | * notice(s), this list of conditions and the following disclaimer in |
michael@0 | 15 | * the documentation and/or other materials provided with the |
michael@0 | 16 | * distribution. |
michael@0 | 17 | * |
michael@0 | 18 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) ``AS IS'' AND ANY |
michael@0 | 19 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE |
michael@0 | 20 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR |
michael@0 | 21 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) BE |
michael@0 | 22 | * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
michael@0 | 23 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
michael@0 | 24 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR |
michael@0 | 25 | * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, |
michael@0 | 26 | * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE |
michael@0 | 27 | * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, |
michael@0 | 28 | * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
michael@0 | 29 | * |
michael@0 | 30 | ****************************************************************************** |
michael@0 | 31 | * |
michael@0 | 32 | * cpp macro implementation of left-leaning red-black trees. |
michael@0 | 33 | * |
michael@0 | 34 | * Usage: |
michael@0 | 35 | * |
michael@0 | 36 | * (Optional.) |
michael@0 | 37 | * #define SIZEOF_PTR ... |
michael@0 | 38 | * #define SIZEOF_PTR_2POW ... |
michael@0 | 39 | * #define RB_NO_C99_VARARRAYS |
michael@0 | 40 | * |
michael@0 | 41 | * (Optional, see assert(3).) |
michael@0 | 42 | * #define NDEBUG |
michael@0 | 43 | * |
michael@0 | 44 | * (Required.) |
michael@0 | 45 | * #include <assert.h> |
michael@0 | 46 | * #include <rb.h> |
michael@0 | 47 | * ... |
michael@0 | 48 | * |
michael@0 | 49 | * All operations are done non-recursively. Parent pointers are not used, and |
michael@0 | 50 | * color bits are stored in the least significant bit of right-child pointers, |
michael@0 | 51 | * thus making node linkage as compact as is possible for red-black trees. |
michael@0 | 52 | * |
michael@0 | 53 | * Some macros use a comparison function pointer, which is expected to have the |
michael@0 | 54 | * following prototype: |
michael@0 | 55 | * |
michael@0 | 56 | * int (a_cmp *)(a_type *a_node, a_type *a_other); |
michael@0 | 57 | * ^^^^^^ |
michael@0 | 58 | * or a_key |
michael@0 | 59 | * |
michael@0 | 60 | * Interpretation of comparision function return values: |
michael@0 | 61 | * |
michael@0 | 62 | * -1 : a_node < a_other |
michael@0 | 63 | * 0 : a_node == a_other |
michael@0 | 64 | * 1 : a_node > a_other |
michael@0 | 65 | * |
michael@0 | 66 | * In all cases, the a_node or a_key macro argument is the first argument to the |
michael@0 | 67 | * comparison function, which makes it possible to write comparison functions |
michael@0 | 68 | * that treat the first argument specially. |
michael@0 | 69 | * |
michael@0 | 70 | ******************************************************************************/ |
michael@0 | 71 | |
michael@0 | 72 | #ifndef RB_H_ |
michael@0 | 73 | #define RB_H_ |
michael@0 | 74 | |
michael@0 | 75 | #if 0 |
michael@0 | 76 | #include <sys/cdefs.h> |
michael@0 | 77 | __FBSDID("$FreeBSD: head/lib/libc/stdlib/rb.h 178995 2008-05-14 18:33:13Z jasone $"); |
michael@0 | 78 | #endif |
michael@0 | 79 | |
michael@0 | 80 | /* Node structure. */ |
michael@0 | 81 | #define rb_node(a_type) \ |
michael@0 | 82 | struct { \ |
michael@0 | 83 | a_type *rbn_left; \ |
michael@0 | 84 | a_type *rbn_right_red; \ |
michael@0 | 85 | } |
michael@0 | 86 | |
michael@0 | 87 | /* Root structure. */ |
michael@0 | 88 | #define rb_tree(a_type) \ |
michael@0 | 89 | struct { \ |
michael@0 | 90 | a_type *rbt_root; \ |
michael@0 | 91 | a_type rbt_nil; \ |
michael@0 | 92 | } |
michael@0 | 93 | |
michael@0 | 94 | /* Left accessors. */ |
michael@0 | 95 | #define rbp_left_get(a_type, a_field, a_node) \ |
michael@0 | 96 | ((a_node)->a_field.rbn_left) |
michael@0 | 97 | #define rbp_left_set(a_type, a_field, a_node, a_left) do { \ |
michael@0 | 98 | (a_node)->a_field.rbn_left = a_left; \ |
michael@0 | 99 | } while (0) |
michael@0 | 100 | |
michael@0 | 101 | /* Right accessors. */ |
michael@0 | 102 | #define rbp_right_get(a_type, a_field, a_node) \ |
michael@0 | 103 | ((a_type *) (((intptr_t) (a_node)->a_field.rbn_right_red) \ |
michael@0 | 104 | & ((ssize_t)-2))) |
michael@0 | 105 | #define rbp_right_set(a_type, a_field, a_node, a_right) do { \ |
michael@0 | 106 | (a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) a_right) \ |
michael@0 | 107 | | (((uintptr_t) (a_node)->a_field.rbn_right_red) & ((size_t)1))); \ |
michael@0 | 108 | } while (0) |
michael@0 | 109 | |
michael@0 | 110 | /* Color accessors. */ |
michael@0 | 111 | #define rbp_red_get(a_type, a_field, a_node) \ |
michael@0 | 112 | ((bool) (((uintptr_t) (a_node)->a_field.rbn_right_red) \ |
michael@0 | 113 | & ((size_t)1))) |
michael@0 | 114 | #define rbp_color_set(a_type, a_field, a_node, a_red) do { \ |
michael@0 | 115 | (a_node)->a_field.rbn_right_red = (a_type *) ((((intptr_t) \ |
michael@0 | 116 | (a_node)->a_field.rbn_right_red) & ((ssize_t)-2)) \ |
michael@0 | 117 | | ((ssize_t)a_red)); \ |
michael@0 | 118 | } while (0) |
michael@0 | 119 | #define rbp_red_set(a_type, a_field, a_node) do { \ |
michael@0 | 120 | (a_node)->a_field.rbn_right_red = (a_type *) (((uintptr_t) \ |
michael@0 | 121 | (a_node)->a_field.rbn_right_red) | ((size_t)1)); \ |
michael@0 | 122 | } while (0) |
michael@0 | 123 | #define rbp_black_set(a_type, a_field, a_node) do { \ |
michael@0 | 124 | (a_node)->a_field.rbn_right_red = (a_type *) (((intptr_t) \ |
michael@0 | 125 | (a_node)->a_field.rbn_right_red) & ((ssize_t)-2)); \ |
michael@0 | 126 | } while (0) |
michael@0 | 127 | |
michael@0 | 128 | /* Node initializer. */ |
michael@0 | 129 | #define rbp_node_new(a_type, a_field, a_tree, a_node) do { \ |
michael@0 | 130 | rbp_left_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil); \ |
michael@0 | 131 | rbp_right_set(a_type, a_field, (a_node), &(a_tree)->rbt_nil); \ |
michael@0 | 132 | rbp_red_set(a_type, a_field, (a_node)); \ |
michael@0 | 133 | } while (0) |
michael@0 | 134 | |
michael@0 | 135 | /* Tree initializer. */ |
michael@0 | 136 | #define rb_new(a_type, a_field, a_tree) do { \ |
michael@0 | 137 | (a_tree)->rbt_root = &(a_tree)->rbt_nil; \ |
michael@0 | 138 | rbp_node_new(a_type, a_field, a_tree, &(a_tree)->rbt_nil); \ |
michael@0 | 139 | rbp_black_set(a_type, a_field, &(a_tree)->rbt_nil); \ |
michael@0 | 140 | } while (0) |
michael@0 | 141 | |
michael@0 | 142 | /* Tree operations. */ |
michael@0 | 143 | #define rbp_black_height(a_type, a_field, a_tree, r_height) do { \ |
michael@0 | 144 | a_type *rbp_bh_t; \ |
michael@0 | 145 | for (rbp_bh_t = (a_tree)->rbt_root, (r_height) = 0; \ |
michael@0 | 146 | rbp_bh_t != &(a_tree)->rbt_nil; \ |
michael@0 | 147 | rbp_bh_t = rbp_left_get(a_type, a_field, rbp_bh_t)) { \ |
michael@0 | 148 | if (rbp_red_get(a_type, a_field, rbp_bh_t) == false) { \ |
michael@0 | 149 | (r_height)++; \ |
michael@0 | 150 | } \ |
michael@0 | 151 | } \ |
michael@0 | 152 | } while (0) |
michael@0 | 153 | |
michael@0 | 154 | #define rbp_first(a_type, a_field, a_tree, a_root, r_node) do { \ |
michael@0 | 155 | for ((r_node) = (a_root); \ |
michael@0 | 156 | rbp_left_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil; \ |
michael@0 | 157 | (r_node) = rbp_left_get(a_type, a_field, (r_node))) { \ |
michael@0 | 158 | } \ |
michael@0 | 159 | } while (0) |
michael@0 | 160 | |
michael@0 | 161 | #define rbp_last(a_type, a_field, a_tree, a_root, r_node) do { \ |
michael@0 | 162 | for ((r_node) = (a_root); \ |
michael@0 | 163 | rbp_right_get(a_type, a_field, (r_node)) != &(a_tree)->rbt_nil; \ |
michael@0 | 164 | (r_node) = rbp_right_get(a_type, a_field, (r_node))) { \ |
michael@0 | 165 | } \ |
michael@0 | 166 | } while (0) |
michael@0 | 167 | |
michael@0 | 168 | #define rbp_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ |
michael@0 | 169 | if (rbp_right_get(a_type, a_field, (a_node)) \ |
michael@0 | 170 | != &(a_tree)->rbt_nil) { \ |
michael@0 | 171 | rbp_first(a_type, a_field, a_tree, rbp_right_get(a_type, \ |
michael@0 | 172 | a_field, (a_node)), (r_node)); \ |
michael@0 | 173 | } else { \ |
michael@0 | 174 | a_type *rbp_n_t = (a_tree)->rbt_root; \ |
michael@0 | 175 | assert(rbp_n_t != &(a_tree)->rbt_nil); \ |
michael@0 | 176 | (r_node) = &(a_tree)->rbt_nil; \ |
michael@0 | 177 | while (true) { \ |
michael@0 | 178 | int rbp_n_cmp = (a_cmp)((a_node), rbp_n_t); \ |
michael@0 | 179 | if (rbp_n_cmp < 0) { \ |
michael@0 | 180 | (r_node) = rbp_n_t; \ |
michael@0 | 181 | rbp_n_t = rbp_left_get(a_type, a_field, rbp_n_t); \ |
michael@0 | 182 | } else if (rbp_n_cmp > 0) { \ |
michael@0 | 183 | rbp_n_t = rbp_right_get(a_type, a_field, rbp_n_t); \ |
michael@0 | 184 | } else { \ |
michael@0 | 185 | break; \ |
michael@0 | 186 | } \ |
michael@0 | 187 | assert(rbp_n_t != &(a_tree)->rbt_nil); \ |
michael@0 | 188 | } \ |
michael@0 | 189 | } \ |
michael@0 | 190 | } while (0) |
michael@0 | 191 | |
michael@0 | 192 | #define rbp_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ |
michael@0 | 193 | if (rbp_left_get(a_type, a_field, (a_node)) != &(a_tree)->rbt_nil) {\ |
michael@0 | 194 | rbp_last(a_type, a_field, a_tree, rbp_left_get(a_type, \ |
michael@0 | 195 | a_field, (a_node)), (r_node)); \ |
michael@0 | 196 | } else { \ |
michael@0 | 197 | a_type *rbp_p_t = (a_tree)->rbt_root; \ |
michael@0 | 198 | assert(rbp_p_t != &(a_tree)->rbt_nil); \ |
michael@0 | 199 | (r_node) = &(a_tree)->rbt_nil; \ |
michael@0 | 200 | while (true) { \ |
michael@0 | 201 | int rbp_p_cmp = (a_cmp)((a_node), rbp_p_t); \ |
michael@0 | 202 | if (rbp_p_cmp < 0) { \ |
michael@0 | 203 | rbp_p_t = rbp_left_get(a_type, a_field, rbp_p_t); \ |
michael@0 | 204 | } else if (rbp_p_cmp > 0) { \ |
michael@0 | 205 | (r_node) = rbp_p_t; \ |
michael@0 | 206 | rbp_p_t = rbp_right_get(a_type, a_field, rbp_p_t); \ |
michael@0 | 207 | } else { \ |
michael@0 | 208 | break; \ |
michael@0 | 209 | } \ |
michael@0 | 210 | assert(rbp_p_t != &(a_tree)->rbt_nil); \ |
michael@0 | 211 | } \ |
michael@0 | 212 | } \ |
michael@0 | 213 | } while (0) |
michael@0 | 214 | |
michael@0 | 215 | #define rb_first(a_type, a_field, a_tree, r_node) do { \ |
michael@0 | 216 | rbp_first(a_type, a_field, a_tree, (a_tree)->rbt_root, (r_node)); \ |
michael@0 | 217 | if ((r_node) == &(a_tree)->rbt_nil) { \ |
michael@0 | 218 | (r_node) = NULL; \ |
michael@0 | 219 | } \ |
michael@0 | 220 | } while (0) |
michael@0 | 221 | |
michael@0 | 222 | #define rb_last(a_type, a_field, a_tree, r_node) do { \ |
michael@0 | 223 | rbp_last(a_type, a_field, a_tree, (a_tree)->rbt_root, r_node); \ |
michael@0 | 224 | if ((r_node) == &(a_tree)->rbt_nil) { \ |
michael@0 | 225 | (r_node) = NULL; \ |
michael@0 | 226 | } \ |
michael@0 | 227 | } while (0) |
michael@0 | 228 | |
michael@0 | 229 | #define rb_next(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ |
michael@0 | 230 | rbp_next(a_type, a_field, a_cmp, a_tree, (a_node), (r_node)); \ |
michael@0 | 231 | if ((r_node) == &(a_tree)->rbt_nil) { \ |
michael@0 | 232 | (r_node) = NULL; \ |
michael@0 | 233 | } \ |
michael@0 | 234 | } while (0) |
michael@0 | 235 | |
michael@0 | 236 | #define rb_prev(a_type, a_field, a_cmp, a_tree, a_node, r_node) do { \ |
michael@0 | 237 | rbp_prev(a_type, a_field, a_cmp, a_tree, (a_node), (r_node)); \ |
michael@0 | 238 | if ((r_node) == &(a_tree)->rbt_nil) { \ |
michael@0 | 239 | (r_node) = NULL; \ |
michael@0 | 240 | } \ |
michael@0 | 241 | } while (0) |
michael@0 | 242 | |
michael@0 | 243 | #define rb_search(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \ |
michael@0 | 244 | int rbp_se_cmp; \ |
michael@0 | 245 | (r_node) = (a_tree)->rbt_root; \ |
michael@0 | 246 | while ((r_node) != &(a_tree)->rbt_nil \ |
michael@0 | 247 | && (rbp_se_cmp = (a_cmp)((a_key), (r_node))) != 0) { \ |
michael@0 | 248 | if (rbp_se_cmp < 0) { \ |
michael@0 | 249 | (r_node) = rbp_left_get(a_type, a_field, (r_node)); \ |
michael@0 | 250 | } else { \ |
michael@0 | 251 | (r_node) = rbp_right_get(a_type, a_field, (r_node)); \ |
michael@0 | 252 | } \ |
michael@0 | 253 | } \ |
michael@0 | 254 | if ((r_node) == &(a_tree)->rbt_nil) { \ |
michael@0 | 255 | (r_node) = NULL; \ |
michael@0 | 256 | } \ |
michael@0 | 257 | } while (0) |
michael@0 | 258 | |
michael@0 | 259 | /* |
michael@0 | 260 | * Find a match if it exists. Otherwise, find the next greater node, if one |
michael@0 | 261 | * exists. |
michael@0 | 262 | */ |
michael@0 | 263 | #define rb_nsearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \ |
michael@0 | 264 | a_type *rbp_ns_t = (a_tree)->rbt_root; \ |
michael@0 | 265 | (r_node) = NULL; \ |
michael@0 | 266 | while (rbp_ns_t != &(a_tree)->rbt_nil) { \ |
michael@0 | 267 | int rbp_ns_cmp = (a_cmp)((a_key), rbp_ns_t); \ |
michael@0 | 268 | if (rbp_ns_cmp < 0) { \ |
michael@0 | 269 | (r_node) = rbp_ns_t; \ |
michael@0 | 270 | rbp_ns_t = rbp_left_get(a_type, a_field, rbp_ns_t); \ |
michael@0 | 271 | } else if (rbp_ns_cmp > 0) { \ |
michael@0 | 272 | rbp_ns_t = rbp_right_get(a_type, a_field, rbp_ns_t); \ |
michael@0 | 273 | } else { \ |
michael@0 | 274 | (r_node) = rbp_ns_t; \ |
michael@0 | 275 | break; \ |
michael@0 | 276 | } \ |
michael@0 | 277 | } \ |
michael@0 | 278 | } while (0) |
michael@0 | 279 | |
michael@0 | 280 | /* |
michael@0 | 281 | * Find a match if it exists. Otherwise, find the previous lesser node, if one |
michael@0 | 282 | * exists. |
michael@0 | 283 | */ |
michael@0 | 284 | #define rb_psearch(a_type, a_field, a_cmp, a_tree, a_key, r_node) do { \ |
michael@0 | 285 | a_type *rbp_ps_t = (a_tree)->rbt_root; \ |
michael@0 | 286 | (r_node) = NULL; \ |
michael@0 | 287 | while (rbp_ps_t != &(a_tree)->rbt_nil) { \ |
michael@0 | 288 | int rbp_ps_cmp = (a_cmp)((a_key), rbp_ps_t); \ |
michael@0 | 289 | if (rbp_ps_cmp < 0) { \ |
michael@0 | 290 | rbp_ps_t = rbp_left_get(a_type, a_field, rbp_ps_t); \ |
michael@0 | 291 | } else if (rbp_ps_cmp > 0) { \ |
michael@0 | 292 | (r_node) = rbp_ps_t; \ |
michael@0 | 293 | rbp_ps_t = rbp_right_get(a_type, a_field, rbp_ps_t); \ |
michael@0 | 294 | } else { \ |
michael@0 | 295 | (r_node) = rbp_ps_t; \ |
michael@0 | 296 | break; \ |
michael@0 | 297 | } \ |
michael@0 | 298 | } \ |
michael@0 | 299 | } while (0) |
michael@0 | 300 | |
michael@0 | 301 | #define rbp_rotate_left(a_type, a_field, a_node, r_node) do { \ |
michael@0 | 302 | (r_node) = rbp_right_get(a_type, a_field, (a_node)); \ |
michael@0 | 303 | rbp_right_set(a_type, a_field, (a_node), \ |
michael@0 | 304 | rbp_left_get(a_type, a_field, (r_node))); \ |
michael@0 | 305 | rbp_left_set(a_type, a_field, (r_node), (a_node)); \ |
michael@0 | 306 | } while (0) |
michael@0 | 307 | |
michael@0 | 308 | #define rbp_rotate_right(a_type, a_field, a_node, r_node) do { \ |
michael@0 | 309 | (r_node) = rbp_left_get(a_type, a_field, (a_node)); \ |
michael@0 | 310 | rbp_left_set(a_type, a_field, (a_node), \ |
michael@0 | 311 | rbp_right_get(a_type, a_field, (r_node))); \ |
michael@0 | 312 | rbp_right_set(a_type, a_field, (r_node), (a_node)); \ |
michael@0 | 313 | } while (0) |
michael@0 | 314 | |
michael@0 | 315 | #define rbp_lean_left(a_type, a_field, a_node, r_node) do { \ |
michael@0 | 316 | bool rbp_ll_red; \ |
michael@0 | 317 | rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ |
michael@0 | 318 | rbp_ll_red = rbp_red_get(a_type, a_field, (a_node)); \ |
michael@0 | 319 | rbp_color_set(a_type, a_field, (r_node), rbp_ll_red); \ |
michael@0 | 320 | rbp_red_set(a_type, a_field, (a_node)); \ |
michael@0 | 321 | } while (0) |
michael@0 | 322 | |
michael@0 | 323 | #define rbp_lean_right(a_type, a_field, a_node, r_node) do { \ |
michael@0 | 324 | bool rbp_lr_red; \ |
michael@0 | 325 | rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ |
michael@0 | 326 | rbp_lr_red = rbp_red_get(a_type, a_field, (a_node)); \ |
michael@0 | 327 | rbp_color_set(a_type, a_field, (r_node), rbp_lr_red); \ |
michael@0 | 328 | rbp_red_set(a_type, a_field, (a_node)); \ |
michael@0 | 329 | } while (0) |
michael@0 | 330 | |
michael@0 | 331 | #define rbp_move_red_left(a_type, a_field, a_node, r_node) do { \ |
michael@0 | 332 | a_type *rbp_mrl_t, *rbp_mrl_u; \ |
michael@0 | 333 | rbp_mrl_t = rbp_left_get(a_type, a_field, (a_node)); \ |
michael@0 | 334 | rbp_red_set(a_type, a_field, rbp_mrl_t); \ |
michael@0 | 335 | rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node)); \ |
michael@0 | 336 | rbp_mrl_u = rbp_left_get(a_type, a_field, rbp_mrl_t); \ |
michael@0 | 337 | if (rbp_red_get(a_type, a_field, rbp_mrl_u)) { \ |
michael@0 | 338 | rbp_rotate_right(a_type, a_field, rbp_mrl_t, rbp_mrl_u); \ |
michael@0 | 339 | rbp_right_set(a_type, a_field, (a_node), rbp_mrl_u); \ |
michael@0 | 340 | rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ |
michael@0 | 341 | rbp_mrl_t = rbp_right_get(a_type, a_field, (a_node)); \ |
michael@0 | 342 | if (rbp_red_get(a_type, a_field, rbp_mrl_t)) { \ |
michael@0 | 343 | rbp_black_set(a_type, a_field, rbp_mrl_t); \ |
michael@0 | 344 | rbp_red_set(a_type, a_field, (a_node)); \ |
michael@0 | 345 | rbp_rotate_left(a_type, a_field, (a_node), rbp_mrl_t); \ |
michael@0 | 346 | rbp_left_set(a_type, a_field, (r_node), rbp_mrl_t); \ |
michael@0 | 347 | } else { \ |
michael@0 | 348 | rbp_black_set(a_type, a_field, (a_node)); \ |
michael@0 | 349 | } \ |
michael@0 | 350 | } else { \ |
michael@0 | 351 | rbp_red_set(a_type, a_field, (a_node)); \ |
michael@0 | 352 | rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ |
michael@0 | 353 | } \ |
michael@0 | 354 | } while (0) |
michael@0 | 355 | |
michael@0 | 356 | #define rbp_move_red_right(a_type, a_field, a_node, r_node) do { \ |
michael@0 | 357 | a_type *rbp_mrr_t; \ |
michael@0 | 358 | rbp_mrr_t = rbp_left_get(a_type, a_field, (a_node)); \ |
michael@0 | 359 | if (rbp_red_get(a_type, a_field, rbp_mrr_t)) { \ |
michael@0 | 360 | a_type *rbp_mrr_u, *rbp_mrr_v; \ |
michael@0 | 361 | rbp_mrr_u = rbp_right_get(a_type, a_field, rbp_mrr_t); \ |
michael@0 | 362 | rbp_mrr_v = rbp_left_get(a_type, a_field, rbp_mrr_u); \ |
michael@0 | 363 | if (rbp_red_get(a_type, a_field, rbp_mrr_v)) { \ |
michael@0 | 364 | rbp_color_set(a_type, a_field, rbp_mrr_u, \ |
michael@0 | 365 | rbp_red_get(a_type, a_field, (a_node))); \ |
michael@0 | 366 | rbp_black_set(a_type, a_field, rbp_mrr_v); \ |
michael@0 | 367 | rbp_rotate_left(a_type, a_field, rbp_mrr_t, rbp_mrr_u); \ |
michael@0 | 368 | rbp_left_set(a_type, a_field, (a_node), rbp_mrr_u); \ |
michael@0 | 369 | rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ |
michael@0 | 370 | rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \ |
michael@0 | 371 | rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \ |
michael@0 | 372 | } else { \ |
michael@0 | 373 | rbp_color_set(a_type, a_field, rbp_mrr_t, \ |
michael@0 | 374 | rbp_red_get(a_type, a_field, (a_node))); \ |
michael@0 | 375 | rbp_red_set(a_type, a_field, rbp_mrr_u); \ |
michael@0 | 376 | rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ |
michael@0 | 377 | rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \ |
michael@0 | 378 | rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \ |
michael@0 | 379 | } \ |
michael@0 | 380 | rbp_red_set(a_type, a_field, (a_node)); \ |
michael@0 | 381 | } else { \ |
michael@0 | 382 | rbp_red_set(a_type, a_field, rbp_mrr_t); \ |
michael@0 | 383 | rbp_mrr_t = rbp_left_get(a_type, a_field, rbp_mrr_t); \ |
michael@0 | 384 | if (rbp_red_get(a_type, a_field, rbp_mrr_t)) { \ |
michael@0 | 385 | rbp_black_set(a_type, a_field, rbp_mrr_t); \ |
michael@0 | 386 | rbp_rotate_right(a_type, a_field, (a_node), (r_node)); \ |
michael@0 | 387 | rbp_rotate_left(a_type, a_field, (a_node), rbp_mrr_t); \ |
michael@0 | 388 | rbp_right_set(a_type, a_field, (r_node), rbp_mrr_t); \ |
michael@0 | 389 | } else { \ |
michael@0 | 390 | rbp_rotate_left(a_type, a_field, (a_node), (r_node)); \ |
michael@0 | 391 | } \ |
michael@0 | 392 | } \ |
michael@0 | 393 | } while (0) |
michael@0 | 394 | |
michael@0 | 395 | #define rb_insert(a_type, a_field, a_cmp, a_tree, a_node) do { \ |
michael@0 | 396 | a_type rbp_i_s; \ |
michael@0 | 397 | a_type *rbp_i_g, *rbp_i_p, *rbp_i_c, *rbp_i_t, *rbp_i_u; \ |
michael@0 | 398 | int rbp_i_cmp = 0; \ |
michael@0 | 399 | rbp_i_g = &(a_tree)->rbt_nil; \ |
michael@0 | 400 | rbp_left_set(a_type, a_field, &rbp_i_s, (a_tree)->rbt_root); \ |
michael@0 | 401 | rbp_right_set(a_type, a_field, &rbp_i_s, &(a_tree)->rbt_nil); \ |
michael@0 | 402 | rbp_black_set(a_type, a_field, &rbp_i_s); \ |
michael@0 | 403 | rbp_i_p = &rbp_i_s; \ |
michael@0 | 404 | rbp_i_c = (a_tree)->rbt_root; \ |
michael@0 | 405 | /* Iteratively search down the tree for the insertion point, */\ |
michael@0 | 406 | /* splitting 4-nodes as they are encountered. At the end of each */\ |
michael@0 | 407 | /* iteration, rbp_i_g->rbp_i_p->rbp_i_c is a 3-level path down */\ |
michael@0 | 408 | /* the tree, assuming a sufficiently deep tree. */\ |
michael@0 | 409 | while (rbp_i_c != &(a_tree)->rbt_nil) { \ |
michael@0 | 410 | rbp_i_t = rbp_left_get(a_type, a_field, rbp_i_c); \ |
michael@0 | 411 | rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t); \ |
michael@0 | 412 | if (rbp_red_get(a_type, a_field, rbp_i_t) \ |
michael@0 | 413 | && rbp_red_get(a_type, a_field, rbp_i_u)) { \ |
michael@0 | 414 | /* rbp_i_c is the top of a logical 4-node, so split it. */\ |
michael@0 | 415 | /* This iteration does not move down the tree, due to the */\ |
michael@0 | 416 | /* disruptiveness of node splitting. */\ |
michael@0 | 417 | /* */\ |
michael@0 | 418 | /* Rotate right. */\ |
michael@0 | 419 | rbp_rotate_right(a_type, a_field, rbp_i_c, rbp_i_t); \ |
michael@0 | 420 | /* Pass red links up one level. */\ |
michael@0 | 421 | rbp_i_u = rbp_left_get(a_type, a_field, rbp_i_t); \ |
michael@0 | 422 | rbp_black_set(a_type, a_field, rbp_i_u); \ |
michael@0 | 423 | if (rbp_left_get(a_type, a_field, rbp_i_p) == rbp_i_c) { \ |
michael@0 | 424 | rbp_left_set(a_type, a_field, rbp_i_p, rbp_i_t); \ |
michael@0 | 425 | rbp_i_c = rbp_i_t; \ |
michael@0 | 426 | } else { \ |
michael@0 | 427 | /* rbp_i_c was the right child of rbp_i_p, so rotate */\ |
michael@0 | 428 | /* left in order to maintain the left-leaning */\ |
michael@0 | 429 | /* invariant. */\ |
michael@0 | 430 | assert(rbp_right_get(a_type, a_field, rbp_i_p) \ |
michael@0 | 431 | == rbp_i_c); \ |
michael@0 | 432 | rbp_right_set(a_type, a_field, rbp_i_p, rbp_i_t); \ |
michael@0 | 433 | rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_u); \ |
michael@0 | 434 | if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\ |
michael@0 | 435 | rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_u); \ |
michael@0 | 436 | } else { \ |
michael@0 | 437 | assert(rbp_right_get(a_type, a_field, rbp_i_g) \ |
michael@0 | 438 | == rbp_i_p); \ |
michael@0 | 439 | rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_u); \ |
michael@0 | 440 | } \ |
michael@0 | 441 | rbp_i_p = rbp_i_u; \ |
michael@0 | 442 | rbp_i_cmp = (a_cmp)((a_node), rbp_i_p); \ |
michael@0 | 443 | if (rbp_i_cmp < 0) { \ |
michael@0 | 444 | rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_p); \ |
michael@0 | 445 | } else { \ |
michael@0 | 446 | assert(rbp_i_cmp > 0); \ |
michael@0 | 447 | rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_p); \ |
michael@0 | 448 | } \ |
michael@0 | 449 | continue; \ |
michael@0 | 450 | } \ |
michael@0 | 451 | } \ |
michael@0 | 452 | rbp_i_g = rbp_i_p; \ |
michael@0 | 453 | rbp_i_p = rbp_i_c; \ |
michael@0 | 454 | rbp_i_cmp = (a_cmp)((a_node), rbp_i_c); \ |
michael@0 | 455 | if (rbp_i_cmp < 0) { \ |
michael@0 | 456 | rbp_i_c = rbp_left_get(a_type, a_field, rbp_i_c); \ |
michael@0 | 457 | } else { \ |
michael@0 | 458 | assert(rbp_i_cmp > 0); \ |
michael@0 | 459 | rbp_i_c = rbp_right_get(a_type, a_field, rbp_i_c); \ |
michael@0 | 460 | } \ |
michael@0 | 461 | } \ |
michael@0 | 462 | /* rbp_i_p now refers to the node under which to insert. */\ |
michael@0 | 463 | rbp_node_new(a_type, a_field, a_tree, (a_node)); \ |
michael@0 | 464 | if (rbp_i_cmp > 0) { \ |
michael@0 | 465 | rbp_right_set(a_type, a_field, rbp_i_p, (a_node)); \ |
michael@0 | 466 | rbp_lean_left(a_type, a_field, rbp_i_p, rbp_i_t); \ |
michael@0 | 467 | if (rbp_left_get(a_type, a_field, rbp_i_g) == rbp_i_p) { \ |
michael@0 | 468 | rbp_left_set(a_type, a_field, rbp_i_g, rbp_i_t); \ |
michael@0 | 469 | } else if (rbp_right_get(a_type, a_field, rbp_i_g) == rbp_i_p) {\ |
michael@0 | 470 | rbp_right_set(a_type, a_field, rbp_i_g, rbp_i_t); \ |
michael@0 | 471 | } \ |
michael@0 | 472 | } else { \ |
michael@0 | 473 | rbp_left_set(a_type, a_field, rbp_i_p, (a_node)); \ |
michael@0 | 474 | } \ |
michael@0 | 475 | /* Update the root and make sure that it is black. */\ |
michael@0 | 476 | (a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_i_s); \ |
michael@0 | 477 | rbp_black_set(a_type, a_field, (a_tree)->rbt_root); \ |
michael@0 | 478 | } while (0) |
michael@0 | 479 | |
michael@0 | 480 | #define rb_remove(a_type, a_field, a_cmp, a_tree, a_node) do { \ |
michael@0 | 481 | a_type rbp_r_s; \ |
michael@0 | 482 | a_type *rbp_r_p, *rbp_r_c, *rbp_r_xp, *rbp_r_t, *rbp_r_u; \ |
michael@0 | 483 | int rbp_r_cmp; \ |
michael@0 | 484 | rbp_left_set(a_type, a_field, &rbp_r_s, (a_tree)->rbt_root); \ |
michael@0 | 485 | rbp_right_set(a_type, a_field, &rbp_r_s, &(a_tree)->rbt_nil); \ |
michael@0 | 486 | rbp_black_set(a_type, a_field, &rbp_r_s); \ |
michael@0 | 487 | rbp_r_p = &rbp_r_s; \ |
michael@0 | 488 | rbp_r_c = (a_tree)->rbt_root; \ |
michael@0 | 489 | rbp_r_xp = &(a_tree)->rbt_nil; \ |
michael@0 | 490 | /* Iterate down the tree, but always transform 2-nodes to 3- or */\ |
michael@0 | 491 | /* 4-nodes in order to maintain the invariant that the current */\ |
michael@0 | 492 | /* node is not a 2-node. This allows simple deletion once a leaf */\ |
michael@0 | 493 | /* is reached. Handle the root specially though, since there may */\ |
michael@0 | 494 | /* be no way to convert it from a 2-node to a 3-node. */\ |
michael@0 | 495 | rbp_r_cmp = (a_cmp)((a_node), rbp_r_c); \ |
michael@0 | 496 | if (rbp_r_cmp < 0) { \ |
michael@0 | 497 | rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \ |
michael@0 | 498 | rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ |
michael@0 | 499 | if (rbp_red_get(a_type, a_field, rbp_r_t) == false \ |
michael@0 | 500 | && rbp_red_get(a_type, a_field, rbp_r_u) == false) { \ |
michael@0 | 501 | /* Apply standard transform to prepare for left move. */\ |
michael@0 | 502 | rbp_move_red_left(a_type, a_field, rbp_r_c, rbp_r_t); \ |
michael@0 | 503 | rbp_black_set(a_type, a_field, rbp_r_t); \ |
michael@0 | 504 | rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \ |
michael@0 | 505 | rbp_r_c = rbp_r_t; \ |
michael@0 | 506 | } else { \ |
michael@0 | 507 | /* Move left. */\ |
michael@0 | 508 | rbp_r_p = rbp_r_c; \ |
michael@0 | 509 | rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c); \ |
michael@0 | 510 | } \ |
michael@0 | 511 | } else { \ |
michael@0 | 512 | if (rbp_r_cmp == 0) { \ |
michael@0 | 513 | assert((a_node) == rbp_r_c); \ |
michael@0 | 514 | if (rbp_right_get(a_type, a_field, rbp_r_c) \ |
michael@0 | 515 | == &(a_tree)->rbt_nil) { \ |
michael@0 | 516 | /* Delete root node (which is also a leaf node). */\ |
michael@0 | 517 | if (rbp_left_get(a_type, a_field, rbp_r_c) \ |
michael@0 | 518 | != &(a_tree)->rbt_nil) { \ |
michael@0 | 519 | rbp_lean_right(a_type, a_field, rbp_r_c, rbp_r_t); \ |
michael@0 | 520 | rbp_right_set(a_type, a_field, rbp_r_t, \ |
michael@0 | 521 | &(a_tree)->rbt_nil); \ |
michael@0 | 522 | } else { \ |
michael@0 | 523 | rbp_r_t = &(a_tree)->rbt_nil; \ |
michael@0 | 524 | } \ |
michael@0 | 525 | rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \ |
michael@0 | 526 | } else { \ |
michael@0 | 527 | /* This is the node we want to delete, but we will */\ |
michael@0 | 528 | /* instead swap it with its successor and delete the */\ |
michael@0 | 529 | /* successor. Record enough information to do the */\ |
michael@0 | 530 | /* swap later. rbp_r_xp is the a_node's parent. */\ |
michael@0 | 531 | rbp_r_xp = rbp_r_p; \ |
michael@0 | 532 | rbp_r_cmp = 1; /* Note that deletion is incomplete. */\ |
michael@0 | 533 | } \ |
michael@0 | 534 | } \ |
michael@0 | 535 | if (rbp_r_cmp == 1) { \ |
michael@0 | 536 | if (rbp_red_get(a_type, a_field, rbp_left_get(a_type, \ |
michael@0 | 537 | a_field, rbp_right_get(a_type, a_field, rbp_r_c))) \ |
michael@0 | 538 | == false) { \ |
michael@0 | 539 | rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \ |
michael@0 | 540 | if (rbp_red_get(a_type, a_field, rbp_r_t)) { \ |
michael@0 | 541 | /* Standard transform. */\ |
michael@0 | 542 | rbp_move_red_right(a_type, a_field, rbp_r_c, \ |
michael@0 | 543 | rbp_r_t); \ |
michael@0 | 544 | } else { \ |
michael@0 | 545 | /* Root-specific transform. */\ |
michael@0 | 546 | rbp_red_set(a_type, a_field, rbp_r_c); \ |
michael@0 | 547 | rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ |
michael@0 | 548 | if (rbp_red_get(a_type, a_field, rbp_r_u)) { \ |
michael@0 | 549 | rbp_black_set(a_type, a_field, rbp_r_u); \ |
michael@0 | 550 | rbp_rotate_right(a_type, a_field, rbp_r_c, \ |
michael@0 | 551 | rbp_r_t); \ |
michael@0 | 552 | rbp_rotate_left(a_type, a_field, rbp_r_c, \ |
michael@0 | 553 | rbp_r_u); \ |
michael@0 | 554 | rbp_right_set(a_type, a_field, rbp_r_t, \ |
michael@0 | 555 | rbp_r_u); \ |
michael@0 | 556 | } else { \ |
michael@0 | 557 | rbp_red_set(a_type, a_field, rbp_r_t); \ |
michael@0 | 558 | rbp_rotate_left(a_type, a_field, rbp_r_c, \ |
michael@0 | 559 | rbp_r_t); \ |
michael@0 | 560 | } \ |
michael@0 | 561 | } \ |
michael@0 | 562 | rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t); \ |
michael@0 | 563 | rbp_r_c = rbp_r_t; \ |
michael@0 | 564 | } else { \ |
michael@0 | 565 | /* Move right. */\ |
michael@0 | 566 | rbp_r_p = rbp_r_c; \ |
michael@0 | 567 | rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c); \ |
michael@0 | 568 | } \ |
michael@0 | 569 | } \ |
michael@0 | 570 | } \ |
michael@0 | 571 | if (rbp_r_cmp != 0) { \ |
michael@0 | 572 | while (true) { \ |
michael@0 | 573 | assert(rbp_r_p != &(a_tree)->rbt_nil); \ |
michael@0 | 574 | rbp_r_cmp = (a_cmp)((a_node), rbp_r_c); \ |
michael@0 | 575 | if (rbp_r_cmp < 0) { \ |
michael@0 | 576 | rbp_r_t = rbp_left_get(a_type, a_field, rbp_r_c); \ |
michael@0 | 577 | if (rbp_r_t == &(a_tree)->rbt_nil) { \ |
michael@0 | 578 | /* rbp_r_c now refers to the successor node to */\ |
michael@0 | 579 | /* relocate, and rbp_r_xp/a_node refer to the */\ |
michael@0 | 580 | /* context for the relocation. */\ |
michael@0 | 581 | if (rbp_left_get(a_type, a_field, rbp_r_xp) \ |
michael@0 | 582 | == (a_node)) { \ |
michael@0 | 583 | rbp_left_set(a_type, a_field, rbp_r_xp, \ |
michael@0 | 584 | rbp_r_c); \ |
michael@0 | 585 | } else { \ |
michael@0 | 586 | assert(rbp_right_get(a_type, a_field, \ |
michael@0 | 587 | rbp_r_xp) == (a_node)); \ |
michael@0 | 588 | rbp_right_set(a_type, a_field, rbp_r_xp, \ |
michael@0 | 589 | rbp_r_c); \ |
michael@0 | 590 | } \ |
michael@0 | 591 | rbp_left_set(a_type, a_field, rbp_r_c, \ |
michael@0 | 592 | rbp_left_get(a_type, a_field, (a_node))); \ |
michael@0 | 593 | rbp_right_set(a_type, a_field, rbp_r_c, \ |
michael@0 | 594 | rbp_right_get(a_type, a_field, (a_node))); \ |
michael@0 | 595 | rbp_color_set(a_type, a_field, rbp_r_c, \ |
michael@0 | 596 | rbp_red_get(a_type, a_field, (a_node))); \ |
michael@0 | 597 | if (rbp_left_get(a_type, a_field, rbp_r_p) \ |
michael@0 | 598 | == rbp_r_c) { \ |
michael@0 | 599 | rbp_left_set(a_type, a_field, rbp_r_p, \ |
michael@0 | 600 | &(a_tree)->rbt_nil); \ |
michael@0 | 601 | } else { \ |
michael@0 | 602 | assert(rbp_right_get(a_type, a_field, rbp_r_p) \ |
michael@0 | 603 | == rbp_r_c); \ |
michael@0 | 604 | rbp_right_set(a_type, a_field, rbp_r_p, \ |
michael@0 | 605 | &(a_tree)->rbt_nil); \ |
michael@0 | 606 | } \ |
michael@0 | 607 | break; \ |
michael@0 | 608 | } \ |
michael@0 | 609 | rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ |
michael@0 | 610 | if (rbp_red_get(a_type, a_field, rbp_r_t) == false \ |
michael@0 | 611 | && rbp_red_get(a_type, a_field, rbp_r_u) == false) { \ |
michael@0 | 612 | rbp_move_red_left(a_type, a_field, rbp_r_c, \ |
michael@0 | 613 | rbp_r_t); \ |
michael@0 | 614 | if (rbp_left_get(a_type, a_field, rbp_r_p) \ |
michael@0 | 615 | == rbp_r_c) { \ |
michael@0 | 616 | rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\ |
michael@0 | 617 | } else { \ |
michael@0 | 618 | rbp_right_set(a_type, a_field, rbp_r_p, \ |
michael@0 | 619 | rbp_r_t); \ |
michael@0 | 620 | } \ |
michael@0 | 621 | rbp_r_c = rbp_r_t; \ |
michael@0 | 622 | } else { \ |
michael@0 | 623 | rbp_r_p = rbp_r_c; \ |
michael@0 | 624 | rbp_r_c = rbp_left_get(a_type, a_field, rbp_r_c); \ |
michael@0 | 625 | } \ |
michael@0 | 626 | } else { \ |
michael@0 | 627 | /* Check whether to delete this node (it has to be */\ |
michael@0 | 628 | /* the correct node and a leaf node). */\ |
michael@0 | 629 | if (rbp_r_cmp == 0) { \ |
michael@0 | 630 | assert((a_node) == rbp_r_c); \ |
michael@0 | 631 | if (rbp_right_get(a_type, a_field, rbp_r_c) \ |
michael@0 | 632 | == &(a_tree)->rbt_nil) { \ |
michael@0 | 633 | /* Delete leaf node. */\ |
michael@0 | 634 | if (rbp_left_get(a_type, a_field, rbp_r_c) \ |
michael@0 | 635 | != &(a_tree)->rbt_nil) { \ |
michael@0 | 636 | rbp_lean_right(a_type, a_field, rbp_r_c, \ |
michael@0 | 637 | rbp_r_t); \ |
michael@0 | 638 | rbp_right_set(a_type, a_field, rbp_r_t, \ |
michael@0 | 639 | &(a_tree)->rbt_nil); \ |
michael@0 | 640 | } else { \ |
michael@0 | 641 | rbp_r_t = &(a_tree)->rbt_nil; \ |
michael@0 | 642 | } \ |
michael@0 | 643 | if (rbp_left_get(a_type, a_field, rbp_r_p) \ |
michael@0 | 644 | == rbp_r_c) { \ |
michael@0 | 645 | rbp_left_set(a_type, a_field, rbp_r_p, \ |
michael@0 | 646 | rbp_r_t); \ |
michael@0 | 647 | } else { \ |
michael@0 | 648 | rbp_right_set(a_type, a_field, rbp_r_p, \ |
michael@0 | 649 | rbp_r_t); \ |
michael@0 | 650 | } \ |
michael@0 | 651 | break; \ |
michael@0 | 652 | } else { \ |
michael@0 | 653 | /* This is the node we want to delete, but we */\ |
michael@0 | 654 | /* will instead swap it with its successor */\ |
michael@0 | 655 | /* and delete the successor. Record enough */\ |
michael@0 | 656 | /* information to do the swap later. */\ |
michael@0 | 657 | /* rbp_r_xp is a_node's parent. */\ |
michael@0 | 658 | rbp_r_xp = rbp_r_p; \ |
michael@0 | 659 | } \ |
michael@0 | 660 | } \ |
michael@0 | 661 | rbp_r_t = rbp_right_get(a_type, a_field, rbp_r_c); \ |
michael@0 | 662 | rbp_r_u = rbp_left_get(a_type, a_field, rbp_r_t); \ |
michael@0 | 663 | if (rbp_red_get(a_type, a_field, rbp_r_u) == false) { \ |
michael@0 | 664 | rbp_move_red_right(a_type, a_field, rbp_r_c, \ |
michael@0 | 665 | rbp_r_t); \ |
michael@0 | 666 | if (rbp_left_get(a_type, a_field, rbp_r_p) \ |
michael@0 | 667 | == rbp_r_c) { \ |
michael@0 | 668 | rbp_left_set(a_type, a_field, rbp_r_p, rbp_r_t);\ |
michael@0 | 669 | } else { \ |
michael@0 | 670 | rbp_right_set(a_type, a_field, rbp_r_p, \ |
michael@0 | 671 | rbp_r_t); \ |
michael@0 | 672 | } \ |
michael@0 | 673 | rbp_r_c = rbp_r_t; \ |
michael@0 | 674 | } else { \ |
michael@0 | 675 | rbp_r_p = rbp_r_c; \ |
michael@0 | 676 | rbp_r_c = rbp_right_get(a_type, a_field, rbp_r_c); \ |
michael@0 | 677 | } \ |
michael@0 | 678 | } \ |
michael@0 | 679 | } \ |
michael@0 | 680 | } \ |
michael@0 | 681 | /* Update root. */\ |
michael@0 | 682 | (a_tree)->rbt_root = rbp_left_get(a_type, a_field, &rbp_r_s); \ |
michael@0 | 683 | } while (0) |
michael@0 | 684 | |
michael@0 | 685 | /* |
michael@0 | 686 | * The rb_wrap() macro provides a convenient way to wrap functions around the |
michael@0 | 687 | * cpp macros. The main benefits of wrapping are that 1) repeated macro |
michael@0 | 688 | * expansion can cause code bloat, especially for rb_{insert,remove)(), and |
michael@0 | 689 | * 2) type, linkage, comparison functions, etc. need not be specified at every |
michael@0 | 690 | * call point. |
michael@0 | 691 | */ |
michael@0 | 692 | |
michael@0 | 693 | #define rb_wrap(a_attr, a_prefix, a_tree_type, a_type, a_field, a_cmp) \ |
michael@0 | 694 | a_attr void \ |
michael@0 | 695 | a_prefix##new(a_tree_type *tree) { \ |
michael@0 | 696 | rb_new(a_type, a_field, tree); \ |
michael@0 | 697 | } \ |
michael@0 | 698 | a_attr a_type * \ |
michael@0 | 699 | a_prefix##first(a_tree_type *tree) { \ |
michael@0 | 700 | a_type *ret; \ |
michael@0 | 701 | rb_first(a_type, a_field, tree, ret); \ |
michael@0 | 702 | return (ret); \ |
michael@0 | 703 | } \ |
michael@0 | 704 | a_attr a_type * \ |
michael@0 | 705 | a_prefix##last(a_tree_type *tree) { \ |
michael@0 | 706 | a_type *ret; \ |
michael@0 | 707 | rb_last(a_type, a_field, tree, ret); \ |
michael@0 | 708 | return (ret); \ |
michael@0 | 709 | } \ |
michael@0 | 710 | a_attr a_type * \ |
michael@0 | 711 | a_prefix##next(a_tree_type *tree, a_type *node) { \ |
michael@0 | 712 | a_type *ret; \ |
michael@0 | 713 | rb_next(a_type, a_field, a_cmp, tree, node, ret); \ |
michael@0 | 714 | return (ret); \ |
michael@0 | 715 | } \ |
michael@0 | 716 | a_attr a_type * \ |
michael@0 | 717 | a_prefix##prev(a_tree_type *tree, a_type *node) { \ |
michael@0 | 718 | a_type *ret; \ |
michael@0 | 719 | rb_prev(a_type, a_field, a_cmp, tree, node, ret); \ |
michael@0 | 720 | return (ret); \ |
michael@0 | 721 | } \ |
michael@0 | 722 | a_attr a_type * \ |
michael@0 | 723 | a_prefix##search(a_tree_type *tree, a_type *key) { \ |
michael@0 | 724 | a_type *ret; \ |
michael@0 | 725 | rb_search(a_type, a_field, a_cmp, tree, key, ret); \ |
michael@0 | 726 | return (ret); \ |
michael@0 | 727 | } \ |
michael@0 | 728 | a_attr a_type * \ |
michael@0 | 729 | a_prefix##nsearch(a_tree_type *tree, a_type *key) { \ |
michael@0 | 730 | a_type *ret; \ |
michael@0 | 731 | rb_nsearch(a_type, a_field, a_cmp, tree, key, ret); \ |
michael@0 | 732 | return (ret); \ |
michael@0 | 733 | } \ |
michael@0 | 734 | a_attr a_type * \ |
michael@0 | 735 | a_prefix##psearch(a_tree_type *tree, a_type *key) { \ |
michael@0 | 736 | a_type *ret; \ |
michael@0 | 737 | rb_psearch(a_type, a_field, a_cmp, tree, key, ret); \ |
michael@0 | 738 | return (ret); \ |
michael@0 | 739 | } \ |
michael@0 | 740 | a_attr void \ |
michael@0 | 741 | a_prefix##insert(a_tree_type *tree, a_type *node) { \ |
michael@0 | 742 | rb_insert(a_type, a_field, a_cmp, tree, node); \ |
michael@0 | 743 | } \ |
michael@0 | 744 | a_attr void \ |
michael@0 | 745 | a_prefix##remove(a_tree_type *tree, a_type *node) { \ |
michael@0 | 746 | rb_remove(a_type, a_field, a_cmp, tree, node); \ |
michael@0 | 747 | } |
michael@0 | 748 | |
michael@0 | 749 | /* |
michael@0 | 750 | * The iterators simulate recursion via an array of pointers that store the |
michael@0 | 751 | * current path. This is critical to performance, since a series of calls to |
michael@0 | 752 | * rb_{next,prev}() would require time proportional to (n lg n), whereas this |
michael@0 | 753 | * implementation only requires time proportional to (n). |
michael@0 | 754 | * |
michael@0 | 755 | * Since the iterators cache a path down the tree, any tree modification may |
michael@0 | 756 | * cause the cached path to become invalid. In order to continue iteration, |
michael@0 | 757 | * use something like the following sequence: |
michael@0 | 758 | * |
michael@0 | 759 | * { |
michael@0 | 760 | * a_type *node, *tnode; |
michael@0 | 761 | * |
michael@0 | 762 | * rb_foreach_begin(a_type, a_field, a_tree, node) { |
michael@0 | 763 | * ... |
michael@0 | 764 | * rb_next(a_type, a_field, a_cmp, a_tree, node, tnode); |
michael@0 | 765 | * rb_remove(a_type, a_field, a_cmp, a_tree, node); |
michael@0 | 766 | * rb_foreach_next(a_type, a_field, a_cmp, a_tree, tnode); |
michael@0 | 767 | * ... |
michael@0 | 768 | * } rb_foreach_end(a_type, a_field, a_tree, node) |
michael@0 | 769 | * } |
michael@0 | 770 | * |
michael@0 | 771 | * Note that this idiom is not advised if every iteration modifies the tree, |
michael@0 | 772 | * since in that case there is no algorithmic complexity improvement over a |
michael@0 | 773 | * series of rb_{next,prev}() calls, thus making the setup overhead wasted |
michael@0 | 774 | * effort. |
michael@0 | 775 | */ |
michael@0 | 776 | |
michael@0 | 777 | #ifdef RB_NO_C99_VARARRAYS |
michael@0 | 778 | /* |
michael@0 | 779 | * Avoid using variable-length arrays, at the cost of using more stack space. |
michael@0 | 780 | * Size the path arrays such that they are always large enough, even if a |
michael@0 | 781 | * tree consumes all of memory. Since each node must contain a minimum of |
michael@0 | 782 | * two pointers, there can never be more nodes than: |
michael@0 | 783 | * |
michael@0 | 784 | * 1 << ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1)) |
michael@0 | 785 | * |
michael@0 | 786 | * Since the depth of a tree is limited to 3*lg(#nodes), the maximum depth |
michael@0 | 787 | * is: |
michael@0 | 788 | * |
michael@0 | 789 | * (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))) |
michael@0 | 790 | * |
michael@0 | 791 | * This works out to a maximum depth of 87 and 180 for 32- and 64-bit |
michael@0 | 792 | * systems, respectively (approximatly 348 and 1440 bytes, respectively). |
michael@0 | 793 | */ |
michael@0 | 794 | # define rbp_compute_f_height(a_type, a_field, a_tree) |
michael@0 | 795 | # define rbp_f_height (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))) |
michael@0 | 796 | # define rbp_compute_fr_height(a_type, a_field, a_tree) |
michael@0 | 797 | # define rbp_fr_height (3 * ((SIZEOF_PTR<<3) - (SIZEOF_PTR_2POW+1))) |
michael@0 | 798 | #else |
michael@0 | 799 | # define rbp_compute_f_height(a_type, a_field, a_tree) \ |
michael@0 | 800 | /* Compute the maximum possible tree depth (3X the black height). */\ |
michael@0 | 801 | unsigned rbp_f_height; \ |
michael@0 | 802 | rbp_black_height(a_type, a_field, a_tree, rbp_f_height); \ |
michael@0 | 803 | rbp_f_height *= 3; |
michael@0 | 804 | # define rbp_compute_fr_height(a_type, a_field, a_tree) \ |
michael@0 | 805 | /* Compute the maximum possible tree depth (3X the black height). */\ |
michael@0 | 806 | unsigned rbp_fr_height; \ |
michael@0 | 807 | rbp_black_height(a_type, a_field, a_tree, rbp_fr_height); \ |
michael@0 | 808 | rbp_fr_height *= 3; |
michael@0 | 809 | #endif |
michael@0 | 810 | |
michael@0 | 811 | #define rb_foreach_begin(a_type, a_field, a_tree, a_var) { \ |
michael@0 | 812 | rbp_compute_f_height(a_type, a_field, a_tree) \ |
michael@0 | 813 | { \ |
michael@0 | 814 | /* Initialize the path to contain the left spine. */\ |
michael@0 | 815 | a_type *rbp_f_path[rbp_f_height]; \ |
michael@0 | 816 | a_type *rbp_f_node; \ |
michael@0 | 817 | bool rbp_f_synced = false; \ |
michael@0 | 818 | unsigned rbp_f_depth = 0; \ |
michael@0 | 819 | if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ |
michael@0 | 820 | rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root; \ |
michael@0 | 821 | rbp_f_depth++; \ |
michael@0 | 822 | while ((rbp_f_node = rbp_left_get(a_type, a_field, \ |
michael@0 | 823 | rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \ |
michael@0 | 824 | rbp_f_path[rbp_f_depth] = rbp_f_node; \ |
michael@0 | 825 | rbp_f_depth++; \ |
michael@0 | 826 | } \ |
michael@0 | 827 | } \ |
michael@0 | 828 | /* While the path is non-empty, iterate. */\ |
michael@0 | 829 | while (rbp_f_depth > 0) { \ |
michael@0 | 830 | (a_var) = rbp_f_path[rbp_f_depth-1]; |
michael@0 | 831 | |
michael@0 | 832 | /* Only use if modifying the tree during iteration. */ |
michael@0 | 833 | #define rb_foreach_next(a_type, a_field, a_cmp, a_tree, a_node) \ |
michael@0 | 834 | /* Re-initialize the path to contain the path to a_node. */\ |
michael@0 | 835 | rbp_f_depth = 0; \ |
michael@0 | 836 | if (a_node != NULL) { \ |
michael@0 | 837 | if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ |
michael@0 | 838 | rbp_f_path[rbp_f_depth] = (a_tree)->rbt_root; \ |
michael@0 | 839 | rbp_f_depth++; \ |
michael@0 | 840 | rbp_f_node = rbp_f_path[0]; \ |
michael@0 | 841 | while (true) { \ |
michael@0 | 842 | int rbp_f_cmp = (a_cmp)((a_node), \ |
michael@0 | 843 | rbp_f_path[rbp_f_depth-1]); \ |
michael@0 | 844 | if (rbp_f_cmp < 0) { \ |
michael@0 | 845 | rbp_f_node = rbp_left_get(a_type, a_field, \ |
michael@0 | 846 | rbp_f_path[rbp_f_depth-1]); \ |
michael@0 | 847 | } else if (rbp_f_cmp > 0) { \ |
michael@0 | 848 | rbp_f_node = rbp_right_get(a_type, a_field, \ |
michael@0 | 849 | rbp_f_path[rbp_f_depth-1]); \ |
michael@0 | 850 | } else { \ |
michael@0 | 851 | break; \ |
michael@0 | 852 | } \ |
michael@0 | 853 | assert(rbp_f_node != &(a_tree)->rbt_nil); \ |
michael@0 | 854 | rbp_f_path[rbp_f_depth] = rbp_f_node; \ |
michael@0 | 855 | rbp_f_depth++; \ |
michael@0 | 856 | } \ |
michael@0 | 857 | } \ |
michael@0 | 858 | } \ |
michael@0 | 859 | rbp_f_synced = true; |
michael@0 | 860 | |
michael@0 | 861 | #define rb_foreach_end(a_type, a_field, a_tree, a_var) \ |
michael@0 | 862 | if (rbp_f_synced) { \ |
michael@0 | 863 | rbp_f_synced = false; \ |
michael@0 | 864 | continue; \ |
michael@0 | 865 | } \ |
michael@0 | 866 | /* Find the successor. */\ |
michael@0 | 867 | if ((rbp_f_node = rbp_right_get(a_type, a_field, \ |
michael@0 | 868 | rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \ |
michael@0 | 869 | /* The successor is the left-most node in the right */\ |
michael@0 | 870 | /* subtree. */\ |
michael@0 | 871 | rbp_f_path[rbp_f_depth] = rbp_f_node; \ |
michael@0 | 872 | rbp_f_depth++; \ |
michael@0 | 873 | while ((rbp_f_node = rbp_left_get(a_type, a_field, \ |
michael@0 | 874 | rbp_f_path[rbp_f_depth-1])) != &(a_tree)->rbt_nil) { \ |
michael@0 | 875 | rbp_f_path[rbp_f_depth] = rbp_f_node; \ |
michael@0 | 876 | rbp_f_depth++; \ |
michael@0 | 877 | } \ |
michael@0 | 878 | } else { \ |
michael@0 | 879 | /* The successor is above the current node. Unwind */\ |
michael@0 | 880 | /* until a left-leaning edge is removed from the */\ |
michael@0 | 881 | /* path, or the path is empty. */\ |
michael@0 | 882 | for (rbp_f_depth--; rbp_f_depth > 0; rbp_f_depth--) { \ |
michael@0 | 883 | if (rbp_left_get(a_type, a_field, \ |
michael@0 | 884 | rbp_f_path[rbp_f_depth-1]) \ |
michael@0 | 885 | == rbp_f_path[rbp_f_depth]) { \ |
michael@0 | 886 | break; \ |
michael@0 | 887 | } \ |
michael@0 | 888 | } \ |
michael@0 | 889 | } \ |
michael@0 | 890 | } \ |
michael@0 | 891 | } \ |
michael@0 | 892 | } |
michael@0 | 893 | |
michael@0 | 894 | #define rb_foreach_reverse_begin(a_type, a_field, a_tree, a_var) { \ |
michael@0 | 895 | rbp_compute_fr_height(a_type, a_field, a_tree) \ |
michael@0 | 896 | { \ |
michael@0 | 897 | /* Initialize the path to contain the right spine. */\ |
michael@0 | 898 | a_type *rbp_fr_path[rbp_fr_height]; \ |
michael@0 | 899 | a_type *rbp_fr_node; \ |
michael@0 | 900 | bool rbp_fr_synced = false; \ |
michael@0 | 901 | unsigned rbp_fr_depth = 0; \ |
michael@0 | 902 | if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ |
michael@0 | 903 | rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root; \ |
michael@0 | 904 | rbp_fr_depth++; \ |
michael@0 | 905 | while ((rbp_fr_node = rbp_right_get(a_type, a_field, \ |
michael@0 | 906 | rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) { \ |
michael@0 | 907 | rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ |
michael@0 | 908 | rbp_fr_depth++; \ |
michael@0 | 909 | } \ |
michael@0 | 910 | } \ |
michael@0 | 911 | /* While the path is non-empty, iterate. */\ |
michael@0 | 912 | while (rbp_fr_depth > 0) { \ |
michael@0 | 913 | (a_var) = rbp_fr_path[rbp_fr_depth-1]; |
michael@0 | 914 | |
michael@0 | 915 | /* Only use if modifying the tree during iteration. */ |
michael@0 | 916 | #define rb_foreach_reverse_prev(a_type, a_field, a_cmp, a_tree, a_node) \ |
michael@0 | 917 | /* Re-initialize the path to contain the path to a_node. */\ |
michael@0 | 918 | rbp_fr_depth = 0; \ |
michael@0 | 919 | if (a_node != NULL) { \ |
michael@0 | 920 | if ((a_tree)->rbt_root != &(a_tree)->rbt_nil) { \ |
michael@0 | 921 | rbp_fr_path[rbp_fr_depth] = (a_tree)->rbt_root; \ |
michael@0 | 922 | rbp_fr_depth++; \ |
michael@0 | 923 | rbp_fr_node = rbp_fr_path[0]; \ |
michael@0 | 924 | while (true) { \ |
michael@0 | 925 | int rbp_fr_cmp = (a_cmp)((a_node), \ |
michael@0 | 926 | rbp_fr_path[rbp_fr_depth-1]); \ |
michael@0 | 927 | if (rbp_fr_cmp < 0) { \ |
michael@0 | 928 | rbp_fr_node = rbp_left_get(a_type, a_field, \ |
michael@0 | 929 | rbp_fr_path[rbp_fr_depth-1]); \ |
michael@0 | 930 | } else if (rbp_fr_cmp > 0) { \ |
michael@0 | 931 | rbp_fr_node = rbp_right_get(a_type, a_field,\ |
michael@0 | 932 | rbp_fr_path[rbp_fr_depth-1]); \ |
michael@0 | 933 | } else { \ |
michael@0 | 934 | break; \ |
michael@0 | 935 | } \ |
michael@0 | 936 | assert(rbp_fr_node != &(a_tree)->rbt_nil); \ |
michael@0 | 937 | rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ |
michael@0 | 938 | rbp_fr_depth++; \ |
michael@0 | 939 | } \ |
michael@0 | 940 | } \ |
michael@0 | 941 | } \ |
michael@0 | 942 | rbp_fr_synced = true; |
michael@0 | 943 | |
michael@0 | 944 | #define rb_foreach_reverse_end(a_type, a_field, a_tree, a_var) \ |
michael@0 | 945 | if (rbp_fr_synced) { \ |
michael@0 | 946 | rbp_fr_synced = false; \ |
michael@0 | 947 | continue; \ |
michael@0 | 948 | } \ |
michael@0 | 949 | if (rbp_fr_depth == 0) { \ |
michael@0 | 950 | /* rb_foreach_reverse_sync() was called with a NULL */\ |
michael@0 | 951 | /* a_node. */\ |
michael@0 | 952 | break; \ |
michael@0 | 953 | } \ |
michael@0 | 954 | /* Find the predecessor. */\ |
michael@0 | 955 | if ((rbp_fr_node = rbp_left_get(a_type, a_field, \ |
michael@0 | 956 | rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) { \ |
michael@0 | 957 | /* The predecessor is the right-most node in the left */\ |
michael@0 | 958 | /* subtree. */\ |
michael@0 | 959 | rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ |
michael@0 | 960 | rbp_fr_depth++; \ |
michael@0 | 961 | while ((rbp_fr_node = rbp_right_get(a_type, a_field, \ |
michael@0 | 962 | rbp_fr_path[rbp_fr_depth-1])) != &(a_tree)->rbt_nil) {\ |
michael@0 | 963 | rbp_fr_path[rbp_fr_depth] = rbp_fr_node; \ |
michael@0 | 964 | rbp_fr_depth++; \ |
michael@0 | 965 | } \ |
michael@0 | 966 | } else { \ |
michael@0 | 967 | /* The predecessor is above the current node. Unwind */\ |
michael@0 | 968 | /* until a right-leaning edge is removed from the */\ |
michael@0 | 969 | /* path, or the path is empty. */\ |
michael@0 | 970 | for (rbp_fr_depth--; rbp_fr_depth > 0; rbp_fr_depth--) {\ |
michael@0 | 971 | if (rbp_right_get(a_type, a_field, \ |
michael@0 | 972 | rbp_fr_path[rbp_fr_depth-1]) \ |
michael@0 | 973 | == rbp_fr_path[rbp_fr_depth]) { \ |
michael@0 | 974 | break; \ |
michael@0 | 975 | } \ |
michael@0 | 976 | } \ |
michael@0 | 977 | } \ |
michael@0 | 978 | } \ |
michael@0 | 979 | } \ |
michael@0 | 980 | } |
michael@0 | 981 | |
michael@0 | 982 | #endif /* RB_H_ */ |