Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 1 | #include <linux/module.h> |
Michel Lespinasse | 9c079ad | 2012-10-08 16:31:33 -0700 | [diff] [blame] | 2 | #include <linux/rbtree_augmented.h> |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 3 | #include <linux/random.h> |
| 4 | #include <asm/timex.h> |
| 5 | |
| 6 | #define NODES 100 |
| 7 | #define PERF_LOOPS 100000 |
| 8 | #define CHECK_LOOPS 100 |
| 9 | |
| 10 | struct test_node { |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 11 | u32 key; |
Cody P Schafer | dbf128c | 2014-01-23 15:56:05 -0800 | [diff] [blame] | 12 | struct rb_node rb; |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 13 | |
| 14 | /* following fields used for testing augmented rbtree functionality */ |
| 15 | u32 val; |
| 16 | u32 augmented; |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 17 | }; |
| 18 | |
| 19 | static struct rb_root root = RB_ROOT; |
| 20 | static struct test_node nodes[NODES]; |
| 21 | |
| 22 | static struct rnd_state rnd; |
| 23 | |
| 24 | static void insert(struct test_node *node, struct rb_root *root) |
| 25 | { |
| 26 | struct rb_node **new = &root->rb_node, *parent = NULL; |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 27 | u32 key = node->key; |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 28 | |
| 29 | while (*new) { |
| 30 | parent = *new; |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 31 | if (key < rb_entry(parent, struct test_node, rb)->key) |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 32 | new = &parent->rb_left; |
| 33 | else |
| 34 | new = &parent->rb_right; |
| 35 | } |
| 36 | |
| 37 | rb_link_node(&node->rb, parent, new); |
| 38 | rb_insert_color(&node->rb, root); |
| 39 | } |
| 40 | |
| 41 | static inline void erase(struct test_node *node, struct rb_root *root) |
| 42 | { |
| 43 | rb_erase(&node->rb, root); |
| 44 | } |
| 45 | |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 46 | static inline u32 augment_recompute(struct test_node *node) |
| 47 | { |
| 48 | u32 max = node->val, child_augmented; |
| 49 | if (node->rb.rb_left) { |
| 50 | child_augmented = rb_entry(node->rb.rb_left, struct test_node, |
| 51 | rb)->augmented; |
| 52 | if (max < child_augmented) |
| 53 | max = child_augmented; |
| 54 | } |
| 55 | if (node->rb.rb_right) { |
| 56 | child_augmented = rb_entry(node->rb.rb_right, struct test_node, |
| 57 | rb)->augmented; |
| 58 | if (max < child_augmented) |
| 59 | max = child_augmented; |
| 60 | } |
| 61 | return max; |
| 62 | } |
| 63 | |
Michel Lespinasse | 3908836 | 2012-10-08 16:31:21 -0700 | [diff] [blame] | 64 | RB_DECLARE_CALLBACKS(static, augment_callbacks, struct test_node, rb, |
| 65 | u32, augmented, augment_recompute) |
Michel Lespinasse | 14b94af | 2012-10-08 16:31:17 -0700 | [diff] [blame] | 66 | |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 67 | static void insert_augmented(struct test_node *node, struct rb_root *root) |
| 68 | { |
Michel Lespinasse | 14b94af | 2012-10-08 16:31:17 -0700 | [diff] [blame] | 69 | struct rb_node **new = &root->rb_node, *rb_parent = NULL; |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 70 | u32 key = node->key; |
Michel Lespinasse | 14b94af | 2012-10-08 16:31:17 -0700 | [diff] [blame] | 71 | u32 val = node->val; |
| 72 | struct test_node *parent; |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 73 | |
| 74 | while (*new) { |
Michel Lespinasse | 14b94af | 2012-10-08 16:31:17 -0700 | [diff] [blame] | 75 | rb_parent = *new; |
| 76 | parent = rb_entry(rb_parent, struct test_node, rb); |
| 77 | if (parent->augmented < val) |
| 78 | parent->augmented = val; |
| 79 | if (key < parent->key) |
| 80 | new = &parent->rb.rb_left; |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 81 | else |
Michel Lespinasse | 14b94af | 2012-10-08 16:31:17 -0700 | [diff] [blame] | 82 | new = &parent->rb.rb_right; |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 83 | } |
| 84 | |
Michel Lespinasse | 14b94af | 2012-10-08 16:31:17 -0700 | [diff] [blame] | 85 | node->augmented = val; |
| 86 | rb_link_node(&node->rb, rb_parent, new); |
| 87 | rb_insert_augmented(&node->rb, root, &augment_callbacks); |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 88 | } |
| 89 | |
| 90 | static void erase_augmented(struct test_node *node, struct rb_root *root) |
| 91 | { |
Michel Lespinasse | 14b94af | 2012-10-08 16:31:17 -0700 | [diff] [blame] | 92 | rb_erase_augmented(&node->rb, root, &augment_callbacks); |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 93 | } |
| 94 | |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 95 | static void init(void) |
| 96 | { |
| 97 | int i; |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 98 | for (i = 0; i < NODES; i++) { |
Akinobu Mita | 496f2f9 | 2012-12-17 16:04:23 -0800 | [diff] [blame] | 99 | nodes[i].key = prandom_u32_state(&rnd); |
| 100 | nodes[i].val = prandom_u32_state(&rnd); |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 101 | } |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 102 | } |
| 103 | |
| 104 | static bool is_red(struct rb_node *rb) |
| 105 | { |
| 106 | return !(rb->__rb_parent_color & 1); |
| 107 | } |
| 108 | |
| 109 | static int black_path_count(struct rb_node *rb) |
| 110 | { |
| 111 | int count; |
| 112 | for (count = 0; rb; rb = rb_parent(rb)) |
| 113 | count += !is_red(rb); |
| 114 | return count; |
| 115 | } |
| 116 | |
Cody P Schafer | 964fe94 | 2014-01-23 15:56:06 -0800 | [diff] [blame^] | 117 | static void check_postorder_foreach(int nr_nodes) |
| 118 | { |
| 119 | struct test_node *cur, *n; |
| 120 | int count = 0; |
| 121 | rbtree_postorder_for_each_entry_safe(cur, n, &root, rb) |
| 122 | count++; |
| 123 | |
| 124 | WARN_ON_ONCE(count != nr_nodes); |
| 125 | } |
| 126 | |
Cody P Schafer | a791a62 | 2013-09-11 14:25:17 -0700 | [diff] [blame] | 127 | static void check_postorder(int nr_nodes) |
| 128 | { |
| 129 | struct rb_node *rb; |
| 130 | int count = 0; |
| 131 | for (rb = rb_first_postorder(&root); rb; rb = rb_next_postorder(rb)) |
| 132 | count++; |
| 133 | |
| 134 | WARN_ON_ONCE(count != nr_nodes); |
| 135 | } |
| 136 | |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 137 | static void check(int nr_nodes) |
| 138 | { |
| 139 | struct rb_node *rb; |
Davidlohr Bueso | 4130f0e | 2013-04-30 15:28:24 -0700 | [diff] [blame] | 140 | int count = 0, blacks = 0; |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 141 | u32 prev_key = 0; |
| 142 | |
| 143 | for (rb = rb_first(&root); rb; rb = rb_next(rb)) { |
| 144 | struct test_node *node = rb_entry(rb, struct test_node, rb); |
| 145 | WARN_ON_ONCE(node->key < prev_key); |
| 146 | WARN_ON_ONCE(is_red(rb) && |
| 147 | (!rb_parent(rb) || is_red(rb_parent(rb)))); |
| 148 | if (!count) |
| 149 | blacks = black_path_count(rb); |
| 150 | else |
| 151 | WARN_ON_ONCE((!rb->rb_left || !rb->rb_right) && |
| 152 | blacks != black_path_count(rb)); |
| 153 | prev_key = node->key; |
| 154 | count++; |
| 155 | } |
Davidlohr Bueso | 4130f0e | 2013-04-30 15:28:24 -0700 | [diff] [blame] | 156 | |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 157 | WARN_ON_ONCE(count != nr_nodes); |
Davidlohr Bueso | 4130f0e | 2013-04-30 15:28:24 -0700 | [diff] [blame] | 158 | WARN_ON_ONCE(count < (1 << black_path_count(rb_last(&root))) - 1); |
Cody P Schafer | a791a62 | 2013-09-11 14:25:17 -0700 | [diff] [blame] | 159 | |
| 160 | check_postorder(nr_nodes); |
Cody P Schafer | 964fe94 | 2014-01-23 15:56:06 -0800 | [diff] [blame^] | 161 | check_postorder_foreach(nr_nodes); |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 162 | } |
| 163 | |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 164 | static void check_augmented(int nr_nodes) |
| 165 | { |
| 166 | struct rb_node *rb; |
| 167 | |
| 168 | check(nr_nodes); |
| 169 | for (rb = rb_first(&root); rb; rb = rb_next(rb)) { |
| 170 | struct test_node *node = rb_entry(rb, struct test_node, rb); |
| 171 | WARN_ON_ONCE(node->augmented != augment_recompute(node)); |
| 172 | } |
| 173 | } |
| 174 | |
Davidlohr Bueso | c75aaa8 | 2013-04-30 15:28:25 -0700 | [diff] [blame] | 175 | static int __init rbtree_test_init(void) |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 176 | { |
| 177 | int i, j; |
| 178 | cycles_t time1, time2, time; |
| 179 | |
| 180 | printk(KERN_ALERT "rbtree testing"); |
| 181 | |
Akinobu Mita | 496f2f9 | 2012-12-17 16:04:23 -0800 | [diff] [blame] | 182 | prandom_seed_state(&rnd, 3141592653589793238ULL); |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 183 | init(); |
| 184 | |
| 185 | time1 = get_cycles(); |
| 186 | |
| 187 | for (i = 0; i < PERF_LOOPS; i++) { |
| 188 | for (j = 0; j < NODES; j++) |
| 189 | insert(nodes + j, &root); |
| 190 | for (j = 0; j < NODES; j++) |
| 191 | erase(nodes + j, &root); |
| 192 | } |
| 193 | |
| 194 | time2 = get_cycles(); |
| 195 | time = time2 - time1; |
| 196 | |
| 197 | time = div_u64(time, PERF_LOOPS); |
| 198 | printk(" -> %llu cycles\n", (unsigned long long)time); |
| 199 | |
| 200 | for (i = 0; i < CHECK_LOOPS; i++) { |
| 201 | init(); |
| 202 | for (j = 0; j < NODES; j++) { |
| 203 | check(j); |
| 204 | insert(nodes + j, &root); |
| 205 | } |
| 206 | for (j = 0; j < NODES; j++) { |
| 207 | check(NODES - j); |
| 208 | erase(nodes + j, &root); |
| 209 | } |
| 210 | check(0); |
| 211 | } |
| 212 | |
Michel Lespinasse | dadf935 | 2012-10-08 16:31:15 -0700 | [diff] [blame] | 213 | printk(KERN_ALERT "augmented rbtree testing"); |
| 214 | |
| 215 | init(); |
| 216 | |
| 217 | time1 = get_cycles(); |
| 218 | |
| 219 | for (i = 0; i < PERF_LOOPS; i++) { |
| 220 | for (j = 0; j < NODES; j++) |
| 221 | insert_augmented(nodes + j, &root); |
| 222 | for (j = 0; j < NODES; j++) |
| 223 | erase_augmented(nodes + j, &root); |
| 224 | } |
| 225 | |
| 226 | time2 = get_cycles(); |
| 227 | time = time2 - time1; |
| 228 | |
| 229 | time = div_u64(time, PERF_LOOPS); |
| 230 | printk(" -> %llu cycles\n", (unsigned long long)time); |
| 231 | |
| 232 | for (i = 0; i < CHECK_LOOPS; i++) { |
| 233 | init(); |
| 234 | for (j = 0; j < NODES; j++) { |
| 235 | check_augmented(j); |
| 236 | insert_augmented(nodes + j, &root); |
| 237 | } |
| 238 | for (j = 0; j < NODES; j++) { |
| 239 | check_augmented(NODES - j); |
| 240 | erase_augmented(nodes + j, &root); |
| 241 | } |
| 242 | check_augmented(0); |
| 243 | } |
| 244 | |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 245 | return -EAGAIN; /* Fail will directly unload the module */ |
| 246 | } |
| 247 | |
Davidlohr Bueso | c75aaa8 | 2013-04-30 15:28:25 -0700 | [diff] [blame] | 248 | static void __exit rbtree_test_exit(void) |
Michel Lespinasse | 910a742 | 2012-10-08 16:30:39 -0700 | [diff] [blame] | 249 | { |
| 250 | printk(KERN_ALERT "test exit\n"); |
| 251 | } |
| 252 | |
| 253 | module_init(rbtree_test_init) |
| 254 | module_exit(rbtree_test_exit) |
| 255 | |
| 256 | MODULE_LICENSE("GPL"); |
| 257 | MODULE_AUTHOR("Michel Lespinasse"); |
| 258 | MODULE_DESCRIPTION("Red Black Tree test"); |