Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 1 | #include <stdio.h> |
| 2 | #include <stdlib.h> |
| 3 | #include <unistd.h> |
| 4 | #include <time.h> |
| 5 | #include <assert.h> |
| 6 | |
| 7 | #include <linux/slab.h> |
| 8 | #include <linux/radix-tree.h> |
| 9 | |
| 10 | #include "test.h" |
| 11 | #include "regression.h" |
| 12 | |
| 13 | void __gang_check(unsigned long middle, long down, long up, int chunk, int hop) |
| 14 | { |
| 15 | long idx; |
| 16 | RADIX_TREE(tree, GFP_KERNEL); |
| 17 | |
| 18 | middle = 1 << 30; |
| 19 | |
| 20 | for (idx = -down; idx < up; idx++) |
| 21 | item_insert(&tree, middle + idx); |
| 22 | |
| 23 | item_check_absent(&tree, middle - down - 1); |
| 24 | for (idx = -down; idx < up; idx++) |
| 25 | item_check_present(&tree, middle + idx); |
| 26 | item_check_absent(&tree, middle + up); |
| 27 | |
| 28 | item_gang_check_present(&tree, middle - down, |
| 29 | up + down, chunk, hop); |
| 30 | item_full_scan(&tree, middle - down, down + up, chunk); |
| 31 | item_kill_tree(&tree); |
| 32 | } |
| 33 | |
| 34 | void gang_check(void) |
| 35 | { |
| 36 | __gang_check(1 << 30, 128, 128, 35, 2); |
| 37 | __gang_check(1 << 31, 128, 128, 32, 32); |
| 38 | __gang_check(1 << 31, 128, 128, 32, 100); |
| 39 | __gang_check(1 << 31, 128, 128, 17, 7); |
| 40 | __gang_check(0xffff0000, 0, 65536, 17, 7); |
| 41 | __gang_check(0xfffffffe, 1, 1, 17, 7); |
| 42 | } |
| 43 | |
| 44 | void __big_gang_check(void) |
| 45 | { |
| 46 | unsigned long start; |
| 47 | int wrapped = 0; |
| 48 | |
| 49 | start = 0; |
| 50 | do { |
| 51 | unsigned long old_start; |
| 52 | |
| 53 | // printf("0x%08lx\n", start); |
| 54 | __gang_check(start, rand() % 113 + 1, rand() % 71, |
| 55 | rand() % 157, rand() % 91 + 1); |
| 56 | old_start = start; |
| 57 | start += rand() % 1000000; |
| 58 | start %= 1ULL << 33; |
| 59 | if (start < old_start) |
| 60 | wrapped = 1; |
| 61 | } while (!wrapped); |
| 62 | } |
| 63 | |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 64 | void big_gang_check(bool long_run) |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 65 | { |
| 66 | int i; |
| 67 | |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 68 | for (i = 0; i < (long_run ? 1000 : 3); i++) { |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 69 | __big_gang_check(); |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 70 | printf("%d ", i); |
| 71 | fflush(stdout); |
| 72 | } |
| 73 | } |
| 74 | |
| 75 | void add_and_check(void) |
| 76 | { |
| 77 | RADIX_TREE(tree, GFP_KERNEL); |
| 78 | |
| 79 | item_insert(&tree, 44); |
| 80 | item_check_present(&tree, 44); |
| 81 | item_check_absent(&tree, 43); |
| 82 | item_kill_tree(&tree); |
| 83 | } |
| 84 | |
| 85 | void dynamic_height_check(void) |
| 86 | { |
| 87 | int i; |
| 88 | RADIX_TREE(tree, GFP_KERNEL); |
| 89 | tree_verify_min_height(&tree, 0); |
| 90 | |
| 91 | item_insert(&tree, 42); |
| 92 | tree_verify_min_height(&tree, 42); |
| 93 | |
| 94 | item_insert(&tree, 1000000); |
| 95 | tree_verify_min_height(&tree, 1000000); |
| 96 | |
| 97 | assert(item_delete(&tree, 1000000)); |
| 98 | tree_verify_min_height(&tree, 42); |
| 99 | |
| 100 | assert(item_delete(&tree, 42)); |
| 101 | tree_verify_min_height(&tree, 0); |
| 102 | |
| 103 | for (i = 0; i < 1000; i++) { |
| 104 | item_insert(&tree, i); |
| 105 | tree_verify_min_height(&tree, i); |
| 106 | } |
| 107 | |
| 108 | i--; |
| 109 | for (;;) { |
| 110 | assert(item_delete(&tree, i)); |
| 111 | if (i == 0) { |
| 112 | tree_verify_min_height(&tree, 0); |
| 113 | break; |
| 114 | } |
| 115 | i--; |
| 116 | tree_verify_min_height(&tree, i); |
| 117 | } |
| 118 | |
| 119 | item_kill_tree(&tree); |
| 120 | } |
| 121 | |
| 122 | void check_copied_tags(struct radix_tree_root *tree, unsigned long start, unsigned long end, unsigned long *idx, int count, int fromtag, int totag) |
| 123 | { |
| 124 | int i; |
| 125 | |
| 126 | for (i = 0; i < count; i++) { |
| 127 | /* if (i % 1000 == 0) |
| 128 | putchar('.'); */ |
| 129 | if (idx[i] < start || idx[i] > end) { |
| 130 | if (item_tag_get(tree, idx[i], totag)) { |
| 131 | printf("%lu-%lu: %lu, tags %d-%d\n", start, end, idx[i], item_tag_get(tree, idx[i], fromtag), item_tag_get(tree, idx[i], totag)); |
| 132 | } |
| 133 | assert(!item_tag_get(tree, idx[i], totag)); |
| 134 | continue; |
| 135 | } |
| 136 | if (item_tag_get(tree, idx[i], fromtag) ^ |
| 137 | item_tag_get(tree, idx[i], totag)) { |
| 138 | printf("%lu-%lu: %lu, tags %d-%d\n", start, end, idx[i], item_tag_get(tree, idx[i], fromtag), item_tag_get(tree, idx[i], totag)); |
| 139 | } |
| 140 | assert(!(item_tag_get(tree, idx[i], fromtag) ^ |
| 141 | item_tag_get(tree, idx[i], totag))); |
| 142 | } |
| 143 | } |
| 144 | |
| 145 | #define ITEMS 50000 |
| 146 | |
| 147 | void copy_tag_check(void) |
| 148 | { |
| 149 | RADIX_TREE(tree, GFP_KERNEL); |
| 150 | unsigned long idx[ITEMS]; |
| 151 | unsigned long start, end, count = 0, tagged, cur, tmp; |
| 152 | int i; |
| 153 | |
| 154 | // printf("generating radix tree indices...\n"); |
| 155 | start = rand(); |
| 156 | end = rand(); |
| 157 | if (start > end && (rand() % 10)) { |
| 158 | cur = start; |
| 159 | start = end; |
| 160 | end = cur; |
| 161 | } |
| 162 | /* Specifically create items around the start and the end of the range |
| 163 | * with high probability to check for off by one errors */ |
| 164 | cur = rand(); |
| 165 | if (cur & 1) { |
| 166 | item_insert(&tree, start); |
| 167 | if (cur & 2) { |
| 168 | if (start <= end) |
| 169 | count++; |
| 170 | item_tag_set(&tree, start, 0); |
| 171 | } |
| 172 | } |
| 173 | if (cur & 4) { |
| 174 | item_insert(&tree, start-1); |
| 175 | if (cur & 8) |
| 176 | item_tag_set(&tree, start-1, 0); |
| 177 | } |
| 178 | if (cur & 16) { |
| 179 | item_insert(&tree, end); |
| 180 | if (cur & 32) { |
| 181 | if (start <= end) |
| 182 | count++; |
| 183 | item_tag_set(&tree, end, 0); |
| 184 | } |
| 185 | } |
| 186 | if (cur & 64) { |
| 187 | item_insert(&tree, end+1); |
| 188 | if (cur & 128) |
| 189 | item_tag_set(&tree, end+1, 0); |
| 190 | } |
| 191 | |
| 192 | for (i = 0; i < ITEMS; i++) { |
| 193 | do { |
| 194 | idx[i] = rand(); |
| 195 | } while (item_lookup(&tree, idx[i])); |
| 196 | |
| 197 | item_insert(&tree, idx[i]); |
| 198 | if (rand() & 1) { |
| 199 | item_tag_set(&tree, idx[i], 0); |
| 200 | if (idx[i] >= start && idx[i] <= end) |
| 201 | count++; |
| 202 | } |
| 203 | /* if (i % 1000 == 0) |
| 204 | putchar('.'); */ |
| 205 | } |
| 206 | |
| 207 | // printf("\ncopying tags...\n"); |
| 208 | cur = start; |
| 209 | tagged = radix_tree_range_tag_if_tagged(&tree, &cur, end, ITEMS, 0, 1); |
| 210 | |
| 211 | // printf("checking copied tags\n"); |
| 212 | assert(tagged == count); |
| 213 | check_copied_tags(&tree, start, end, idx, ITEMS, 0, 1); |
| 214 | |
| 215 | /* Copy tags in several rounds */ |
| 216 | // printf("\ncopying tags...\n"); |
| 217 | cur = start; |
| 218 | do { |
| 219 | tmp = rand() % (count/10+2); |
| 220 | tagged = radix_tree_range_tag_if_tagged(&tree, &cur, end, tmp, 0, 2); |
| 221 | } while (tmp == tagged); |
| 222 | |
| 223 | // printf("%lu %lu %lu\n", tagged, tmp, count); |
| 224 | // printf("checking copied tags\n"); |
| 225 | check_copied_tags(&tree, start, end, idx, ITEMS, 0, 2); |
| 226 | assert(tagged < tmp); |
| 227 | verify_tag_consistency(&tree, 0); |
| 228 | verify_tag_consistency(&tree, 1); |
| 229 | verify_tag_consistency(&tree, 2); |
| 230 | // printf("\n"); |
| 231 | item_kill_tree(&tree); |
| 232 | } |
| 233 | |
Ross Zwisler | eb73f7f | 2016-05-20 17:02:49 -0700 | [diff] [blame] | 234 | static void __locate_check(struct radix_tree_root *tree, unsigned long index, |
Matthew Wilcox | 0a2efc6 | 2016-05-20 17:02:46 -0700 | [diff] [blame] | 235 | unsigned order) |
Matthew Wilcox | d42cb1a | 2016-05-20 17:01:39 -0700 | [diff] [blame] | 236 | { |
| 237 | struct item *item; |
| 238 | unsigned long index2; |
| 239 | |
Matthew Wilcox | 0a2efc6 | 2016-05-20 17:02:46 -0700 | [diff] [blame] | 240 | item_insert_order(tree, index, order); |
Matthew Wilcox | d42cb1a | 2016-05-20 17:01:39 -0700 | [diff] [blame] | 241 | item = item_lookup(tree, index); |
| 242 | index2 = radix_tree_locate_item(tree, item); |
| 243 | if (index != index2) { |
Matthew Wilcox | 0a2efc6 | 2016-05-20 17:02:46 -0700 | [diff] [blame] | 244 | printf("index %ld order %d inserted; found %ld\n", |
| 245 | index, order, index2); |
Matthew Wilcox | d42cb1a | 2016-05-20 17:01:39 -0700 | [diff] [blame] | 246 | abort(); |
| 247 | } |
| 248 | } |
| 249 | |
Ross Zwisler | eb73f7f | 2016-05-20 17:02:49 -0700 | [diff] [blame] | 250 | static void __order_0_locate_check(void) |
| 251 | { |
| 252 | RADIX_TREE(tree, GFP_KERNEL); |
| 253 | int i; |
| 254 | |
| 255 | for (i = 0; i < 50; i++) |
| 256 | __locate_check(&tree, rand() % INT_MAX, 0); |
| 257 | |
| 258 | item_kill_tree(&tree); |
| 259 | } |
| 260 | |
Matthew Wilcox | d42cb1a | 2016-05-20 17:01:39 -0700 | [diff] [blame] | 261 | static void locate_check(void) |
| 262 | { |
| 263 | RADIX_TREE(tree, GFP_KERNEL); |
Matthew Wilcox | 0a2efc6 | 2016-05-20 17:02:46 -0700 | [diff] [blame] | 264 | unsigned order; |
Matthew Wilcox | d42cb1a | 2016-05-20 17:01:39 -0700 | [diff] [blame] | 265 | unsigned long offset, index; |
| 266 | |
Ross Zwisler | eb73f7f | 2016-05-20 17:02:49 -0700 | [diff] [blame] | 267 | __order_0_locate_check(); |
| 268 | |
Matthew Wilcox | 0a2efc6 | 2016-05-20 17:02:46 -0700 | [diff] [blame] | 269 | for (order = 0; order < 20; order++) { |
| 270 | for (offset = 0; offset < (1 << (order + 3)); |
| 271 | offset += (1UL << order)) { |
| 272 | for (index = 0; index < (1UL << (order + 5)); |
| 273 | index += (1UL << order)) { |
| 274 | __locate_check(&tree, index + offset, order); |
| 275 | } |
| 276 | if (radix_tree_locate_item(&tree, &tree) != -1) |
| 277 | abort(); |
Matthew Wilcox | d42cb1a | 2016-05-20 17:01:39 -0700 | [diff] [blame] | 278 | |
Matthew Wilcox | 0a2efc6 | 2016-05-20 17:02:46 -0700 | [diff] [blame] | 279 | item_kill_tree(&tree); |
| 280 | } |
Matthew Wilcox | d42cb1a | 2016-05-20 17:01:39 -0700 | [diff] [blame] | 281 | } |
| 282 | |
| 283 | if (radix_tree_locate_item(&tree, &tree) != -1) |
| 284 | abort(); |
Matthew Wilcox | 0a2efc6 | 2016-05-20 17:02:46 -0700 | [diff] [blame] | 285 | __locate_check(&tree, -1, 0); |
Matthew Wilcox | d42cb1a | 2016-05-20 17:01:39 -0700 | [diff] [blame] | 286 | if (radix_tree_locate_item(&tree, &tree) != -1) |
| 287 | abort(); |
| 288 | item_kill_tree(&tree); |
| 289 | } |
| 290 | |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 291 | static void single_thread_tests(bool long_run) |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 292 | { |
| 293 | int i; |
| 294 | |
Matthew Wilcox | 847d357 | 2016-12-14 15:08:02 -0800 | [diff] [blame] | 295 | printf("starting single_thread_tests: %d allocated, preempt %d\n", |
| 296 | nr_allocated, preempt_count); |
Matthew Wilcox | 4f3755d | 2016-05-20 17:02:14 -0700 | [diff] [blame] | 297 | multiorder_checks(); |
Matthew Wilcox | 847d357 | 2016-12-14 15:08:02 -0800 | [diff] [blame] | 298 | printf("after multiorder_check: %d allocated, preempt %d\n", |
| 299 | nr_allocated, preempt_count); |
Matthew Wilcox | d42cb1a | 2016-05-20 17:01:39 -0700 | [diff] [blame] | 300 | locate_check(); |
Matthew Wilcox | 847d357 | 2016-12-14 15:08:02 -0800 | [diff] [blame] | 301 | printf("after locate_check: %d allocated, preempt %d\n", |
| 302 | nr_allocated, preempt_count); |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 303 | tag_check(); |
Matthew Wilcox | 847d357 | 2016-12-14 15:08:02 -0800 | [diff] [blame] | 304 | printf("after tag_check: %d allocated, preempt %d\n", |
| 305 | nr_allocated, preempt_count); |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 306 | gang_check(); |
Matthew Wilcox | 847d357 | 2016-12-14 15:08:02 -0800 | [diff] [blame] | 307 | printf("after gang_check: %d allocated, preempt %d\n", |
| 308 | nr_allocated, preempt_count); |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 309 | add_and_check(); |
Matthew Wilcox | 847d357 | 2016-12-14 15:08:02 -0800 | [diff] [blame] | 310 | printf("after add_and_check: %d allocated, preempt %d\n", |
| 311 | nr_allocated, preempt_count); |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 312 | dynamic_height_check(); |
Matthew Wilcox | 847d357 | 2016-12-14 15:08:02 -0800 | [diff] [blame] | 313 | printf("after dynamic_height_check: %d allocated, preempt %d\n", |
| 314 | nr_allocated, preempt_count); |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 315 | big_gang_check(long_run); |
Matthew Wilcox | 847d357 | 2016-12-14 15:08:02 -0800 | [diff] [blame] | 316 | printf("after big_gang_check: %d allocated, preempt %d\n", |
| 317 | nr_allocated, preempt_count); |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 318 | for (i = 0; i < (long_run ? 2000 : 3); i++) { |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 319 | copy_tag_check(); |
| 320 | printf("%d ", i); |
| 321 | fflush(stdout); |
| 322 | } |
Matthew Wilcox | 847d357 | 2016-12-14 15:08:02 -0800 | [diff] [blame] | 323 | printf("after copy_tag_check: %d allocated, preempt %d\n", |
| 324 | nr_allocated, preempt_count); |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 325 | } |
| 326 | |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 327 | int main(int argc, char **argv) |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 328 | { |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 329 | bool long_run = false; |
| 330 | int opt; |
Matthew Wilcox | 061ef39 | 2016-12-14 15:08:08 -0800 | [diff] [blame] | 331 | unsigned int seed = time(NULL); |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 332 | |
Matthew Wilcox | 061ef39 | 2016-12-14 15:08:08 -0800 | [diff] [blame] | 333 | while ((opt = getopt(argc, argv, "ls:")) != -1) { |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 334 | if (opt == 'l') |
| 335 | long_run = true; |
Matthew Wilcox | 061ef39 | 2016-12-14 15:08:08 -0800 | [diff] [blame] | 336 | else if (opt == 's') |
| 337 | seed = strtoul(optarg, NULL, 0); |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 338 | } |
| 339 | |
Matthew Wilcox | 061ef39 | 2016-12-14 15:08:08 -0800 | [diff] [blame] | 340 | printf("random seed %u\n", seed); |
| 341 | srand(seed); |
| 342 | |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 343 | rcu_register_thread(); |
| 344 | radix_tree_init(); |
| 345 | |
| 346 | regression1_test(); |
| 347 | regression2_test(); |
Konstantin Khlebnikov | 2d6f45b | 2016-03-17 14:22:08 -0700 | [diff] [blame] | 348 | regression3_test(); |
Ross Zwisler | eec4852 | 2016-10-11 13:51:21 -0700 | [diff] [blame] | 349 | iteration_test(); |
Ross Zwisler | aa1d62d | 2016-05-20 17:01:45 -0700 | [diff] [blame] | 350 | single_thread_tests(long_run); |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 351 | |
Matthew Wilcox | 6df5ee7 | 2016-12-14 15:08:05 -0800 | [diff] [blame] | 352 | /* Free any remaining preallocated nodes */ |
| 353 | radix_tree_cpu_dead(0); |
| 354 | |
Konstantin Khlebnikov | cfa40bc | 2016-12-14 15:08:14 -0800 | [diff] [blame^] | 355 | benchmark(); |
| 356 | |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 357 | sleep(1); |
Matthew Wilcox | 847d357 | 2016-12-14 15:08:02 -0800 | [diff] [blame] | 358 | printf("after sleep(1): %d allocated, preempt %d\n", |
| 359 | nr_allocated, preempt_count); |
Matthew Wilcox | 1366c37 | 2016-03-17 14:21:45 -0700 | [diff] [blame] | 360 | rcu_unregister_thread(); |
| 361 | |
| 362 | exit(0); |
| 363 | } |