David Sterba | c1d7c51 | 2018-04-03 19:23:33 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2013 Fusion IO. All rights reserved. |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 4 | */ |
| 5 | |
| 6 | #include <linux/pagemap.h> |
| 7 | #include <linux/sched.h> |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 8 | #include <linux/slab.h> |
Byongho Lee | ee22184 | 2015-12-15 01:42:10 +0900 | [diff] [blame] | 9 | #include <linux/sizes.h> |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 10 | #include "btrfs-tests.h" |
Feifei Xu | ed9e4af | 2016-06-01 19:18:26 +0800 | [diff] [blame] | 11 | #include "../ctree.h" |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 12 | #include "../extent_io.h" |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 13 | #include "../btrfs_inode.h" |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 14 | |
| 15 | #define PROCESS_UNLOCK (1 << 0) |
| 16 | #define PROCESS_RELEASE (1 << 1) |
| 17 | #define PROCESS_TEST_LOCKED (1 << 2) |
| 18 | |
| 19 | static noinline int process_page_range(struct inode *inode, u64 start, u64 end, |
| 20 | unsigned long flags) |
| 21 | { |
| 22 | int ret; |
| 23 | struct page *pages[16]; |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 24 | unsigned long index = start >> PAGE_SHIFT; |
| 25 | unsigned long end_index = end >> PAGE_SHIFT; |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 26 | unsigned long nr_pages = end_index - index + 1; |
| 27 | int i; |
| 28 | int count = 0; |
| 29 | int loops = 0; |
| 30 | |
| 31 | while (nr_pages > 0) { |
| 32 | ret = find_get_pages_contig(inode->i_mapping, index, |
| 33 | min_t(unsigned long, nr_pages, |
| 34 | ARRAY_SIZE(pages)), pages); |
| 35 | for (i = 0; i < ret; i++) { |
| 36 | if (flags & PROCESS_TEST_LOCKED && |
| 37 | !PageLocked(pages[i])) |
| 38 | count++; |
| 39 | if (flags & PROCESS_UNLOCK && PageLocked(pages[i])) |
| 40 | unlock_page(pages[i]); |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 41 | put_page(pages[i]); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 42 | if (flags & PROCESS_RELEASE) |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 43 | put_page(pages[i]); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 44 | } |
| 45 | nr_pages -= ret; |
| 46 | index += ret; |
| 47 | cond_resched(); |
| 48 | loops++; |
| 49 | if (loops > 100000) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 50 | printk(KERN_ERR |
| 51 | "stuck in a loop, start %llu, end %llu, nr_pages %lu, ret %d\n", |
| 52 | start, end, nr_pages, ret); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 53 | break; |
| 54 | } |
| 55 | } |
| 56 | return count; |
| 57 | } |
| 58 | |
Feifei Xu | b9ef22d | 2016-06-01 19:18:25 +0800 | [diff] [blame] | 59 | static int test_find_delalloc(u32 sectorsize) |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 60 | { |
| 61 | struct inode *inode; |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 62 | struct extent_io_tree *tmp; |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 63 | struct page *page; |
| 64 | struct page *locked_page = NULL; |
| 65 | unsigned long index = 0; |
Qu Wenruo | d9cb245 | 2018-11-03 17:24:52 +0800 | [diff] [blame] | 66 | /* In this test we need at least 2 file extents at its maximum size */ |
| 67 | u64 max_bytes = BTRFS_MAX_EXTENT_SIZE; |
| 68 | u64 total_dirty = 2 * max_bytes; |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 69 | u64 start, end, test_start; |
Lu Fengqi | 3522e90 | 2018-11-29 11:33:38 +0800 | [diff] [blame] | 70 | bool found; |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 71 | int ret = -EINVAL; |
| 72 | |
David Sterba | 315b76b | 2018-05-17 00:00:44 +0200 | [diff] [blame] | 73 | test_msg("running find delalloc tests"); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 74 | |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 75 | inode = btrfs_new_test_inode(); |
| 76 | if (!inode) { |
David Sterba | 6a060db | 2019-03-15 17:28:46 +0100 | [diff] [blame] | 77 | test_std_err(TEST_ALLOC_INODE); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 78 | return -ENOMEM; |
| 79 | } |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 80 | tmp = &BTRFS_I(inode)->io_tree; |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 81 | |
Qu Wenruo | 43eb5f2 | 2019-03-01 10:47:59 +0800 | [diff] [blame] | 82 | /* |
| 83 | * Passing NULL as we don't have fs_info but tracepoints are not used |
| 84 | * at this point |
| 85 | */ |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 86 | extent_io_tree_init(NULL, tmp, IO_TREE_SELFTEST, NULL); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 87 | |
| 88 | /* |
| 89 | * First go through and create and mark all of our pages dirty, we pin |
| 90 | * everything to make sure our pages don't get evicted and screw up our |
| 91 | * test. |
| 92 | */ |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 93 | for (index = 0; index < (total_dirty >> PAGE_SHIFT); index++) { |
David Sterba | 8cce83b | 2016-01-22 10:28:24 +0100 | [diff] [blame] | 94 | page = find_or_create_page(inode->i_mapping, index, GFP_KERNEL); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 95 | if (!page) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 96 | test_err("failed to allocate test page"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 97 | ret = -ENOMEM; |
| 98 | goto out; |
| 99 | } |
| 100 | SetPageDirty(page); |
| 101 | if (index) { |
| 102 | unlock_page(page); |
| 103 | } else { |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 104 | get_page(page); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 105 | locked_page = page; |
| 106 | } |
| 107 | } |
| 108 | |
| 109 | /* Test this scenario |
| 110 | * |--- delalloc ---| |
| 111 | * |--- search ---| |
| 112 | */ |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 113 | set_extent_delalloc(tmp, 0, sectorsize - 1, 0, NULL); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 114 | start = 0; |
| 115 | end = 0; |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 116 | found = find_lock_delalloc_range(inode, locked_page, &start, |
Nikolay Borisov | 917aace | 2018-10-26 14:43:20 +0300 | [diff] [blame] | 117 | &end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 118 | if (!found) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 119 | test_err("should have found at least one delalloc"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 120 | goto out_bits; |
| 121 | } |
Feifei Xu | b9ef22d | 2016-06-01 19:18:25 +0800 | [diff] [blame] | 122 | if (start != 0 || end != (sectorsize - 1)) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 123 | test_err("expected start 0 end %u, got start %llu end %llu", |
Feifei Xu | b9ef22d | 2016-06-01 19:18:25 +0800 | [diff] [blame] | 124 | sectorsize - 1, start, end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 125 | goto out_bits; |
| 126 | } |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 127 | unlock_extent(tmp, start, end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 128 | unlock_page(locked_page); |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 129 | put_page(locked_page); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 130 | |
| 131 | /* |
| 132 | * Test this scenario |
| 133 | * |
| 134 | * |--- delalloc ---| |
| 135 | * |--- search ---| |
| 136 | */ |
Byongho Lee | ee22184 | 2015-12-15 01:42:10 +0900 | [diff] [blame] | 137 | test_start = SZ_64M; |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 138 | locked_page = find_lock_page(inode->i_mapping, |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 139 | test_start >> PAGE_SHIFT); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 140 | if (!locked_page) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 141 | test_err("couldn't find the locked page"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 142 | goto out_bits; |
| 143 | } |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 144 | set_extent_delalloc(tmp, sectorsize, max_bytes - 1, 0, NULL); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 145 | start = test_start; |
| 146 | end = 0; |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 147 | found = find_lock_delalloc_range(inode, locked_page, &start, |
Nikolay Borisov | 917aace | 2018-10-26 14:43:20 +0300 | [diff] [blame] | 148 | &end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 149 | if (!found) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 150 | test_err("couldn't find delalloc in our range"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 151 | goto out_bits; |
| 152 | } |
| 153 | if (start != test_start || end != max_bytes - 1) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 154 | test_err("expected start %llu end %llu, got start %llu, end %llu", |
| 155 | test_start, max_bytes - 1, start, end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 156 | goto out_bits; |
| 157 | } |
| 158 | if (process_page_range(inode, start, end, |
| 159 | PROCESS_TEST_LOCKED | PROCESS_UNLOCK)) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 160 | test_err("there were unlocked pages in the range"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 161 | goto out_bits; |
| 162 | } |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 163 | unlock_extent(tmp, start, end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 164 | /* locked_page was unlocked above */ |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 165 | put_page(locked_page); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 166 | |
| 167 | /* |
| 168 | * Test this scenario |
| 169 | * |--- delalloc ---| |
| 170 | * |--- search ---| |
| 171 | */ |
Feifei Xu | b9ef22d | 2016-06-01 19:18:25 +0800 | [diff] [blame] | 172 | test_start = max_bytes + sectorsize; |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 173 | locked_page = find_lock_page(inode->i_mapping, test_start >> |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 174 | PAGE_SHIFT); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 175 | if (!locked_page) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 176 | test_err("couldn't find the locked page"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 177 | goto out_bits; |
| 178 | } |
| 179 | start = test_start; |
| 180 | end = 0; |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 181 | found = find_lock_delalloc_range(inode, locked_page, &start, |
Nikolay Borisov | 917aace | 2018-10-26 14:43:20 +0300 | [diff] [blame] | 182 | &end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 183 | if (found) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 184 | test_err("found range when we shouldn't have"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 185 | goto out_bits; |
| 186 | } |
| 187 | if (end != (u64)-1) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 188 | test_err("did not return the proper end offset"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 189 | goto out_bits; |
| 190 | } |
| 191 | |
| 192 | /* |
| 193 | * Test this scenario |
| 194 | * [------- delalloc -------| |
| 195 | * [max_bytes]|-- search--| |
| 196 | * |
| 197 | * We are re-using our test_start from above since it works out well. |
| 198 | */ |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 199 | set_extent_delalloc(tmp, max_bytes, total_dirty - 1, 0, NULL); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 200 | start = test_start; |
| 201 | end = 0; |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 202 | found = find_lock_delalloc_range(inode, locked_page, &start, |
Nikolay Borisov | 917aace | 2018-10-26 14:43:20 +0300 | [diff] [blame] | 203 | &end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 204 | if (!found) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 205 | test_err("didn't find our range"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 206 | goto out_bits; |
| 207 | } |
| 208 | if (start != test_start || end != total_dirty - 1) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 209 | test_err("expected start %llu end %llu, got start %llu end %llu", |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 210 | test_start, total_dirty - 1, start, end); |
| 211 | goto out_bits; |
| 212 | } |
| 213 | if (process_page_range(inode, start, end, |
| 214 | PROCESS_TEST_LOCKED | PROCESS_UNLOCK)) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 215 | test_err("pages in range were not all locked"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 216 | goto out_bits; |
| 217 | } |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 218 | unlock_extent(tmp, start, end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 219 | |
| 220 | /* |
| 221 | * Now to test where we run into a page that is no longer dirty in the |
| 222 | * range we want to find. |
| 223 | */ |
Byongho Lee | ee22184 | 2015-12-15 01:42:10 +0900 | [diff] [blame] | 224 | page = find_get_page(inode->i_mapping, |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 225 | (max_bytes + SZ_1M) >> PAGE_SHIFT); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 226 | if (!page) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 227 | test_err("couldn't find our page"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 228 | goto out_bits; |
| 229 | } |
| 230 | ClearPageDirty(page); |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 231 | put_page(page); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 232 | |
| 233 | /* We unlocked it in the previous test */ |
| 234 | lock_page(locked_page); |
| 235 | start = test_start; |
| 236 | end = 0; |
| 237 | /* |
| 238 | * Currently if we fail to find dirty pages in the delalloc range we |
Kirill A. Shutemov | ea1754a | 2016-04-01 15:29:48 +0300 | [diff] [blame] | 239 | * will adjust max_bytes down to PAGE_SIZE and then re-search. If |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 240 | * this changes at any point in the future we will need to fix this |
| 241 | * tests expected behavior. |
| 242 | */ |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 243 | found = find_lock_delalloc_range(inode, locked_page, &start, |
Nikolay Borisov | 917aace | 2018-10-26 14:43:20 +0300 | [diff] [blame] | 244 | &end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 245 | if (!found) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 246 | test_err("didn't find our range"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 247 | goto out_bits; |
| 248 | } |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 249 | if (start != test_start && end != test_start + PAGE_SIZE - 1) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 250 | test_err("expected start %llu end %llu, got start %llu end %llu", |
| 251 | test_start, test_start + PAGE_SIZE - 1, start, end); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 252 | goto out_bits; |
| 253 | } |
| 254 | if (process_page_range(inode, start, end, PROCESS_TEST_LOCKED | |
| 255 | PROCESS_UNLOCK)) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 256 | test_err("pages in range were not all locked"); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 257 | goto out_bits; |
| 258 | } |
| 259 | ret = 0; |
| 260 | out_bits: |
Goldwyn Rodrigues | 9978059 | 2019-06-21 10:02:54 -0500 | [diff] [blame] | 261 | clear_extent_bits(tmp, 0, total_dirty - 1, (unsigned)-1); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 262 | out: |
| 263 | if (locked_page) |
Kirill A. Shutemov | 09cbfea | 2016-04-01 15:29:47 +0300 | [diff] [blame] | 264 | put_page(locked_page); |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 265 | process_page_range(inode, 0, total_dirty - 1, |
| 266 | PROCESS_UNLOCK | PROCESS_RELEASE); |
| 267 | iput(inode); |
| 268 | return ret; |
| 269 | } |
| 270 | |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 271 | static int check_eb_bitmap(unsigned long *bitmap, struct extent_buffer *eb, |
| 272 | unsigned long len) |
Feifei Xu | 34b3e6c | 2016-06-01 19:18:30 +0800 | [diff] [blame] | 273 | { |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 274 | unsigned long i; |
| 275 | |
| 276 | for (i = 0; i < len * BITS_PER_BYTE; i++) { |
| 277 | int bit, bit1; |
| 278 | |
| 279 | bit = !!test_bit(i, bitmap); |
| 280 | bit1 = !!extent_buffer_test_bit(eb, 0, i); |
| 281 | if (bit1 != bit) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 282 | test_err("bits do not match"); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 283 | return -EINVAL; |
| 284 | } |
| 285 | |
| 286 | bit1 = !!extent_buffer_test_bit(eb, i / BITS_PER_BYTE, |
| 287 | i % BITS_PER_BYTE); |
| 288 | if (bit1 != bit) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 289 | test_err("offset bits do not match"); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 290 | return -EINVAL; |
| 291 | } |
| 292 | } |
| 293 | return 0; |
Feifei Xu | 34b3e6c | 2016-06-01 19:18:30 +0800 | [diff] [blame] | 294 | } |
| 295 | |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 296 | static int __test_eb_bitmaps(unsigned long *bitmap, struct extent_buffer *eb, |
| 297 | unsigned long len) |
| 298 | { |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 299 | unsigned long i, j; |
| 300 | u32 x; |
| 301 | int ret; |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 302 | |
| 303 | memset(bitmap, 0, len); |
David Sterba | b159fa2 | 2016-11-08 18:09:03 +0100 | [diff] [blame] | 304 | memzero_extent_buffer(eb, 0, len); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 305 | if (memcmp_extent_buffer(eb, bitmap, 0, len) != 0) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 306 | test_err("bitmap was not zeroed"); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 307 | return -EINVAL; |
| 308 | } |
| 309 | |
| 310 | bitmap_set(bitmap, 0, len * BITS_PER_BYTE); |
| 311 | extent_buffer_bitmap_set(eb, 0, 0, len * BITS_PER_BYTE); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 312 | ret = check_eb_bitmap(bitmap, eb, len); |
| 313 | if (ret) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 314 | test_err("setting all bits failed"); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 315 | return ret; |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 316 | } |
| 317 | |
| 318 | bitmap_clear(bitmap, 0, len * BITS_PER_BYTE); |
| 319 | extent_buffer_bitmap_clear(eb, 0, 0, len * BITS_PER_BYTE); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 320 | ret = check_eb_bitmap(bitmap, eb, len); |
| 321 | if (ret) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 322 | test_err("clearing all bits failed"); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 323 | return ret; |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 324 | } |
| 325 | |
Feifei Xu | ed9e4af | 2016-06-01 19:18:26 +0800 | [diff] [blame] | 326 | /* Straddling pages test */ |
| 327 | if (len > PAGE_SIZE) { |
| 328 | bitmap_set(bitmap, |
| 329 | (PAGE_SIZE - sizeof(long) / 2) * BITS_PER_BYTE, |
| 330 | sizeof(long) * BITS_PER_BYTE); |
| 331 | extent_buffer_bitmap_set(eb, PAGE_SIZE - sizeof(long) / 2, 0, |
| 332 | sizeof(long) * BITS_PER_BYTE); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 333 | ret = check_eb_bitmap(bitmap, eb, len); |
| 334 | if (ret) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 335 | test_err("setting straddling pages failed"); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 336 | return ret; |
Feifei Xu | ed9e4af | 2016-06-01 19:18:26 +0800 | [diff] [blame] | 337 | } |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 338 | |
Feifei Xu | ed9e4af | 2016-06-01 19:18:26 +0800 | [diff] [blame] | 339 | bitmap_set(bitmap, 0, len * BITS_PER_BYTE); |
| 340 | bitmap_clear(bitmap, |
| 341 | (PAGE_SIZE - sizeof(long) / 2) * BITS_PER_BYTE, |
| 342 | sizeof(long) * BITS_PER_BYTE); |
| 343 | extent_buffer_bitmap_set(eb, 0, 0, len * BITS_PER_BYTE); |
| 344 | extent_buffer_bitmap_clear(eb, PAGE_SIZE - sizeof(long) / 2, 0, |
| 345 | sizeof(long) * BITS_PER_BYTE); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 346 | ret = check_eb_bitmap(bitmap, eb, len); |
| 347 | if (ret) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 348 | test_err("clearing straddling pages failed"); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 349 | return ret; |
Feifei Xu | ed9e4af | 2016-06-01 19:18:26 +0800 | [diff] [blame] | 350 | } |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 351 | } |
| 352 | |
| 353 | /* |
| 354 | * Generate a wonky pseudo-random bit pattern for the sake of not using |
| 355 | * something repetitive that could miss some hypothetical off-by-n bug. |
| 356 | */ |
| 357 | x = 0; |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 358 | bitmap_clear(bitmap, 0, len * BITS_PER_BYTE); |
| 359 | extent_buffer_bitmap_clear(eb, 0, 0, len * BITS_PER_BYTE); |
| 360 | for (i = 0; i < len * BITS_PER_BYTE / 32; i++) { |
| 361 | x = (0x19660dULL * (u64)x + 0x3c6ef35fULL) & 0xffffffffU; |
| 362 | for (j = 0; j < 32; j++) { |
| 363 | if (x & (1U << j)) { |
| 364 | bitmap_set(bitmap, i * 32 + j, 1); |
| 365 | extent_buffer_bitmap_set(eb, 0, i * 32 + j, 1); |
| 366 | } |
| 367 | } |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 368 | } |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 369 | |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 370 | ret = check_eb_bitmap(bitmap, eb, len); |
| 371 | if (ret) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 372 | test_err("random bit pattern failed"); |
Omar Sandoval | 9426ce7 | 2016-09-22 17:24:23 -0700 | [diff] [blame] | 373 | return ret; |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 374 | } |
| 375 | |
| 376 | return 0; |
| 377 | } |
| 378 | |
Feifei Xu | b9ef22d | 2016-06-01 19:18:25 +0800 | [diff] [blame] | 379 | static int test_eb_bitmaps(u32 sectorsize, u32 nodesize) |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 380 | { |
Jeff Mahoney | da17066 | 2016-06-15 09:22:56 -0400 | [diff] [blame] | 381 | struct btrfs_fs_info *fs_info; |
David Sterba | d33d105 | 2019-03-15 16:46:55 +0100 | [diff] [blame] | 382 | unsigned long *bitmap = NULL; |
| 383 | struct extent_buffer *eb = NULL; |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 384 | int ret; |
| 385 | |
David Sterba | 315b76b | 2018-05-17 00:00:44 +0200 | [diff] [blame] | 386 | test_msg("running extent buffer bitmap tests"); |
Feifei Xu | ed9e4af | 2016-06-01 19:18:26 +0800 | [diff] [blame] | 387 | |
Qu Wenruo | b1d51f6 | 2020-11-13 20:51:27 +0800 | [diff] [blame] | 388 | fs_info = btrfs_alloc_dummy_fs_info(nodesize, sectorsize); |
David Sterba | d46a05e | 2019-03-15 16:43:11 +0100 | [diff] [blame] | 389 | if (!fs_info) { |
David Sterba | 37b2a7b | 2019-03-15 17:28:46 +0100 | [diff] [blame] | 390 | test_std_err(TEST_ALLOC_FS_INFO); |
David Sterba | d46a05e | 2019-03-15 16:43:11 +0100 | [diff] [blame] | 391 | return -ENOMEM; |
| 392 | } |
Jeff Mahoney | da17066 | 2016-06-15 09:22:56 -0400 | [diff] [blame] | 393 | |
Qu Wenruo | b1d51f6 | 2020-11-13 20:51:27 +0800 | [diff] [blame] | 394 | bitmap = kmalloc(nodesize, GFP_KERNEL); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 395 | if (!bitmap) { |
David Sterba | 3c7251f | 2018-05-17 00:00:42 +0200 | [diff] [blame] | 396 | test_err("couldn't allocate test bitmap"); |
David Sterba | d33d105 | 2019-03-15 16:46:55 +0100 | [diff] [blame] | 397 | ret = -ENOMEM; |
| 398 | goto out; |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 399 | } |
| 400 | |
Qu Wenruo | b1d51f6 | 2020-11-13 20:51:27 +0800 | [diff] [blame] | 401 | eb = __alloc_dummy_extent_buffer(fs_info, 0, nodesize); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 402 | if (!eb) { |
David Sterba | 9e3d9f8 | 2019-03-15 17:28:46 +0100 | [diff] [blame] | 403 | test_std_err(TEST_ALLOC_ROOT); |
David Sterba | d33d105 | 2019-03-15 16:46:55 +0100 | [diff] [blame] | 404 | ret = -ENOMEM; |
| 405 | goto out; |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 406 | } |
| 407 | |
Qu Wenruo | b1d51f6 | 2020-11-13 20:51:27 +0800 | [diff] [blame] | 408 | ret = __test_eb_bitmaps(bitmap, eb, nodesize); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 409 | if (ret) |
| 410 | goto out; |
| 411 | |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 412 | free_extent_buffer(eb); |
Qu Wenruo | b1d51f6 | 2020-11-13 20:51:27 +0800 | [diff] [blame] | 413 | |
| 414 | /* |
| 415 | * Test again for case where the tree block is sectorsize aligned but |
| 416 | * not nodesize aligned. |
| 417 | */ |
| 418 | eb = __alloc_dummy_extent_buffer(fs_info, sectorsize, nodesize); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 419 | if (!eb) { |
David Sterba | 9e3d9f8 | 2019-03-15 17:28:46 +0100 | [diff] [blame] | 420 | test_std_err(TEST_ALLOC_ROOT); |
David Sterba | d33d105 | 2019-03-15 16:46:55 +0100 | [diff] [blame] | 421 | ret = -ENOMEM; |
| 422 | goto out; |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 423 | } |
| 424 | |
Qu Wenruo | b1d51f6 | 2020-11-13 20:51:27 +0800 | [diff] [blame] | 425 | ret = __test_eb_bitmaps(bitmap, eb, nodesize); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 426 | out: |
| 427 | free_extent_buffer(eb); |
| 428 | kfree(bitmap); |
David Sterba | d33d105 | 2019-03-15 16:46:55 +0100 | [diff] [blame] | 429 | btrfs_free_dummy_fs_info(fs_info); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 430 | return ret; |
| 431 | } |
| 432 | |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 433 | static int test_find_first_clear_extent_bit(void) |
| 434 | { |
| 435 | struct extent_io_tree tree; |
| 436 | u64 start, end; |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 437 | int ret = -EINVAL; |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 438 | |
| 439 | test_msg("running find_first_clear_extent_bit test"); |
Nikolay Borisov | 5750c37 | 2020-01-27 11:59:26 +0200 | [diff] [blame] | 440 | |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 441 | extent_io_tree_init(NULL, &tree, IO_TREE_SELFTEST, NULL); |
| 442 | |
Nikolay Borisov | 5750c37 | 2020-01-27 11:59:26 +0200 | [diff] [blame] | 443 | /* Test correct handling of empty tree */ |
| 444 | find_first_clear_extent_bit(&tree, 0, &start, &end, CHUNK_TRIMMED); |
| 445 | if (start != 0 || end != -1) { |
| 446 | test_err( |
| 447 | "error getting a range from completely empty tree: start %llu end %llu", |
| 448 | start, end); |
| 449 | goto out; |
| 450 | } |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 451 | /* |
| 452 | * Set 1M-4M alloc/discard and 32M-64M thus leaving a hole between |
| 453 | * 4M-32M |
| 454 | */ |
| 455 | set_extent_bits(&tree, SZ_1M, SZ_4M - 1, |
| 456 | CHUNK_TRIMMED | CHUNK_ALLOCATED); |
| 457 | |
| 458 | find_first_clear_extent_bit(&tree, SZ_512K, &start, &end, |
| 459 | CHUNK_TRIMMED | CHUNK_ALLOCATED); |
| 460 | |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 461 | if (start != 0 || end != SZ_1M - 1) { |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 462 | test_err("error finding beginning range: start %llu end %llu", |
| 463 | start, end); |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 464 | goto out; |
| 465 | } |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 466 | |
| 467 | /* Now add 32M-64M so that we have a hole between 4M-32M */ |
| 468 | set_extent_bits(&tree, SZ_32M, SZ_64M - 1, |
| 469 | CHUNK_TRIMMED | CHUNK_ALLOCATED); |
| 470 | |
| 471 | /* |
| 472 | * Request first hole starting at 12M, we should get 4M-32M |
| 473 | */ |
| 474 | find_first_clear_extent_bit(&tree, 12 * SZ_1M, &start, &end, |
| 475 | CHUNK_TRIMMED | CHUNK_ALLOCATED); |
| 476 | |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 477 | if (start != SZ_4M || end != SZ_32M - 1) { |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 478 | test_err("error finding trimmed range: start %llu end %llu", |
| 479 | start, end); |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 480 | goto out; |
| 481 | } |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 482 | |
| 483 | /* |
| 484 | * Search in the middle of allocated range, should get the next one |
| 485 | * available, which happens to be unallocated -> 4M-32M |
| 486 | */ |
| 487 | find_first_clear_extent_bit(&tree, SZ_2M, &start, &end, |
| 488 | CHUNK_TRIMMED | CHUNK_ALLOCATED); |
| 489 | |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 490 | if (start != SZ_4M || end != SZ_32M - 1) { |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 491 | test_err("error finding next unalloc range: start %llu end %llu", |
| 492 | start, end); |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 493 | goto out; |
| 494 | } |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 495 | |
| 496 | /* |
| 497 | * Set 64M-72M with CHUNK_ALLOC flag, then search for CHUNK_TRIMMED flag |
| 498 | * being unset in this range, we should get the entry in range 64M-72M |
| 499 | */ |
| 500 | set_extent_bits(&tree, SZ_64M, SZ_64M + SZ_8M - 1, CHUNK_ALLOCATED); |
| 501 | find_first_clear_extent_bit(&tree, SZ_64M + SZ_1M, &start, &end, |
| 502 | CHUNK_TRIMMED); |
| 503 | |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 504 | if (start != SZ_64M || end != SZ_64M + SZ_8M - 1) { |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 505 | test_err("error finding exact range: start %llu end %llu", |
| 506 | start, end); |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 507 | goto out; |
| 508 | } |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 509 | |
| 510 | find_first_clear_extent_bit(&tree, SZ_64M - SZ_8M, &start, &end, |
| 511 | CHUNK_TRIMMED); |
| 512 | |
| 513 | /* |
| 514 | * Search in the middle of set range whose immediate neighbour doesn't |
| 515 | * have the bits set so it must be returned |
| 516 | */ |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 517 | if (start != SZ_64M || end != SZ_64M + SZ_8M - 1) { |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 518 | test_err("error finding next alloc range: start %llu end %llu", |
| 519 | start, end); |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 520 | goto out; |
| 521 | } |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 522 | |
| 523 | /* |
| 524 | * Search beyond any known range, shall return after last known range |
| 525 | * and end should be -1 |
| 526 | */ |
| 527 | find_first_clear_extent_bit(&tree, -1, &start, &end, CHUNK_TRIMMED); |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 528 | if (start != SZ_64M + SZ_8M || end != -1) { |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 529 | test_err( |
| 530 | "error handling beyond end of range search: start %llu end %llu", |
| 531 | start, end); |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 532 | goto out; |
| 533 | } |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 534 | |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 535 | ret = 0; |
| 536 | out: |
Filipe Manana | cdf52bd | 2019-08-03 09:53:16 +0100 | [diff] [blame] | 537 | clear_extent_bits(&tree, 0, (u64)-1, CHUNK_TRIMMED | CHUNK_ALLOCATED); |
| 538 | |
Filipe Manana | 202f64e | 2019-08-05 10:57:41 +0100 | [diff] [blame] | 539 | return ret; |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 540 | } |
| 541 | |
Feifei Xu | b9ef22d | 2016-06-01 19:18:25 +0800 | [diff] [blame] | 542 | int btrfs_test_extent_io(u32 sectorsize, u32 nodesize) |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 543 | { |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 544 | int ret; |
| 545 | |
David Sterba | 315b76b | 2018-05-17 00:00:44 +0200 | [diff] [blame] | 546 | test_msg("running extent I/O tests"); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 547 | |
Feifei Xu | b9ef22d | 2016-06-01 19:18:25 +0800 | [diff] [blame] | 548 | ret = test_find_delalloc(sectorsize); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 549 | if (ret) |
| 550 | goto out; |
| 551 | |
Nikolay Borisov | 1eaebb3 | 2019-06-03 13:06:02 +0300 | [diff] [blame] | 552 | ret = test_find_first_clear_extent_bit(); |
| 553 | if (ret) |
| 554 | goto out; |
| 555 | |
Feifei Xu | b9ef22d | 2016-06-01 19:18:25 +0800 | [diff] [blame] | 556 | ret = test_eb_bitmaps(sectorsize, nodesize); |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 557 | out: |
Omar Sandoval | 0f33122 | 2015-09-29 20:50:31 -0700 | [diff] [blame] | 558 | return ret; |
Josef Bacik | 294e30f | 2013-10-09 12:00:56 -0400 | [diff] [blame] | 559 | } |