Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2006-2008 Nokia Corporation |
| 3 | * |
| 4 | * This program is free software; you can redistribute it and/or modify it |
| 5 | * under the terms of the GNU General Public License version 2 as published by |
| 6 | * the Free Software Foundation. |
| 7 | * |
| 8 | * This program is distributed in the hope that it will be useful, but WITHOUT |
| 9 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 10 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for |
| 11 | * more details. |
| 12 | * |
| 13 | * You should have received a copy of the GNU General Public License along with |
| 14 | * this program; see the file COPYING. If not, write to the Free Software |
| 15 | * Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. |
| 16 | * |
| 17 | * Test OOB read and write on MTD device. |
| 18 | * |
| 19 | * Author: Adrian Hunter <ext-adrian.hunter@nokia.com> |
| 20 | */ |
| 21 | |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 22 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt |
| 23 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 24 | #include <asm/div64.h> |
| 25 | #include <linux/init.h> |
| 26 | #include <linux/module.h> |
| 27 | #include <linux/moduleparam.h> |
| 28 | #include <linux/err.h> |
| 29 | #include <linux/mtd/mtd.h> |
Tejun Heo | 5a0e3ad | 2010-03-24 17:04:11 +0900 | [diff] [blame] | 30 | #include <linux/slab.h> |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 31 | #include <linux/sched.h> |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 32 | #include <linux/random.h> |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 33 | |
Akinobu Mita | 4bf527a | 2013-08-03 18:52:09 +0900 | [diff] [blame] | 34 | #include "mtd_test.h" |
| 35 | |
Wolfram Sang | 7406060 | 2011-10-30 00:11:53 +0200 | [diff] [blame] | 36 | static int dev = -EINVAL; |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 37 | static int bitflip_limit; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 38 | module_param(dev, int, S_IRUGO); |
| 39 | MODULE_PARM_DESC(dev, "MTD device number to use"); |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 40 | module_param(bitflip_limit, int, S_IRUGO); |
| 41 | MODULE_PARM_DESC(bitflip_limit, "Max. allowed bitflips per page"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 42 | |
| 43 | static struct mtd_info *mtd; |
| 44 | static unsigned char *readbuf; |
| 45 | static unsigned char *writebuf; |
| 46 | static unsigned char *bbt; |
| 47 | |
| 48 | static int ebcnt; |
| 49 | static int pgcnt; |
| 50 | static int errcnt; |
| 51 | static int use_offset; |
| 52 | static int use_len; |
| 53 | static int use_len_max; |
| 54 | static int vary_offset; |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 55 | static struct rnd_state rnd_state; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 56 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 57 | static void do_vary_offset(void) |
| 58 | { |
| 59 | use_len -= 1; |
| 60 | if (use_len < 1) { |
| 61 | use_offset += 1; |
| 62 | if (use_offset >= use_len_max) |
| 63 | use_offset = 0; |
| 64 | use_len = use_len_max - use_offset; |
| 65 | } |
| 66 | } |
| 67 | |
| 68 | static int write_eraseblock(int ebnum) |
| 69 | { |
| 70 | int i; |
| 71 | struct mtd_oob_ops ops; |
| 72 | int err = 0; |
Brian Norris | b9da8ba | 2015-02-28 02:02:26 -0800 | [diff] [blame] | 73 | loff_t addr = (loff_t)ebnum * mtd->erasesize; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 74 | |
Akinobu Mita | be54f8f | 2014-03-08 00:24:10 +0900 | [diff] [blame] | 75 | prandom_bytes_state(&rnd_state, writebuf, use_len_max * pgcnt); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 76 | for (i = 0; i < pgcnt; ++i, addr += mtd->writesize) { |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 77 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 78 | ops.len = 0; |
| 79 | ops.retlen = 0; |
| 80 | ops.ooblen = use_len; |
| 81 | ops.oobretlen = 0; |
| 82 | ops.ooboffs = use_offset; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 83 | ops.datbuf = NULL; |
Akinobu Mita | be54f8f | 2014-03-08 00:24:10 +0900 | [diff] [blame] | 84 | ops.oobbuf = writebuf + (use_len_max * i) + use_offset; |
Artem Bityutskiy | a2cc5ba | 2011-12-23 18:29:55 +0200 | [diff] [blame] | 85 | err = mtd_write_oob(mtd, addr, &ops); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 86 | if (err || ops.oobretlen != use_len) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 87 | pr_err("error: writeoob failed at %#llx\n", |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 88 | (long long)addr); |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 89 | pr_err("error: use_len %d, use_offset %d\n", |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 90 | use_len, use_offset); |
| 91 | errcnt += 1; |
| 92 | return err ? err : -1; |
| 93 | } |
| 94 | if (vary_offset) |
| 95 | do_vary_offset(); |
| 96 | } |
| 97 | |
| 98 | return err; |
| 99 | } |
| 100 | |
| 101 | static int write_whole_device(void) |
| 102 | { |
| 103 | int err; |
| 104 | unsigned int i; |
| 105 | |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 106 | pr_info("writing OOBs of whole device\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 107 | for (i = 0; i < ebcnt; ++i) { |
| 108 | if (bbt[i]) |
| 109 | continue; |
| 110 | err = write_eraseblock(i); |
| 111 | if (err) |
| 112 | return err; |
| 113 | if (i % 256 == 0) |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 114 | pr_info("written up to eraseblock %u\n", i); |
Richard Weinberger | 2a6a28e7 | 2015-03-29 21:52:06 +0200 | [diff] [blame] | 115 | |
| 116 | err = mtdtest_relax(); |
| 117 | if (err) |
| 118 | return err; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 119 | } |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 120 | pr_info("written %u eraseblocks\n", i); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 121 | return 0; |
| 122 | } |
| 123 | |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 124 | /* |
| 125 | * Display the address, offset and data bytes at comparison failure. |
| 126 | * Return number of bitflips encountered. |
| 127 | */ |
Roger Quadros | 718e38b | 2015-07-08 14:50:19 +0300 | [diff] [blame] | 128 | static size_t memcmpshowoffset(loff_t addr, loff_t offset, const void *cs, |
| 129 | const void *ct, size_t count) |
Roger Quadros | 5a66088 | 2014-10-21 16:53:27 +0300 | [diff] [blame] | 130 | { |
| 131 | const unsigned char *su1, *su2; |
| 132 | int res; |
Roger Quadros | 5a66088 | 2014-10-21 16:53:27 +0300 | [diff] [blame] | 133 | size_t i = 0; |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 134 | size_t bitflips = 0; |
Roger Quadros | 5a66088 | 2014-10-21 16:53:27 +0300 | [diff] [blame] | 135 | |
| 136 | for (su1 = cs, su2 = ct; 0 < count; ++su1, ++su2, count--, i++) { |
| 137 | res = *su1 ^ *su2; |
| 138 | if (res) { |
Roger Quadros | 718e38b | 2015-07-08 14:50:19 +0300 | [diff] [blame] | 139 | pr_info("error @addr[0x%lx:0x%lx] 0x%x -> 0x%x diff 0x%x\n", |
| 140 | (unsigned long)addr, (unsigned long)offset + i, |
| 141 | *su1, *su2, res); |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 142 | bitflips += hweight8(res); |
Roger Quadros | 5a66088 | 2014-10-21 16:53:27 +0300 | [diff] [blame] | 143 | } |
| 144 | } |
| 145 | |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 146 | return bitflips; |
Roger Quadros | 5a66088 | 2014-10-21 16:53:27 +0300 | [diff] [blame] | 147 | } |
| 148 | |
Roger Quadros | 718e38b | 2015-07-08 14:50:19 +0300 | [diff] [blame] | 149 | #define memcmpshow(addr, cs, ct, count) memcmpshowoffset((addr), 0, (cs), (ct),\ |
| 150 | (count)) |
| 151 | |
Roger Quadros | d2b51c8 | 2014-12-05 17:18:39 +0200 | [diff] [blame] | 152 | /* |
| 153 | * Compare with 0xff and show the address, offset and data bytes at |
| 154 | * comparison failure. Return number of bitflips encountered. |
| 155 | */ |
| 156 | static size_t memffshow(loff_t addr, loff_t offset, const void *cs, |
| 157 | size_t count) |
| 158 | { |
| 159 | const unsigned char *su1; |
| 160 | int res; |
| 161 | size_t i = 0; |
| 162 | size_t bitflips = 0; |
| 163 | |
| 164 | for (su1 = cs; 0 < count; ++su1, count--, i++) { |
| 165 | res = *su1 ^ 0xff; |
| 166 | if (res) { |
| 167 | pr_info("error @addr[0x%lx:0x%lx] 0x%x -> 0xff diff 0x%x\n", |
| 168 | (unsigned long)addr, (unsigned long)offset + i, |
| 169 | *su1, res); |
| 170 | bitflips += hweight8(res); |
| 171 | } |
| 172 | } |
| 173 | |
| 174 | return bitflips; |
| 175 | } |
| 176 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 177 | static int verify_eraseblock(int ebnum) |
| 178 | { |
| 179 | int i; |
| 180 | struct mtd_oob_ops ops; |
| 181 | int err = 0; |
Brian Norris | 1001ff7 | 2014-07-21 19:07:12 -0700 | [diff] [blame] | 182 | loff_t addr = (loff_t)ebnum * mtd->erasesize; |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 183 | size_t bitflips; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 184 | |
Akinobu Mita | be54f8f | 2014-03-08 00:24:10 +0900 | [diff] [blame] | 185 | prandom_bytes_state(&rnd_state, writebuf, use_len_max * pgcnt); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 186 | for (i = 0; i < pgcnt; ++i, addr += mtd->writesize) { |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 187 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 188 | ops.len = 0; |
| 189 | ops.retlen = 0; |
| 190 | ops.ooblen = use_len; |
| 191 | ops.oobretlen = 0; |
| 192 | ops.ooboffs = use_offset; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 193 | ops.datbuf = NULL; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 194 | ops.oobbuf = readbuf; |
Artem Bityutskiy | fd2819b | 2011-12-23 18:27:05 +0200 | [diff] [blame] | 195 | err = mtd_read_oob(mtd, addr, &ops); |
Miquel Raynal | 12663b4 | 2018-01-11 21:39:20 +0100 | [diff] [blame] | 196 | if (mtd_is_bitflip(err)) |
| 197 | err = 0; |
| 198 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 199 | if (err || ops.oobretlen != use_len) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 200 | pr_err("error: readoob failed at %#llx\n", |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 201 | (long long)addr); |
| 202 | errcnt += 1; |
| 203 | return err ? err : -1; |
| 204 | } |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 205 | |
| 206 | bitflips = memcmpshow(addr, readbuf, |
| 207 | writebuf + (use_len_max * i) + use_offset, |
| 208 | use_len); |
| 209 | if (bitflips > bitflip_limit) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 210 | pr_err("error: verify failed at %#llx\n", |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 211 | (long long)addr); |
| 212 | errcnt += 1; |
| 213 | if (errcnt > 1000) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 214 | pr_err("error: too many errors\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 215 | return -1; |
| 216 | } |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 217 | } else if (bitflips) { |
| 218 | pr_info("ignoring error as within bitflip_limit\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 219 | } |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 220 | |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 221 | if (use_offset != 0 || use_len < mtd->oobavail) { |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 222 | int k; |
| 223 | |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 224 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 225 | ops.len = 0; |
| 226 | ops.retlen = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 227 | ops.ooblen = mtd->oobavail; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 228 | ops.oobretlen = 0; |
| 229 | ops.ooboffs = 0; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 230 | ops.datbuf = NULL; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 231 | ops.oobbuf = readbuf; |
Artem Bityutskiy | fd2819b | 2011-12-23 18:27:05 +0200 | [diff] [blame] | 232 | err = mtd_read_oob(mtd, addr, &ops); |
Miquel Raynal | 12663b4 | 2018-01-11 21:39:20 +0100 | [diff] [blame] | 233 | if (mtd_is_bitflip(err)) |
| 234 | err = 0; |
| 235 | |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 236 | if (err || ops.oobretlen != mtd->oobavail) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 237 | pr_err("error: readoob failed at %#llx\n", |
| 238 | (long long)addr); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 239 | errcnt += 1; |
| 240 | return err ? err : -1; |
| 241 | } |
Roger Quadros | 718e38b | 2015-07-08 14:50:19 +0300 | [diff] [blame] | 242 | bitflips = memcmpshowoffset(addr, use_offset, |
| 243 | readbuf + use_offset, |
| 244 | writebuf + (use_len_max * i) + use_offset, |
| 245 | use_len); |
Roger Quadros | d2b51c8 | 2014-12-05 17:18:39 +0200 | [diff] [blame] | 246 | |
| 247 | /* verify pre-offset area for 0xff */ |
| 248 | bitflips += memffshow(addr, 0, readbuf, use_offset); |
| 249 | |
| 250 | /* verify post-(use_offset + use_len) area for 0xff */ |
| 251 | k = use_offset + use_len; |
| 252 | bitflips += memffshow(addr, k, readbuf + k, |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 253 | mtd->oobavail - k); |
Roger Quadros | d2b51c8 | 2014-12-05 17:18:39 +0200 | [diff] [blame] | 254 | |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 255 | if (bitflips > bitflip_limit) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 256 | pr_err("error: verify failed at %#llx\n", |
| 257 | (long long)addr); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 258 | errcnt += 1; |
| 259 | if (errcnt > 1000) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 260 | pr_err("error: too many errors\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 261 | return -1; |
| 262 | } |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 263 | } else if (bitflips) { |
Roger Quadros | d2b51c8 | 2014-12-05 17:18:39 +0200 | [diff] [blame] | 264 | pr_info("ignoring errors as within bitflip limit\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 265 | } |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 266 | } |
| 267 | if (vary_offset) |
| 268 | do_vary_offset(); |
| 269 | } |
| 270 | return err; |
| 271 | } |
| 272 | |
| 273 | static int verify_eraseblock_in_one_go(int ebnum) |
| 274 | { |
| 275 | struct mtd_oob_ops ops; |
| 276 | int err = 0; |
Brian Norris | 1001ff7 | 2014-07-21 19:07:12 -0700 | [diff] [blame] | 277 | loff_t addr = (loff_t)ebnum * mtd->erasesize; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 278 | size_t len = mtd->oobavail * pgcnt; |
| 279 | size_t oobavail = mtd->oobavail; |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 280 | size_t bitflips; |
| 281 | int i; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 282 | |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 283 | prandom_bytes_state(&rnd_state, writebuf, len); |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 284 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 285 | ops.len = 0; |
| 286 | ops.retlen = 0; |
| 287 | ops.ooblen = len; |
| 288 | ops.oobretlen = 0; |
| 289 | ops.ooboffs = 0; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 290 | ops.datbuf = NULL; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 291 | ops.oobbuf = readbuf; |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 292 | |
| 293 | /* read entire block's OOB at one go */ |
Artem Bityutskiy | fd2819b | 2011-12-23 18:27:05 +0200 | [diff] [blame] | 294 | err = mtd_read_oob(mtd, addr, &ops); |
Miquel Raynal | 12663b4 | 2018-01-11 21:39:20 +0100 | [diff] [blame] | 295 | if (mtd_is_bitflip(err)) |
| 296 | err = 0; |
| 297 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 298 | if (err || ops.oobretlen != len) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 299 | pr_err("error: readoob failed at %#llx\n", |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 300 | (long long)addr); |
| 301 | errcnt += 1; |
| 302 | return err ? err : -1; |
| 303 | } |
Roger Quadros | afc0ea1 | 2014-10-21 16:53:28 +0300 | [diff] [blame] | 304 | |
| 305 | /* verify one page OOB at a time for bitflip per page limit check */ |
| 306 | for (i = 0; i < pgcnt; ++i, addr += mtd->writesize) { |
| 307 | bitflips = memcmpshow(addr, readbuf + (i * oobavail), |
| 308 | writebuf + (i * oobavail), oobavail); |
| 309 | if (bitflips > bitflip_limit) { |
| 310 | pr_err("error: verify failed at %#llx\n", |
| 311 | (long long)addr); |
| 312 | errcnt += 1; |
| 313 | if (errcnt > 1000) { |
| 314 | pr_err("error: too many errors\n"); |
| 315 | return -1; |
| 316 | } |
| 317 | } else if (bitflips) { |
| 318 | pr_info("ignoring error as within bitflip_limit\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 319 | } |
| 320 | } |
| 321 | |
| 322 | return err; |
| 323 | } |
| 324 | |
| 325 | static int verify_all_eraseblocks(void) |
| 326 | { |
| 327 | int err; |
| 328 | unsigned int i; |
| 329 | |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 330 | pr_info("verifying all eraseblocks\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 331 | for (i = 0; i < ebcnt; ++i) { |
| 332 | if (bbt[i]) |
| 333 | continue; |
| 334 | err = verify_eraseblock(i); |
| 335 | if (err) |
| 336 | return err; |
| 337 | if (i % 256 == 0) |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 338 | pr_info("verified up to eraseblock %u\n", i); |
Richard Weinberger | 2a6a28e7 | 2015-03-29 21:52:06 +0200 | [diff] [blame] | 339 | |
| 340 | err = mtdtest_relax(); |
| 341 | if (err) |
| 342 | return err; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 343 | } |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 344 | pr_info("verified %u eraseblocks\n", i); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 345 | return 0; |
| 346 | } |
| 347 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 348 | static int __init mtd_oobtest_init(void) |
| 349 | { |
| 350 | int err = 0; |
| 351 | unsigned int i; |
| 352 | uint64_t tmp; |
| 353 | struct mtd_oob_ops ops; |
| 354 | loff_t addr = 0, addr0; |
| 355 | |
| 356 | printk(KERN_INFO "\n"); |
| 357 | printk(KERN_INFO "=================================================\n"); |
Wolfram Sang | 7406060 | 2011-10-30 00:11:53 +0200 | [diff] [blame] | 358 | |
| 359 | if (dev < 0) { |
Masanari Iida | 064a769 | 2012-11-09 23:20:58 +0900 | [diff] [blame] | 360 | pr_info("Please specify a valid mtd-device via module parameter\n"); |
| 361 | pr_crit("CAREFUL: This test wipes all data on the specified MTD device!\n"); |
Wolfram Sang | 7406060 | 2011-10-30 00:11:53 +0200 | [diff] [blame] | 362 | return -EINVAL; |
| 363 | } |
| 364 | |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 365 | pr_info("MTD device: %d\n", dev); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 366 | |
| 367 | mtd = get_mtd_device(NULL, dev); |
| 368 | if (IS_ERR(mtd)) { |
| 369 | err = PTR_ERR(mtd); |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 370 | pr_err("error: cannot get MTD device\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 371 | return err; |
| 372 | } |
| 373 | |
Huang Shijie | 818b973 | 2013-09-25 14:58:17 +0800 | [diff] [blame] | 374 | if (!mtd_type_is_nand(mtd)) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 375 | pr_info("this test requires NAND flash\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 376 | goto out; |
| 377 | } |
| 378 | |
| 379 | tmp = mtd->size; |
| 380 | do_div(tmp, mtd->erasesize); |
| 381 | ebcnt = tmp; |
| 382 | pgcnt = mtd->erasesize / mtd->writesize; |
| 383 | |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 384 | pr_info("MTD device size %llu, eraseblock size %u, " |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 385 | "page size %u, count of eraseblocks %u, pages per " |
| 386 | "eraseblock %u, OOB size %u\n", |
| 387 | (unsigned long long)mtd->size, mtd->erasesize, |
| 388 | mtd->writesize, ebcnt, pgcnt, mtd->oobsize); |
| 389 | |
| 390 | err = -ENOMEM; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 391 | readbuf = kmalloc(mtd->erasesize, GFP_KERNEL); |
Brian Norris | 33777e6 | 2013-05-02 14:18:51 -0700 | [diff] [blame] | 392 | if (!readbuf) |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 393 | goto out; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 394 | writebuf = kmalloc(mtd->erasesize, GFP_KERNEL); |
Brian Norris | 33777e6 | 2013-05-02 14:18:51 -0700 | [diff] [blame] | 395 | if (!writebuf) |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 396 | goto out; |
Akinobu Mita | 4bf527a | 2013-08-03 18:52:09 +0900 | [diff] [blame] | 397 | bbt = kzalloc(ebcnt, GFP_KERNEL); |
| 398 | if (!bbt) |
| 399 | goto out; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 400 | |
Akinobu Mita | 4bf527a | 2013-08-03 18:52:09 +0900 | [diff] [blame] | 401 | err = mtdtest_scan_for_bad_eraseblocks(mtd, bbt, 0, ebcnt); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 402 | if (err) |
| 403 | goto out; |
| 404 | |
| 405 | use_offset = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 406 | use_len = mtd->oobavail; |
| 407 | use_len_max = mtd->oobavail; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 408 | vary_offset = 0; |
| 409 | |
| 410 | /* First test: write all OOB, read it back and verify */ |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 411 | pr_info("test 1 of 5\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 412 | |
Akinobu Mita | 4bf527a | 2013-08-03 18:52:09 +0900 | [diff] [blame] | 413 | err = mtdtest_erase_good_eraseblocks(mtd, bbt, 0, ebcnt); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 414 | if (err) |
| 415 | goto out; |
| 416 | |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 417 | prandom_seed_state(&rnd_state, 1); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 418 | err = write_whole_device(); |
| 419 | if (err) |
| 420 | goto out; |
| 421 | |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 422 | prandom_seed_state(&rnd_state, 1); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 423 | err = verify_all_eraseblocks(); |
| 424 | if (err) |
| 425 | goto out; |
| 426 | |
| 427 | /* |
| 428 | * Second test: write all OOB, a block at a time, read it back and |
| 429 | * verify. |
| 430 | */ |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 431 | pr_info("test 2 of 5\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 432 | |
Akinobu Mita | 4bf527a | 2013-08-03 18:52:09 +0900 | [diff] [blame] | 433 | err = mtdtest_erase_good_eraseblocks(mtd, bbt, 0, ebcnt); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 434 | if (err) |
| 435 | goto out; |
| 436 | |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 437 | prandom_seed_state(&rnd_state, 3); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 438 | err = write_whole_device(); |
| 439 | if (err) |
| 440 | goto out; |
| 441 | |
| 442 | /* Check all eraseblocks */ |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 443 | prandom_seed_state(&rnd_state, 3); |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 444 | pr_info("verifying all eraseblocks\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 445 | for (i = 0; i < ebcnt; ++i) { |
| 446 | if (bbt[i]) |
| 447 | continue; |
| 448 | err = verify_eraseblock_in_one_go(i); |
| 449 | if (err) |
| 450 | goto out; |
| 451 | if (i % 256 == 0) |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 452 | pr_info("verified up to eraseblock %u\n", i); |
Richard Weinberger | 2a6a28e7 | 2015-03-29 21:52:06 +0200 | [diff] [blame] | 453 | |
| 454 | err = mtdtest_relax(); |
| 455 | if (err) |
| 456 | goto out; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 457 | } |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 458 | pr_info("verified %u eraseblocks\n", i); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 459 | |
| 460 | /* |
| 461 | * Third test: write OOB at varying offsets and lengths, read it back |
| 462 | * and verify. |
| 463 | */ |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 464 | pr_info("test 3 of 5\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 465 | |
Akinobu Mita | 4bf527a | 2013-08-03 18:52:09 +0900 | [diff] [blame] | 466 | err = mtdtest_erase_good_eraseblocks(mtd, bbt, 0, ebcnt); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 467 | if (err) |
| 468 | goto out; |
| 469 | |
| 470 | /* Write all eraseblocks */ |
| 471 | use_offset = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 472 | use_len = mtd->oobavail; |
| 473 | use_len_max = mtd->oobavail; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 474 | vary_offset = 1; |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 475 | prandom_seed_state(&rnd_state, 5); |
Akinobu Mita | f54d633 | 2009-10-09 18:43:52 +0900 | [diff] [blame] | 476 | |
| 477 | err = write_whole_device(); |
| 478 | if (err) |
| 479 | goto out; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 480 | |
| 481 | /* Check all eraseblocks */ |
| 482 | use_offset = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 483 | use_len = mtd->oobavail; |
| 484 | use_len_max = mtd->oobavail; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 485 | vary_offset = 1; |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 486 | prandom_seed_state(&rnd_state, 5); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 487 | err = verify_all_eraseblocks(); |
| 488 | if (err) |
| 489 | goto out; |
| 490 | |
| 491 | use_offset = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 492 | use_len = mtd->oobavail; |
| 493 | use_len_max = mtd->oobavail; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 494 | vary_offset = 0; |
| 495 | |
| 496 | /* Fourth test: try to write off end of device */ |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 497 | pr_info("test 4 of 5\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 498 | |
Akinobu Mita | 4bf527a | 2013-08-03 18:52:09 +0900 | [diff] [blame] | 499 | err = mtdtest_erase_good_eraseblocks(mtd, bbt, 0, ebcnt); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 500 | if (err) |
| 501 | goto out; |
| 502 | |
| 503 | addr0 = 0; |
Roel Kluin | c6f7e7b | 2009-07-31 16:21:01 +0200 | [diff] [blame] | 504 | for (i = 0; i < ebcnt && bbt[i]; ++i) |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 505 | addr0 += mtd->erasesize; |
| 506 | |
| 507 | /* Attempt to write off end of OOB */ |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 508 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 509 | ops.len = 0; |
| 510 | ops.retlen = 0; |
| 511 | ops.ooblen = 1; |
| 512 | ops.oobretlen = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 513 | ops.ooboffs = mtd->oobavail; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 514 | ops.datbuf = NULL; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 515 | ops.oobbuf = writebuf; |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 516 | pr_info("attempting to start write past end of OOB\n"); |
| 517 | pr_info("an error is expected...\n"); |
Artem Bityutskiy | a2cc5ba | 2011-12-23 18:29:55 +0200 | [diff] [blame] | 518 | err = mtd_write_oob(mtd, addr0, &ops); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 519 | if (err) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 520 | pr_info("error occurred as expected\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 521 | err = 0; |
| 522 | } else { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 523 | pr_err("error: can write past end of OOB\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 524 | errcnt += 1; |
| 525 | } |
| 526 | |
| 527 | /* Attempt to read off end of OOB */ |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 528 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 529 | ops.len = 0; |
| 530 | ops.retlen = 0; |
| 531 | ops.ooblen = 1; |
| 532 | ops.oobretlen = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 533 | ops.ooboffs = mtd->oobavail; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 534 | ops.datbuf = NULL; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 535 | ops.oobbuf = readbuf; |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 536 | pr_info("attempting to start read past end of OOB\n"); |
| 537 | pr_info("an error is expected...\n"); |
Artem Bityutskiy | fd2819b | 2011-12-23 18:27:05 +0200 | [diff] [blame] | 538 | err = mtd_read_oob(mtd, addr0, &ops); |
Miquel Raynal | 12663b4 | 2018-01-11 21:39:20 +0100 | [diff] [blame] | 539 | if (mtd_is_bitflip(err)) |
| 540 | err = 0; |
| 541 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 542 | if (err) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 543 | pr_info("error occurred as expected\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 544 | err = 0; |
| 545 | } else { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 546 | pr_err("error: can read past end of OOB\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 547 | errcnt += 1; |
| 548 | } |
| 549 | |
| 550 | if (bbt[ebcnt - 1]) |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 551 | pr_info("skipping end of device tests because last " |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 552 | "block is bad\n"); |
| 553 | else { |
| 554 | /* Attempt to write off end of device */ |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 555 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 556 | ops.len = 0; |
| 557 | ops.retlen = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 558 | ops.ooblen = mtd->oobavail + 1; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 559 | ops.oobretlen = 0; |
| 560 | ops.ooboffs = 0; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 561 | ops.datbuf = NULL; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 562 | ops.oobbuf = writebuf; |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 563 | pr_info("attempting to write past end of device\n"); |
| 564 | pr_info("an error is expected...\n"); |
Artem Bityutskiy | a2cc5ba | 2011-12-23 18:29:55 +0200 | [diff] [blame] | 565 | err = mtd_write_oob(mtd, mtd->size - mtd->writesize, &ops); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 566 | if (err) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 567 | pr_info("error occurred as expected\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 568 | err = 0; |
| 569 | } else { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 570 | pr_err("error: wrote past end of device\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 571 | errcnt += 1; |
| 572 | } |
| 573 | |
| 574 | /* Attempt to read off end of device */ |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 575 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 576 | ops.len = 0; |
| 577 | ops.retlen = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 578 | ops.ooblen = mtd->oobavail + 1; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 579 | ops.oobretlen = 0; |
| 580 | ops.ooboffs = 0; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 581 | ops.datbuf = NULL; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 582 | ops.oobbuf = readbuf; |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 583 | pr_info("attempting to read past end of device\n"); |
| 584 | pr_info("an error is expected...\n"); |
Artem Bityutskiy | fd2819b | 2011-12-23 18:27:05 +0200 | [diff] [blame] | 585 | err = mtd_read_oob(mtd, mtd->size - mtd->writesize, &ops); |
Miquel Raynal | 12663b4 | 2018-01-11 21:39:20 +0100 | [diff] [blame] | 586 | if (mtd_is_bitflip(err)) |
| 587 | err = 0; |
| 588 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 589 | if (err) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 590 | pr_info("error occurred as expected\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 591 | err = 0; |
| 592 | } else { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 593 | pr_err("error: read past end of device\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 594 | errcnt += 1; |
| 595 | } |
| 596 | |
Akinobu Mita | 4bf527a | 2013-08-03 18:52:09 +0900 | [diff] [blame] | 597 | err = mtdtest_erase_eraseblock(mtd, ebcnt - 1); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 598 | if (err) |
| 599 | goto out; |
| 600 | |
| 601 | /* Attempt to write off end of device */ |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 602 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 603 | ops.len = 0; |
| 604 | ops.retlen = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 605 | ops.ooblen = mtd->oobavail; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 606 | ops.oobretlen = 0; |
| 607 | ops.ooboffs = 1; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 608 | ops.datbuf = NULL; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 609 | ops.oobbuf = writebuf; |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 610 | pr_info("attempting to write past end of device\n"); |
| 611 | pr_info("an error is expected...\n"); |
Artem Bityutskiy | a2cc5ba | 2011-12-23 18:29:55 +0200 | [diff] [blame] | 612 | err = mtd_write_oob(mtd, mtd->size - mtd->writesize, &ops); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 613 | if (err) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 614 | pr_info("error occurred as expected\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 615 | err = 0; |
| 616 | } else { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 617 | pr_err("error: wrote past end of device\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 618 | errcnt += 1; |
| 619 | } |
| 620 | |
| 621 | /* Attempt to read off end of device */ |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 622 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 623 | ops.len = 0; |
| 624 | ops.retlen = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 625 | ops.ooblen = mtd->oobavail; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 626 | ops.oobretlen = 0; |
| 627 | ops.ooboffs = 1; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 628 | ops.datbuf = NULL; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 629 | ops.oobbuf = readbuf; |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 630 | pr_info("attempting to read past end of device\n"); |
| 631 | pr_info("an error is expected...\n"); |
Artem Bityutskiy | fd2819b | 2011-12-23 18:27:05 +0200 | [diff] [blame] | 632 | err = mtd_read_oob(mtd, mtd->size - mtd->writesize, &ops); |
Miquel Raynal | 12663b4 | 2018-01-11 21:39:20 +0100 | [diff] [blame] | 633 | if (mtd_is_bitflip(err)) |
| 634 | err = 0; |
| 635 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 636 | if (err) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 637 | pr_info("error occurred as expected\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 638 | err = 0; |
| 639 | } else { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 640 | pr_err("error: read past end of device\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 641 | errcnt += 1; |
| 642 | } |
| 643 | } |
| 644 | |
| 645 | /* Fifth test: write / read across block boundaries */ |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 646 | pr_info("test 5 of 5\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 647 | |
| 648 | /* Erase all eraseblocks */ |
Akinobu Mita | 4bf527a | 2013-08-03 18:52:09 +0900 | [diff] [blame] | 649 | err = mtdtest_erase_good_eraseblocks(mtd, bbt, 0, ebcnt); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 650 | if (err) |
| 651 | goto out; |
| 652 | |
| 653 | /* Write all eraseblocks */ |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 654 | prandom_seed_state(&rnd_state, 11); |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 655 | pr_info("writing OOBs of whole device\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 656 | for (i = 0; i < ebcnt - 1; ++i) { |
| 657 | int cnt = 2; |
| 658 | int pg; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 659 | size_t sz = mtd->oobavail; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 660 | if (bbt[i] || bbt[i + 1]) |
| 661 | continue; |
Brian Norris | 1001ff7 | 2014-07-21 19:07:12 -0700 | [diff] [blame] | 662 | addr = (loff_t)(i + 1) * mtd->erasesize - mtd->writesize; |
Akinobu Mita | be54f8f | 2014-03-08 00:24:10 +0900 | [diff] [blame] | 663 | prandom_bytes_state(&rnd_state, writebuf, sz * cnt); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 664 | for (pg = 0; pg < cnt; ++pg) { |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 665 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 666 | ops.len = 0; |
| 667 | ops.retlen = 0; |
| 668 | ops.ooblen = sz; |
| 669 | ops.oobretlen = 0; |
| 670 | ops.ooboffs = 0; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 671 | ops.datbuf = NULL; |
Akinobu Mita | be54f8f | 2014-03-08 00:24:10 +0900 | [diff] [blame] | 672 | ops.oobbuf = writebuf + pg * sz; |
Artem Bityutskiy | a2cc5ba | 2011-12-23 18:29:55 +0200 | [diff] [blame] | 673 | err = mtd_write_oob(mtd, addr, &ops); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 674 | if (err) |
| 675 | goto out; |
| 676 | if (i % 256 == 0) |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 677 | pr_info("written up to eraseblock %u\n", i); |
Richard Weinberger | 2a6a28e7 | 2015-03-29 21:52:06 +0200 | [diff] [blame] | 678 | |
| 679 | err = mtdtest_relax(); |
| 680 | if (err) |
| 681 | goto out; |
| 682 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 683 | addr += mtd->writesize; |
| 684 | } |
| 685 | } |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 686 | pr_info("written %u eraseblocks\n", i); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 687 | |
| 688 | /* Check all eraseblocks */ |
Akinobu Mita | 8dad049 | 2013-02-27 17:05:33 -0800 | [diff] [blame] | 689 | prandom_seed_state(&rnd_state, 11); |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 690 | pr_info("verifying all eraseblocks\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 691 | for (i = 0; i < ebcnt - 1; ++i) { |
| 692 | if (bbt[i] || bbt[i + 1]) |
| 693 | continue; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 694 | prandom_bytes_state(&rnd_state, writebuf, mtd->oobavail * 2); |
Brian Norris | 1001ff7 | 2014-07-21 19:07:12 -0700 | [diff] [blame] | 695 | addr = (loff_t)(i + 1) * mtd->erasesize - mtd->writesize; |
Brian Norris | 0612b9d | 2011-08-30 18:45:40 -0700 | [diff] [blame] | 696 | ops.mode = MTD_OPS_AUTO_OOB; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 697 | ops.len = 0; |
| 698 | ops.retlen = 0; |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 699 | ops.ooblen = mtd->oobavail * 2; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 700 | ops.oobretlen = 0; |
| 701 | ops.ooboffs = 0; |
Hannes Eder | 23d4249 | 2009-03-05 20:15:01 +0100 | [diff] [blame] | 702 | ops.datbuf = NULL; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 703 | ops.oobbuf = readbuf; |
Artem Bityutskiy | fd2819b | 2011-12-23 18:27:05 +0200 | [diff] [blame] | 704 | err = mtd_read_oob(mtd, addr, &ops); |
Miquel Raynal | 12663b4 | 2018-01-11 21:39:20 +0100 | [diff] [blame] | 705 | if (mtd_is_bitflip(err)) |
| 706 | err = 0; |
| 707 | |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 708 | if (err) |
| 709 | goto out; |
Roger Quadros | 5a66088 | 2014-10-21 16:53:27 +0300 | [diff] [blame] | 710 | if (memcmpshow(addr, readbuf, writebuf, |
Boris BREZILLON | f5b8aa7 | 2016-03-07 10:46:51 +0100 | [diff] [blame] | 711 | mtd->oobavail * 2)) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 712 | pr_err("error: verify failed at %#llx\n", |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 713 | (long long)addr); |
| 714 | errcnt += 1; |
| 715 | if (errcnt > 1000) { |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 716 | pr_err("error: too many errors\n"); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 717 | goto out; |
| 718 | } |
| 719 | } |
| 720 | if (i % 256 == 0) |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 721 | pr_info("verified up to eraseblock %u\n", i); |
Richard Weinberger | 2a6a28e7 | 2015-03-29 21:52:06 +0200 | [diff] [blame] | 722 | |
| 723 | err = mtdtest_relax(); |
| 724 | if (err) |
| 725 | goto out; |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 726 | } |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 727 | pr_info("verified %u eraseblocks\n", i); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 728 | |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 729 | pr_info("finished with %d errors\n", errcnt); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 730 | out: |
| 731 | kfree(bbt); |
| 732 | kfree(writebuf); |
| 733 | kfree(readbuf); |
| 734 | put_mtd_device(mtd); |
| 735 | if (err) |
Vikram Narayanan | 0481027 | 2012-10-10 23:12:02 +0530 | [diff] [blame] | 736 | pr_info("error %d occurred\n", err); |
Artem Bityutskiy | e3644da | 2008-12-08 13:33:29 +0200 | [diff] [blame] | 737 | printk(KERN_INFO "=================================================\n"); |
| 738 | return err; |
| 739 | } |
| 740 | module_init(mtd_oobtest_init); |
| 741 | |
| 742 | static void __exit mtd_oobtest_exit(void) |
| 743 | { |
| 744 | return; |
| 745 | } |
| 746 | module_exit(mtd_oobtest_exit); |
| 747 | |
| 748 | MODULE_DESCRIPTION("Out-of-band test module"); |
| 749 | MODULE_AUTHOR("Adrian Hunter"); |
| 750 | MODULE_LICENSE("GPL"); |