blob: a46db080719531b1004d354c5627e7aa79ba77c5 [file] [log] [blame]
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +09001/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Quick'n'dirty IP checksum ...
7 *
8 * Copyright (C) 1998, 1999 Ralf Baechle
9 * Copyright (C) 1999 Silicon Graphics, Inc.
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +010010 * Copyright (C) 2007 Maciej W. Rozycki
Markos Chandrasac852272013-12-12 16:21:00 +000011 * Copyright (C) 2014 Imagination Technologies Ltd.
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +090012 */
Atsushi Nemotof860c902006-12-13 01:22:06 +090013#include <linux/errno.h>
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +090014#include <asm/asm.h>
Atsushi Nemotof860c902006-12-13 01:22:06 +090015#include <asm/asm-offsets.h>
Paul Burton23130042016-11-07 11:14:13 +000016#include <asm/export.h>
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +090017#include <asm/regdef.h>
18
19#ifdef CONFIG_64BIT
Atsushi Nemoto52ffe762006-12-08 01:04:31 +090020/*
21 * As we are sharing code base with the mips32 tree (which use the o32 ABI
22 * register definitions). We need to redefine the register definitions from
23 * the n64 ABI register naming to the o32 ABI register naming.
24 */
25#undef t0
26#undef t1
27#undef t2
28#undef t3
29#define t0 $8
30#define t1 $9
31#define t2 $10
32#define t3 $11
33#define t4 $12
34#define t5 $13
35#define t6 $14
36#define t7 $15
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090037
38#define USE_DOUBLE
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +090039#endif
40
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090041#ifdef USE_DOUBLE
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +090042
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090043#define LOAD ld
Atsushi Nemotob80a1b82008-09-20 17:20:04 +020044#define LOAD32 lwu
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090045#define ADD daddu
46#define NBYTES 8
47
48#else
49
50#define LOAD lw
Atsushi Nemotob80a1b82008-09-20 17:20:04 +020051#define LOAD32 lw
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090052#define ADD addu
53#define NBYTES 4
54
55#endif /* USE_DOUBLE */
56
57#define UNIT(unit) ((unit)*NBYTES)
58
59#define ADDC(sum,reg) \
Maciej W. Rozycki44ba1382014-04-04 03:32:54 +010060 .set push; \
61 .set noat; \
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090062 ADD sum, reg; \
63 sltu v1, sum, reg; \
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +010064 ADD sum, v1; \
Maciej W. Rozycki44ba1382014-04-04 03:32:54 +010065 .set pop
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090066
Atsushi Nemotob80a1b82008-09-20 17:20:04 +020067#define ADDC32(sum,reg) \
Maciej W. Rozycki44ba1382014-04-04 03:32:54 +010068 .set push; \
69 .set noat; \
Atsushi Nemotob80a1b82008-09-20 17:20:04 +020070 addu sum, reg; \
71 sltu v1, sum, reg; \
72 addu sum, v1; \
Maciej W. Rozycki44ba1382014-04-04 03:32:54 +010073 .set pop
Atsushi Nemotob80a1b82008-09-20 17:20:04 +020074
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090075#define CSUM_BIGCHUNK1(src, offset, sum, _t0, _t1, _t2, _t3) \
76 LOAD _t0, (offset + UNIT(0))(src); \
77 LOAD _t1, (offset + UNIT(1))(src); \
Ralf Baechle70342282013-01-22 12:59:30 +010078 LOAD _t2, (offset + UNIT(2))(src); \
79 LOAD _t3, (offset + UNIT(3))(src); \
Chen Jie615eb602015-03-27 01:07:24 +080080 ADDC(_t0, _t1); \
81 ADDC(_t2, _t3); \
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090082 ADDC(sum, _t0); \
Chen Jie615eb602015-03-27 01:07:24 +080083 ADDC(sum, _t2)
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090084
85#ifdef USE_DOUBLE
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +090086#define CSUM_BIGCHUNK(src, offset, sum, _t0, _t1, _t2, _t3) \
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +090087 CSUM_BIGCHUNK1(src, offset, sum, _t0, _t1, _t2, _t3)
88#else
89#define CSUM_BIGCHUNK(src, offset, sum, _t0, _t1, _t2, _t3) \
90 CSUM_BIGCHUNK1(src, offset, sum, _t0, _t1, _t2, _t3); \
91 CSUM_BIGCHUNK1(src, offset + 0x10, sum, _t0, _t1, _t2, _t3)
92#endif
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +090093
94/*
95 * a0: source address
96 * a1: length of the area to checksum
97 * a2: partial checksum
98 */
99
100#define src a0
101#define sum v0
102
103 .text
104 .set noreorder
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900105 .align 5
106LEAF(csum_partial)
Paul Burton23130042016-11-07 11:14:13 +0000107EXPORT_SYMBOL(csum_partial)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900108 move sum, zero
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900109 move t7, zero
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900110
111 sltiu t8, a1, 0x8
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000112 bnez t8, .Lsmall_csumcpy /* < 8 bytes to copy */
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900113 move t2, a1
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900114
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900115 andi t7, src, 0x1 /* odd buffer? */
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900116
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000117.Lhword_align:
118 beqz t7, .Lword_align
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900119 andi t8, src, 0x2
120
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900121 lbu t0, (src)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900122 LONG_SUBU a1, a1, 0x1
123#ifdef __MIPSEL__
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900124 sll t0, t0, 8
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900125#endif
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900126 ADDC(sum, t0)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900127 PTR_ADDU src, src, 0x1
128 andi t8, src, 0x2
129
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000130.Lword_align:
131 beqz t8, .Ldword_align
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900132 sltiu t8, a1, 56
133
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900134 lhu t0, (src)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900135 LONG_SUBU a1, a1, 0x2
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900136 ADDC(sum, t0)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900137 sltiu t8, a1, 56
138 PTR_ADDU src, src, 0x2
139
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000140.Ldword_align:
141 bnez t8, .Ldo_end_words
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900142 move t8, a1
143
144 andi t8, src, 0x4
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000145 beqz t8, .Lqword_align
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900146 andi t8, src, 0x8
147
Atsushi Nemotob80a1b82008-09-20 17:20:04 +0200148 LOAD32 t0, 0x00(src)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900149 LONG_SUBU a1, a1, 0x4
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900150 ADDC(sum, t0)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900151 PTR_ADDU src, src, 0x4
152 andi t8, src, 0x8
153
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000154.Lqword_align:
155 beqz t8, .Loword_align
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900156 andi t8, src, 0x10
157
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +0900158#ifdef USE_DOUBLE
159 ld t0, 0x00(src)
160 LONG_SUBU a1, a1, 0x8
161 ADDC(sum, t0)
162#else
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900163 lw t0, 0x00(src)
164 lw t1, 0x04(src)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900165 LONG_SUBU a1, a1, 0x8
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900166 ADDC(sum, t0)
167 ADDC(sum, t1)
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +0900168#endif
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900169 PTR_ADDU src, src, 0x8
170 andi t8, src, 0x10
171
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000172.Loword_align:
173 beqz t8, .Lbegin_movement
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900174 LONG_SRL t8, a1, 0x7
175
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +0900176#ifdef USE_DOUBLE
177 ld t0, 0x00(src)
178 ld t1, 0x08(src)
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900179 ADDC(sum, t0)
180 ADDC(sum, t1)
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +0900181#else
182 CSUM_BIGCHUNK1(src, 0x00, sum, t0, t1, t3, t4)
183#endif
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900184 LONG_SUBU a1, a1, 0x10
185 PTR_ADDU src, src, 0x10
186 LONG_SRL t8, a1, 0x7
187
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000188.Lbegin_movement:
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900189 beqz t8, 1f
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900190 andi t2, a1, 0x40
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900191
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000192.Lmove_128bytes:
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900193 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4)
194 CSUM_BIGCHUNK(src, 0x20, sum, t0, t1, t3, t4)
195 CSUM_BIGCHUNK(src, 0x40, sum, t0, t1, t3, t4)
196 CSUM_BIGCHUNK(src, 0x60, sum, t0, t1, t3, t4)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900197 LONG_SUBU t8, t8, 0x01
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100198 .set reorder /* DADDI_WAR */
199 PTR_ADDU src, src, 0x80
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000200 bnez t8, .Lmove_128bytes
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100201 .set noreorder
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900202
2031:
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900204 beqz t2, 1f
205 andi t2, a1, 0x20
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900206
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000207.Lmove_64bytes:
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900208 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4)
209 CSUM_BIGCHUNK(src, 0x20, sum, t0, t1, t3, t4)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900210 PTR_ADDU src, src, 0x40
211
2121:
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000213 beqz t2, .Ldo_end_words
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900214 andi t8, a1, 0x1c
215
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000216.Lmove_32bytes:
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900217 CSUM_BIGCHUNK(src, 0x00, sum, t0, t1, t3, t4)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900218 andi t8, a1, 0x1c
219 PTR_ADDU src, src, 0x20
220
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000221.Ldo_end_words:
222 beqz t8, .Lsmall_csumcpy
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900223 andi t2, a1, 0x3
224 LONG_SRL t8, t8, 0x2
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900225
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000226.Lend_words:
Atsushi Nemotob80a1b82008-09-20 17:20:04 +0200227 LOAD32 t0, (src)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900228 LONG_SUBU t8, t8, 0x1
Atsushi Nemoto52ffe762006-12-08 01:04:31 +0900229 ADDC(sum, t0)
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100230 .set reorder /* DADDI_WAR */
231 PTR_ADDU src, src, 0x4
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000232 bnez t8, .Lend_words
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100233 .set noreorder
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900234
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900235/* unknown src alignment and < 8 bytes to go */
Ralf Baechlec5ec1982008-01-29 10:14:59 +0000236.Lsmall_csumcpy:
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900237 move a1, t2
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900238
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900239 andi t0, a1, 4
240 beqz t0, 1f
241 andi t0, a1, 2
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900242
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900243 /* Still a full word to go */
244 ulw t1, (src)
245 PTR_ADDIU src, 4
Atsushi Nemotob80a1b82008-09-20 17:20:04 +0200246#ifdef USE_DOUBLE
247 dsll t1, t1, 32 /* clear lower 32bit */
248#endif
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900249 ADDC(sum, t1)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900250
Atsushi Nemoto773ff782006-12-08 01:04:45 +09002511: move t1, zero
252 beqz t0, 1f
253 andi t0, a1, 1
254
255 /* Still a halfword to go */
256 ulhu t1, (src)
257 PTR_ADDIU src, 2
258
2591: beqz t0, 1f
260 sll t1, t1, 16
261
262 lbu t2, (src)
263 nop
264
265#ifdef __MIPSEB__
266 sll t2, t2, 8
267#endif
268 or t1, t2
269
2701: ADDC(sum, t1)
271
272 /* fold checksum */
Atsushi Nemotoed99e2b2006-12-08 01:04:51 +0900273#ifdef USE_DOUBLE
274 dsll32 v1, sum, 0
275 daddu sum, v1
276 sltu v1, sum, v1
277 dsra32 sum, sum, 0
278 addu sum, v1
279#endif
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900280
281 /* odd buffer alignment? */
Serge Seminab7c01f2020-05-21 17:07:14 +0300282#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR5) || \
283 defined(CONFIG_CPU_LOONGSON64)
Chen Jie3c09bae2014-08-15 16:56:58 +0800284 .set push
285 .set arch=mips32r2
Ralf Baechleb65a75b2008-10-11 16:18:53 +0100286 wsbh v1, sum
287 movn sum, v1, t7
Chen Jie3c09bae2014-08-15 16:56:58 +0800288 .set pop
Ralf Baechleb65a75b2008-10-11 16:18:53 +0100289#else
290 beqz t7, 1f /* odd buffer alignment? */
291 lui v1, 0x00ff
292 addu v1, 0x00ff
293 and t0, sum, v1
294 sll t0, t0, 8
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900295 srl sum, sum, 8
Ralf Baechleb65a75b2008-10-11 16:18:53 +0100296 and sum, sum, v1
297 or sum, sum, t0
Atsushi Nemoto773ff782006-12-08 01:04:45 +09002981:
Ralf Baechleb65a75b2008-10-11 16:18:53 +0100299#endif
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900300 .set reorder
Ralf Baechle70342282013-01-22 12:59:30 +0100301 /* Add the passed partial csum. */
Atsushi Nemotob80a1b82008-09-20 17:20:04 +0200302 ADDC32(sum, a2)
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900303 jr ra
Atsushi Nemoto773ff782006-12-08 01:04:45 +0900304 .set noreorder
Atsushi Nemoto0bcdda02006-12-04 00:42:59 +0900305 END(csum_partial)
Atsushi Nemotof860c902006-12-13 01:22:06 +0900306
307
308/*
309 * checksum and copy routines based on memcpy.S
310 *
Al Viro1cd95ab2020-07-19 17:37:15 -0400311 * csum_partial_copy_nocheck(src, dst, len)
312 * __csum_partial_copy_kernel(src, dst, len)
Atsushi Nemotof860c902006-12-13 01:22:06 +0900313 *
Ralf Baechle70342282013-01-22 12:59:30 +0100314 * See "Spec" in memcpy.S for details. Unlike __copy_user, all
Atsushi Nemotof860c902006-12-13 01:22:06 +0900315 * function in this file use the standard calling convention.
316 */
317
318#define src a0
319#define dst a1
320#define len a2
Atsushi Nemotof860c902006-12-13 01:22:06 +0900321#define sum v0
322#define odd t8
Atsushi Nemotof860c902006-12-13 01:22:06 +0900323
324/*
Al Viro1cd95ab2020-07-19 17:37:15 -0400325 * All exception handlers simply return 0.
Atsushi Nemotof860c902006-12-13 01:22:06 +0900326 */
327
Markos Chandras2ab82e62014-01-16 17:02:13 +0000328/* Instruction type */
329#define LD_INSN 1
330#define ST_INSN 2
Markos Chandrase89fb562014-01-17 10:48:46 +0000331#define LEGACY_MODE 1
332#define EVA_MODE 2
333#define USEROP 1
334#define KERNELOP 2
Markos Chandras2ab82e62014-01-16 17:02:13 +0000335
336/*
337 * Wrapper to add an entry in the exception table
338 * in case the insn causes a memory exception.
339 * Arguments:
340 * insn : Load/store instruction
341 * type : Instruction type
342 * reg : Register
343 * addr : Address
344 * handler : Exception handler
345 */
Al Viro1cd95ab2020-07-19 17:37:15 -0400346#define EXC(insn, type, reg, addr) \
Markos Chandrase89fb562014-01-17 10:48:46 +0000347 .if \mode == LEGACY_MODE; \
3489: insn reg, addr; \
349 .section __ex_table,"a"; \
Al Viro1cd95ab2020-07-19 17:37:15 -0400350 PTR 9b, .L_exc; \
Markos Chandrase89fb562014-01-17 10:48:46 +0000351 .previous; \
Markos Chandras6f85ceb2014-01-17 11:36:16 +0000352 /* This is enabled in EVA mode */ \
353 .else; \
354 /* If loading from user or storing to user */ \
355 .if ((\from == USEROP) && (type == LD_INSN)) || \
356 ((\to == USEROP) && (type == ST_INSN)); \
3579: __BUILD_EVA_INSN(insn##e, reg, addr); \
358 .section __ex_table,"a"; \
Al Viro1cd95ab2020-07-19 17:37:15 -0400359 PTR 9b, .L_exc; \
Markos Chandras6f85ceb2014-01-17 11:36:16 +0000360 .previous; \
361 .else; \
362 /* EVA without exception */ \
363 insn reg, addr; \
364 .endif; \
Markos Chandrase89fb562014-01-17 10:48:46 +0000365 .endif
Atsushi Nemotof860c902006-12-13 01:22:06 +0900366
Markos Chandras2ab82e62014-01-16 17:02:13 +0000367#undef LOAD
368
Atsushi Nemotof860c902006-12-13 01:22:06 +0900369#ifdef USE_DOUBLE
370
Markos Chandras2ab82e62014-01-16 17:02:13 +0000371#define LOADK ld /* No exception */
Al Viro1cd95ab2020-07-19 17:37:15 -0400372#define LOAD(reg, addr) EXC(ld, LD_INSN, reg, addr)
373#define LOADBU(reg, addr) EXC(lbu, LD_INSN, reg, addr)
374#define LOADL(reg, addr) EXC(ldl, LD_INSN, reg, addr)
375#define LOADR(reg, addr) EXC(ldr, LD_INSN, reg, addr)
376#define STOREB(reg, addr) EXC(sb, ST_INSN, reg, addr)
377#define STOREL(reg, addr) EXC(sdl, ST_INSN, reg, addr)
378#define STORER(reg, addr) EXC(sdr, ST_INSN, reg, addr)
379#define STORE(reg, addr) EXC(sd, ST_INSN, reg, addr)
Atsushi Nemotof860c902006-12-13 01:22:06 +0900380#define ADD daddu
381#define SUB dsubu
382#define SRL dsrl
383#define SLL dsll
384#define SLLV dsllv
385#define SRLV dsrlv
386#define NBYTES 8
387#define LOG_NBYTES 3
388
389#else
390
Markos Chandras2ab82e62014-01-16 17:02:13 +0000391#define LOADK lw /* No exception */
Al Viro1cd95ab2020-07-19 17:37:15 -0400392#define LOAD(reg, addr) EXC(lw, LD_INSN, reg, addr)
393#define LOADBU(reg, addr) EXC(lbu, LD_INSN, reg, addr)
394#define LOADL(reg, addr) EXC(lwl, LD_INSN, reg, addr)
395#define LOADR(reg, addr) EXC(lwr, LD_INSN, reg, addr)
396#define STOREB(reg, addr) EXC(sb, ST_INSN, reg, addr)
397#define STOREL(reg, addr) EXC(swl, ST_INSN, reg, addr)
398#define STORER(reg, addr) EXC(swr, ST_INSN, reg, addr)
399#define STORE(reg, addr) EXC(sw, ST_INSN, reg, addr)
Atsushi Nemotof860c902006-12-13 01:22:06 +0900400#define ADD addu
401#define SUB subu
402#define SRL srl
403#define SLL sll
404#define SLLV sllv
405#define SRLV srlv
406#define NBYTES 4
407#define LOG_NBYTES 2
408
409#endif /* USE_DOUBLE */
410
411#ifdef CONFIG_CPU_LITTLE_ENDIAN
412#define LDFIRST LOADR
Ralf Baechle70342282013-01-22 12:59:30 +0100413#define LDREST LOADL
Atsushi Nemotof860c902006-12-13 01:22:06 +0900414#define STFIRST STORER
Ralf Baechle70342282013-01-22 12:59:30 +0100415#define STREST STOREL
Atsushi Nemotof860c902006-12-13 01:22:06 +0900416#define SHIFT_DISCARD SLLV
417#define SHIFT_DISCARD_REVERT SRLV
418#else
419#define LDFIRST LOADL
Ralf Baechle70342282013-01-22 12:59:30 +0100420#define LDREST LOADR
Atsushi Nemotof860c902006-12-13 01:22:06 +0900421#define STFIRST STOREL
Ralf Baechle70342282013-01-22 12:59:30 +0100422#define STREST STORER
Atsushi Nemotof860c902006-12-13 01:22:06 +0900423#define SHIFT_DISCARD SRLV
424#define SHIFT_DISCARD_REVERT SLLV
425#endif
426
427#define FIRST(unit) ((unit)*NBYTES)
428#define REST(unit) (FIRST(unit)+NBYTES-1)
429
430#define ADDRMASK (NBYTES-1)
431
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100432#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
Atsushi Nemotof860c902006-12-13 01:22:06 +0900433 .set noat
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100434#else
435 .set at=v1
436#endif
Atsushi Nemotof860c902006-12-13 01:22:06 +0900437
Al Viro1cd95ab2020-07-19 17:37:15 -0400438 .macro __BUILD_CSUM_PARTIAL_COPY_USER mode, from, to
Markos Chandrase89fb562014-01-17 10:48:46 +0000439
Al Viro1cd95ab2020-07-19 17:37:15 -0400440 li sum, -1
Atsushi Nemotof860c902006-12-13 01:22:06 +0900441 move odd, zero
442 /*
443 * Note: dst & src may be unaligned, len may be 0
444 * Temps
445 */
446 /*
447 * The "issue break"s below are very approximate.
448 * Issue delays for dcache fills will perturb the schedule, as will
449 * load queue full replay traps, etc.
450 *
451 * If len < NBYTES use byte operations.
452 */
453 sltu t2, len, NBYTES
454 and t1, dst, ADDRMASK
Markos Chandrase89fb562014-01-17 10:48:46 +0000455 bnez t2, .Lcopy_bytes_checklen\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900456 and t0, src, ADDRMASK
457 andi odd, dst, 0x1 /* odd buffer? */
Markos Chandrase89fb562014-01-17 10:48:46 +0000458 bnez t1, .Ldst_unaligned\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900459 nop
Markos Chandrase89fb562014-01-17 10:48:46 +0000460 bnez t0, .Lsrc_unaligned_dst_aligned\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900461 /*
462 * use delay slot for fall-through
463 * src and dst are aligned; need to compute rem
464 */
Markos Chandrase89fb562014-01-17 10:48:46 +0000465.Lboth_aligned\@:
Ralf Baechle70342282013-01-22 12:59:30 +0100466 SRL t0, len, LOG_NBYTES+3 # +3 for 8 units/iter
Markos Chandrase89fb562014-01-17 10:48:46 +0000467 beqz t0, .Lcleanup_both_aligned\@ # len < 8*NBYTES
Atsushi Nemotof860c902006-12-13 01:22:06 +0900468 nop
469 SUB len, 8*NBYTES # subtract here for bgez loop
470 .align 4
4711:
Al Viro1cd95ab2020-07-19 17:37:15 -0400472 LOAD(t0, UNIT(0)(src))
473 LOAD(t1, UNIT(1)(src))
474 LOAD(t2, UNIT(2)(src))
475 LOAD(t3, UNIT(3)(src))
476 LOAD(t4, UNIT(4)(src))
477 LOAD(t5, UNIT(5)(src))
478 LOAD(t6, UNIT(6)(src))
479 LOAD(t7, UNIT(7)(src))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900480 SUB len, len, 8*NBYTES
481 ADD src, src, 8*NBYTES
Al Viro1cd95ab2020-07-19 17:37:15 -0400482 STORE(t0, UNIT(0)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800483 ADDC(t0, t1)
Al Viro1cd95ab2020-07-19 17:37:15 -0400484 STORE(t1, UNIT(1)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800485 ADDC(sum, t0)
Al Viro1cd95ab2020-07-19 17:37:15 -0400486 STORE(t2, UNIT(2)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800487 ADDC(t2, t3)
Al Viro1cd95ab2020-07-19 17:37:15 -0400488 STORE(t3, UNIT(3)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800489 ADDC(sum, t2)
Al Viro1cd95ab2020-07-19 17:37:15 -0400490 STORE(t4, UNIT(4)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800491 ADDC(t4, t5)
Al Viro1cd95ab2020-07-19 17:37:15 -0400492 STORE(t5, UNIT(5)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800493 ADDC(sum, t4)
Al Viro1cd95ab2020-07-19 17:37:15 -0400494 STORE(t6, UNIT(6)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800495 ADDC(t6, t7)
Al Viro1cd95ab2020-07-19 17:37:15 -0400496 STORE(t7, UNIT(7)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800497 ADDC(sum, t6)
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100498 .set reorder /* DADDI_WAR */
499 ADD dst, dst, 8*NBYTES
Atsushi Nemotof860c902006-12-13 01:22:06 +0900500 bgez len, 1b
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100501 .set noreorder
Atsushi Nemotof860c902006-12-13 01:22:06 +0900502 ADD len, 8*NBYTES # revert len (see above)
503
504 /*
505 * len == the number of bytes left to copy < 8*NBYTES
506 */
Markos Chandrase89fb562014-01-17 10:48:46 +0000507.Lcleanup_both_aligned\@:
Atsushi Nemotof860c902006-12-13 01:22:06 +0900508#define rem t7
Markos Chandrase89fb562014-01-17 10:48:46 +0000509 beqz len, .Ldone\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900510 sltu t0, len, 4*NBYTES
Markos Chandrase89fb562014-01-17 10:48:46 +0000511 bnez t0, .Lless_than_4units\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900512 and rem, len, (NBYTES-1) # rem = len % NBYTES
513 /*
514 * len >= 4*NBYTES
515 */
Al Viro1cd95ab2020-07-19 17:37:15 -0400516 LOAD(t0, UNIT(0)(src))
517 LOAD(t1, UNIT(1)(src))
518 LOAD(t2, UNIT(2)(src))
519 LOAD(t3, UNIT(3)(src))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900520 SUB len, len, 4*NBYTES
521 ADD src, src, 4*NBYTES
Al Viro1cd95ab2020-07-19 17:37:15 -0400522 STORE(t0, UNIT(0)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800523 ADDC(t0, t1)
Al Viro1cd95ab2020-07-19 17:37:15 -0400524 STORE(t1, UNIT(1)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800525 ADDC(sum, t0)
Al Viro1cd95ab2020-07-19 17:37:15 -0400526 STORE(t2, UNIT(2)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800527 ADDC(t2, t3)
Al Viro1cd95ab2020-07-19 17:37:15 -0400528 STORE(t3, UNIT(3)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800529 ADDC(sum, t2)
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100530 .set reorder /* DADDI_WAR */
531 ADD dst, dst, 4*NBYTES
Markos Chandrase89fb562014-01-17 10:48:46 +0000532 beqz len, .Ldone\@
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100533 .set noreorder
Markos Chandrase89fb562014-01-17 10:48:46 +0000534.Lless_than_4units\@:
Atsushi Nemotof860c902006-12-13 01:22:06 +0900535 /*
536 * rem = len % NBYTES
537 */
Markos Chandrase89fb562014-01-17 10:48:46 +0000538 beq rem, len, .Lcopy_bytes\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900539 nop
5401:
Al Viro1cd95ab2020-07-19 17:37:15 -0400541 LOAD(t0, 0(src))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900542 ADD src, src, NBYTES
543 SUB len, len, NBYTES
Al Viro1cd95ab2020-07-19 17:37:15 -0400544 STORE(t0, 0(dst))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900545 ADDC(sum, t0)
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100546 .set reorder /* DADDI_WAR */
547 ADD dst, dst, NBYTES
Atsushi Nemotof860c902006-12-13 01:22:06 +0900548 bne rem, len, 1b
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100549 .set noreorder
Atsushi Nemotof860c902006-12-13 01:22:06 +0900550
551 /*
552 * src and dst are aligned, need to copy rem bytes (rem < NBYTES)
553 * A loop would do only a byte at a time with possible branch
Ralf Baechle70342282013-01-22 12:59:30 +0100554 * mispredicts. Can't do an explicit LOAD dst,mask,or,STORE
Atsushi Nemotof860c902006-12-13 01:22:06 +0900555 * because can't assume read-access to dst. Instead, use
556 * STREST dst, which doesn't require read access to dst.
557 *
558 * This code should perform better than a simple loop on modern,
559 * wide-issue mips processors because the code has fewer branches and
560 * more instruction-level parallelism.
561 */
562#define bits t2
Markos Chandrase89fb562014-01-17 10:48:46 +0000563 beqz len, .Ldone\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900564 ADD t1, dst, len # t1 is just past last byte of dst
565 li bits, 8*NBYTES
566 SLL rem, len, 3 # rem = number of bits to keep
Al Viro1cd95ab2020-07-19 17:37:15 -0400567 LOAD(t0, 0(src))
Ralf Baechle70342282013-01-22 12:59:30 +0100568 SUB bits, bits, rem # bits = number of bits to discard
Atsushi Nemotof860c902006-12-13 01:22:06 +0900569 SHIFT_DISCARD t0, t0, bits
Al Viro1cd95ab2020-07-19 17:37:15 -0400570 STREST(t0, -1(t1))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900571 SHIFT_DISCARD_REVERT t0, t0, bits
572 .set reorder
573 ADDC(sum, t0)
Markos Chandrase89fb562014-01-17 10:48:46 +0000574 b .Ldone\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900575 .set noreorder
Markos Chandrase89fb562014-01-17 10:48:46 +0000576.Ldst_unaligned\@:
Atsushi Nemotof860c902006-12-13 01:22:06 +0900577 /*
578 * dst is unaligned
579 * t0 = src & ADDRMASK
580 * t1 = dst & ADDRMASK; T1 > 0
581 * len >= NBYTES
582 *
583 * Copy enough bytes to align dst
584 * Set match = (src and dst have same alignment)
585 */
586#define match rem
Al Viro1cd95ab2020-07-19 17:37:15 -0400587 LDFIRST(t3, FIRST(0)(src))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900588 ADD t2, zero, NBYTES
Al Viro1cd95ab2020-07-19 17:37:15 -0400589 LDREST(t3, REST(0)(src))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900590 SUB t2, t2, t1 # t2 = number of bytes copied
591 xor match, t0, t1
Al Viro1cd95ab2020-07-19 17:37:15 -0400592 STFIRST(t3, FIRST(0)(dst))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900593 SLL t4, t1, 3 # t4 = number of bits to discard
594 SHIFT_DISCARD t3, t3, t4
595 /* no SHIFT_DISCARD_REVERT to handle odd buffer properly */
596 ADDC(sum, t3)
Markos Chandrase89fb562014-01-17 10:48:46 +0000597 beq len, t2, .Ldone\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900598 SUB len, len, t2
599 ADD dst, dst, t2
Markos Chandrase89fb562014-01-17 10:48:46 +0000600 beqz match, .Lboth_aligned\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900601 ADD src, src, t2
602
Markos Chandrase89fb562014-01-17 10:48:46 +0000603.Lsrc_unaligned_dst_aligned\@:
Ralf Baechle70342282013-01-22 12:59:30 +0100604 SRL t0, len, LOG_NBYTES+2 # +2 for 4 units/iter
Markos Chandrase89fb562014-01-17 10:48:46 +0000605 beqz t0, .Lcleanup_src_unaligned\@
Ralf Baechle70342282013-01-22 12:59:30 +0100606 and rem, len, (4*NBYTES-1) # rem = len % 4*NBYTES
Atsushi Nemotof860c902006-12-13 01:22:06 +09006071:
608/*
609 * Avoid consecutive LD*'s to the same register since some mips
610 * implementations can't issue them in the same cycle.
611 * It's OK to load FIRST(N+1) before REST(N) because the two addresses
612 * are to the same unit (unless src is aligned, but it's not).
613 */
Al Viro1cd95ab2020-07-19 17:37:15 -0400614 LDFIRST(t0, FIRST(0)(src))
615 LDFIRST(t1, FIRST(1)(src))
Ralf Baechle70342282013-01-22 12:59:30 +0100616 SUB len, len, 4*NBYTES
Al Viro1cd95ab2020-07-19 17:37:15 -0400617 LDREST(t0, REST(0)(src))
618 LDREST(t1, REST(1)(src))
619 LDFIRST(t2, FIRST(2)(src))
620 LDFIRST(t3, FIRST(3)(src))
621 LDREST(t2, REST(2)(src))
622 LDREST(t3, REST(3)(src))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900623 ADD src, src, 4*NBYTES
624#ifdef CONFIG_CPU_SB1
625 nop # improves slotting
626#endif
Al Viro1cd95ab2020-07-19 17:37:15 -0400627 STORE(t0, UNIT(0)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800628 ADDC(t0, t1)
Al Viro1cd95ab2020-07-19 17:37:15 -0400629 STORE(t1, UNIT(1)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800630 ADDC(sum, t0)
Al Viro1cd95ab2020-07-19 17:37:15 -0400631 STORE(t2, UNIT(2)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800632 ADDC(t2, t3)
Al Viro1cd95ab2020-07-19 17:37:15 -0400633 STORE(t3, UNIT(3)(dst))
Chen Jie615eb602015-03-27 01:07:24 +0800634 ADDC(sum, t2)
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100635 .set reorder /* DADDI_WAR */
636 ADD dst, dst, 4*NBYTES
Atsushi Nemotof860c902006-12-13 01:22:06 +0900637 bne len, rem, 1b
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100638 .set noreorder
Atsushi Nemotof860c902006-12-13 01:22:06 +0900639
Markos Chandrase89fb562014-01-17 10:48:46 +0000640.Lcleanup_src_unaligned\@:
641 beqz len, .Ldone\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900642 and rem, len, NBYTES-1 # rem = len % NBYTES
Markos Chandrase89fb562014-01-17 10:48:46 +0000643 beq rem, len, .Lcopy_bytes\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900644 nop
6451:
Al Viro1cd95ab2020-07-19 17:37:15 -0400646 LDFIRST(t0, FIRST(0)(src))
647 LDREST(t0, REST(0)(src))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900648 ADD src, src, NBYTES
649 SUB len, len, NBYTES
Al Viro1cd95ab2020-07-19 17:37:15 -0400650 STORE(t0, 0(dst))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900651 ADDC(sum, t0)
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100652 .set reorder /* DADDI_WAR */
653 ADD dst, dst, NBYTES
Atsushi Nemotof860c902006-12-13 01:22:06 +0900654 bne len, rem, 1b
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100655 .set noreorder
Atsushi Nemotof860c902006-12-13 01:22:06 +0900656
Markos Chandrase89fb562014-01-17 10:48:46 +0000657.Lcopy_bytes_checklen\@:
658 beqz len, .Ldone\@
Atsushi Nemotof860c902006-12-13 01:22:06 +0900659 nop
Markos Chandrase89fb562014-01-17 10:48:46 +0000660.Lcopy_bytes\@:
Atsushi Nemotof860c902006-12-13 01:22:06 +0900661 /* 0 < len < NBYTES */
662#ifdef CONFIG_CPU_LITTLE_ENDIAN
663#define SHIFT_START 0
664#define SHIFT_INC 8
665#else
666#define SHIFT_START 8*(NBYTES-1)
667#define SHIFT_INC -8
668#endif
669 move t2, zero # partial word
Ralf Baechle70342282013-01-22 12:59:30 +0100670 li t3, SHIFT_START # shift
Atsushi Nemotof860c902006-12-13 01:22:06 +0900671#define COPY_BYTE(N) \
Al Viro1cd95ab2020-07-19 17:37:15 -0400672 LOADBU(t0, N(src)); \
Atsushi Nemotof860c902006-12-13 01:22:06 +0900673 SUB len, len, 1; \
Al Viro1cd95ab2020-07-19 17:37:15 -0400674 STOREB(t0, N(dst)); \
Atsushi Nemotof860c902006-12-13 01:22:06 +0900675 SLLV t0, t0, t3; \
676 addu t3, SHIFT_INC; \
Markos Chandrase89fb562014-01-17 10:48:46 +0000677 beqz len, .Lcopy_bytes_done\@; \
Atsushi Nemotof860c902006-12-13 01:22:06 +0900678 or t2, t0
679
680 COPY_BYTE(0)
681 COPY_BYTE(1)
682#ifdef USE_DOUBLE
683 COPY_BYTE(2)
684 COPY_BYTE(3)
685 COPY_BYTE(4)
686 COPY_BYTE(5)
687#endif
Al Viro1cd95ab2020-07-19 17:37:15 -0400688 LOADBU(t0, NBYTES-2(src))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900689 SUB len, len, 1
Al Viro1cd95ab2020-07-19 17:37:15 -0400690 STOREB(t0, NBYTES-2(dst))
Atsushi Nemotof860c902006-12-13 01:22:06 +0900691 SLLV t0, t0, t3
692 or t2, t0
Markos Chandrase89fb562014-01-17 10:48:46 +0000693.Lcopy_bytes_done\@:
Atsushi Nemotof860c902006-12-13 01:22:06 +0900694 ADDC(sum, t2)
Markos Chandrase89fb562014-01-17 10:48:46 +0000695.Ldone\@:
Atsushi Nemotof860c902006-12-13 01:22:06 +0900696 /* fold checksum */
Maciej W. Rozycki44ba1382014-04-04 03:32:54 +0100697 .set push
698 .set noat
Atsushi Nemotof860c902006-12-13 01:22:06 +0900699#ifdef USE_DOUBLE
700 dsll32 v1, sum, 0
701 daddu sum, v1
702 sltu v1, sum, v1
703 dsra32 sum, sum, 0
704 addu sum, v1
705#endif
Atsushi Nemotof860c902006-12-13 01:22:06 +0900706
Serge Seminab7c01f2020-05-21 17:07:14 +0300707#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR5) || \
708 defined(CONFIG_CPU_LOONGSON64)
Chen Jie3c09bae2014-08-15 16:56:58 +0800709 .set push
710 .set arch=mips32r2
Ralf Baechleb65a75b2008-10-11 16:18:53 +0100711 wsbh v1, sum
712 movn sum, v1, odd
Chen Jie3c09bae2014-08-15 16:56:58 +0800713 .set pop
Ralf Baechleb65a75b2008-10-11 16:18:53 +0100714#else
715 beqz odd, 1f /* odd buffer alignment? */
716 lui v1, 0x00ff
717 addu v1, 0x00ff
718 and t0, sum, v1
719 sll t0, t0, 8
Atsushi Nemotof860c902006-12-13 01:22:06 +0900720 srl sum, sum, 8
Ralf Baechleb65a75b2008-10-11 16:18:53 +0100721 and sum, sum, v1
722 or sum, sum, t0
Atsushi Nemotof860c902006-12-13 01:22:06 +09007231:
Ralf Baechleb65a75b2008-10-11 16:18:53 +0100724#endif
Maciej W. Rozycki44ba1382014-04-04 03:32:54 +0100725 .set pop
Atsushi Nemotof860c902006-12-13 01:22:06 +0900726 .set reorder
Atsushi Nemotof860c902006-12-13 01:22:06 +0900727 jr ra
728 .set noreorder
Markos Chandrase89fb562014-01-17 10:48:46 +0000729 .endm
730
Al Viro1cd95ab2020-07-19 17:37:15 -0400731 .set noreorder
732.L_exc:
733 jr ra
734 li v0, 0
735
736FEXPORT(__csum_partial_copy_nocheck)
737EXPORT_SYMBOL(__csum_partial_copy_nocheck)
Markos Chandras6f85ceb2014-01-17 11:36:16 +0000738#ifndef CONFIG_EVA
Markos Chandrase89fb562014-01-17 10:48:46 +0000739FEXPORT(__csum_partial_copy_to_user)
Paul Burton23130042016-11-07 11:14:13 +0000740EXPORT_SYMBOL(__csum_partial_copy_to_user)
Markos Chandrase89fb562014-01-17 10:48:46 +0000741FEXPORT(__csum_partial_copy_from_user)
Paul Burton23130042016-11-07 11:14:13 +0000742EXPORT_SYMBOL(__csum_partial_copy_from_user)
Markos Chandras6f85ceb2014-01-17 11:36:16 +0000743#endif
Al Viro1cd95ab2020-07-19 17:37:15 -0400744__BUILD_CSUM_PARTIAL_COPY_USER LEGACY_MODE USEROP USEROP
Markos Chandras6f85ceb2014-01-17 11:36:16 +0000745
746#ifdef CONFIG_EVA
747LEAF(__csum_partial_copy_to_user)
Al Viro1cd95ab2020-07-19 17:37:15 -0400748__BUILD_CSUM_PARTIAL_COPY_USER EVA_MODE KERNELOP USEROP
Markos Chandras6f85ceb2014-01-17 11:36:16 +0000749END(__csum_partial_copy_to_user)
750
751LEAF(__csum_partial_copy_from_user)
Al Viro1cd95ab2020-07-19 17:37:15 -0400752__BUILD_CSUM_PARTIAL_COPY_USER EVA_MODE USEROP KERNELOP
Markos Chandras6f85ceb2014-01-17 11:36:16 +0000753END(__csum_partial_copy_from_user)
754#endif