blob: 0b342bae9a98c13c63e06f8618288232885c2715 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 1998, 1999, 2000 by Ralf Baechle
7 * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
Steven J. Hill26c5e072013-03-25 13:40:49 -05008 * Copyright (C) 2007 by Maciej W. Rozycki
9 * Copyright (C) 2011, 2012 MIPS Technologies, Inc.
Linus Torvalds1da177e2005-04-16 15:20:36 -070010 */
11#include <asm/asm.h>
Sam Ravnborg048eb582005-09-09 22:32:31 +020012#include <asm/asm-offsets.h>
Paul Burton576a2f02016-11-07 11:14:15 +000013#include <asm/export.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070014#include <asm/regdef.h>
15
Atsushi Nemotoa5831582006-12-18 00:07:40 +090016#if LONGSIZE == 4
17#define LONG_S_L swl
18#define LONG_S_R swr
19#else
20#define LONG_S_L sdl
21#define LONG_S_R sdr
22#endif
23
Steven J. Hill26c5e072013-03-25 13:40:49 -050024#ifdef CONFIG_CPU_MICROMIPS
25#define STORSIZE (LONGSIZE * 2)
26#define STORMASK (STORSIZE - 1)
27#define FILL64RG t8
28#define FILLPTRG t7
29#undef LONG_S
30#define LONG_S LONG_SP
31#else
32#define STORSIZE LONGSIZE
33#define STORMASK LONGMASK
34#define FILL64RG a1
35#define FILLPTRG t0
36#endif
37
Markos Chandras6d5155c2014-01-03 09:23:16 +000038#define LEGACY_MODE 1
39#define EVA_MODE 2
40
Markos Chandrasfd9720e2014-01-03 10:11:45 +000041/*
42 * No need to protect it with EVA #ifdefery. The generated block of code
43 * will never be assembled if EVA is not enabled.
44 */
45#define __EVAFY(insn, reg, addr) __BUILD_EVA_INSN(insn##e, reg, addr)
46#define ___BUILD_EVA_INSN(insn, reg, addr) __EVAFY(insn, reg, addr)
47
Linus Torvalds1da177e2005-04-16 15:20:36 -070048#define EX(insn,reg,addr,handler) \
Markos Chandrasfd9720e2014-01-03 10:11:45 +000049 .if \mode == LEGACY_MODE; \
509: insn reg, addr; \
51 .else; \
529: ___BUILD_EVA_INSN(insn, reg, addr); \
53 .endif; \
Ralf Baechle70342282013-01-22 12:59:30 +010054 .section __ex_table,"a"; \
Thomas Bogendoerferfa62f392022-01-25 15:19:44 +010055 PTR_WD 9b, handler; \
Linus Torvalds1da177e2005-04-16 15:20:36 -070056 .previous
57
Markos Chandrasfd9720e2014-01-03 10:11:45 +000058 .macro f_fill64 dst, offset, val, fixup, mode
Steven J. Hill26c5e072013-03-25 13:40:49 -050059 EX(LONG_S, \val, (\offset + 0 * STORSIZE)(\dst), \fixup)
60 EX(LONG_S, \val, (\offset + 1 * STORSIZE)(\dst), \fixup)
61 EX(LONG_S, \val, (\offset + 2 * STORSIZE)(\dst), \fixup)
62 EX(LONG_S, \val, (\offset + 3 * STORSIZE)(\dst), \fixup)
63#if ((defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4)) || !defined(CONFIG_CPU_MICROMIPS))
64 EX(LONG_S, \val, (\offset + 4 * STORSIZE)(\dst), \fixup)
65 EX(LONG_S, \val, (\offset + 5 * STORSIZE)(\dst), \fixup)
66 EX(LONG_S, \val, (\offset + 6 * STORSIZE)(\dst), \fixup)
67 EX(LONG_S, \val, (\offset + 7 * STORSIZE)(\dst), \fixup)
68#endif
69#if (!defined(CONFIG_CPU_MICROMIPS) && (LONGSIZE == 4))
70 EX(LONG_S, \val, (\offset + 8 * STORSIZE)(\dst), \fixup)
71 EX(LONG_S, \val, (\offset + 9 * STORSIZE)(\dst), \fixup)
72 EX(LONG_S, \val, (\offset + 10 * STORSIZE)(\dst), \fixup)
73 EX(LONG_S, \val, (\offset + 11 * STORSIZE)(\dst), \fixup)
74 EX(LONG_S, \val, (\offset + 12 * STORSIZE)(\dst), \fixup)
75 EX(LONG_S, \val, (\offset + 13 * STORSIZE)(\dst), \fixup)
76 EX(LONG_S, \val, (\offset + 14 * STORSIZE)(\dst), \fixup)
77 EX(LONG_S, \val, (\offset + 15 * STORSIZE)(\dst), \fixup)
Atsushi Nemotoa5831582006-12-18 00:07:40 +090078#endif
Linus Torvalds1da177e2005-04-16 15:20:36 -070079 .endm
80
Linus Torvalds1da177e2005-04-16 15:20:36 -070081 .align 5
Linus Torvalds1da177e2005-04-16 15:20:36 -070082
Markos Chandras6d5155c2014-01-03 09:23:16 +000083 /*
84 * Macro to generate the __bzero{,_user} symbol
85 * Arguments:
86 * mode: LEGACY_MODE or EVA_MODE
87 */
88 .macro __BUILD_BZERO mode
89 /* Initialize __memset if this is the first time we call this macro */
90 .ifnotdef __memset
91 .set __memset, 1
92 .hidden __memset /* Make sure it does not leak */
93 .endif
Linus Torvalds1da177e2005-04-16 15:20:36 -070094
Steven J. Hill26c5e072013-03-25 13:40:49 -050095 sltiu t0, a2, STORSIZE /* very small region? */
Maciej W. Rozycki68dec262018-10-02 12:50:16 +010096 .set noreorder
Markos Chandras6d5155c2014-01-03 09:23:16 +000097 bnez t0, .Lsmall_memset\@
Matt Redfearn21325632018-04-17 16:40:02 +010098 andi t0, a0, STORMASK /* aligned? */
Maciej W. Rozycki68dec262018-10-02 12:50:16 +010099 .set reorder
Linus Torvalds1da177e2005-04-16 15:20:36 -0700100
Steven J. Hill26c5e072013-03-25 13:40:49 -0500101#ifdef CONFIG_CPU_MICROMIPS
102 move t8, a1 /* used by 'swp' instruction */
103 move t9, a1
104#endif
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100105 .set noreorder
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100106#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
Linus Torvalds1da177e2005-04-16 15:20:36 -0700107 beqz t0, 1f
Matt Redfearn21325632018-04-17 16:40:02 +0100108 PTR_SUBU t0, STORSIZE /* alignment in bytes */
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100109#else
110 .set noat
Steven J. Hill26c5e072013-03-25 13:40:49 -0500111 li AT, STORSIZE
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100112 beqz t0, 1f
Matt Redfearn21325632018-04-17 16:40:02 +0100113 PTR_SUBU t0, AT /* alignment in bytes */
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100114 .set at
115#endif
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100116 .set reorder
Linus Torvalds1da177e2005-04-16 15:20:36 -0700117
Alexander Lobakin18d84e2e2020-01-22 13:58:50 +0300118#ifndef CONFIG_CPU_NO_LOAD_STORE_LR
Thomas Bogendoerfer930bff82007-11-25 11:47:56 +0100119 R10KCBARRIER(0(ra))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700120#ifdef __MIPSEB__
Markos Chandras6d5155c2014-01-03 09:23:16 +0000121 EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
Markos Chandrasdd2adea2014-11-19 08:58:10 +0000122#else
Markos Chandras6d5155c2014-01-03 09:23:16 +0000123 EX(LONG_S_R, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700124#endif
125 PTR_SUBU a0, t0 /* long align ptr */
126 PTR_ADDU a2, t0 /* correct size */
127
Alexander Lobakin18d84e2e2020-01-22 13:58:50 +0300128#else /* CONFIG_CPU_NO_LOAD_STORE_LR */
Leonid Yegoshin8c562082014-11-18 09:04:34 +0000129#define STORE_BYTE(N) \
130 EX(sb, a1, N(a0), .Lbyte_fixup\@); \
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100131 .set noreorder; \
Leonid Yegoshin8c562082014-11-18 09:04:34 +0000132 beqz t0, 0f; \
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100133 PTR_ADDU t0, 1; \
134 .set reorder;
Leonid Yegoshin8c562082014-11-18 09:04:34 +0000135
136 PTR_ADDU a2, t0 /* correct size */
137 PTR_ADDU t0, 1
138 STORE_BYTE(0)
139 STORE_BYTE(1)
140#if LONGSIZE == 4
141 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
142#else
143 STORE_BYTE(2)
144 STORE_BYTE(3)
145 STORE_BYTE(4)
146 STORE_BYTE(5)
147 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
148#endif
1490:
150 ori a0, STORMASK
151 xori a0, STORMASK
152 PTR_ADDIU a0, STORSIZE
Alexander Lobakin18d84e2e2020-01-22 13:58:50 +0300153#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
Linus Torvalds1da177e2005-04-16 15:20:36 -07001541: ori t1, a2, 0x3f /* # of full blocks */
155 xori t1, 0x3f
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100156 andi t0, a2, 0x40-STORSIZE
Markos Chandras6d5155c2014-01-03 09:23:16 +0000157 beqz t1, .Lmemset_partial\@ /* no block to fill */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700158
159 PTR_ADDU t1, a0 /* end address */
Linus Torvalds1da177e2005-04-16 15:20:36 -07001601: PTR_ADDIU a0, 64
Thomas Bogendoerfer930bff82007-11-25 11:47:56 +0100161 R10KCBARRIER(0(ra))
Markos Chandrasfd9720e2014-01-03 10:11:45 +0000162 f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
Linus Torvalds1da177e2005-04-16 15:20:36 -0700163 bne t1, a0, 1b
Linus Torvalds1da177e2005-04-16 15:20:36 -0700164
Markos Chandras6d5155c2014-01-03 09:23:16 +0000165.Lmemset_partial\@:
Thomas Bogendoerfer930bff82007-11-25 11:47:56 +0100166 R10KCBARRIER(0(ra))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700167 PTR_LA t1, 2f /* where to start */
Steven J. Hill26c5e072013-03-25 13:40:49 -0500168#ifdef CONFIG_CPU_MICROMIPS
169 LONG_SRL t7, t0, 1
170#endif
Atsushi Nemotoa5831582006-12-18 00:07:40 +0900171#if LONGSIZE == 4
Steven J. Hill26c5e072013-03-25 13:40:49 -0500172 PTR_SUBU t1, FILLPTRG
Atsushi Nemotoa5831582006-12-18 00:07:40 +0900173#else
174 .set noat
Steven J. Hill26c5e072013-03-25 13:40:49 -0500175 LONG_SRL AT, FILLPTRG, 1
Atsushi Nemotoa5831582006-12-18 00:07:40 +0900176 PTR_SUBU t1, AT
Maciej W. Rozycki619b6e12007-10-23 12:43:25 +0100177 .set at
Atsushi Nemotoa5831582006-12-18 00:07:40 +0900178#endif
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100179 PTR_ADDU a0, t0 /* dest ptr */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700180 jr t1
Linus Torvalds1da177e2005-04-16 15:20:36 -0700181
Markos Chandras6d5155c2014-01-03 09:23:16 +0000182 /* ... but first do longs ... */
Markos Chandrasfd9720e2014-01-03 10:11:45 +0000183 f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
Maciej W. Rozycki68dec262018-10-02 12:50:16 +01001842: andi a2, STORMASK /* At most one long to go */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700185
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100186 .set noreorder
Linus Torvalds1da177e2005-04-16 15:20:36 -0700187 beqz a2, 1f
Alexander Lobakin18d84e2e2020-01-22 13:58:50 +0300188#ifndef CONFIG_CPU_NO_LOAD_STORE_LR
Matt Redfearn21325632018-04-17 16:40:02 +0100189 PTR_ADDU a0, a2 /* What's left */
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100190 .set reorder
Thomas Bogendoerfer930bff82007-11-25 11:47:56 +0100191 R10KCBARRIER(0(ra))
Linus Torvalds1da177e2005-04-16 15:20:36 -0700192#ifdef __MIPSEB__
Markos Chandras6d5155c2014-01-03 09:23:16 +0000193 EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
Markos Chandrasdd2adea2014-11-19 08:58:10 +0000194#else
Markos Chandras6d5155c2014-01-03 09:23:16 +0000195 EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700196#endif
Alexander Lobakin18d84e2e2020-01-22 13:58:50 +0300197#else /* CONFIG_CPU_NO_LOAD_STORE_LR */
Matt Redfearn21325632018-04-17 16:40:02 +0100198 PTR_SUBU t0, $0, a2
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100199 .set reorder
Matt Redfearnb1c03f12018-05-23 14:39:58 +0100200 move a2, zero /* No remaining longs */
Leonid Yegoshin8c562082014-11-18 09:04:34 +0000201 PTR_ADDIU t0, 1
202 STORE_BYTE(0)
203 STORE_BYTE(1)
204#if LONGSIZE == 4
205 EX(sb, a1, 2(a0), .Lbyte_fixup\@)
206#else
207 STORE_BYTE(2)
208 STORE_BYTE(3)
209 STORE_BYTE(4)
210 STORE_BYTE(5)
211 EX(sb, a1, 6(a0), .Lbyte_fixup\@)
212#endif
2130:
Alexander Lobakin18d84e2e2020-01-22 13:58:50 +0300214#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
Maciej W. Rozycki68dec262018-10-02 12:50:16 +01002151: move a2, zero
216 jr ra
Linus Torvalds1da177e2005-04-16 15:20:36 -0700217
Markos Chandras6d5155c2014-01-03 09:23:16 +0000218.Lsmall_memset\@:
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100219 PTR_ADDU t1, a0, a2
Linus Torvalds1da177e2005-04-16 15:20:36 -0700220 beqz a2, 2f
Linus Torvalds1da177e2005-04-16 15:20:36 -0700221
2221: PTR_ADDIU a0, 1 /* fill bytewise */
Thomas Bogendoerfer930bff82007-11-25 11:47:56 +0100223 R10KCBARRIER(0(ra))
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100224 .set noreorder
Linus Torvalds1da177e2005-04-16 15:20:36 -0700225 bne t1, a0, 1b
Matt Redfearn8a8158c2018-03-29 10:28:23 +0100226 EX(sb, a1, -1(a0), .Lsmall_fixup\@)
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100227 .set reorder
Linus Torvalds1da177e2005-04-16 15:20:36 -0700228
Maciej W. Rozycki68dec262018-10-02 12:50:16 +01002292: move a2, zero
230 jr ra /* done */
Markos Chandras6d5155c2014-01-03 09:23:16 +0000231 .if __memset == 1
Linus Torvalds1da177e2005-04-16 15:20:36 -0700232 END(memset)
Markos Chandras6d5155c2014-01-03 09:23:16 +0000233 .set __memset, 0
234 .hidden __memset
235 .endif
Linus Torvalds1da177e2005-04-16 15:20:36 -0700236
Alexander Lobakin18d84e2e2020-01-22 13:58:50 +0300237#ifdef CONFIG_CPU_NO_LOAD_STORE_LR
Leonid Yegoshin8c562082014-11-18 09:04:34 +0000238.Lbyte_fixup\@:
Matt Redfearn63124552018-05-23 14:39:59 +0100239 /*
240 * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
241 * a2 = a2 - t0 + 1
242 */
Matt Redfearnb1c03f12018-05-23 14:39:58 +0100243 PTR_SUBU a2, t0
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100244 PTR_ADDIU a2, 1
Leonid Yegoshin8c562082014-11-18 09:04:34 +0000245 jr ra
Alexander Lobakin18d84e2e2020-01-22 13:58:50 +0300246#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
Leonid Yegoshin8c562082014-11-18 09:04:34 +0000247
Markos Chandras6d5155c2014-01-03 09:23:16 +0000248.Lfirst_fixup\@:
Matt Redfearn63124552018-05-23 14:39:59 +0100249 /* unset_bytes already in a2 */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700250 jr ra
Linus Torvalds1da177e2005-04-16 15:20:36 -0700251
Markos Chandras6d5155c2014-01-03 09:23:16 +0000252.Lfwd_fixup\@:
Matt Redfearn63124552018-05-23 14:39:59 +0100253 /*
254 * unset_bytes = partial_start_addr + #bytes - fault_addr
255 * a2 = t1 + (a2 & 3f) - $28->task->BUADDR
256 */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700257 PTR_L t0, TI_TASK($28)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700258 andi a2, 0x3f
Tony Wue5674ad2010-11-10 21:48:15 +0800259 LONG_L t0, THREAD_BUADDR(t0)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700260 LONG_ADDU a2, t1
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100261 LONG_SUBU a2, t0
Linus Torvalds1da177e2005-04-16 15:20:36 -0700262 jr ra
Linus Torvalds1da177e2005-04-16 15:20:36 -0700263
Markos Chandras6d5155c2014-01-03 09:23:16 +0000264.Lpartial_fixup\@:
Matt Redfearn63124552018-05-23 14:39:59 +0100265 /*
266 * unset_bytes = partial_end_addr + #bytes - fault_addr
267 * a2 = a0 + (a2 & STORMASK) - $28->task->BUADDR
268 */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700269 PTR_L t0, TI_TASK($28)
Steven J. Hill26c5e072013-03-25 13:40:49 -0500270 andi a2, STORMASK
Tony Wue5674ad2010-11-10 21:48:15 +0800271 LONG_L t0, THREAD_BUADDR(t0)
Matt Redfearndaf70d82018-04-17 15:52:21 +0100272 LONG_ADDU a2, a0
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100273 LONG_SUBU a2, t0
Linus Torvalds1da177e2005-04-16 15:20:36 -0700274 jr ra
Linus Torvalds1da177e2005-04-16 15:20:36 -0700275
Markos Chandras6d5155c2014-01-03 09:23:16 +0000276.Llast_fixup\@:
Matt Redfearn63124552018-05-23 14:39:59 +0100277 /* unset_bytes already in a2 */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700278 jr ra
Markos Chandras6d5155c2014-01-03 09:23:16 +0000279
Matt Redfearn8a8158c2018-03-29 10:28:23 +0100280.Lsmall_fixup\@:
Matt Redfearn63124552018-05-23 14:39:59 +0100281 /*
282 * unset_bytes = end_addr - current_addr + 1
283 * a2 = t1 - a0 + 1
284 */
Matt Redfearn8a8158c2018-03-29 10:28:23 +0100285 PTR_SUBU a2, t1, a0
Maciej W. Rozycki2f7619a2018-10-02 12:50:11 +0100286 PTR_ADDIU a2, 1
Matt Redfearn8a8158c2018-03-29 10:28:23 +0100287 jr ra
Matt Redfearn8a8158c2018-03-29 10:28:23 +0100288
Markos Chandras6d5155c2014-01-03 09:23:16 +0000289 .endm
290
291/*
292 * memset(void *s, int c, size_t n)
293 *
294 * a0: start of area to clear
295 * a1: char to fill with
296 * a2: size of area to clear
297 */
298
299LEAF(memset)
Paul Burton576a2f02016-11-07 11:14:15 +0000300EXPORT_SYMBOL(memset)
Maciej W. Rozycki68dec262018-10-02 12:50:16 +0100301 move v0, a0 /* result */
Markos Chandras6d5155c2014-01-03 09:23:16 +0000302 beqz a1, 1f
Markos Chandras6d5155c2014-01-03 09:23:16 +0000303
304 andi a1, 0xff /* spread fillword */
305 LONG_SLL t1, a1, 8
306 or a1, t1
307 LONG_SLL t1, a1, 16
308#if LONGSIZE == 8
309 or a1, t1
310 LONG_SLL t1, a1, 32
311#endif
312 or a1, t1
3131:
Markos Chandrasfd9720e2014-01-03 10:11:45 +0000314#ifndef CONFIG_EVA
Markos Chandras6d5155c2014-01-03 09:23:16 +0000315FEXPORT(__bzero)
Paul Burton576a2f02016-11-07 11:14:15 +0000316EXPORT_SYMBOL(__bzero)
Markos Chandrasfd9720e2014-01-03 10:11:45 +0000317#endif
Markos Chandras6d5155c2014-01-03 09:23:16 +0000318 __BUILD_BZERO LEGACY_MODE
Markos Chandrasfd9720e2014-01-03 10:11:45 +0000319
320#ifdef CONFIG_EVA
321LEAF(__bzero)
Paul Burton576a2f02016-11-07 11:14:15 +0000322EXPORT_SYMBOL(__bzero)
Markos Chandrasfd9720e2014-01-03 10:11:45 +0000323 __BUILD_BZERO EVA_MODE
324END(__bzero)
325#endif