blob: 7d23d4bb2168bacb4f52dda176b7a5bc5402b5f0 [file] [log] [blame]
Thomas Gleixnerd2912cb2019-06-04 10:11:33 +02001/* SPDX-License-Identifier: GPL-2.0-only */
Linus Torvalds1da177e2005-04-16 15:20:36 -07002/*
Russell King4baa9922008-08-02 10:55:55 +01003 * arch/arm/include/asm/assembler.h
Linus Torvalds1da177e2005-04-16 15:20:36 -07004 *
5 * Copyright (C) 1996-2000 Russell King
6 *
Linus Torvalds1da177e2005-04-16 15:20:36 -07007 * This file contains arm architecture specific defines
8 * for the different processors.
9 *
10 * Do not include any C declarations in this file - it is included by
11 * assembler source.
12 */
Magnus Damm2bc58a62011-06-13 06:46:44 +010013#ifndef __ASM_ASSEMBLER_H__
14#define __ASM_ASSEMBLER_H__
15
Linus Torvalds1da177e2005-04-16 15:20:36 -070016#ifndef __ASSEMBLY__
17#error "Only include this from assembly code"
18#endif
19
20#include <asm/ptrace.h>
Dave Martin80c59da2012-02-09 08:47:17 -080021#include <asm/opcodes-virt.h>
Catalin Marinas0b1f68e2014-04-02 10:57:49 +010022#include <asm/asm-offsets.h>
Andrey Ryabinin9a2b51b2014-06-18 16:12:40 +010023#include <asm/page.h>
24#include <asm/thread_info.h>
Russell King747ffc22020-05-03 13:03:54 +010025#include <asm/uaccess-asm.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070026
Rob Herring6f6f6a72012-03-10 10:30:31 -060027#define IOMEM(x) (x)
28
Linus Torvalds1da177e2005-04-16 15:20:36 -070029/*
30 * Endian independent macros for shifting bytes within registers.
31 */
32#ifndef __ARMEB__
Victor Kamenskyd98b90e2014-02-25 08:41:09 +010033#define lspull lsr
34#define lspush lsl
Linus Torvalds1da177e2005-04-16 15:20:36 -070035#define get_byte_0 lsl #0
36#define get_byte_1 lsr #8
37#define get_byte_2 lsr #16
38#define get_byte_3 lsr #24
39#define put_byte_0 lsl #0
40#define put_byte_1 lsl #8
41#define put_byte_2 lsl #16
42#define put_byte_3 lsl #24
43#else
Victor Kamenskyd98b90e2014-02-25 08:41:09 +010044#define lspull lsl
45#define lspush lsr
Linus Torvalds1da177e2005-04-16 15:20:36 -070046#define get_byte_0 lsr #24
47#define get_byte_1 lsr #16
48#define get_byte_2 lsr #8
49#define get_byte_3 lsl #0
50#define put_byte_0 lsl #24
51#define put_byte_1 lsl #16
52#define put_byte_2 lsl #8
53#define put_byte_3 lsl #0
54#endif
55
Ben Dooks457c2402013-02-12 18:59:57 +000056/* Select code for any configuration running in BE8 mode */
57#ifdef CONFIG_CPU_ENDIAN_BE8
58#define ARM_BE8(code...) code
59#else
60#define ARM_BE8(code...)
61#endif
62
Linus Torvalds1da177e2005-04-16 15:20:36 -070063/*
64 * Data preload for architectures that support it
65 */
66#if __LINUX_ARM_ARCH__ >= 5
67#define PLD(code...) code
68#else
69#define PLD(code...)
70#endif
71
Linus Torvalds1da177e2005-04-16 15:20:36 -070072/*
Nicolas Pitre2239aff2008-03-31 12:38:31 -040073 * This can be used to enable code to cacheline align the destination
74 * pointer when bulk writing to memory. Experiments on StrongARM and
75 * XScale didn't show this a worthwhile thing to do when the cache is not
76 * set to write-allocate (this would need further testing on XScale when WA
77 * is used).
78 *
79 * On Feroceon there is much to gain however, regardless of cache mode.
80 */
81#ifdef CONFIG_CPU_FEROCEON
82#define CALGN(code...) code
83#else
84#define CALGN(code...)
85#endif
86
Arnd Bergmannffa47aa2017-06-30 18:03:59 +020087#define IMM12_MASK 0xfff
88
Nicolas Pitre2239aff2008-03-31 12:38:31 -040089/*
Russell King9c429542006-03-23 16:59:37 +000090 * Enable and disable interrupts
91 */
92#if __LINUX_ARM_ARCH__ >= 6
Uwe Kleine-König0d928b02009-08-13 20:38:17 +020093 .macro disable_irq_notrace
Russell King9c429542006-03-23 16:59:37 +000094 cpsid i
95 .endm
96
Uwe Kleine-König0d928b02009-08-13 20:38:17 +020097 .macro enable_irq_notrace
Russell King9c429542006-03-23 16:59:37 +000098 cpsie i
99 .endm
100#else
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200101 .macro disable_irq_notrace
Russell King9c429542006-03-23 16:59:37 +0000102 msr cpsr_c, #PSR_I_BIT | SVC_MODE
103 .endm
104
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200105 .macro enable_irq_notrace
Russell King9c429542006-03-23 16:59:37 +0000106 msr cpsr_c, #SVC_MODE
107 .endm
108#endif
109
Russell King3302cad2015-08-20 16:13:37 +0100110 .macro asm_trace_hardirqs_off, save=1
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200111#if defined(CONFIG_TRACE_IRQFLAGS)
Russell King3302cad2015-08-20 16:13:37 +0100112 .if \save
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200113 stmdb sp!, {r0-r3, ip, lr}
Russell King3302cad2015-08-20 16:13:37 +0100114 .endif
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200115 bl trace_hardirqs_off
Russell King3302cad2015-08-20 16:13:37 +0100116 .if \save
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200117 ldmia sp!, {r0-r3, ip, lr}
Russell King3302cad2015-08-20 16:13:37 +0100118 .endif
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200119#endif
120 .endm
121
Russell King3302cad2015-08-20 16:13:37 +0100122 .macro asm_trace_hardirqs_on, cond=al, save=1
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200123#if defined(CONFIG_TRACE_IRQFLAGS)
124 /*
125 * actually the registers should be pushed and pop'd conditionally, but
126 * after bl the flags are certainly clobbered
127 */
Russell King3302cad2015-08-20 16:13:37 +0100128 .if \save
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200129 stmdb sp!, {r0-r3, ip, lr}
Russell King3302cad2015-08-20 16:13:37 +0100130 .endif
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200131 bl\cond trace_hardirqs_on
Russell King3302cad2015-08-20 16:13:37 +0100132 .if \save
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200133 ldmia sp!, {r0-r3, ip, lr}
Russell King3302cad2015-08-20 16:13:37 +0100134 .endif
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200135#endif
136 .endm
137
Russell King3302cad2015-08-20 16:13:37 +0100138 .macro disable_irq, save=1
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200139 disable_irq_notrace
Russell King3302cad2015-08-20 16:13:37 +0100140 asm_trace_hardirqs_off \save
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200141 .endm
142
143 .macro enable_irq
144 asm_trace_hardirqs_on
145 enable_irq_notrace
146 .endm
Russell King9c429542006-03-23 16:59:37 +0000147/*
Linus Torvalds1da177e2005-04-16 15:20:36 -0700148 * Save the current IRQ state and disable IRQs. Note that this macro
149 * assumes FIQs are enabled, and that the processor is in SVC mode.
150 */
Russell King59d1ff32005-11-09 15:04:22 +0000151 .macro save_and_disable_irqs, oldcpsr
Catalin Marinas55bdd692010-05-21 18:06:41 +0100152#ifdef CONFIG_CPU_V7M
153 mrs \oldcpsr, primask
154#else
Linus Torvalds1da177e2005-04-16 15:20:36 -0700155 mrs \oldcpsr, cpsr
Catalin Marinas55bdd692010-05-21 18:06:41 +0100156#endif
Russell King9c429542006-03-23 16:59:37 +0000157 disable_irq
Linus Torvalds1da177e2005-04-16 15:20:36 -0700158 .endm
159
Rabin Vincent8e43a902012-02-15 16:01:42 +0100160 .macro save_and_disable_irqs_notrace, oldcpsr
Vladimir Murzinb2bf4822016-08-30 17:28:43 +0100161#ifdef CONFIG_CPU_V7M
162 mrs \oldcpsr, primask
163#else
Rabin Vincent8e43a902012-02-15 16:01:42 +0100164 mrs \oldcpsr, cpsr
Vladimir Murzinb2bf4822016-08-30 17:28:43 +0100165#endif
Rabin Vincent8e43a902012-02-15 16:01:42 +0100166 disable_irq_notrace
167 .endm
168
Linus Torvalds1da177e2005-04-16 15:20:36 -0700169/*
170 * Restore interrupt state previously stored in a register. We don't
171 * guarantee that this will preserve the flags.
172 */
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200173 .macro restore_irqs_notrace, oldcpsr
Catalin Marinas55bdd692010-05-21 18:06:41 +0100174#ifdef CONFIG_CPU_V7M
175 msr primask, \oldcpsr
176#else
Linus Torvalds1da177e2005-04-16 15:20:36 -0700177 msr cpsr_c, \oldcpsr
Catalin Marinas55bdd692010-05-21 18:06:41 +0100178#endif
Linus Torvalds1da177e2005-04-16 15:20:36 -0700179 .endm
180
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200181 .macro restore_irqs, oldcpsr
182 tst \oldcpsr, #PSR_I_BIT
Russell King01e09a22015-08-20 14:22:48 +0100183 asm_trace_hardirqs_on cond=eq
Uwe Kleine-König0d928b02009-08-13 20:38:17 +0200184 restore_irqs_notrace \oldcpsr
185 .endm
186
Catalin Marinas39ad04c2014-04-02 10:57:48 +0100187/*
Russell King14327c62015-04-21 14:17:25 +0100188 * Assembly version of "adr rd, BSYM(sym)". This should only be used to
189 * reference local symbols in the same assembly file which are to be
190 * resolved by the assembler. Other usage is undefined.
191 */
192 .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
193 .macro badr\c, rd, sym
194#ifdef CONFIG_THUMB2_KERNEL
195 adr\c \rd, \sym + 1
196#else
197 adr\c \rd, \sym
198#endif
199 .endm
200 .endr
201
Ard Biesheuvel50596b72021-09-18 10:44:37 +0200202 .macro get_current, rd
203#ifdef CONFIG_CURRENT_POINTER_IN_TPIDRURO
204 mrc p15, 0, \rd, c13, c0, 3 @ get TPIDRURO register
205#else
206 get_thread_info \rd
207 ldr \rd, [\rd, #TI_TASK]
208#endif
209 .endm
210
211 .macro set_current, rn
212#ifdef CONFIG_CURRENT_POINTER_IN_TPIDRURO
213 mcr p15, 0, \rn, c13, c0, 3 @ set TPIDRURO register
214#endif
215 .endm
216
217 .macro reload_current, t1:req, t2:req
218#ifdef CONFIG_CURRENT_POINTER_IN_TPIDRURO
219 adr_l \t1, __entry_task @ get __entry_task base address
220 mrc p15, 0, \t2, c13, c0, 4 @ get per-CPU offset
221 ldr \t1, [\t1, \t2] @ load variable
222 mcr p15, 0, \t1, c13, c0, 3 @ store in TPIDRURO
223#endif
224 .endm
225
Russell King14327c62015-04-21 14:17:25 +0100226/*
Catalin Marinas39ad04c2014-04-02 10:57:48 +0100227 * Get current thread_info.
228 */
229 .macro get_thread_info, rd
Ard Biesheuvel18ed1c012021-09-18 10:44:38 +0200230#ifdef CONFIG_THREAD_INFO_IN_TASK
231 /* thread_info is the first member of struct task_struct */
232 get_current \rd
233#else
Andrey Ryabinin9a2b51b2014-06-18 16:12:40 +0100234 ARM( mov \rd, sp, lsr #THREAD_SIZE_ORDER + PAGE_SHIFT )
Catalin Marinas39ad04c2014-04-02 10:57:48 +0100235 THUMB( mov \rd, sp )
Andrey Ryabinin9a2b51b2014-06-18 16:12:40 +0100236 THUMB( lsr \rd, \rd, #THREAD_SIZE_ORDER + PAGE_SHIFT )
237 mov \rd, \rd, lsl #THREAD_SIZE_ORDER + PAGE_SHIFT
Ard Biesheuvel18ed1c012021-09-18 10:44:38 +0200238#endif
Catalin Marinas39ad04c2014-04-02 10:57:48 +0100239 .endm
240
Catalin Marinas0b1f68e2014-04-02 10:57:49 +0100241/*
242 * Increment/decrement the preempt count.
243 */
244#ifdef CONFIG_PREEMPT_COUNT
245 .macro inc_preempt_count, ti, tmp
246 ldr \tmp, [\ti, #TI_PREEMPT] @ get preempt count
247 add \tmp, \tmp, #1 @ increment it
248 str \tmp, [\ti, #TI_PREEMPT]
249 .endm
250
251 .macro dec_preempt_count, ti, tmp
252 ldr \tmp, [\ti, #TI_PREEMPT] @ get preempt count
253 sub \tmp, \tmp, #1 @ decrement it
254 str \tmp, [\ti, #TI_PREEMPT]
255 .endm
256
257 .macro dec_preempt_count_ti, ti, tmp
258 get_thread_info \ti
259 dec_preempt_count \ti, \tmp
260 .endm
261#else
262 .macro inc_preempt_count, ti, tmp
263 .endm
264
265 .macro dec_preempt_count, ti, tmp
266 .endm
267
268 .macro dec_preempt_count_ti, ti, tmp
269 .endm
270#endif
271
Vincent Whitchurchf4418822018-11-09 10:09:48 +0100272#define USERL(l, x...) \
Linus Torvalds1da177e2005-04-16 15:20:36 -07002739999: x; \
Russell King42604152010-04-19 10:15:03 +0100274 .pushsection __ex_table,"a"; \
Linus Torvalds1da177e2005-04-16 15:20:36 -0700275 .align 3; \
Vincent Whitchurchf4418822018-11-09 10:09:48 +0100276 .long 9999b,l; \
Russell King42604152010-04-19 10:15:03 +0100277 .popsection
Russell Kingbac4e962009-05-25 20:58:00 +0100278
Vincent Whitchurchf4418822018-11-09 10:09:48 +0100279#define USER(x...) USERL(9001f, x)
280
Russell Kingf00ec482010-09-04 10:47:48 +0100281#ifdef CONFIG_SMP
282#define ALT_SMP(instr...) \
2839998: instr
Dave Martined3768a2010-12-01 15:39:23 +0100284/*
285 * Note: if you get assembler errors from ALT_UP() when building with
286 * CONFIG_THUMB2_KERNEL, you almost certainly need to use
287 * ALT_SMP( W(instr) ... )
288 */
Russell Kingf00ec482010-09-04 10:47:48 +0100289#define ALT_UP(instr...) \
290 .pushsection ".alt.smp.init", "a" ;\
Ard Biesheuvel450abd32020-09-14 11:48:20 +0300291 .long 9998b - . ;\
Dave Martined3768a2010-12-01 15:39:23 +01002929997: instr ;\
Russell King89c6bc52015-04-09 12:59:35 +0100293 .if . - 9997b == 2 ;\
294 nop ;\
295 .endif ;\
Dave Martined3768a2010-12-01 15:39:23 +0100296 .if . - 9997b != 4 ;\
297 .error "ALT_UP() content must assemble to exactly 4 bytes";\
298 .endif ;\
Russell Kingf00ec482010-09-04 10:47:48 +0100299 .popsection
300#define ALT_UP_B(label) \
Russell Kingf00ec482010-09-04 10:47:48 +0100301 .pushsection ".alt.smp.init", "a" ;\
Ard Biesheuvel450abd32020-09-14 11:48:20 +0300302 .long 9998b - . ;\
Jian Caia780e482020-04-29 01:20:11 +0100303 W(b) . + (label - 9998b) ;\
Russell Kingf00ec482010-09-04 10:47:48 +0100304 .popsection
305#else
306#define ALT_SMP(instr...)
307#define ALT_UP(instr...) instr
308#define ALT_UP_B(label) b label
309#endif
310
Russell Kingbac4e962009-05-25 20:58:00 +0100311/*
Will Deacond675d0b2011-11-22 17:30:28 +0000312 * Instruction barrier
313 */
314 .macro instr_sync
315#if __LINUX_ARM_ARCH__ >= 7
316 isb
317#elif __LINUX_ARM_ARCH__ == 6
318 mcr p15, 0, r0, c7, c5, 4
319#endif
320 .endm
321
322/*
Russell Kingbac4e962009-05-25 20:58:00 +0100323 * SMP data memory barrier
324 */
Dave Martined3768a2010-12-01 15:39:23 +0100325 .macro smp_dmb mode
Russell Kingbac4e962009-05-25 20:58:00 +0100326#ifdef CONFIG_SMP
327#if __LINUX_ARM_ARCH__ >= 7
Dave Martined3768a2010-12-01 15:39:23 +0100328 .ifeqs "\mode","arm"
Will Deacon3ea12802013-05-10 18:07:19 +0100329 ALT_SMP(dmb ish)
Dave Martined3768a2010-12-01 15:39:23 +0100330 .else
Will Deacon3ea12802013-05-10 18:07:19 +0100331 ALT_SMP(W(dmb) ish)
Dave Martined3768a2010-12-01 15:39:23 +0100332 .endif
Russell Kingbac4e962009-05-25 20:58:00 +0100333#elif __LINUX_ARM_ARCH__ == 6
Russell Kingf00ec482010-09-04 10:47:48 +0100334 ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb
335#else
336#error Incompatible SMP platform
Russell Kingbac4e962009-05-25 20:58:00 +0100337#endif
Dave Martined3768a2010-12-01 15:39:23 +0100338 .ifeqs "\mode","arm"
Russell Kingf00ec482010-09-04 10:47:48 +0100339 ALT_UP(nop)
Dave Martined3768a2010-12-01 15:39:23 +0100340 .else
341 ALT_UP(W(nop))
342 .endif
Russell Kingbac4e962009-05-25 20:58:00 +0100343#endif
344 .endm
Catalin Marinasb86040a2009-07-24 12:32:54 +0100345
Catalin Marinas55bdd692010-05-21 18:06:41 +0100346#if defined(CONFIG_CPU_V7M)
347 /*
348 * setmode is used to assert to be in svc mode during boot. For v7-M
349 * this is done in __v7m_setup, so setmode can be empty here.
350 */
351 .macro setmode, mode, reg
352 .endm
353#elif defined(CONFIG_THUMB2_KERNEL)
Catalin Marinasb86040a2009-07-24 12:32:54 +0100354 .macro setmode, mode, reg
355 mov \reg, #\mode
356 msr cpsr_c, \reg
357 .endm
358#else
359 .macro setmode, mode, reg
360 msr cpsr_c, #\mode
361 .endm
362#endif
Catalin Marinas8b592782009-07-24 12:32:57 +0100363
364/*
Dave Martin80c59da2012-02-09 08:47:17 -0800365 * Helper macro to enter SVC mode cleanly and mask interrupts. reg is
366 * a scratch register for the macro to overwrite.
367 *
368 * This macro is intended for forcing the CPU into SVC mode at boot time.
369 * you cannot return to the original mode.
Dave Martin80c59da2012-02-09 08:47:17 -0800370 */
371.macro safe_svcmode_maskall reg:req
Lorenzo Pieralisi0e0779d2014-05-08 17:31:40 +0100372#if __LINUX_ARM_ARCH__ >= 6 && !defined(CONFIG_CPU_V7M)
Dave Martin80c59da2012-02-09 08:47:17 -0800373 mrs \reg , cpsr
Russell King8e9c24a2012-12-03 15:39:43 +0000374 eor \reg, \reg, #HYP_MODE
375 tst \reg, #MODE_MASK
Dave Martin80c59da2012-02-09 08:47:17 -0800376 bic \reg , \reg , #MODE_MASK
Russell King8e9c24a2012-12-03 15:39:43 +0000377 orr \reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE
Dave Martin80c59da2012-02-09 08:47:17 -0800378THUMB( orr \reg , \reg , #PSR_T_BIT )
Dave Martin80c59da2012-02-09 08:47:17 -0800379 bne 1f
Marc Zyngier2a552d52012-10-06 17:03:17 +0100380 orr \reg, \reg, #PSR_A_BIT
Russell King14327c62015-04-21 14:17:25 +0100381 badr lr, 2f
Marc Zyngier2a552d52012-10-06 17:03:17 +0100382 msr spsr_cxsf, \reg
Dave Martin80c59da2012-02-09 08:47:17 -0800383 __MSR_ELR_HYP(14)
384 __ERET
Marc Zyngier2a552d52012-10-06 17:03:17 +01003851: msr cpsr_c, \reg
Dave Martin80c59da2012-02-09 08:47:17 -08003862:
Dave Martin1ecec692012-12-10 18:35:22 +0100387#else
388/*
389 * workaround for possibly broken pre-v6 hardware
390 * (akita, Sharp Zaurus C-1000, PXA270-based)
391 */
392 setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg
393#endif
Dave Martin80c59da2012-02-09 08:47:17 -0800394.endm
395
396/*
Catalin Marinas8b592782009-07-24 12:32:57 +0100397 * STRT/LDRT access macros with ARM and Thumb-2 variants
398 */
399#ifdef CONFIG_THUMB2_KERNEL
400
Catalin Marinas4e7682d2012-01-25 11:38:13 +0100401 .macro usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER()
Catalin Marinas8b592782009-07-24 12:32:57 +01004029999:
403 .if \inc == 1
Stefan Agnerc0018992019-02-18 00:56:58 +0100404 \instr\()b\t\cond\().w \reg, [\ptr, #\off]
Catalin Marinas8b592782009-07-24 12:32:57 +0100405 .elseif \inc == 4
Stefan Agnerc0018992019-02-18 00:56:58 +0100406 \instr\t\cond\().w \reg, [\ptr, #\off]
Catalin Marinas8b592782009-07-24 12:32:57 +0100407 .else
408 .error "Unsupported inc macro argument"
409 .endif
410
Russell King42604152010-04-19 10:15:03 +0100411 .pushsection __ex_table,"a"
Catalin Marinas8b592782009-07-24 12:32:57 +0100412 .align 3
413 .long 9999b, \abort
Russell King42604152010-04-19 10:15:03 +0100414 .popsection
Catalin Marinas8b592782009-07-24 12:32:57 +0100415 .endm
416
417 .macro usracc, instr, reg, ptr, inc, cond, rept, abort
418 @ explicit IT instruction needed because of the label
419 @ introduced by the USER macro
420 .ifnc \cond,al
421 .if \rept == 1
422 itt \cond
423 .elseif \rept == 2
424 ittt \cond
425 .else
426 .error "Unsupported rept macro argument"
427 .endif
428 .endif
429
430 @ Slightly optimised to avoid incrementing the pointer twice
431 usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort
432 .if \rept == 2
Will Deacon1142b712010-11-19 13:18:31 +0100433 usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort
Catalin Marinas8b592782009-07-24 12:32:57 +0100434 .endif
435
436 add\cond \ptr, #\rept * \inc
437 .endm
438
439#else /* !CONFIG_THUMB2_KERNEL */
440
Catalin Marinas4e7682d2012-01-25 11:38:13 +0100441 .macro usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER()
Catalin Marinas8b592782009-07-24 12:32:57 +0100442 .rept \rept
4439999:
444 .if \inc == 1
Stefan Agnerc0018992019-02-18 00:56:58 +0100445 \instr\()b\t\cond \reg, [\ptr], #\inc
Catalin Marinas8b592782009-07-24 12:32:57 +0100446 .elseif \inc == 4
Stefan Agnerc0018992019-02-18 00:56:58 +0100447 \instr\t\cond \reg, [\ptr], #\inc
Catalin Marinas8b592782009-07-24 12:32:57 +0100448 .else
449 .error "Unsupported inc macro argument"
450 .endif
451
Russell King42604152010-04-19 10:15:03 +0100452 .pushsection __ex_table,"a"
Catalin Marinas8b592782009-07-24 12:32:57 +0100453 .align 3
454 .long 9999b, \abort
Russell King42604152010-04-19 10:15:03 +0100455 .popsection
Catalin Marinas8b592782009-07-24 12:32:57 +0100456 .endr
457 .endm
458
459#endif /* CONFIG_THUMB2_KERNEL */
460
461 .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
462 usracc str, \reg, \ptr, \inc, \cond, \rept, \abort
463 .endm
464
465 .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
466 usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort
467 .endm
Dave Martin8f519652011-06-23 17:10:05 +0100468
469/* Utility macro for declaring string literals */
470 .macro string name:req, string
471 .type \name , #object
472\name:
473 .asciz "\string"
474 .size \name , . - \name
475 .endm
476
Russell King6ebbf2c2014-06-30 16:29:12 +0100477 .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
478 .macro ret\c, reg
479#if __LINUX_ARM_ARCH__ < 6
480 mov\c pc, \reg
481#else
482 .ifeqs "\reg", "lr"
483 bx\c \reg
484 .else
485 mov\c pc, \reg
486 .endif
487#endif
488 .endm
489 .endr
490
491 .macro ret.w, reg
492 ret \reg
493#ifdef CONFIG_THUMB2_KERNEL
494 nop
495#endif
496 .endm
497
Russell King8bafae22017-11-24 23:49:34 +0000498 .macro bug, msg, line
499#ifdef CONFIG_THUMB2_KERNEL
5001: .inst 0xde02
501#else
5021: .inst 0xe7f001f2
503#endif
504#ifdef CONFIG_DEBUG_BUGVERBOSE
505 .pushsection .rodata.str, "aMS", %progbits, 1
5062: .asciz "\msg"
507 .popsection
508 .pushsection __bug_table, "aw"
509 .align 2
510 .word 1b, 2b
511 .hword \line
512 .popsection
513#endif
514 .endm
515
Masami Hiramatsu0d73c3f2018-05-13 05:04:29 +0100516#ifdef CONFIG_KPROBES
517#define _ASM_NOKPROBE(entry) \
518 .pushsection "_kprobe_blacklist", "aw" ; \
519 .balign 4 ; \
520 .long entry; \
521 .popsection
522#else
523#define _ASM_NOKPROBE(entry)
524#endif
525
Ard Biesheuvel0b167462020-09-14 11:23:39 +0300526 .macro __adldst_l, op, reg, sym, tmp, c
527 .if __LINUX_ARM_ARCH__ < 7
528 ldr\c \tmp, .La\@
529 .subsection 1
530 .align 2
531.La\@: .long \sym - .Lpc\@
532 .previous
533 .else
534 .ifnb \c
535 THUMB( ittt \c )
536 .endif
537 movw\c \tmp, #:lower16:\sym - .Lpc\@
538 movt\c \tmp, #:upper16:\sym - .Lpc\@
539 .endif
540
541#ifndef CONFIG_THUMB2_KERNEL
542 .set .Lpc\@, . + 8 // PC bias
543 .ifc \op, add
544 add\c \reg, \tmp, pc
545 .else
546 \op\c \reg, [pc, \tmp]
547 .endif
548#else
549.Lb\@: add\c \tmp, \tmp, pc
550 /*
551 * In Thumb-2 builds, the PC bias depends on whether we are currently
552 * emitting into a .arm or a .thumb section. The size of the add opcode
553 * above will be 2 bytes when emitting in Thumb mode and 4 bytes when
554 * emitting in ARM mode, so let's use this to account for the bias.
555 */
556 .set .Lpc\@, . + (. - .Lb\@)
557
558 .ifnc \op, add
559 \op\c \reg, [\tmp]
560 .endif
561#endif
562 .endm
563
564 /*
565 * mov_l - move a constant value or [relocated] address into a register
566 */
567 .macro mov_l, dst:req, imm:req
568 .if __LINUX_ARM_ARCH__ < 7
569 ldr \dst, =\imm
570 .else
571 movw \dst, #:lower16:\imm
572 movt \dst, #:upper16:\imm
573 .endif
574 .endm
575
576 /*
577 * adr_l - adr pseudo-op with unlimited range
578 *
579 * @dst: destination register
580 * @sym: name of the symbol
581 * @cond: conditional opcode suffix
582 */
583 .macro adr_l, dst:req, sym:req, cond
584 __adldst_l add, \dst, \sym, \dst, \cond
585 .endm
586
587 /*
588 * ldr_l - ldr <literal> pseudo-op with unlimited range
589 *
590 * @dst: destination register
591 * @sym: name of the symbol
592 * @cond: conditional opcode suffix
593 */
594 .macro ldr_l, dst:req, sym:req, cond
595 __adldst_l ldr, \dst, \sym, \dst, \cond
596 .endm
597
598 /*
599 * str_l - str <literal> pseudo-op with unlimited range
600 *
601 * @src: source register
602 * @sym: name of the symbol
603 * @tmp: mandatory scratch register
604 * @cond: conditional opcode suffix
605 */
606 .macro str_l, src:req, sym:req, tmp:req, cond
607 __adldst_l str, \src, \sym, \tmp, \cond
608 .endm
609
Ard Biesheuvel6468e892020-12-09 17:21:43 +0100610 /*
611 * rev_l - byte-swap a 32-bit value
612 *
613 * @val: source/destination register
614 * @tmp: scratch register
615 */
616 .macro rev_l, val:req, tmp:req
617 .if __LINUX_ARM_ARCH__ < 6
618 eor \tmp, \val, \val, ror #16
619 bic \tmp, \tmp, #0x00ff0000
620 mov \val, \val, ror #8
621 eor \val, \val, \tmp, lsr #8
622 .else
623 rev \val, \val
624 .endif
625 .endm
626
Magnus Damm2bc58a62011-06-13 06:46:44 +0100627#endif /* __ASM_ASSEMBLER_H__ */