Thomas Gleixner | 50acfb2 | 2019-05-29 07:18:00 -0700 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2015 Regents of the University of California |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 4 | */ |
| 5 | |
| 6 | #ifndef _ASM_RISCV_CSR_H |
| 7 | #define _ASM_RISCV_CSR_H |
| 8 | |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 9 | #include <asm/asm.h> |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 10 | #include <linux/const.h> |
| 11 | |
| 12 | /* Status register flags */ |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 13 | #define SR_SIE _AC(0x00000002, UL) /* Supervisor Interrupt Enable */ |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 14 | #define SR_MIE _AC(0x00000008, UL) /* Machine Interrupt Enable */ |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 15 | #define SR_SPIE _AC(0x00000020, UL) /* Previous Supervisor IE */ |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 16 | #define SR_MPIE _AC(0x00000080, UL) /* Previous Machine IE */ |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 17 | #define SR_SPP _AC(0x00000100, UL) /* Previously Supervisor */ |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 18 | #define SR_MPP _AC(0x00001800, UL) /* Previously Machine */ |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 19 | #define SR_SUM _AC(0x00040000, UL) /* Supervisor User Memory Access */ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 20 | |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 21 | #define SR_FS _AC(0x00006000, UL) /* Floating-point Status */ |
| 22 | #define SR_FS_OFF _AC(0x00000000, UL) |
| 23 | #define SR_FS_INITIAL _AC(0x00002000, UL) |
| 24 | #define SR_FS_CLEAN _AC(0x00004000, UL) |
| 25 | #define SR_FS_DIRTY _AC(0x00006000, UL) |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 26 | |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 27 | #define SR_XS _AC(0x00018000, UL) /* Extension Status */ |
| 28 | #define SR_XS_OFF _AC(0x00000000, UL) |
| 29 | #define SR_XS_INITIAL _AC(0x00008000, UL) |
| 30 | #define SR_XS_CLEAN _AC(0x00010000, UL) |
| 31 | #define SR_XS_DIRTY _AC(0x00018000, UL) |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 32 | |
| 33 | #ifndef CONFIG_64BIT |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 34 | #define SR_SD _AC(0x80000000, UL) /* FS/XS dirty */ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 35 | #else |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 36 | #define SR_SD _AC(0x8000000000000000, UL) /* FS/XS dirty */ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 37 | #endif |
| 38 | |
Christoph Hellwig | 7549cdf | 2018-01-09 15:00:32 +0100 | [diff] [blame] | 39 | /* SATP flags */ |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 40 | #ifndef CONFIG_64BIT |
| 41 | #define SATP_PPN _AC(0x003FFFFF, UL) |
| 42 | #define SATP_MODE_32 _AC(0x80000000, UL) |
| 43 | #define SATP_MODE SATP_MODE_32 |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 44 | #else |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 45 | #define SATP_PPN _AC(0x00000FFFFFFFFFFF, UL) |
| 46 | #define SATP_MODE_39 _AC(0x8000000000000000, UL) |
| 47 | #define SATP_MODE SATP_MODE_39 |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 48 | #endif |
| 49 | |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 50 | /* Exception cause high bit - is an interrupt if set */ |
| 51 | #define CAUSE_IRQ_FLAG (_AC(1, UL) << (__riscv_xlen - 1)) |
Anup Patel | 6dcaf00 | 2019-04-25 08:38:37 +0000 | [diff] [blame] | 52 | |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 53 | /* Interrupt causes (minus the high bit) */ |
Anup Patel | 6dcaf00 | 2019-04-25 08:38:37 +0000 | [diff] [blame] | 54 | #define IRQ_U_SOFT 0 |
| 55 | #define IRQ_S_SOFT 1 |
| 56 | #define IRQ_M_SOFT 3 |
| 57 | #define IRQ_U_TIMER 4 |
| 58 | #define IRQ_S_TIMER 5 |
| 59 | #define IRQ_M_TIMER 7 |
| 60 | #define IRQ_U_EXT 8 |
| 61 | #define IRQ_S_EXT 9 |
| 62 | #define IRQ_M_EXT 11 |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 63 | |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 64 | /* Exception causes */ |
Anup Patel | 196a14d | 2019-04-25 08:38:30 +0000 | [diff] [blame] | 65 | #define EXC_INST_MISALIGNED 0 |
| 66 | #define EXC_INST_ACCESS 1 |
| 67 | #define EXC_BREAKPOINT 3 |
| 68 | #define EXC_LOAD_ACCESS 5 |
| 69 | #define EXC_STORE_ACCESS 7 |
| 70 | #define EXC_SYSCALL 8 |
| 71 | #define EXC_INST_PAGE_FAULT 12 |
| 72 | #define EXC_LOAD_PAGE_FAULT 13 |
| 73 | #define EXC_STORE_PAGE_FAULT 15 |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 74 | |
Greentime Hu | c68a903 | 2020-01-09 11:17:40 +0800 | [diff] [blame^] | 75 | /* PMP configuration */ |
| 76 | #define PMP_R 0x01 |
| 77 | #define PMP_W 0x02 |
| 78 | #define PMP_X 0x04 |
| 79 | #define PMP_A 0x18 |
| 80 | #define PMP_A_TOR 0x08 |
| 81 | #define PMP_A_NA4 0x10 |
| 82 | #define PMP_A_NAPOT 0x18 |
| 83 | #define PMP_L 0x80 |
| 84 | |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 85 | /* symbolic CSR names: */ |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 86 | #define CSR_CYCLE 0xc00 |
| 87 | #define CSR_TIME 0xc01 |
| 88 | #define CSR_INSTRET 0xc02 |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 89 | #define CSR_CYCLEH 0xc80 |
| 90 | #define CSR_TIMEH 0xc81 |
| 91 | #define CSR_INSTRETH 0xc82 |
| 92 | |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 93 | #define CSR_SSTATUS 0x100 |
| 94 | #define CSR_SIE 0x104 |
| 95 | #define CSR_STVEC 0x105 |
| 96 | #define CSR_SCOUNTEREN 0x106 |
| 97 | #define CSR_SSCRATCH 0x140 |
| 98 | #define CSR_SEPC 0x141 |
| 99 | #define CSR_SCAUSE 0x142 |
| 100 | #define CSR_STVAL 0x143 |
| 101 | #define CSR_SIP 0x144 |
| 102 | #define CSR_SATP 0x180 |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 103 | |
| 104 | #define CSR_MSTATUS 0x300 |
Christoph Hellwig | 9e80635 | 2019-10-28 13:10:40 +0100 | [diff] [blame] | 105 | #define CSR_MISA 0x301 |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 106 | #define CSR_MIE 0x304 |
| 107 | #define CSR_MTVEC 0x305 |
| 108 | #define CSR_MSCRATCH 0x340 |
| 109 | #define CSR_MEPC 0x341 |
| 110 | #define CSR_MCAUSE 0x342 |
| 111 | #define CSR_MTVAL 0x343 |
| 112 | #define CSR_MIP 0x344 |
Greentime Hu | c68a903 | 2020-01-09 11:17:40 +0800 | [diff] [blame^] | 113 | #define CSR_PMPCFG0 0x3a0 |
| 114 | #define CSR_PMPADDR0 0x3b0 |
Damien Le Moal | accb9db | 2019-10-28 13:10:39 +0100 | [diff] [blame] | 115 | #define CSR_MHARTID 0xf14 |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 116 | |
| 117 | #ifdef CONFIG_RISCV_M_MODE |
| 118 | # define CSR_STATUS CSR_MSTATUS |
| 119 | # define CSR_IE CSR_MIE |
| 120 | # define CSR_TVEC CSR_MTVEC |
| 121 | # define CSR_SCRATCH CSR_MSCRATCH |
| 122 | # define CSR_EPC CSR_MEPC |
| 123 | # define CSR_CAUSE CSR_MCAUSE |
| 124 | # define CSR_TVAL CSR_MTVAL |
| 125 | # define CSR_IP CSR_MIP |
| 126 | |
| 127 | # define SR_IE SR_MIE |
| 128 | # define SR_PIE SR_MPIE |
| 129 | # define SR_PP SR_MPP |
| 130 | |
Paul Walmsley | 2f3035d | 2019-12-20 03:09:49 -0800 | [diff] [blame] | 131 | # define RV_IRQ_SOFT IRQ_M_SOFT |
| 132 | # define RV_IRQ_TIMER IRQ_M_TIMER |
| 133 | # define RV_IRQ_EXT IRQ_M_EXT |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 134 | #else /* CONFIG_RISCV_M_MODE */ |
| 135 | # define CSR_STATUS CSR_SSTATUS |
| 136 | # define CSR_IE CSR_SIE |
| 137 | # define CSR_TVEC CSR_STVEC |
| 138 | # define CSR_SCRATCH CSR_SSCRATCH |
| 139 | # define CSR_EPC CSR_SEPC |
| 140 | # define CSR_CAUSE CSR_SCAUSE |
| 141 | # define CSR_TVAL CSR_STVAL |
| 142 | # define CSR_IP CSR_SIP |
| 143 | |
| 144 | # define SR_IE SR_SIE |
| 145 | # define SR_PIE SR_SPIE |
| 146 | # define SR_PP SR_SPP |
| 147 | |
Paul Walmsley | 2f3035d | 2019-12-20 03:09:49 -0800 | [diff] [blame] | 148 | # define RV_IRQ_SOFT IRQ_S_SOFT |
| 149 | # define RV_IRQ_TIMER IRQ_S_TIMER |
| 150 | # define RV_IRQ_EXT IRQ_S_EXT |
Christoph Hellwig | a4c3733 | 2019-10-28 13:10:32 +0100 | [diff] [blame] | 151 | #endif /* CONFIG_RISCV_M_MODE */ |
| 152 | |
| 153 | /* IE/IP (Supervisor/Machine Interrupt Enable/Pending) flags */ |
Paul Walmsley | 2f3035d | 2019-12-20 03:09:49 -0800 | [diff] [blame] | 154 | #define IE_SIE (_AC(0x1, UL) << RV_IRQ_SOFT) |
| 155 | #define IE_TIE (_AC(0x1, UL) << RV_IRQ_TIMER) |
| 156 | #define IE_EIE (_AC(0x1, UL) << RV_IRQ_EXT) |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 157 | |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 158 | #ifndef __ASSEMBLY__ |
| 159 | |
| 160 | #define csr_swap(csr, val) \ |
| 161 | ({ \ |
| 162 | unsigned long __v = (unsigned long)(val); \ |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 163 | __asm__ __volatile__ ("csrrw %0, " __ASM_STR(csr) ", %1"\ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 164 | : "=r" (__v) : "rK" (__v) \ |
| 165 | : "memory"); \ |
| 166 | __v; \ |
| 167 | }) |
| 168 | |
| 169 | #define csr_read(csr) \ |
| 170 | ({ \ |
| 171 | register unsigned long __v; \ |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 172 | __asm__ __volatile__ ("csrr %0, " __ASM_STR(csr) \ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 173 | : "=r" (__v) : \ |
| 174 | : "memory"); \ |
| 175 | __v; \ |
| 176 | }) |
| 177 | |
| 178 | #define csr_write(csr, val) \ |
| 179 | ({ \ |
| 180 | unsigned long __v = (unsigned long)(val); \ |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 181 | __asm__ __volatile__ ("csrw " __ASM_STR(csr) ", %0" \ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 182 | : : "rK" (__v) \ |
| 183 | : "memory"); \ |
| 184 | }) |
| 185 | |
| 186 | #define csr_read_set(csr, val) \ |
| 187 | ({ \ |
| 188 | unsigned long __v = (unsigned long)(val); \ |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 189 | __asm__ __volatile__ ("csrrs %0, " __ASM_STR(csr) ", %1"\ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 190 | : "=r" (__v) : "rK" (__v) \ |
| 191 | : "memory"); \ |
| 192 | __v; \ |
| 193 | }) |
| 194 | |
| 195 | #define csr_set(csr, val) \ |
| 196 | ({ \ |
| 197 | unsigned long __v = (unsigned long)(val); \ |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 198 | __asm__ __volatile__ ("csrs " __ASM_STR(csr) ", %0" \ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 199 | : : "rK" (__v) \ |
| 200 | : "memory"); \ |
| 201 | }) |
| 202 | |
| 203 | #define csr_read_clear(csr, val) \ |
| 204 | ({ \ |
| 205 | unsigned long __v = (unsigned long)(val); \ |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 206 | __asm__ __volatile__ ("csrrc %0, " __ASM_STR(csr) ", %1"\ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 207 | : "=r" (__v) : "rK" (__v) \ |
| 208 | : "memory"); \ |
| 209 | __v; \ |
| 210 | }) |
| 211 | |
| 212 | #define csr_clear(csr, val) \ |
| 213 | ({ \ |
| 214 | unsigned long __v = (unsigned long)(val); \ |
Anup Patel | a3182c9 | 2019-04-25 08:38:41 +0000 | [diff] [blame] | 215 | __asm__ __volatile__ ("csrc " __ASM_STR(csr) ", %0" \ |
Palmer Dabbelt | 5d8544e | 2017-07-10 18:03:19 -0700 | [diff] [blame] | 216 | : : "rK" (__v) \ |
| 217 | : "memory"); \ |
| 218 | }) |
| 219 | |
| 220 | #endif /* __ASSEMBLY__ */ |
| 221 | |
| 222 | #endif /* _ASM_RISCV_CSR_H */ |