Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 1 | /* |
| 2 | * omap iommu: main structures |
| 3 | * |
| 4 | * Copyright (C) 2008-2009 Nokia Corporation |
| 5 | * |
| 6 | * Written by Hiroshi DOYU <Hiroshi.DOYU@nokia.com> |
| 7 | * |
| 8 | * This program is free software; you can redistribute it and/or modify |
| 9 | * it under the terms of the GNU General Public License version 2 as |
| 10 | * published by the Free Software Foundation. |
| 11 | */ |
| 12 | |
Suman Anna | 533b40c | 2014-10-22 17:22:22 -0500 | [diff] [blame] | 13 | #ifndef _OMAP_IOMMU_H |
| 14 | #define _OMAP_IOMMU_H |
| 15 | |
Suman Anna | eb642a3 | 2015-07-20 17:33:31 -0500 | [diff] [blame] | 16 | #include <linux/bitops.h> |
Joerg Roedel | e73b7af | 2017-04-12 00:21:28 -0500 | [diff] [blame] | 17 | #include <linux/iommu.h> |
Suman Anna | eb642a3 | 2015-07-20 17:33:31 -0500 | [diff] [blame] | 18 | |
Suman Anna | 69c2c19 | 2015-07-20 17:33:25 -0500 | [diff] [blame] | 19 | #define for_each_iotlb_cr(obj, n, __i, cr) \ |
| 20 | for (__i = 0; \ |
| 21 | (__i < (n)) && (cr = __iotlb_read_cr((obj), __i), true); \ |
| 22 | __i++) |
| 23 | |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 24 | struct iotlb_entry { |
| 25 | u32 da; |
| 26 | u32 pa; |
| 27 | u32 pgsz, prsvd, valid; |
Suman Anna | dc308f9 | 2015-07-20 17:33:27 -0500 | [diff] [blame] | 28 | u32 endian, elsz, mixed; |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 29 | }; |
| 30 | |
Joerg Roedel | e73b7af | 2017-04-12 00:21:28 -0500 | [diff] [blame] | 31 | /** |
| 32 | * struct omap_iommu_domain - omap iommu domain |
| 33 | * @pgtable: the page table |
| 34 | * @iommu_dev: an omap iommu device attached to this domain. only a single |
| 35 | * iommu device can be attached for now. |
| 36 | * @dev: Device using this domain. |
| 37 | * @lock: domain lock, should be taken when attaching/detaching |
| 38 | * @domain: generic domain handle used by iommu core code |
| 39 | */ |
| 40 | struct omap_iommu_domain { |
| 41 | u32 *pgtable; |
| 42 | struct omap_iommu *iommu_dev; |
| 43 | struct device *dev; |
| 44 | spinlock_t lock; |
| 45 | struct iommu_domain domain; |
| 46 | }; |
| 47 | |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 48 | struct omap_iommu { |
| 49 | const char *name; |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 50 | void __iomem *regbase; |
Suman Anna | 3ca9299 | 2015-10-02 18:02:44 -0500 | [diff] [blame] | 51 | struct regmap *syscfg; |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 52 | struct device *dev; |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 53 | struct iommu_domain *domain; |
Suman Anna | 61c7535 | 2014-10-22 17:22:30 -0500 | [diff] [blame] | 54 | struct dentry *debug_dir; |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 55 | |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 56 | spinlock_t iommu_lock; /* global for this whole object */ |
| 57 | |
| 58 | /* |
| 59 | * We don't change iopgd for a situation like pgd for a task, |
| 60 | * but share it globally for each iommu. |
| 61 | */ |
| 62 | u32 *iopgd; |
| 63 | spinlock_t page_table_lock; /* protect iopgd */ |
| 64 | |
| 65 | int nr_tlb_entries; |
| 66 | |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 67 | void *ctx; /* iommu context: registres saved area */ |
Suman Anna | b148d5f | 2014-02-28 14:42:37 -0600 | [diff] [blame] | 68 | |
| 69 | int has_bus_err_back; |
Suman Anna | 3ca9299 | 2015-10-02 18:02:44 -0500 | [diff] [blame] | 70 | u32 id; |
Joerg Roedel | 01611fe | 2017-04-12 00:21:30 -0500 | [diff] [blame] | 71 | |
| 72 | struct iommu_device iommu; |
Joerg Roedel | 28ae1e3 | 2017-04-12 00:21:31 -0500 | [diff] [blame] | 73 | struct iommu_group *group; |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 74 | }; |
| 75 | |
Joerg Roedel | e73b7af | 2017-04-12 00:21:28 -0500 | [diff] [blame] | 76 | /** |
| 77 | * struct omap_iommu_arch_data - omap iommu private data |
Joerg Roedel | e73b7af | 2017-04-12 00:21:28 -0500 | [diff] [blame] | 78 | * @iommu_dev: handle of the iommu device |
| 79 | * |
| 80 | * This is an omap iommu private data object, which binds an iommu user |
| 81 | * to its iommu device. This object should be placed at the iommu user's |
| 82 | * dev_archdata so generic IOMMU API can be used without having to |
| 83 | * utilize omap-specific plumbing anymore. |
| 84 | */ |
| 85 | struct omap_iommu_arch_data { |
Joerg Roedel | e73b7af | 2017-04-12 00:21:28 -0500 | [diff] [blame] | 86 | struct omap_iommu *iommu_dev; |
| 87 | }; |
| 88 | |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 89 | struct cr_regs { |
Suman Anna | dc308f9 | 2015-07-20 17:33:27 -0500 | [diff] [blame] | 90 | u32 cam; |
| 91 | u32 ram; |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 92 | }; |
| 93 | |
Suman Anna | 69c2c19 | 2015-07-20 17:33:25 -0500 | [diff] [blame] | 94 | struct iotlb_lock { |
| 95 | short base; |
| 96 | short vict; |
| 97 | }; |
| 98 | |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 99 | /** |
| 100 | * dev_to_omap_iommu() - retrieves an omap iommu object from a user device |
| 101 | * @dev: iommu client device |
| 102 | */ |
| 103 | static inline struct omap_iommu *dev_to_omap_iommu(struct device *dev) |
| 104 | { |
| 105 | struct omap_iommu_arch_data *arch_data = dev->archdata.iommu; |
| 106 | |
| 107 | return arch_data->iommu_dev; |
| 108 | } |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 109 | |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 110 | /* |
| 111 | * MMU Register offsets |
| 112 | */ |
| 113 | #define MMU_REVISION 0x00 |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 114 | #define MMU_IRQSTATUS 0x18 |
| 115 | #define MMU_IRQENABLE 0x1c |
| 116 | #define MMU_WALKING_ST 0x40 |
| 117 | #define MMU_CNTL 0x44 |
| 118 | #define MMU_FAULT_AD 0x48 |
| 119 | #define MMU_TTB 0x4c |
| 120 | #define MMU_LOCK 0x50 |
| 121 | #define MMU_LD_TLB 0x54 |
| 122 | #define MMU_CAM 0x58 |
| 123 | #define MMU_RAM 0x5c |
| 124 | #define MMU_GFLUSH 0x60 |
| 125 | #define MMU_FLUSH_ENTRY 0x64 |
| 126 | #define MMU_READ_CAM 0x68 |
| 127 | #define MMU_READ_RAM 0x6c |
| 128 | #define MMU_EMU_FAULT_AD 0x70 |
Suman Anna | b148d5f | 2014-02-28 14:42:37 -0600 | [diff] [blame] | 129 | #define MMU_GP_REG 0x88 |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 130 | |
| 131 | #define MMU_REG_SIZE 256 |
| 132 | |
| 133 | /* |
| 134 | * MMU Register bit definitions |
| 135 | */ |
Suman Anna | bd4396f | 2014-10-22 17:22:27 -0500 | [diff] [blame] | 136 | /* IRQSTATUS & IRQENABLE */ |
Suman Anna | eb642a3 | 2015-07-20 17:33:31 -0500 | [diff] [blame] | 137 | #define MMU_IRQ_MULTIHITFAULT BIT(4) |
| 138 | #define MMU_IRQ_TABLEWALKFAULT BIT(3) |
| 139 | #define MMU_IRQ_EMUMISS BIT(2) |
| 140 | #define MMU_IRQ_TRANSLATIONFAULT BIT(1) |
| 141 | #define MMU_IRQ_TLBMISS BIT(0) |
Suman Anna | bd4396f | 2014-10-22 17:22:27 -0500 | [diff] [blame] | 142 | |
| 143 | #define __MMU_IRQ_FAULT \ |
| 144 | (MMU_IRQ_MULTIHITFAULT | MMU_IRQ_EMUMISS | MMU_IRQ_TRANSLATIONFAULT) |
| 145 | #define MMU_IRQ_MASK \ |
| 146 | (__MMU_IRQ_FAULT | MMU_IRQ_TABLEWALKFAULT | MMU_IRQ_TLBMISS) |
| 147 | #define MMU_IRQ_TWL_MASK (__MMU_IRQ_FAULT | MMU_IRQ_TABLEWALKFAULT) |
| 148 | #define MMU_IRQ_TLB_MISS_MASK (__MMU_IRQ_FAULT | MMU_IRQ_TLBMISS) |
| 149 | |
| 150 | /* MMU_CNTL */ |
| 151 | #define MMU_CNTL_SHIFT 1 |
| 152 | #define MMU_CNTL_MASK (7 << MMU_CNTL_SHIFT) |
Suman Anna | eb642a3 | 2015-07-20 17:33:31 -0500 | [diff] [blame] | 153 | #define MMU_CNTL_EML_TLB BIT(3) |
| 154 | #define MMU_CNTL_TWL_EN BIT(2) |
| 155 | #define MMU_CNTL_MMU_EN BIT(1) |
Suman Anna | bd4396f | 2014-10-22 17:22:27 -0500 | [diff] [blame] | 156 | |
| 157 | /* CAM */ |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 158 | #define MMU_CAM_VATAG_SHIFT 12 |
| 159 | #define MMU_CAM_VATAG_MASK \ |
| 160 | ((~0UL >> MMU_CAM_VATAG_SHIFT) << MMU_CAM_VATAG_SHIFT) |
Suman Anna | eb642a3 | 2015-07-20 17:33:31 -0500 | [diff] [blame] | 161 | #define MMU_CAM_P BIT(3) |
| 162 | #define MMU_CAM_V BIT(2) |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 163 | #define MMU_CAM_PGSZ_MASK 3 |
| 164 | #define MMU_CAM_PGSZ_1M (0 << 0) |
| 165 | #define MMU_CAM_PGSZ_64K (1 << 0) |
| 166 | #define MMU_CAM_PGSZ_4K (2 << 0) |
| 167 | #define MMU_CAM_PGSZ_16M (3 << 0) |
| 168 | |
Suman Anna | bd4396f | 2014-10-22 17:22:27 -0500 | [diff] [blame] | 169 | /* RAM */ |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 170 | #define MMU_RAM_PADDR_SHIFT 12 |
| 171 | #define MMU_RAM_PADDR_MASK \ |
| 172 | ((~0UL >> MMU_RAM_PADDR_SHIFT) << MMU_RAM_PADDR_SHIFT) |
| 173 | |
Laurent Pinchart | baaa7b5 | 2014-07-18 12:49:55 +0200 | [diff] [blame] | 174 | #define MMU_RAM_ENDIAN_SHIFT 9 |
Suman Anna | eb642a3 | 2015-07-20 17:33:31 -0500 | [diff] [blame] | 175 | #define MMU_RAM_ENDIAN_MASK BIT(MMU_RAM_ENDIAN_SHIFT) |
Laurent Pinchart | baaa7b5 | 2014-07-18 12:49:55 +0200 | [diff] [blame] | 176 | #define MMU_RAM_ENDIAN_LITTLE (0 << MMU_RAM_ENDIAN_SHIFT) |
Suman Anna | eb642a3 | 2015-07-20 17:33:31 -0500 | [diff] [blame] | 177 | #define MMU_RAM_ENDIAN_BIG BIT(MMU_RAM_ENDIAN_SHIFT) |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 178 | |
Laurent Pinchart | baaa7b5 | 2014-07-18 12:49:55 +0200 | [diff] [blame] | 179 | #define MMU_RAM_ELSZ_SHIFT 7 |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 180 | #define MMU_RAM_ELSZ_MASK (3 << MMU_RAM_ELSZ_SHIFT) |
| 181 | #define MMU_RAM_ELSZ_8 (0 << MMU_RAM_ELSZ_SHIFT) |
| 182 | #define MMU_RAM_ELSZ_16 (1 << MMU_RAM_ELSZ_SHIFT) |
| 183 | #define MMU_RAM_ELSZ_32 (2 << MMU_RAM_ELSZ_SHIFT) |
| 184 | #define MMU_RAM_ELSZ_NONE (3 << MMU_RAM_ELSZ_SHIFT) |
| 185 | #define MMU_RAM_MIXED_SHIFT 6 |
Suman Anna | eb642a3 | 2015-07-20 17:33:31 -0500 | [diff] [blame] | 186 | #define MMU_RAM_MIXED_MASK BIT(MMU_RAM_MIXED_SHIFT) |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 187 | #define MMU_RAM_MIXED MMU_RAM_MIXED_MASK |
| 188 | |
Suman Anna | b148d5f | 2014-02-28 14:42:37 -0600 | [diff] [blame] | 189 | #define MMU_GP_REG_BUS_ERR_BACK_EN 0x1 |
| 190 | |
Suman Anna | bd4396f | 2014-10-22 17:22:27 -0500 | [diff] [blame] | 191 | #define get_cam_va_mask(pgsz) \ |
| 192 | (((pgsz) == MMU_CAM_PGSZ_16M) ? 0xff000000 : \ |
| 193 | ((pgsz) == MMU_CAM_PGSZ_1M) ? 0xfff00000 : \ |
| 194 | ((pgsz) == MMU_CAM_PGSZ_64K) ? 0xffff0000 : \ |
| 195 | ((pgsz) == MMU_CAM_PGSZ_4K) ? 0xfffff000 : 0) |
| 196 | |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 197 | /* |
Suman Anna | 3ca9299 | 2015-10-02 18:02:44 -0500 | [diff] [blame] | 198 | * DSP_SYSTEM registers and bit definitions (applicable only for DRA7xx DSP) |
| 199 | */ |
| 200 | #define DSP_SYS_REVISION 0x00 |
| 201 | #define DSP_SYS_MMU_CONFIG 0x18 |
| 202 | #define DSP_SYS_MMU_CONFIG_EN_SHIFT 4 |
| 203 | |
| 204 | /* |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 205 | * utilities for super page(16MB, 1MB, 64KB and 4KB) |
| 206 | */ |
| 207 | |
| 208 | #define iopgsz_max(bytes) \ |
| 209 | (((bytes) >= SZ_16M) ? SZ_16M : \ |
| 210 | ((bytes) >= SZ_1M) ? SZ_1M : \ |
| 211 | ((bytes) >= SZ_64K) ? SZ_64K : \ |
| 212 | ((bytes) >= SZ_4K) ? SZ_4K : 0) |
| 213 | |
| 214 | #define bytes_to_iopgsz(bytes) \ |
| 215 | (((bytes) == SZ_16M) ? MMU_CAM_PGSZ_16M : \ |
| 216 | ((bytes) == SZ_1M) ? MMU_CAM_PGSZ_1M : \ |
| 217 | ((bytes) == SZ_64K) ? MMU_CAM_PGSZ_64K : \ |
| 218 | ((bytes) == SZ_4K) ? MMU_CAM_PGSZ_4K : -1) |
| 219 | |
| 220 | #define iopgsz_to_bytes(iopgsz) \ |
| 221 | (((iopgsz) == MMU_CAM_PGSZ_16M) ? SZ_16M : \ |
| 222 | ((iopgsz) == MMU_CAM_PGSZ_1M) ? SZ_1M : \ |
| 223 | ((iopgsz) == MMU_CAM_PGSZ_64K) ? SZ_64K : \ |
| 224 | ((iopgsz) == MMU_CAM_PGSZ_4K) ? SZ_4K : 0) |
| 225 | |
| 226 | #define iopgsz_ok(bytes) (bytes_to_iopgsz(bytes) >= 0) |
| 227 | |
| 228 | /* |
| 229 | * global functions |
| 230 | */ |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 231 | |
Suman Anna | 69c2c19 | 2015-07-20 17:33:25 -0500 | [diff] [blame] | 232 | struct cr_regs __iotlb_read_cr(struct omap_iommu *obj, int n); |
| 233 | void iotlb_lock_get(struct omap_iommu *obj, struct iotlb_lock *l); |
| 234 | void iotlb_lock_set(struct omap_iommu *obj, struct iotlb_lock *l); |
| 235 | |
| 236 | #ifdef CONFIG_OMAP_IOMMU_DEBUG |
Suman Anna | 61c7535 | 2014-10-22 17:22:30 -0500 | [diff] [blame] | 237 | void omap_iommu_debugfs_init(void); |
| 238 | void omap_iommu_debugfs_exit(void); |
| 239 | |
| 240 | void omap_iommu_debugfs_add(struct omap_iommu *obj); |
| 241 | void omap_iommu_debugfs_remove(struct omap_iommu *obj); |
| 242 | #else |
| 243 | static inline void omap_iommu_debugfs_init(void) { } |
| 244 | static inline void omap_iommu_debugfs_exit(void) { } |
| 245 | |
| 246 | static inline void omap_iommu_debugfs_add(struct omap_iommu *obj) { } |
| 247 | static inline void omap_iommu_debugfs_remove(struct omap_iommu *obj) { } |
| 248 | #endif |
| 249 | |
Tony Lindgren | ed1c7de | 2012-11-02 12:24:06 -0700 | [diff] [blame] | 250 | /* |
| 251 | * register accessors |
| 252 | */ |
| 253 | static inline u32 iommu_read_reg(struct omap_iommu *obj, size_t offs) |
| 254 | { |
| 255 | return __raw_readl(obj->regbase + offs); |
| 256 | } |
| 257 | |
| 258 | static inline void iommu_write_reg(struct omap_iommu *obj, u32 val, size_t offs) |
| 259 | { |
| 260 | __raw_writel(val, obj->regbase + offs); |
| 261 | } |
Suman Anna | 533b40c | 2014-10-22 17:22:22 -0500 | [diff] [blame] | 262 | |
Suman Anna | 69c2c19 | 2015-07-20 17:33:25 -0500 | [diff] [blame] | 263 | static inline int iotlb_cr_valid(struct cr_regs *cr) |
| 264 | { |
| 265 | if (!cr) |
| 266 | return -EINVAL; |
| 267 | |
| 268 | return cr->cam & MMU_CAM_V; |
| 269 | } |
| 270 | |
Suman Anna | 533b40c | 2014-10-22 17:22:22 -0500 | [diff] [blame] | 271 | #endif /* _OMAP_IOMMU_H */ |