blob: dcd43b067a6a619790a24cdeff83e7c0af119e90 [file] [log] [blame]
Sergei Shtylyov05214442009-03-11 19:49:05 +04001/*
2 * TI Common Platform Interrupt Controller (cp_intc) driver
3 *
4 * Author: Steve Chen <schen@mvista.com>
5 * Copyright (C) 2008-2009, MontaVista Software, Inc. <source@mvista.com>
6 *
7 * This file is licensed under the terms of the GNU General Public License
8 * version 2. This program is licensed "as is" without any warranty of any
9 * kind, whether express or implied.
10 */
11
Heiko Schocher07caba92012-05-30 12:18:57 +020012#include <linux/export.h>
Sergei Shtylyov05214442009-03-11 19:49:05 +040013#include <linux/init.h>
Sergei Shtylyov05214442009-03-11 19:49:05 +040014#include <linux/irq.h>
David Lechner9a7f2fc2016-02-29 16:33:26 -060015#include <linux/irqchip.h>
Heiko Schocher07caba92012-05-30 12:18:57 +020016#include <linux/irqdomain.h>
Sergei Shtylyov05214442009-03-11 19:49:05 +040017#include <linux/io.h>
Heiko Schocher961e6572012-05-30 12:18:58 +020018#include <linux/of.h>
19#include <linux/of_address.h>
20#include <linux/of_irq.h>
Sergei Shtylyov05214442009-03-11 19:49:05 +040021
Bartosz Golaszewskid0064592019-02-14 15:51:58 +010022#include <asm/exception.h>
Cyril Chemparathybd808942010-05-07 17:06:37 -040023#include <mach/common.h>
Bartosz Golaszewskied4d1892019-02-14 15:52:17 +010024
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010025#define DAVINCI_CP_INTC_CTRL 0x04
Bartosz Golaszewski3b5d1c52019-02-14 15:52:22 +010026#define DAVINCI_CP_INTC_HOST_CTRL 0x0c
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010027#define DAVINCI_CP_INTC_GLOBAL_ENABLE 0x10
28#define DAVINCI_CP_INTC_SYS_STAT_IDX_CLR 0x24
29#define DAVINCI_CP_INTC_SYS_ENABLE_IDX_SET 0x28
Bartosz Golaszewski3b5d1c52019-02-14 15:52:22 +010030#define DAVINCI_CP_INTC_SYS_ENABLE_IDX_CLR 0x2c
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010031#define DAVINCI_CP_INTC_HOST_ENABLE_IDX_SET 0x34
32#define DAVINCI_CP_INTC_HOST_ENABLE_IDX_CLR 0x38
33#define DAVINCI_CP_INTC_PRIO_IDX 0x80
34#define DAVINCI_CP_INTC_SYS_STAT_CLR(n) (0x0280 + (n << 2))
35#define DAVINCI_CP_INTC_SYS_ENABLE_CLR(n) (0x0380 + (n << 2))
36#define DAVINCI_CP_INTC_CHAN_MAP(n) (0x0400 + (n << 2))
Bartosz Golaszewski3b5d1c52019-02-14 15:52:22 +010037#define DAVINCI_CP_INTC_SYS_POLARITY(n) (0x0d00 + (n << 2))
38#define DAVINCI_CP_INTC_SYS_TYPE(n) (0x0d80 + (n << 2))
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010039#define DAVINCI_CP_INTC_HOST_ENABLE(n) (0x1500 + (n << 2))
Bartosz Golaszewskid0064592019-02-14 15:51:58 +010040#define DAVINCI_CP_INTC_PRI_INDX_MASK GENMASK(9, 0)
41#define DAVINCI_CP_INTC_GPIR_NONE BIT(31)
42
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010043static void __iomem *davinci_cp_intc_base;
44static struct irq_domain *davinci_cp_intc_irq_domain;
Bartosz Golaszewskifb746842019-02-14 15:52:00 +010045
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010046static inline unsigned int davinci_cp_intc_read(unsigned int offset)
Sergei Shtylyov05214442009-03-11 19:49:05 +040047{
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010048 return __raw_readl(davinci_cp_intc_base + offset);
Sergei Shtylyov05214442009-03-11 19:49:05 +040049}
50
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010051static inline void davinci_cp_intc_write(unsigned long value,
52 unsigned int offset)
Sergei Shtylyov05214442009-03-11 19:49:05 +040053{
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010054 __raw_writel(value, davinci_cp_intc_base + offset);
Sergei Shtylyov05214442009-03-11 19:49:05 +040055}
56
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010057static void davinci_cp_intc_ack_irq(struct irq_data *d)
Sergei Shtylyov05214442009-03-11 19:49:05 +040058{
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010059 davinci_cp_intc_write(d->hwirq, DAVINCI_CP_INTC_SYS_STAT_IDX_CLR);
Sergei Shtylyov05214442009-03-11 19:49:05 +040060}
61
62/* Disable interrupt */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010063static void davinci_cp_intc_mask_irq(struct irq_data *d)
Sergei Shtylyov05214442009-03-11 19:49:05 +040064{
65 /* XXX don't know why we need to disable nIRQ here... */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010066 davinci_cp_intc_write(1, DAVINCI_CP_INTC_HOST_ENABLE_IDX_CLR);
67 davinci_cp_intc_write(d->hwirq, DAVINCI_CP_INTC_SYS_ENABLE_IDX_CLR);
68 davinci_cp_intc_write(1, DAVINCI_CP_INTC_HOST_ENABLE_IDX_SET);
Sergei Shtylyov05214442009-03-11 19:49:05 +040069}
70
71/* Enable interrupt */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010072static void davinci_cp_intc_unmask_irq(struct irq_data *d)
Sergei Shtylyov05214442009-03-11 19:49:05 +040073{
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010074 davinci_cp_intc_write(d->hwirq, DAVINCI_CP_INTC_SYS_ENABLE_IDX_SET);
Sergei Shtylyov05214442009-03-11 19:49:05 +040075}
76
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010077static int davinci_cp_intc_set_irq_type(struct irq_data *d,
78 unsigned int flow_type)
Sergei Shtylyov05214442009-03-11 19:49:05 +040079{
Heiko Schocher07caba92012-05-30 12:18:57 +020080 unsigned reg = BIT_WORD(d->hwirq);
81 unsigned mask = BIT_MASK(d->hwirq);
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010082 unsigned polarity = davinci_cp_intc_read(
83 DAVINCI_CP_INTC_SYS_POLARITY(reg));
84 unsigned type = davinci_cp_intc_read(
85 DAVINCI_CP_INTC_SYS_TYPE(reg));
Sergei Shtylyov05214442009-03-11 19:49:05 +040086
87 switch (flow_type) {
88 case IRQ_TYPE_EDGE_RISING:
89 polarity |= mask;
90 type |= mask;
91 break;
92 case IRQ_TYPE_EDGE_FALLING:
93 polarity &= ~mask;
94 type |= mask;
95 break;
96 case IRQ_TYPE_LEVEL_HIGH:
97 polarity |= mask;
98 type &= ~mask;
99 break;
100 case IRQ_TYPE_LEVEL_LOW:
101 polarity &= ~mask;
102 type &= ~mask;
103 break;
104 default:
105 return -EINVAL;
106 }
107
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100108 davinci_cp_intc_write(polarity, DAVINCI_CP_INTC_SYS_POLARITY(reg));
109 davinci_cp_intc_write(type, DAVINCI_CP_INTC_SYS_TYPE(reg));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400110
111 return 0;
112}
113
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100114static struct irq_chip davinci_cp_intc_irq_chip = {
Sergei Shtylyov05214442009-03-11 19:49:05 +0400115 .name = "cp_intc",
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100116 .irq_ack = davinci_cp_intc_ack_irq,
117 .irq_mask = davinci_cp_intc_mask_irq,
118 .irq_unmask = davinci_cp_intc_unmask_irq,
119 .irq_set_type = davinci_cp_intc_set_irq_type,
Sudeep Holla3f86e572015-08-01 21:03:56 +0530120 .flags = IRQCHIP_SKIP_SET_WAKE,
Sergei Shtylyov05214442009-03-11 19:49:05 +0400121};
122
Bartosz Golaszewskid0064592019-02-14 15:51:58 +0100123static asmlinkage void __exception_irq_entry
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100124davinci_cp_intc_handle_irq(struct pt_regs *regs)
Bartosz Golaszewskid0064592019-02-14 15:51:58 +0100125{
126 int gpir, irqnr, none;
127
128 /*
129 * The interrupt number is in first ten bits. The NONE field set to 1
130 * indicates a spurious irq.
131 */
132
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100133 gpir = davinci_cp_intc_read(DAVINCI_CP_INTC_PRIO_IDX);
Bartosz Golaszewskid0064592019-02-14 15:51:58 +0100134 irqnr = gpir & DAVINCI_CP_INTC_PRI_INDX_MASK;
135 none = gpir & DAVINCI_CP_INTC_GPIR_NONE;
136
137 if (unlikely(none)) {
138 pr_err_once("%s: spurious irq!\n", __func__);
139 return;
140 }
141
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100142 handle_domain_irq(davinci_cp_intc_irq_domain, irqnr, regs);
Bartosz Golaszewskid0064592019-02-14 15:51:58 +0100143}
144
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100145static int davinci_cp_intc_host_map(struct irq_domain *h, unsigned int virq,
Heiko Schocher07caba92012-05-30 12:18:57 +0200146 irq_hw_number_t hw)
Sergei Shtylyov05214442009-03-11 19:49:05 +0400147{
Heiko Schocher07caba92012-05-30 12:18:57 +0200148 pr_debug("cp_intc_host_map(%d, 0x%lx)\n", virq, hw);
149
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100150 irq_set_chip(virq, &davinci_cp_intc_irq_chip);
Rob Herringe8d36d52015-07-27 15:55:13 -0500151 irq_set_probe(virq);
Heiko Schocher07caba92012-05-30 12:18:57 +0200152 irq_set_handler(virq, handle_edge_irq);
153 return 0;
154}
155
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100156static const struct irq_domain_ops davinci_cp_intc_irq_domain_ops = {
157 .map = davinci_cp_intc_host_map,
Heiko Schocher07caba92012-05-30 12:18:57 +0200158 .xlate = irq_domain_xlate_onetwocell,
159};
160
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100161static int __init davinci_cp_intc_of_init(struct device_node *node,
162 struct device_node *parent)
Heiko Schocher07caba92012-05-30 12:18:57 +0200163{
164 u32 num_irq = davinci_soc_info.intc_irq_num;
Cyril Chemparathybd808942010-05-07 17:06:37 -0400165 u8 *irq_prio = davinci_soc_info.intc_irq_prios;
Sergei Shtylyov05214442009-03-11 19:49:05 +0400166 unsigned num_reg = BITS_TO_LONGS(num_irq);
Heiko Schocher07caba92012-05-30 12:18:57 +0200167 int i, irq_base;
Sergei Shtylyov05214442009-03-11 19:49:05 +0400168
Heiko Schocher961e6572012-05-30 12:18:58 +0200169 if (node) {
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100170 davinci_cp_intc_base = of_iomap(node, 0);
Heiko Schocher961e6572012-05-30 12:18:58 +0200171 if (of_property_read_u32(node, "ti,intc-size", &num_irq))
172 pr_warn("unable to get intc-size, default to %d\n",
173 num_irq);
174 } else {
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100175 davinci_cp_intc_base = ioremap(davinci_soc_info.intc_base, SZ_8K);
Heiko Schocher961e6572012-05-30 12:18:58 +0200176 }
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100177 if (WARN_ON(!davinci_cp_intc_base))
Heiko Schocher07caba92012-05-30 12:18:57 +0200178 return -EINVAL;
Sergei Shtylyov05214442009-03-11 19:49:05 +0400179
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100180 davinci_cp_intc_write(0, DAVINCI_CP_INTC_GLOBAL_ENABLE);
Sergei Shtylyov05214442009-03-11 19:49:05 +0400181
182 /* Disable all host interrupts */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100183 davinci_cp_intc_write(0, DAVINCI_CP_INTC_HOST_ENABLE(0));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400184
185 /* Disable system interrupts */
186 for (i = 0; i < num_reg; i++)
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100187 davinci_cp_intc_write(~0, DAVINCI_CP_INTC_SYS_ENABLE_CLR(i));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400188
189 /* Set to normal mode, no nesting, no priority hold */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100190 davinci_cp_intc_write(0, DAVINCI_CP_INTC_CTRL);
191 davinci_cp_intc_write(0, DAVINCI_CP_INTC_HOST_CTRL);
Sergei Shtylyov05214442009-03-11 19:49:05 +0400192
193 /* Clear system interrupt status */
194 for (i = 0; i < num_reg; i++)
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100195 davinci_cp_intc_write(~0, DAVINCI_CP_INTC_SYS_STAT_CLR(i));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400196
197 /* Enable nIRQ (what about nFIQ?) */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100198 davinci_cp_intc_write(1, DAVINCI_CP_INTC_HOST_ENABLE_IDX_SET);
Sergei Shtylyov05214442009-03-11 19:49:05 +0400199
200 /*
201 * Priority is determined by host channel: lower channel number has
202 * higher priority i.e. channel 0 has highest priority and channel 31
203 * had the lowest priority.
204 */
205 num_reg = (num_irq + 3) >> 2; /* 4 channels per register */
206 if (irq_prio) {
207 unsigned j, k;
208 u32 val;
209
210 for (k = i = 0; i < num_reg; i++) {
211 for (val = j = 0; j < 4; j++, k++) {
212 val >>= 8;
213 if (k < num_irq)
214 val |= irq_prio[k] << 24;
215 }
216
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100217 davinci_cp_intc_write(val, DAVINCI_CP_INTC_CHAN_MAP(i));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400218 }
219 } else {
220 /*
221 * Default everything to channel 15 if priority not specified.
222 * Note that channel 0-1 are mapped to nFIQ and channels 2-31
223 * are mapped to nIRQ.
224 */
225 for (i = 0; i < num_reg; i++)
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100226 davinci_cp_intc_write(0x0f0f0f0f,
227 DAVINCI_CP_INTC_CHAN_MAP(i));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400228 }
229
Heiko Schocher07caba92012-05-30 12:18:57 +0200230 irq_base = irq_alloc_descs(-1, 0, num_irq, 0);
231 if (irq_base < 0) {
232 pr_warn("Couldn't allocate IRQ numbers\n");
233 irq_base = 0;
234 }
235
236 /* create a legacy host */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100237 davinci_cp_intc_irq_domain = irq_domain_add_legacy(
238 node, num_irq, irq_base, 0,
239 &davinci_cp_intc_irq_domain_ops, NULL);
Heiko Schocher07caba92012-05-30 12:18:57 +0200240
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100241 if (!davinci_cp_intc_irq_domain) {
Heiko Schocher07caba92012-05-30 12:18:57 +0200242 pr_err("cp_intc: failed to allocate irq host!\n");
243 return -EINVAL;
Sergei Shtylyov05214442009-03-11 19:49:05 +0400244 }
245
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100246 set_handle_irq(davinci_cp_intc_handle_irq);
Bartosz Golaszewskid0064592019-02-14 15:51:58 +0100247
Sergei Shtylyov05214442009-03-11 19:49:05 +0400248 /* Enable global interrupt */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100249 davinci_cp_intc_write(1, DAVINCI_CP_INTC_GLOBAL_ENABLE);
Heiko Schocher07caba92012-05-30 12:18:57 +0200250
251 return 0;
252}
253
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100254void __init davinci_cp_intc_init(void)
Heiko Schocher07caba92012-05-30 12:18:57 +0200255{
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100256 davinci_cp_intc_of_init(NULL, NULL);
Sergei Shtylyov05214442009-03-11 19:49:05 +0400257}
David Lechner9a7f2fc2016-02-29 16:33:26 -0600258
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100259IRQCHIP_DECLARE(cp_intc, "ti,cp-intc", davinci_cp_intc_of_init);