blob: 276da2772e7f6080ebd57d5cda8222841572a860 [file] [log] [blame]
Bartosz Golaszewski9ad1acb2019-02-14 15:52:28 +01001// SPDX-License-Identifier: GPL-2.0-only
2//
3// Author: Steve Chen <schen@mvista.com>
4// Copyright (C) 2008-2009, MontaVista Software, Inc. <source@mvista.com>
5// Author: Bartosz Golaszewski <bgolaszewski@baylibre.com>
6// Copyright (C) 2019, Texas Instruments
7//
8// TI Common Platform Interrupt Controller (cp_intc) driver
Sergei Shtylyov05214442009-03-11 19:49:05 +04009
Heiko Schocher07caba92012-05-30 12:18:57 +020010#include <linux/export.h>
Sergei Shtylyov05214442009-03-11 19:49:05 +040011#include <linux/init.h>
Sergei Shtylyov05214442009-03-11 19:49:05 +040012#include <linux/irq.h>
David Lechner9a7f2fc2016-02-29 16:33:26 -060013#include <linux/irqchip.h>
Bartosz Golaszewski65679542019-02-14 15:52:23 +010014#include <linux/irqchip/irq-davinci-cp-intc.h>
Heiko Schocher07caba92012-05-30 12:18:57 +020015#include <linux/irqdomain.h>
Sergei Shtylyov05214442009-03-11 19:49:05 +040016#include <linux/io.h>
Heiko Schocher961e6572012-05-30 12:18:58 +020017#include <linux/of.h>
18#include <linux/of_address.h>
19#include <linux/of_irq.h>
Sergei Shtylyov05214442009-03-11 19:49:05 +040020
Bartosz Golaszewskid0064592019-02-14 15:51:58 +010021#include <asm/exception.h>
Bartosz Golaszewskied4d1892019-02-14 15:52:17 +010022
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010023#define DAVINCI_CP_INTC_CTRL 0x04
Bartosz Golaszewski3b5d1c52019-02-14 15:52:22 +010024#define DAVINCI_CP_INTC_HOST_CTRL 0x0c
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010025#define DAVINCI_CP_INTC_GLOBAL_ENABLE 0x10
26#define DAVINCI_CP_INTC_SYS_STAT_IDX_CLR 0x24
27#define DAVINCI_CP_INTC_SYS_ENABLE_IDX_SET 0x28
Bartosz Golaszewski3b5d1c52019-02-14 15:52:22 +010028#define DAVINCI_CP_INTC_SYS_ENABLE_IDX_CLR 0x2c
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010029#define DAVINCI_CP_INTC_HOST_ENABLE_IDX_SET 0x34
30#define DAVINCI_CP_INTC_HOST_ENABLE_IDX_CLR 0x38
31#define DAVINCI_CP_INTC_PRIO_IDX 0x80
32#define DAVINCI_CP_INTC_SYS_STAT_CLR(n) (0x0280 + (n << 2))
33#define DAVINCI_CP_INTC_SYS_ENABLE_CLR(n) (0x0380 + (n << 2))
34#define DAVINCI_CP_INTC_CHAN_MAP(n) (0x0400 + (n << 2))
Bartosz Golaszewski3b5d1c52019-02-14 15:52:22 +010035#define DAVINCI_CP_INTC_SYS_POLARITY(n) (0x0d00 + (n << 2))
36#define DAVINCI_CP_INTC_SYS_TYPE(n) (0x0d80 + (n << 2))
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010037#define DAVINCI_CP_INTC_HOST_ENABLE(n) (0x1500 + (n << 2))
Bartosz Golaszewskid0064592019-02-14 15:51:58 +010038#define DAVINCI_CP_INTC_PRI_INDX_MASK GENMASK(9, 0)
39#define DAVINCI_CP_INTC_GPIR_NONE BIT(31)
40
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010041static void __iomem *davinci_cp_intc_base;
42static struct irq_domain *davinci_cp_intc_irq_domain;
Bartosz Golaszewskifb746842019-02-14 15:52:00 +010043
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010044static inline unsigned int davinci_cp_intc_read(unsigned int offset)
Sergei Shtylyov05214442009-03-11 19:49:05 +040045{
Bartosz Golaszewskid43da8d2019-02-14 15:52:27 +010046 return readl_relaxed(davinci_cp_intc_base + offset);
Sergei Shtylyov05214442009-03-11 19:49:05 +040047}
48
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010049static inline void davinci_cp_intc_write(unsigned long value,
50 unsigned int offset)
Sergei Shtylyov05214442009-03-11 19:49:05 +040051{
Bartosz Golaszewskid43da8d2019-02-14 15:52:27 +010052 writel_relaxed(value, davinci_cp_intc_base + offset);
Sergei Shtylyov05214442009-03-11 19:49:05 +040053}
54
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010055static void davinci_cp_intc_ack_irq(struct irq_data *d)
Sergei Shtylyov05214442009-03-11 19:49:05 +040056{
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010057 davinci_cp_intc_write(d->hwirq, DAVINCI_CP_INTC_SYS_STAT_IDX_CLR);
Sergei Shtylyov05214442009-03-11 19:49:05 +040058}
59
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010060static void davinci_cp_intc_mask_irq(struct irq_data *d)
Sergei Shtylyov05214442009-03-11 19:49:05 +040061{
62 /* XXX don't know why we need to disable nIRQ here... */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010063 davinci_cp_intc_write(1, DAVINCI_CP_INTC_HOST_ENABLE_IDX_CLR);
64 davinci_cp_intc_write(d->hwirq, DAVINCI_CP_INTC_SYS_ENABLE_IDX_CLR);
65 davinci_cp_intc_write(1, DAVINCI_CP_INTC_HOST_ENABLE_IDX_SET);
Sergei Shtylyov05214442009-03-11 19:49:05 +040066}
67
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010068static void davinci_cp_intc_unmask_irq(struct irq_data *d)
Sergei Shtylyov05214442009-03-11 19:49:05 +040069{
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010070 davinci_cp_intc_write(d->hwirq, DAVINCI_CP_INTC_SYS_ENABLE_IDX_SET);
Sergei Shtylyov05214442009-03-11 19:49:05 +040071}
72
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +010073static int davinci_cp_intc_set_irq_type(struct irq_data *d,
74 unsigned int flow_type)
Sergei Shtylyov05214442009-03-11 19:49:05 +040075{
Bartosz Golaszewski9762d872019-02-14 15:52:25 +010076 unsigned int reg, mask, polarity, type;
77
78 reg = BIT_WORD(d->hwirq);
79 mask = BIT_MASK(d->hwirq);
80 polarity = davinci_cp_intc_read(DAVINCI_CP_INTC_SYS_POLARITY(reg));
81 type = davinci_cp_intc_read(DAVINCI_CP_INTC_SYS_TYPE(reg));
Sergei Shtylyov05214442009-03-11 19:49:05 +040082
83 switch (flow_type) {
84 case IRQ_TYPE_EDGE_RISING:
85 polarity |= mask;
86 type |= mask;
87 break;
88 case IRQ_TYPE_EDGE_FALLING:
89 polarity &= ~mask;
90 type |= mask;
91 break;
92 case IRQ_TYPE_LEVEL_HIGH:
93 polarity |= mask;
94 type &= ~mask;
95 break;
96 case IRQ_TYPE_LEVEL_LOW:
97 polarity &= ~mask;
98 type &= ~mask;
99 break;
100 default:
101 return -EINVAL;
102 }
103
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100104 davinci_cp_intc_write(polarity, DAVINCI_CP_INTC_SYS_POLARITY(reg));
105 davinci_cp_intc_write(type, DAVINCI_CP_INTC_SYS_TYPE(reg));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400106
107 return 0;
108}
109
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100110static struct irq_chip davinci_cp_intc_irq_chip = {
Sergei Shtylyov05214442009-03-11 19:49:05 +0400111 .name = "cp_intc",
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100112 .irq_ack = davinci_cp_intc_ack_irq,
113 .irq_mask = davinci_cp_intc_mask_irq,
114 .irq_unmask = davinci_cp_intc_unmask_irq,
115 .irq_set_type = davinci_cp_intc_set_irq_type,
Sudeep Holla3f86e572015-08-01 21:03:56 +0530116 .flags = IRQCHIP_SKIP_SET_WAKE,
Sergei Shtylyov05214442009-03-11 19:49:05 +0400117};
118
Bartosz Golaszewskid0064592019-02-14 15:51:58 +0100119static asmlinkage void __exception_irq_entry
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100120davinci_cp_intc_handle_irq(struct pt_regs *regs)
Bartosz Golaszewskid0064592019-02-14 15:51:58 +0100121{
122 int gpir, irqnr, none;
123
124 /*
125 * The interrupt number is in first ten bits. The NONE field set to 1
126 * indicates a spurious irq.
127 */
128
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100129 gpir = davinci_cp_intc_read(DAVINCI_CP_INTC_PRIO_IDX);
Bartosz Golaszewskid0064592019-02-14 15:51:58 +0100130 irqnr = gpir & DAVINCI_CP_INTC_PRI_INDX_MASK;
131 none = gpir & DAVINCI_CP_INTC_GPIR_NONE;
132
133 if (unlikely(none)) {
134 pr_err_once("%s: spurious irq!\n", __func__);
135 return;
136 }
137
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100138 handle_domain_irq(davinci_cp_intc_irq_domain, irqnr, regs);
Bartosz Golaszewskid0064592019-02-14 15:51:58 +0100139}
140
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100141static int davinci_cp_intc_host_map(struct irq_domain *h, unsigned int virq,
Heiko Schocher07caba92012-05-30 12:18:57 +0200142 irq_hw_number_t hw)
Sergei Shtylyov05214442009-03-11 19:49:05 +0400143{
Heiko Schocher07caba92012-05-30 12:18:57 +0200144 pr_debug("cp_intc_host_map(%d, 0x%lx)\n", virq, hw);
145
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100146 irq_set_chip(virq, &davinci_cp_intc_irq_chip);
Rob Herringe8d36d52015-07-27 15:55:13 -0500147 irq_set_probe(virq);
Heiko Schocher07caba92012-05-30 12:18:57 +0200148 irq_set_handler(virq, handle_edge_irq);
Bartosz Golaszewski9762d872019-02-14 15:52:25 +0100149
Heiko Schocher07caba92012-05-30 12:18:57 +0200150 return 0;
151}
152
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100153static const struct irq_domain_ops davinci_cp_intc_irq_domain_ops = {
154 .map = davinci_cp_intc_host_map,
Heiko Schocher07caba92012-05-30 12:18:57 +0200155 .xlate = irq_domain_xlate_onetwocell,
156};
157
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100158static int __init
159davinci_cp_intc_do_init(const struct davinci_cp_intc_config *config,
160 struct device_node *node)
Heiko Schocher07caba92012-05-30 12:18:57 +0200161{
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100162 unsigned int num_regs = BITS_TO_LONGS(config->num_irqs);
163 int offset, irq_base;
Bartosz Golaszewski9cf58a42019-02-14 15:52:24 +0100164 void __iomem *req;
165
166 req = request_mem_region(config->reg.start,
167 resource_size(&config->reg),
168 "davinci-cp-intc");
169 if (!req) {
170 pr_err("%s: register range busy\n", __func__);
171 return -EBUSY;
172 }
Sergei Shtylyov05214442009-03-11 19:49:05 +0400173
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100174 davinci_cp_intc_base = ioremap(config->reg.start,
175 resource_size(&config->reg));
Bartosz Golaszewski6c702da2019-02-14 15:52:26 +0100176 if (!davinci_cp_intc_base) {
177 pr_err("%s: unable to ioremap register range\n", __func__);
Heiko Schocher07caba92012-05-30 12:18:57 +0200178 return -EINVAL;
Bartosz Golaszewski6c702da2019-02-14 15:52:26 +0100179 }
Sergei Shtylyov05214442009-03-11 19:49:05 +0400180
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100181 davinci_cp_intc_write(0, DAVINCI_CP_INTC_GLOBAL_ENABLE);
Sergei Shtylyov05214442009-03-11 19:49:05 +0400182
183 /* Disable all host interrupts */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100184 davinci_cp_intc_write(0, DAVINCI_CP_INTC_HOST_ENABLE(0));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400185
186 /* Disable system interrupts */
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100187 for (offset = 0; offset < num_regs; offset++)
188 davinci_cp_intc_write(~0,
189 DAVINCI_CP_INTC_SYS_ENABLE_CLR(offset));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400190
191 /* Set to normal mode, no nesting, no priority hold */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100192 davinci_cp_intc_write(0, DAVINCI_CP_INTC_CTRL);
193 davinci_cp_intc_write(0, DAVINCI_CP_INTC_HOST_CTRL);
Sergei Shtylyov05214442009-03-11 19:49:05 +0400194
195 /* Clear system interrupt status */
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100196 for (offset = 0; offset < num_regs; offset++)
197 davinci_cp_intc_write(~0,
198 DAVINCI_CP_INTC_SYS_STAT_CLR(offset));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400199
200 /* Enable nIRQ (what about nFIQ?) */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100201 davinci_cp_intc_write(1, DAVINCI_CP_INTC_HOST_ENABLE_IDX_SET);
Sergei Shtylyov05214442009-03-11 19:49:05 +0400202
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100203 /* Default all priorities to channel 7. */
204 num_regs = (config->num_irqs + 3) >> 2; /* 4 channels per register */
205 for (offset = 0; offset < num_regs; offset++)
206 davinci_cp_intc_write(0x07070707,
207 DAVINCI_CP_INTC_CHAN_MAP(offset));
Sergei Shtylyov05214442009-03-11 19:49:05 +0400208
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100209 irq_base = irq_alloc_descs(-1, 0, config->num_irqs, 0);
Heiko Schocher07caba92012-05-30 12:18:57 +0200210 if (irq_base < 0) {
Bartosz Golaszewski6c702da2019-02-14 15:52:26 +0100211 pr_err("%s: unable to allocate interrupt descriptors: %d\n",
212 __func__, irq_base);
213 return irq_base;
Heiko Schocher07caba92012-05-30 12:18:57 +0200214 }
215
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100216 davinci_cp_intc_irq_domain = irq_domain_add_legacy(
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100217 node, config->num_irqs, irq_base, 0,
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100218 &davinci_cp_intc_irq_domain_ops, NULL);
Heiko Schocher07caba92012-05-30 12:18:57 +0200219
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100220 if (!davinci_cp_intc_irq_domain) {
Bartosz Golaszewski6c702da2019-02-14 15:52:26 +0100221 pr_err("%s: unable to create an interrupt domain\n", __func__);
Heiko Schocher07caba92012-05-30 12:18:57 +0200222 return -EINVAL;
Sergei Shtylyov05214442009-03-11 19:49:05 +0400223 }
224
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100225 set_handle_irq(davinci_cp_intc_handle_irq);
Bartosz Golaszewskid0064592019-02-14 15:51:58 +0100226
Sergei Shtylyov05214442009-03-11 19:49:05 +0400227 /* Enable global interrupt */
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100228 davinci_cp_intc_write(1, DAVINCI_CP_INTC_GLOBAL_ENABLE);
Heiko Schocher07caba92012-05-30 12:18:57 +0200229
230 return 0;
231}
232
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100233int __init davinci_cp_intc_init(const struct davinci_cp_intc_config *config)
Heiko Schocher07caba92012-05-30 12:18:57 +0200234{
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100235 return davinci_cp_intc_do_init(config, NULL);
Sergei Shtylyov05214442009-03-11 19:49:05 +0400236}
David Lechner9a7f2fc2016-02-29 16:33:26 -0600237
Bartosz Golaszewski65679542019-02-14 15:52:23 +0100238static int __init davinci_cp_intc_of_init(struct device_node *node,
239 struct device_node *parent)
240{
241 struct davinci_cp_intc_config config = { };
242 int ret;
243
244 ret = of_address_to_resource(node, 0, &config.reg);
245 if (ret) {
246 pr_err("%s: unable to get the register range from device-tree\n",
247 __func__);
248 return ret;
249 }
250
251 ret = of_property_read_u32(node, "ti,intc-size", &config.num_irqs);
252 if (ret) {
253 pr_err("%s: unable to read the 'ti,intc-size' property\n",
254 __func__);
255 return ret;
256 }
257
258 return davinci_cp_intc_do_init(&config, node);
259}
Bartosz Golaszewskib35b55e2019-02-14 15:52:21 +0100260IRQCHIP_DECLARE(cp_intc, "ti,cp-intc", davinci_cp_intc_of_init);