Thomas Gleixner | fcaf203 | 2019-05-27 08:55:08 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 2 | /* |
| 3 | * Copyright 2011 Freescale Semiconductor, Inc. |
| 4 | * Copyright 2011 Linaro Ltd. |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 5 | */ |
| 6 | |
| 7 | #include <linux/init.h> |
| 8 | #include <linux/io.h> |
| 9 | #include <linux/of.h> |
| 10 | #include <linux/of_address.h> |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 11 | #include <linux/reset-controller.h> |
Will Deacon | eaa142c | 2011-08-09 12:24:07 +0100 | [diff] [blame] | 12 | #include <linux/smp.h> |
Will Deacon | eb50439 | 2012-01-20 12:01:12 +0100 | [diff] [blame] | 13 | #include <asm/smp_plat.h> |
Fabio Estevam | 0989857 | 2013-03-25 09:20:43 -0300 | [diff] [blame] | 14 | #include "common.h" |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 15 | |
| 16 | #define SRC_SCR 0x000 |
| 17 | #define SRC_GPR1 0x020 |
Shawn Guo | 0575fb7 | 2011-12-09 00:51:26 +0100 | [diff] [blame] | 18 | #define BP_SRC_SCR_WARM_RESET_ENABLE 0 |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 19 | #define BP_SRC_SCR_SW_GPU_RST 1 |
| 20 | #define BP_SRC_SCR_SW_VPU_RST 2 |
| 21 | #define BP_SRC_SCR_SW_IPU1_RST 3 |
| 22 | #define BP_SRC_SCR_SW_OPEN_VG_RST 4 |
| 23 | #define BP_SRC_SCR_SW_IPU2_RST 12 |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 24 | #define BP_SRC_SCR_CORE1_RST 14 |
| 25 | #define BP_SRC_SCR_CORE1_ENABLE 22 |
| 26 | |
| 27 | static void __iomem *src_base; |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 28 | static DEFINE_SPINLOCK(scr_lock); |
| 29 | |
| 30 | static const int sw_reset_bits[5] = { |
| 31 | BP_SRC_SCR_SW_GPU_RST, |
| 32 | BP_SRC_SCR_SW_VPU_RST, |
| 33 | BP_SRC_SCR_SW_IPU1_RST, |
| 34 | BP_SRC_SCR_SW_OPEN_VG_RST, |
| 35 | BP_SRC_SCR_SW_IPU2_RST |
| 36 | }; |
| 37 | |
| 38 | static int imx_src_reset_module(struct reset_controller_dev *rcdev, |
| 39 | unsigned long sw_reset_idx) |
| 40 | { |
| 41 | unsigned long timeout; |
| 42 | unsigned long flags; |
| 43 | int bit; |
| 44 | u32 val; |
| 45 | |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 46 | if (sw_reset_idx >= ARRAY_SIZE(sw_reset_bits)) |
| 47 | return -EINVAL; |
| 48 | |
| 49 | bit = 1 << sw_reset_bits[sw_reset_idx]; |
| 50 | |
| 51 | spin_lock_irqsave(&scr_lock, flags); |
| 52 | val = readl_relaxed(src_base + SRC_SCR); |
| 53 | val |= bit; |
| 54 | writel_relaxed(val, src_base + SRC_SCR); |
| 55 | spin_unlock_irqrestore(&scr_lock, flags); |
| 56 | |
| 57 | timeout = jiffies + msecs_to_jiffies(1000); |
| 58 | while (readl(src_base + SRC_SCR) & bit) { |
| 59 | if (time_after(jiffies, timeout)) |
| 60 | return -ETIME; |
| 61 | cpu_relax(); |
| 62 | } |
| 63 | |
| 64 | return 0; |
| 65 | } |
| 66 | |
Philipp Zabel | d2443b2 | 2016-02-25 10:44:41 +0100 | [diff] [blame] | 67 | static const struct reset_control_ops imx_src_ops = { |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 68 | .reset = imx_src_reset_module, |
| 69 | }; |
| 70 | |
| 71 | static struct reset_controller_dev imx_reset_controller = { |
| 72 | .ops = &imx_src_ops, |
| 73 | .nr_resets = ARRAY_SIZE(sw_reset_bits), |
| 74 | }; |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 75 | |
| 76 | void imx_enable_cpu(int cpu, bool enable) |
| 77 | { |
| 78 | u32 mask, val; |
| 79 | |
Will Deacon | eaa142c | 2011-08-09 12:24:07 +0100 | [diff] [blame] | 80 | cpu = cpu_logical_map(cpu); |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 81 | mask = 1 << (BP_SRC_SCR_CORE1_ENABLE + cpu - 1); |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 82 | spin_lock(&scr_lock); |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 83 | val = readl_relaxed(src_base + SRC_SCR); |
| 84 | val = enable ? val | mask : val & ~mask; |
Shawn Guo | 6050d18 | 2013-10-09 15:54:31 +0800 | [diff] [blame] | 85 | val |= 1 << (BP_SRC_SCR_CORE1_RST + cpu - 1); |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 86 | writel_relaxed(val, src_base + SRC_SCR); |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 87 | spin_unlock(&scr_lock); |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 88 | } |
| 89 | |
| 90 | void imx_set_cpu_jump(int cpu, void *jump_addr) |
| 91 | { |
Will Deacon | eaa142c | 2011-08-09 12:24:07 +0100 | [diff] [blame] | 92 | cpu = cpu_logical_map(cpu); |
Florian Fainelli | 64fc2a9 | 2017-01-15 03:59:29 +0100 | [diff] [blame] | 93 | writel_relaxed(__pa_symbol(jump_addr), |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 94 | src_base + SRC_GPR1 + cpu * 8); |
| 95 | } |
| 96 | |
Shawn Guo | 2f3edfd | 2013-03-26 16:46:07 +0800 | [diff] [blame] | 97 | u32 imx_get_cpu_arg(int cpu) |
| 98 | { |
| 99 | cpu = cpu_logical_map(cpu); |
| 100 | return readl_relaxed(src_base + SRC_GPR1 + cpu * 8 + 4); |
| 101 | } |
| 102 | |
| 103 | void imx_set_cpu_arg(int cpu, u32 arg) |
| 104 | { |
| 105 | cpu = cpu_logical_map(cpu); |
| 106 | writel_relaxed(arg, src_base + SRC_GPR1 + cpu * 8 + 4); |
| 107 | } |
| 108 | |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 109 | void __init imx_src_init(void) |
| 110 | { |
| 111 | struct device_node *np; |
Shawn Guo | 0575fb7 | 2011-12-09 00:51:26 +0100 | [diff] [blame] | 112 | u32 val; |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 113 | |
Philipp Zabel | bd3d924 | 2013-03-28 17:35:22 +0100 | [diff] [blame] | 114 | np = of_find_compatible_node(NULL, NULL, "fsl,imx51-src"); |
| 115 | if (!np) |
| 116 | return; |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 117 | src_base = of_iomap(np, 0); |
| 118 | WARN_ON(!src_base); |
Shawn Guo | 0575fb7 | 2011-12-09 00:51:26 +0100 | [diff] [blame] | 119 | |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 120 | imx_reset_controller.of_node = np; |
Arnd Bergmann | 5c5f042 | 2013-04-30 14:58:31 +0200 | [diff] [blame] | 121 | if (IS_ENABLED(CONFIG_RESET_CONTROLLER)) |
| 122 | reset_controller_register(&imx_reset_controller); |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 123 | |
Shawn Guo | 0575fb7 | 2011-12-09 00:51:26 +0100 | [diff] [blame] | 124 | /* |
| 125 | * force warm reset sources to generate cold reset |
| 126 | * for a more reliable restart |
| 127 | */ |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 128 | spin_lock(&scr_lock); |
Shawn Guo | 0575fb7 | 2011-12-09 00:51:26 +0100 | [diff] [blame] | 129 | val = readl_relaxed(src_base + SRC_SCR); |
| 130 | val &= ~(1 << BP_SRC_SCR_WARM_RESET_ENABLE); |
| 131 | writel_relaxed(val, src_base + SRC_SCR); |
Philipp Zabel | 02985b9 | 2013-03-28 17:35:19 +0100 | [diff] [blame] | 132 | spin_unlock(&scr_lock); |
Shawn Guo | 9fbbe68 | 2011-09-06 14:39:44 +0800 | [diff] [blame] | 133 | } |