blob: 45e3137ccd71c21fb835a1938c8cd36aa16d5949 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
David Gibson26ef5c02005-11-10 11:50:16 +11002#ifndef _ASM_POWERPC_CACHE_H
3#define _ASM_POWERPC_CACHE_H
4
5#ifdef __KERNEL__
6
David Gibson26ef5c02005-11-10 11:50:16 +11007
8/* bytes per L1 cache line */
Christophe Leroy968159c2017-08-08 13:58:54 +02009#if defined(CONFIG_PPC_8xx) || defined(CONFIG_403GCX)
David Gibson26ef5c02005-11-10 11:50:16 +110010#define L1_CACHE_SHIFT 4
11#define MAX_COPY_PREFETCH 1
Christophe Leroy1128bb72018-05-18 15:01:16 +020012#define IFETCH_ALIGN_SHIFT 2
Kumar Gala3dfa8772008-06-16 09:41:32 -050013#elif defined(CONFIG_PPC_E500MC)
14#define L1_CACHE_SHIFT 6
15#define MAX_COPY_PREFETCH 4
Christophe Leroy1128bb72018-05-18 15:01:16 +020016#define IFETCH_ALIGN_SHIFT 3
David Gibson26ef5c02005-11-10 11:50:16 +110017#elif defined(CONFIG_PPC32)
David Gibson26ef5c02005-11-10 11:50:16 +110018#define MAX_COPY_PREFETCH 4
Christophe Leroy1128bb72018-05-18 15:01:16 +020019#define IFETCH_ALIGN_SHIFT 3 /* 603 fetches 2 insn at a time */
Dave Kleikampe7f75ad2010-03-05 10:43:12 +000020#if defined(CONFIG_PPC_47x)
21#define L1_CACHE_SHIFT 7
22#else
23#define L1_CACHE_SHIFT 5
24#endif
David Gibson26ef5c02005-11-10 11:50:16 +110025#else /* CONFIG_PPC64 */
26#define L1_CACHE_SHIFT 7
Nicholas Pigginf4329f22016-10-13 14:43:52 +110027#define IFETCH_ALIGN_SHIFT 4 /* POWER8,9 */
David Gibson26ef5c02005-11-10 11:50:16 +110028#endif
29
30#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
31
32#define SMP_CACHE_BYTES L1_CACHE_BYTES
David Gibson26ef5c02005-11-10 11:50:16 +110033
Nicholas Pigginf4329f22016-10-13 14:43:52 +110034#define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT)
35
Christophe Leroyd98fc702019-05-14 09:05:15 +000036#if !defined(__ASSEMBLY__)
37#ifdef CONFIG_PPC64
Benjamin Herrenschmidte2827fe2017-01-08 17:31:47 -060038
39struct ppc_cache_info {
40 u32 size;
41 u32 line_size;
42 u32 block_size; /* L1 only */
43 u32 log_block_size;
44 u32 blocks_per_page;
45 u32 sets;
Benjamin Herrenschmidt98a5f362017-02-03 17:20:07 +110046 u32 assoc;
Benjamin Herrenschmidte2827fe2017-01-08 17:31:47 -060047};
48
David Gibson26ef5c02005-11-10 11:50:16 +110049struct ppc64_caches {
Benjamin Herrenschmidte2827fe2017-01-08 17:31:47 -060050 struct ppc_cache_info l1d;
51 struct ppc_cache_info l1i;
Benjamin Herrenschmidt65e01f32017-01-08 17:31:48 -060052 struct ppc_cache_info l2;
53 struct ppc_cache_info l3;
David Gibson26ef5c02005-11-10 11:50:16 +110054};
55
56extern struct ppc64_caches ppc64_caches;
Christophe Leroy22e9c882019-05-14 09:05:16 +000057
58static inline u32 l1_cache_shift(void)
59{
60 return ppc64_caches.l1d.log_block_size;
61}
62
63static inline u32 l1_cache_bytes(void)
64{
65 return ppc64_caches.l1d.block_size;
66}
Christophe Leroyd98fc702019-05-14 09:05:15 +000067#else
68static inline u32 l1_cache_shift(void)
69{
70 return L1_CACHE_SHIFT;
71}
72
73static inline u32 l1_cache_bytes(void)
74{
75 return L1_CACHE_BYTES;
76}
77#endif
78#endif /* ! __ASSEMBLY__ */
David Gibson26ef5c02005-11-10 11:50:16 +110079
Kevin Hao0ce63672013-08-22 09:30:35 +080080#if defined(__ASSEMBLY__)
81/*
82 * For a snooping icache, we still need a dummy icbi to purge all the
83 * prefetched instructions from the ifetch buffers. We also need a sync
84 * before the icbi to order the the actual stores to memory that might
85 * have modified instructions with the icbi.
86 */
87#define PURGE_PREFETCHED_INS \
88 sync; \
89 icbi 0,r3; \
90 sync; \
91 isync
David Howellsae3a1972012-03-28 18:30:02 +010092
Kevin Hao0ce63672013-08-22 09:30:35 +080093#else
Denys Vlasenko54cb27a2010-02-20 01:03:44 +010094#define __read_mostly __attribute__((__section__(".data..read_mostly")))
David Howellsae3a1972012-03-28 18:30:02 +010095
Christophe Leroyd7cceda2018-11-17 10:24:56 +000096#ifdef CONFIG_PPC_BOOK3S_32
David Howellsae3a1972012-03-28 18:30:02 +010097extern long _get_L2CR(void);
98extern long _get_L3CR(void);
99extern void _set_L2CR(unsigned long);
100extern void _set_L3CR(unsigned long);
101#else
102#define _get_L2CR() 0L
103#define _get_L3CR() 0L
104#define _set_L2CR(val) do { } while(0)
105#define _set_L3CR(val) do { } while(0)
Tony Breedsbd67fcf2007-07-04 14:04:31 +1000106#endif
107
Christophe Leroyd6bfa022016-02-09 17:08:23 +0100108static inline void dcbz(void *addr)
109{
Michael Ellermaned4289e2019-07-29 22:28:54 +1000110 __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
Christophe Leroyd6bfa022016-02-09 17:08:23 +0100111}
112
113static inline void dcbi(void *addr)
114{
Michael Ellermaned4289e2019-07-29 22:28:54 +1000115 __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
Christophe Leroyd6bfa022016-02-09 17:08:23 +0100116}
117
118static inline void dcbf(void *addr)
119{
Michael Ellermaned4289e2019-07-29 22:28:54 +1000120 __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
Christophe Leroyd6bfa022016-02-09 17:08:23 +0100121}
122
123static inline void dcbst(void *addr)
124{
Michael Ellermaned4289e2019-07-29 22:28:54 +1000125 __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
Christophe Leroyd6bfa022016-02-09 17:08:23 +0100126}
David Howellsae3a1972012-03-28 18:30:02 +0100127#endif /* !__ASSEMBLY__ */
David Gibson26ef5c02005-11-10 11:50:16 +1100128#endif /* __KERNEL__ */
129#endif /* _ASM_POWERPC_CACHE_H */