blob: c1d257aa4c2d350d23774420eafed6deb57ec5f9 [file] [log] [blame]
Greg Kroah-Hartmanb2441312017-11-01 15:07:57 +01001/* SPDX-License-Identifier: GPL-2.0 */
David Gibson26ef5c02005-11-10 11:50:16 +11002#ifndef _ASM_POWERPC_CACHE_H
3#define _ASM_POWERPC_CACHE_H
4
5#ifdef __KERNEL__
6
David Gibson26ef5c02005-11-10 11:50:16 +11007
8/* bytes per L1 cache line */
Christophe Leroy968159c2017-08-08 13:58:54 +02009#if defined(CONFIG_PPC_8xx) || defined(CONFIG_403GCX)
David Gibson26ef5c02005-11-10 11:50:16 +110010#define L1_CACHE_SHIFT 4
11#define MAX_COPY_PREFETCH 1
Kumar Gala3dfa8772008-06-16 09:41:32 -050012#elif defined(CONFIG_PPC_E500MC)
13#define L1_CACHE_SHIFT 6
14#define MAX_COPY_PREFETCH 4
David Gibson26ef5c02005-11-10 11:50:16 +110015#elif defined(CONFIG_PPC32)
David Gibson26ef5c02005-11-10 11:50:16 +110016#define MAX_COPY_PREFETCH 4
Dave Kleikampe7f75ad2010-03-05 10:43:12 +000017#if defined(CONFIG_PPC_47x)
18#define L1_CACHE_SHIFT 7
19#else
20#define L1_CACHE_SHIFT 5
21#endif
David Gibson26ef5c02005-11-10 11:50:16 +110022#else /* CONFIG_PPC64 */
23#define L1_CACHE_SHIFT 7
Nicholas Pigginf4329f22016-10-13 14:43:52 +110024#define IFETCH_ALIGN_SHIFT 4 /* POWER8,9 */
David Gibson26ef5c02005-11-10 11:50:16 +110025#endif
26
27#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
28
29#define SMP_CACHE_BYTES L1_CACHE_BYTES
David Gibson26ef5c02005-11-10 11:50:16 +110030
Nicholas Pigginf4329f22016-10-13 14:43:52 +110031#define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT)
32
David Gibson26ef5c02005-11-10 11:50:16 +110033#if defined(__powerpc64__) && !defined(__ASSEMBLY__)
Benjamin Herrenschmidte2827fe2017-01-08 17:31:47 -060034
35struct ppc_cache_info {
36 u32 size;
37 u32 line_size;
38 u32 block_size; /* L1 only */
39 u32 log_block_size;
40 u32 blocks_per_page;
41 u32 sets;
Benjamin Herrenschmidt98a5f362017-02-03 17:20:07 +110042 u32 assoc;
Benjamin Herrenschmidte2827fe2017-01-08 17:31:47 -060043};
44
David Gibson26ef5c02005-11-10 11:50:16 +110045struct ppc64_caches {
Benjamin Herrenschmidte2827fe2017-01-08 17:31:47 -060046 struct ppc_cache_info l1d;
47 struct ppc_cache_info l1i;
Benjamin Herrenschmidt65e01f32017-01-08 17:31:48 -060048 struct ppc_cache_info l2;
49 struct ppc_cache_info l3;
David Gibson26ef5c02005-11-10 11:50:16 +110050};
51
52extern struct ppc64_caches ppc64_caches;
53#endif /* __powerpc64__ && ! __ASSEMBLY__ */
54
Kevin Hao0ce63672013-08-22 09:30:35 +080055#if defined(__ASSEMBLY__)
56/*
57 * For a snooping icache, we still need a dummy icbi to purge all the
58 * prefetched instructions from the ifetch buffers. We also need a sync
59 * before the icbi to order the the actual stores to memory that might
60 * have modified instructions with the icbi.
61 */
62#define PURGE_PREFETCHED_INS \
63 sync; \
64 icbi 0,r3; \
65 sync; \
66 isync
David Howellsae3a1972012-03-28 18:30:02 +010067
Kevin Hao0ce63672013-08-22 09:30:35 +080068#else
Denys Vlasenko54cb27a2010-02-20 01:03:44 +010069#define __read_mostly __attribute__((__section__(".data..read_mostly")))
David Howellsae3a1972012-03-28 18:30:02 +010070
71#ifdef CONFIG_6xx
72extern long _get_L2CR(void);
73extern long _get_L3CR(void);
74extern void _set_L2CR(unsigned long);
75extern void _set_L3CR(unsigned long);
76#else
77#define _get_L2CR() 0L
78#define _get_L3CR() 0L
79#define _set_L2CR(val) do { } while(0)
80#define _set_L3CR(val) do { } while(0)
Tony Breedsbd67fcf2007-07-04 14:04:31 +100081#endif
82
Christophe Leroyd6bfa022016-02-09 17:08:23 +010083static inline void dcbz(void *addr)
84{
85 __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
86}
87
88static inline void dcbi(void *addr)
89{
90 __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
91}
92
93static inline void dcbf(void *addr)
94{
95 __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
96}
97
98static inline void dcbst(void *addr)
99{
100 __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
101}
David Howellsae3a1972012-03-28 18:30:02 +0100102#endif /* !__ASSEMBLY__ */
David Gibson26ef5c02005-11-10 11:50:16 +1100103#endif /* __KERNEL__ */
104#endif /* _ASM_POWERPC_CACHE_H */