Commit | Line | Data |
---|---|---|
26ef5c09 DG |
1 | #ifndef _ASM_POWERPC_CACHE_H |
2 | #define _ASM_POWERPC_CACHE_H | |
3 | ||
4 | #ifdef __KERNEL__ | |
5 | ||
26ef5c09 DG |
6 | |
7 | /* bytes per L1 cache line */ | |
8 | #if defined(CONFIG_8xx) || defined(CONFIG_403GCX) | |
9 | #define L1_CACHE_SHIFT 4 | |
10 | #define MAX_COPY_PREFETCH 1 | |
3dfa8773 KG |
11 | #elif defined(CONFIG_PPC_E500MC) |
12 | #define L1_CACHE_SHIFT 6 | |
13 | #define MAX_COPY_PREFETCH 4 | |
26ef5c09 | 14 | #elif defined(CONFIG_PPC32) |
26ef5c09 | 15 | #define MAX_COPY_PREFETCH 4 |
e7f75ad0 DK |
16 | #if defined(CONFIG_PPC_47x) |
17 | #define L1_CACHE_SHIFT 7 | |
18 | #else | |
19 | #define L1_CACHE_SHIFT 5 | |
20 | #endif | |
26ef5c09 DG |
21 | #else /* CONFIG_PPC64 */ |
22 | #define L1_CACHE_SHIFT 7 | |
f4329f2e | 23 | #define IFETCH_ALIGN_SHIFT 4 /* POWER8,9 */ |
26ef5c09 DG |
24 | #endif |
25 | ||
26 | #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) | |
27 | ||
28 | #define SMP_CACHE_BYTES L1_CACHE_BYTES | |
26ef5c09 | 29 | |
f4329f2e NP |
30 | #define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT) |
31 | ||
26ef5c09 DG |
32 | #if defined(__powerpc64__) && !defined(__ASSEMBLY__) |
33 | struct ppc64_caches { | |
34 | u32 dsize; /* L1 d-cache size */ | |
35 | u32 dline_size; /* L1 d-cache line size */ | |
bd067f83 BH |
36 | u32 dblock_size; /* L1 d-cache block size */ |
37 | u32 log_dblock_size; | |
38 | u32 dblocks_per_page; | |
26ef5c09 | 39 | u32 isize; /* L1 i-cache size */ |
bd067f83 BH |
40 | u32 iline_size; /* L1 d-cache line size */ |
41 | u32 iblock_size; /* L1 i-cache block size */ | |
42 | u32 log_iblock_size; | |
43 | u32 iblocks_per_page; | |
26ef5c09 DG |
44 | }; |
45 | ||
46 | extern struct ppc64_caches ppc64_caches; | |
47 | #endif /* __powerpc64__ && ! __ASSEMBLY__ */ | |
48 | ||
0ce63670 KH |
49 | #if defined(__ASSEMBLY__) |
50 | /* | |
51 | * For a snooping icache, we still need a dummy icbi to purge all the | |
52 | * prefetched instructions from the ifetch buffers. We also need a sync | |
53 | * before the icbi to order the the actual stores to memory that might | |
54 | * have modified instructions with the icbi. | |
55 | */ | |
56 | #define PURGE_PREFETCHED_INS \ | |
57 | sync; \ | |
58 | icbi 0,r3; \ | |
59 | sync; \ | |
60 | isync | |
ae3a197e | 61 | |
0ce63670 | 62 | #else |
54cb27a7 | 63 | #define __read_mostly __attribute__((__section__(".data..read_mostly"))) |
ae3a197e DH |
64 | |
65 | #ifdef CONFIG_6xx | |
66 | extern long _get_L2CR(void); | |
67 | extern long _get_L3CR(void); | |
68 | extern void _set_L2CR(unsigned long); | |
69 | extern void _set_L3CR(unsigned long); | |
70 | #else | |
71 | #define _get_L2CR() 0L | |
72 | #define _get_L3CR() 0L | |
73 | #define _set_L2CR(val) do { } while(0) | |
74 | #define _set_L3CR(val) do { } while(0) | |
bd67fcf9 TB |
75 | #endif |
76 | ||
d6bfa02f CL |
77 | static inline void dcbz(void *addr) |
78 | { | |
79 | __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory"); | |
80 | } | |
81 | ||
82 | static inline void dcbi(void *addr) | |
83 | { | |
84 | __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory"); | |
85 | } | |
86 | ||
87 | static inline void dcbf(void *addr) | |
88 | { | |
89 | __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory"); | |
90 | } | |
91 | ||
92 | static inline void dcbst(void *addr) | |
93 | { | |
94 | __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory"); | |
95 | } | |
ae3a197e | 96 | #endif /* !__ASSEMBLY__ */ |
26ef5c09 DG |
97 | #endif /* __KERNEL__ */ |
98 | #endif /* _ASM_POWERPC_CACHE_H */ |