Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
98de920a | 6 | * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org> |
a3c4946d RB |
7 | * Copyright (C) MIPS Technologies, Inc. |
8 | * written by Ralf Baechle <ralf@linux-mips.org> | |
1da177e4 LT |
9 | */ |
10 | #ifndef _ASM_HAZARDS_H | |
11 | #define _ASM_HAZARDS_H | |
12 | ||
36396f3c | 13 | #ifdef __ASSEMBLY__ |
d7d86aa8 | 14 | #define ASMMACRO(name, code...) .macro name; code; .endm |
1da177e4 LT |
15 | #else |
16 | ||
572afc24 RB |
17 | #include <asm/cpu-features.h> |
18 | ||
d7d86aa8 RB |
19 | #define ASMMACRO(name, code...) \ |
20 | __asm__(".macro " #name "; " #code "; .endm"); \ | |
21 | \ | |
22 | static inline void name(void) \ | |
23 | { \ | |
24 | __asm__ __volatile__ (#name); \ | |
25 | } | |
1da177e4 | 26 | |
98de920a RB |
27 | /* |
28 | * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine. | |
29 | */ | |
30 | extern void mips_ihb(void); | |
31 | ||
1da177e4 LT |
32 | #endif |
33 | ||
d7d86aa8 RB |
34 | ASMMACRO(_ssnop, |
35 | sll $0, $0, 1 | |
36 | ) | |
37 | ||
38 | ASMMACRO(_ehb, | |
39 | sll $0, $0, 3 | |
40 | ) | |
41 | ||
1da177e4 | 42 | /* |
d7d86aa8 | 43 | * TLB hazards |
1da177e4 | 44 | */ |
bd6d85c2 | 45 | #if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_CAVIUM_OCTEON) |
1da177e4 | 46 | |
1da177e4 | 47 | /* |
d7d86aa8 | 48 | * MIPSR2 defines ehb for hazard avoidance |
1da177e4 LT |
49 | */ |
50 | ||
d7d86aa8 RB |
51 | ASMMACRO(mtc0_tlbw_hazard, |
52 | _ehb | |
53 | ) | |
54 | ASMMACRO(tlbw_use_hazard, | |
55 | _ehb | |
56 | ) | |
57 | ASMMACRO(tlb_probe_hazard, | |
58 | _ehb | |
59 | ) | |
60 | ASMMACRO(irq_enable_hazard, | |
7605b390 | 61 | _ehb |
d7d86aa8 RB |
62 | ) |
63 | ASMMACRO(irq_disable_hazard, | |
1da177e4 | 64 | _ehb |
d7d86aa8 RB |
65 | ) |
66 | ASMMACRO(back_to_back_c0_hazard, | |
67 | _ehb | |
68 | ) | |
1da177e4 | 69 | /* |
d7d86aa8 RB |
70 | * gcc has a tradition of misscompiling the previous construct using the |
71 | * address of a label as argument to inline assembler. Gas otoh has the | |
72 | * annoying difference between la and dla which are only usable for 32-bit | |
73 | * rsp. 64-bit code, so can't be used without conditional compilation. | |
74 | * The alterantive is switching the assembler to 64-bit code which happens | |
75 | * to work right even for 32-bit code ... | |
1da177e4 | 76 | */ |
d7d86aa8 RB |
77 | #define instruction_hazard() \ |
78 | do { \ | |
79 | unsigned long tmp; \ | |
80 | \ | |
81 | __asm__ __volatile__( \ | |
82 | " .set mips64r2 \n" \ | |
83 | " dla %0, 1f \n" \ | |
84 | " jr.hb %0 \n" \ | |
85 | " .set mips0 \n" \ | |
86 | "1: \n" \ | |
87 | : "=r" (tmp)); \ | |
88 | } while (0) | |
1da177e4 | 89 | |
1c7c4451 KC |
90 | #elif (defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MIPS_ALCHEMY)) || \ |
91 | defined(CONFIG_CPU_BMIPS) | |
572afc24 RB |
92 | |
93 | /* | |
94 | * These are slightly complicated by the fact that we guarantee R1 kernels to | |
95 | * run fine on R2 processors. | |
96 | */ | |
97 | ASMMACRO(mtc0_tlbw_hazard, | |
98 | _ssnop; _ssnop; _ehb | |
99 | ) | |
100 | ASMMACRO(tlbw_use_hazard, | |
101 | _ssnop; _ssnop; _ssnop; _ehb | |
102 | ) | |
103 | ASMMACRO(tlb_probe_hazard, | |
104 | _ssnop; _ssnop; _ssnop; _ehb | |
105 | ) | |
106 | ASMMACRO(irq_enable_hazard, | |
107 | _ssnop; _ssnop; _ssnop; _ehb | |
108 | ) | |
109 | ASMMACRO(irq_disable_hazard, | |
110 | _ssnop; _ssnop; _ssnop; _ehb | |
111 | ) | |
112 | ASMMACRO(back_to_back_c0_hazard, | |
113 | _ssnop; _ssnop; _ssnop; _ehb | |
114 | ) | |
115 | /* | |
116 | * gcc has a tradition of misscompiling the previous construct using the | |
117 | * address of a label as argument to inline assembler. Gas otoh has the | |
118 | * annoying difference between la and dla which are only usable for 32-bit | |
119 | * rsp. 64-bit code, so can't be used without conditional compilation. | |
120 | * The alterantive is switching the assembler to 64-bit code which happens | |
121 | * to work right even for 32-bit code ... | |
122 | */ | |
123 | #define __instruction_hazard() \ | |
124 | do { \ | |
125 | unsigned long tmp; \ | |
126 | \ | |
127 | __asm__ __volatile__( \ | |
128 | " .set mips64r2 \n" \ | |
129 | " dla %0, 1f \n" \ | |
130 | " jr.hb %0 \n" \ | |
131 | " .set mips0 \n" \ | |
132 | "1: \n" \ | |
133 | : "=r" (tmp)); \ | |
134 | } while (0) | |
135 | ||
136 | #define instruction_hazard() \ | |
137 | do { \ | |
138 | if (cpu_has_mips_r2) \ | |
139 | __instruction_hazard(); \ | |
140 | } while (0) | |
141 | ||
42a4f17d | 142 | #elif defined(CONFIG_MIPS_ALCHEMY) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \ |
15fb0a15 KC |
143 | defined(CONFIG_CPU_LOONGSON2) || defined(CONFIG_CPU_R10000) || \ |
144 | defined(CONFIG_CPU_R5500) | |
1da177e4 LT |
145 | |
146 | /* | |
d7d86aa8 | 147 | * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer. |
1da177e4 | 148 | */ |
1da177e4 | 149 | |
d7d86aa8 RB |
150 | ASMMACRO(mtc0_tlbw_hazard, |
151 | ) | |
152 | ASMMACRO(tlbw_use_hazard, | |
153 | ) | |
154 | ASMMACRO(tlb_probe_hazard, | |
155 | ) | |
156 | ASMMACRO(irq_enable_hazard, | |
157 | ) | |
158 | ASMMACRO(irq_disable_hazard, | |
159 | ) | |
160 | ASMMACRO(back_to_back_c0_hazard, | |
161 | ) | |
162 | #define instruction_hazard() do { } while (0) | |
1da177e4 | 163 | |
d7d86aa8 | 164 | #elif defined(CONFIG_CPU_RM9000) |
88d535b6 | 165 | |
1da177e4 LT |
166 | /* |
167 | * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent | |
168 | * use of the JTLB for instructions should not occur for 4 cpu cycles and use | |
169 | * for data translations should not occur for 3 cpu cycles. | |
170 | */ | |
171 | ||
d7d86aa8 RB |
172 | ASMMACRO(mtc0_tlbw_hazard, |
173 | _ssnop; _ssnop; _ssnop; _ssnop | |
174 | ) | |
175 | ASMMACRO(tlbw_use_hazard, | |
176 | _ssnop; _ssnop; _ssnop; _ssnop | |
177 | ) | |
178 | ASMMACRO(tlb_probe_hazard, | |
179 | _ssnop; _ssnop; _ssnop; _ssnop | |
180 | ) | |
181 | ASMMACRO(irq_enable_hazard, | |
182 | ) | |
183 | ASMMACRO(irq_disable_hazard, | |
184 | ) | |
185 | ASMMACRO(back_to_back_c0_hazard, | |
186 | ) | |
187 | #define instruction_hazard() do { } while (0) | |
1da177e4 | 188 | |
d7d86aa8 | 189 | #elif defined(CONFIG_CPU_SB1) |
1da177e4 LT |
190 | |
191 | /* | |
d7d86aa8 | 192 | * Mostly like R4000 for historic reasons |
1da177e4 | 193 | */ |
d7d86aa8 RB |
194 | ASMMACRO(mtc0_tlbw_hazard, |
195 | ) | |
196 | ASMMACRO(tlbw_use_hazard, | |
197 | ) | |
198 | ASMMACRO(tlb_probe_hazard, | |
199 | ) | |
200 | ASMMACRO(irq_enable_hazard, | |
201 | ) | |
202 | ASMMACRO(irq_disable_hazard, | |
203 | _ssnop; _ssnop; _ssnop | |
204 | ) | |
205 | ASMMACRO(back_to_back_c0_hazard, | |
206 | ) | |
207 | #define instruction_hazard() do { } while (0) | |
5068debf | 208 | |
1da177e4 LT |
209 | #else |
210 | ||
211 | /* | |
d7d86aa8 RB |
212 | * Finally the catchall case for all other processors including R4000, R4400, |
213 | * R4600, R4700, R5000, RM7000, NEC VR41xx etc. | |
a3c4946d | 214 | * |
d7d86aa8 RB |
215 | * The taken branch will result in a two cycle penalty for the two killed |
216 | * instructions on R4000 / R4400. Other processors only have a single cycle | |
217 | * hazard so this is nice trick to have an optimal code for a range of | |
218 | * processors. | |
7043ad4f | 219 | */ |
d7d86aa8 | 220 | ASMMACRO(mtc0_tlbw_hazard, |
3f318370 | 221 | nop; nop |
d7d86aa8 RB |
222 | ) |
223 | ASMMACRO(tlbw_use_hazard, | |
224 | nop; nop; nop | |
225 | ) | |
226 | ASMMACRO(tlb_probe_hazard, | |
227 | nop; nop; nop | |
228 | ) | |
229 | ASMMACRO(irq_enable_hazard, | |
7b0fdaa6 | 230 | _ssnop; _ssnop; _ssnop; |
d7d86aa8 RB |
231 | ) |
232 | ASMMACRO(irq_disable_hazard, | |
233 | nop; nop; nop | |
234 | ) | |
235 | ASMMACRO(back_to_back_c0_hazard, | |
236 | _ssnop; _ssnop; _ssnop; | |
237 | ) | |
cc61c1fe | 238 | #define instruction_hazard() do { } while (0) |
41c594ab | 239 | |
d7d86aa8 | 240 | #endif |
1da177e4 | 241 | |
0b624956 CD |
242 | |
243 | /* FPU hazards */ | |
244 | ||
245 | #if defined(CONFIG_CPU_SB1) | |
246 | ASMMACRO(enable_fpu_hazard, | |
247 | .set push; | |
248 | .set mips64; | |
249 | .set noreorder; | |
250 | _ssnop; | |
21a151d8 | 251 | bnezl $0, .+4; |
a1b53a7b | 252 | _ssnop; |
0b624956 CD |
253 | .set pop |
254 | ) | |
255 | ASMMACRO(disable_fpu_hazard, | |
256 | ) | |
257 | ||
258 | #elif defined(CONFIG_CPU_MIPSR2) | |
259 | ASMMACRO(enable_fpu_hazard, | |
260 | _ehb | |
261 | ) | |
262 | ASMMACRO(disable_fpu_hazard, | |
263 | _ehb | |
264 | ) | |
265 | #else | |
266 | ASMMACRO(enable_fpu_hazard, | |
267 | nop; nop; nop; nop | |
268 | ) | |
269 | ASMMACRO(disable_fpu_hazard, | |
270 | _ehb | |
271 | ) | |
272 | #endif | |
273 | ||
1da177e4 | 274 | #endif /* _ASM_HAZARDS_H */ |