Commit | Line | Data |
---|---|---|
f9ff0f30 SR |
1 | #ifndef _ASM_POWERPC_EXCEPTION_H |
2 | #define _ASM_POWERPC_EXCEPTION_H | |
3 | /* | |
4 | * Extracted from head_64.S | |
5 | * | |
6 | * PowerPC version | |
7 | * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org) | |
8 | * | |
9 | * Rewritten by Cort Dougan (cort@cs.nmt.edu) for PReP | |
10 | * Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu> | |
11 | * Adapted for Power Macintosh by Paul Mackerras. | |
12 | * Low-level exception handlers and MMU support | |
13 | * rewritten by Paul Mackerras. | |
14 | * Copyright (C) 1996 Paul Mackerras. | |
15 | * | |
16 | * Adapted for 64bit PowerPC by Dave Engebretsen, Peter Bergner, and | |
17 | * Mike Corrigan {engebret|bergner|mikejc}@us.ibm.com | |
18 | * | |
19 | * This file contains the low-level support and setup for the | |
20 | * PowerPC-64 platform, including trap and interrupt dispatch. | |
21 | * | |
22 | * This program is free software; you can redistribute it and/or | |
23 | * modify it under the terms of the GNU General Public License | |
24 | * as published by the Free Software Foundation; either version | |
25 | * 2 of the License, or (at your option) any later version. | |
26 | */ | |
27 | /* | |
28 | * The following macros define the code that appears as | |
29 | * the prologue to each of the exception handlers. They | |
30 | * are split into two parts to allow a single kernel binary | |
31 | * to be used for pSeries and iSeries. | |
32 | * | |
33 | * We make as much of the exception code common between native | |
34 | * exception handlers (including pSeries LPAR) and iSeries LPAR | |
35 | * implementations as possible. | |
36 | */ | |
da2bc464 | 37 | #include <asm/head-64.h> |
2c86cd18 | 38 | #include <asm/feature-fixups.h> |
f9ff0f30 | 39 | |
8c388514 | 40 | /* PACA save area offsets (exgen, exmc, etc) */ |
f9ff0f30 SR |
41 | #define EX_R9 0 |
42 | #define EX_R10 8 | |
43 | #define EX_R11 16 | |
44 | #define EX_R12 24 | |
45 | #define EX_R13 32 | |
36670fcf NP |
46 | #define EX_DAR 40 |
47 | #define EX_DSISR 48 | |
48 | #define EX_CCR 52 | |
635942ae NP |
49 | #define EX_CFAR 56 |
50 | #define EX_PPR 64 | |
8568f1e0 | 51 | #if defined(CONFIG_RELOCATABLE) |
635942ae | 52 | #define EX_CTR 72 |
635942ae | 53 | #define EX_SIZE 10 /* size in u64 units */ |
8568f1e0 NP |
54 | #else |
55 | #define EX_SIZE 9 /* size in u64 units */ | |
56 | #endif | |
dbeea1d6 | 57 | |
ba41e1e1 BS |
58 | /* |
59 | * maximum recursive depth of MCE exceptions | |
60 | */ | |
61 | #define MAX_MCE_DEPTH 4 | |
62 | ||
635942ae NP |
63 | /* |
64 | * EX_R3 is only used by the bad_stack handler. bad_stack reloads and | |
65 | * saves DAR from SPRN_DAR, and EX_DAR is not used. So EX_R3 can overlap | |
66 | * with EX_DAR. | |
67 | */ | |
68 | #define EX_R3 EX_DAR | |
69 | ||
4508a74a NP |
70 | #ifdef __ASSEMBLY__ |
71 | ||
a048a07d NP |
72 | #define STF_ENTRY_BARRIER_SLOT \ |
73 | STF_ENTRY_BARRIER_FIXUP_SECTION; \ | |
74 | nop; \ | |
75 | nop; \ | |
76 | nop | |
77 | ||
78 | #define STF_EXIT_BARRIER_SLOT \ | |
79 | STF_EXIT_BARRIER_FIXUP_SECTION; \ | |
80 | nop; \ | |
81 | nop; \ | |
82 | nop; \ | |
83 | nop; \ | |
84 | nop; \ | |
85 | nop | |
86 | ||
87 | /* | |
88 | * r10 must be free to use, r13 must be paca | |
89 | */ | |
90 | #define INTERRUPT_TO_KERNEL \ | |
91 | STF_ENTRY_BARRIER_SLOT | |
92 | ||
aa8a5e00 ME |
93 | /* |
94 | * Macros for annotating the expected destination of (h)rfid | |
95 | * | |
96 | * The nop instructions allow us to insert one or more instructions to flush the | |
97 | * L1-D cache when returning to userspace or a guest. | |
98 | */ | |
99 | #define RFI_FLUSH_SLOT \ | |
100 | RFI_FLUSH_FIXUP_SECTION; \ | |
101 | nop; \ | |
102 | nop; \ | |
103 | nop | |
50e51c13 NP |
104 | |
105 | #define RFI_TO_KERNEL \ | |
106 | rfid | |
107 | ||
108 | #define RFI_TO_USER \ | |
a048a07d | 109 | STF_EXIT_BARRIER_SLOT; \ |
aa8a5e00 ME |
110 | RFI_FLUSH_SLOT; \ |
111 | rfid; \ | |
112 | b rfi_flush_fallback | |
50e51c13 NP |
113 | |
114 | #define RFI_TO_USER_OR_KERNEL \ | |
a048a07d | 115 | STF_EXIT_BARRIER_SLOT; \ |
aa8a5e00 ME |
116 | RFI_FLUSH_SLOT; \ |
117 | rfid; \ | |
118 | b rfi_flush_fallback | |
50e51c13 NP |
119 | |
120 | #define RFI_TO_GUEST \ | |
a048a07d | 121 | STF_EXIT_BARRIER_SLOT; \ |
aa8a5e00 ME |
122 | RFI_FLUSH_SLOT; \ |
123 | rfid; \ | |
124 | b rfi_flush_fallback | |
50e51c13 NP |
125 | |
126 | #define HRFI_TO_KERNEL \ | |
127 | hrfid | |
128 | ||
129 | #define HRFI_TO_USER \ | |
a048a07d | 130 | STF_EXIT_BARRIER_SLOT; \ |
aa8a5e00 ME |
131 | RFI_FLUSH_SLOT; \ |
132 | hrfid; \ | |
133 | b hrfi_flush_fallback | |
50e51c13 NP |
134 | |
135 | #define HRFI_TO_USER_OR_KERNEL \ | |
a048a07d | 136 | STF_EXIT_BARRIER_SLOT; \ |
aa8a5e00 ME |
137 | RFI_FLUSH_SLOT; \ |
138 | hrfid; \ | |
139 | b hrfi_flush_fallback | |
50e51c13 NP |
140 | |
141 | #define HRFI_TO_GUEST \ | |
a048a07d | 142 | STF_EXIT_BARRIER_SLOT; \ |
aa8a5e00 ME |
143 | RFI_FLUSH_SLOT; \ |
144 | hrfid; \ | |
145 | b hrfi_flush_fallback | |
50e51c13 NP |
146 | |
147 | #define HRFI_TO_UNKNOWN \ | |
a048a07d | 148 | STF_EXIT_BARRIER_SLOT; \ |
aa8a5e00 ME |
149 | RFI_FLUSH_SLOT; \ |
150 | hrfid; \ | |
151 | b hrfi_flush_fallback | |
50e51c13 | 152 | |
f9ff0f30 SR |
153 | /* |
154 | * We're short on space and time in the exception prolog, so we can't | |
27510235 ME |
155 | * use the normal LOAD_REG_IMMEDIATE macro to load the address of label. |
156 | * Instead we get the base of the kernel from paca->kernelbase and or in the low | |
157 | * part of label. This requires that the label be within 64KB of kernelbase, and | |
158 | * that kernelbase be 64K aligned. | |
f9ff0f30 | 159 | */ |
f9ff0f30 | 160 | #define LOAD_HANDLER(reg, label) \ |
d8d42b05 | 161 | ld reg,PACAKBASE(r13); /* get high part of &label */ \ |
4b1f5ccc | 162 | ori reg,reg,FIXED_SYMBOL_ABS_ADDR(label) |
f9ff0f30 | 163 | |
fb479e44 NP |
164 | #define __LOAD_HANDLER(reg, label) \ |
165 | ld reg,PACAKBASE(r13); \ | |
4b1f5ccc | 166 | ori reg,reg,(ABS_ADDR(label))@l |
fb479e44 | 167 | |
a97a65d5 NP |
168 | /* |
169 | * Branches from unrelocated code (e.g., interrupts) to labels outside | |
170 | * head-y require >64K offsets. | |
171 | */ | |
172 | #define __LOAD_FAR_HANDLER(reg, label) \ | |
173 | ld reg,PACAKBASE(r13); \ | |
174 | ori reg,reg,(ABS_ADDR(label))@l; \ | |
4b1f5ccc | 175 | addis reg,reg,(ABS_ADDR(label))@h |
a97a65d5 | 176 | |
4508a74a NP |
177 | #ifdef CONFIG_RELOCATABLE |
178 | .macro EXCEPTION_PROLOG_2_RELON label, hsrr | |
179 | .if \hsrr | |
180 | mfspr r11,SPRN_HSRR0 /* save HSRR0 */ | |
181 | .else | |
182 | mfspr r11,SPRN_SRR0 /* save SRR0 */ | |
183 | .endif | |
184 | LOAD_HANDLER(r12, \label\()) | |
185 | mtctr r12 | |
186 | .if \hsrr | |
187 | mfspr r12,SPRN_HSRR1 /* and HSRR1 */ | |
188 | .else | |
189 | mfspr r12,SPRN_SRR1 /* and HSRR1 */ | |
190 | .endif | |
191 | li r10,MSR_RI | |
192 | mtmsrd r10,1 /* Set RI (EE=0) */ | |
193 | bctr | |
194 | .endm | |
195 | #else | |
196 | /* If not relocatable, we can jump directly -- and save messing with LR */ | |
197 | .macro EXCEPTION_PROLOG_2_RELON label, hsrr | |
198 | .if \hsrr | |
199 | mfspr r11,SPRN_HSRR0 /* save HSRR0 */ | |
200 | mfspr r12,SPRN_HSRR1 /* and HSRR1 */ | |
201 | .else | |
202 | mfspr r11,SPRN_SRR0 /* save SRR0 */ | |
203 | mfspr r12,SPRN_SRR1 /* and SRR1 */ | |
204 | .endif | |
205 | li r10,MSR_RI | |
206 | mtmsrd r10,1 /* Set RI (EE=0) */ | |
207 | b \label | |
208 | .endm | |
209 | #endif | |
210 | ||
211 | /* | |
212 | * As EXCEPTION_PROLOG(), except we've already got relocation on so no need to | |
213 | * rfid. Save LR in case we're CONFIG_RELOCATABLE, in which case | |
214 | * EXCEPTION_PROLOG_2_RELON will be using LR. | |
215 | */ | |
216 | #define EXCEPTION_RELON_PROLOG(area, label, hsrr, extra, vec) \ | |
217 | SET_SCRATCH0(r13); /* save r13 */ \ | |
218 | EXCEPTION_PROLOG_0(area); \ | |
219 | EXCEPTION_PROLOG_1(area, extra, vec); \ | |
220 | EXCEPTION_PROLOG_2_RELON label, hsrr | |
221 | ||
a5d4f3ad | 222 | /* Exception register prefixes */ |
4508a74a NP |
223 | #define EXC_HV 1 |
224 | #define EXC_STD 0 | |
a5d4f3ad | 225 | |
4700dfaf MN |
226 | #if defined(CONFIG_RELOCATABLE) |
227 | /* | |
bc2e6c6a MN |
228 | * If we support interrupts with relocation on AND we're a relocatable kernel, |
229 | * we need to use CTR to get to the 2nd level handler. So, save/restore it | |
230 | * when required. | |
4700dfaf | 231 | */ |
bc2e6c6a MN |
232 | #define SAVE_CTR(reg, area) mfctr reg ; std reg,area+EX_CTR(r13) |
233 | #define GET_CTR(reg, area) ld reg,area+EX_CTR(r13) | |
234 | #define RESTORE_CTR(reg, area) ld reg,area+EX_CTR(r13) ; mtctr reg | |
4700dfaf | 235 | #else |
bc2e6c6a MN |
236 | /* ...else CTR is unused and in register. */ |
237 | #define SAVE_CTR(reg, area) | |
238 | #define GET_CTR(reg, area) mfctr reg | |
239 | #define RESTORE_CTR(reg, area) | |
4700dfaf MN |
240 | #endif |
241 | ||
13e7a8e8 HM |
242 | /* |
243 | * PPR save/restore macros used in exceptions_64s.S | |
244 | * Used for P7 or later processors | |
245 | */ | |
4c2de74c | 246 | #define SAVE_PPR(area, ra) \ |
13e7a8e8 | 247 | BEGIN_FTR_SECTION_NESTED(940) \ |
4c2de74c NP |
248 | ld ra,area+EX_PPR(r13); /* Read PPR from paca */ \ |
249 | std ra,_PPR(r1); \ | |
13e7a8e8 HM |
250 | END_FTR_SECTION_NESTED(CPU_FTR_HAS_PPR,CPU_FTR_HAS_PPR,940) |
251 | ||
252 | #define RESTORE_PPR_PACA(area, ra) \ | |
253 | BEGIN_FTR_SECTION_NESTED(941) \ | |
254 | ld ra,area+EX_PPR(r13); \ | |
255 | mtspr SPRN_PPR,ra; \ | |
256 | END_FTR_SECTION_NESTED(CPU_FTR_HAS_PPR,CPU_FTR_HAS_PPR,941) | |
257 | ||
13e7a8e8 | 258 | /* |
1707dd16 | 259 | * Get an SPR into a register if the CPU has the given feature |
13e7a8e8 | 260 | */ |
1707dd16 | 261 | #define OPT_GET_SPR(ra, spr, ftr) \ |
13e7a8e8 | 262 | BEGIN_FTR_SECTION_NESTED(943) \ |
1707dd16 PM |
263 | mfspr ra,spr; \ |
264 | END_FTR_SECTION_NESTED(ftr,ftr,943) | |
13e7a8e8 | 265 | |
d410ae21 MS |
266 | /* |
267 | * Set an SPR from a register if the CPU has the given feature | |
268 | */ | |
269 | #define OPT_SET_SPR(ra, spr, ftr) \ | |
270 | BEGIN_FTR_SECTION_NESTED(943) \ | |
271 | mtspr spr,ra; \ | |
272 | END_FTR_SECTION_NESTED(ftr,ftr,943) | |
273 | ||
1707dd16 PM |
274 | /* |
275 | * Save a register to the PACA if the CPU has the given feature | |
276 | */ | |
277 | #define OPT_SAVE_REG_TO_PACA(offset, ra, ftr) \ | |
278 | BEGIN_FTR_SECTION_NESTED(943) \ | |
279 | std ra,offset(r13); \ | |
280 | END_FTR_SECTION_NESTED(ftr,ftr,943) | |
281 | ||
544686ca NP |
282 | #define EXCEPTION_PROLOG_0(area) \ |
283 | GET_PACA(r13); \ | |
44e9309f | 284 | std r9,area+EX_R9(r13); /* save r9 */ \ |
1707dd16 PM |
285 | OPT_GET_SPR(r9, SPRN_PPR, CPU_FTR_HAS_PPR); \ |
286 | HMT_MEDIUM; \ | |
44e9309f | 287 | std r10,area+EX_R10(r13); /* save r10 - r12 */ \ |
1707dd16 PM |
288 | OPT_GET_SPR(r10, SPRN_CFAR, CPU_FTR_CFAR) |
289 | ||
f14e953b | 290 | #define __EXCEPTION_PROLOG_1_PRE(area) \ |
1707dd16 PM |
291 | OPT_SAVE_REG_TO_PACA(area+EX_PPR, r9, CPU_FTR_HAS_PPR); \ |
292 | OPT_SAVE_REG_TO_PACA(area+EX_CFAR, r10, CPU_FTR_CFAR); \ | |
a048a07d | 293 | INTERRUPT_TO_KERNEL; \ |
bc2e6c6a | 294 | SAVE_CTR(r10, area); \ |
4b1f5ccc | 295 | mfcr r9 |
f14e953b MS |
296 | |
297 | #define __EXCEPTION_PROLOG_1_POST(area) \ | |
b01c8b54 PM |
298 | std r11,area+EX_R11(r13); \ |
299 | std r12,area+EX_R12(r13); \ | |
300 | GET_SCRATCH0(r10); \ | |
301 | std r10,area+EX_R13(r13) | |
f14e953b MS |
302 | |
303 | /* | |
304 | * This version of the EXCEPTION_PROLOG_1 will carry | |
305 | * addition parameter called "bitmask" to support | |
306 | * checking of the interrupt maskable level in the SOFTEN_TEST. | |
307 | * Intended to be used in MASKABLE_EXCPETION_* macros. | |
308 | */ | |
309 | #define MASKABLE_EXCEPTION_PROLOG_1(area, extra, vec, bitmask) \ | |
310 | __EXCEPTION_PROLOG_1_PRE(area); \ | |
311 | extra(vec, bitmask); \ | |
4b1f5ccc | 312 | __EXCEPTION_PROLOG_1_POST(area) |
f14e953b MS |
313 | |
314 | /* | |
315 | * This version of the EXCEPTION_PROLOG_1 is intended | |
316 | * to be used in STD_EXCEPTION* macros | |
317 | */ | |
318 | #define _EXCEPTION_PROLOG_1(area, extra, vec) \ | |
319 | __EXCEPTION_PROLOG_1_PRE(area); \ | |
320 | extra(vec); \ | |
4b1f5ccc | 321 | __EXCEPTION_PROLOG_1_POST(area) |
f14e953b | 322 | |
b01c8b54 | 323 | #define EXCEPTION_PROLOG_1(area, extra, vec) \ |
f14e953b | 324 | _EXCEPTION_PROLOG_1(area, extra, vec) |
7180e3e6 | 325 | |
4508a74a NP |
326 | .macro EXCEPTION_PROLOG_2 label, hsrr |
327 | ld r10,PACAKMSR(r13) /* get MSR value for kernel */ | |
328 | .if \hsrr | |
329 | mfspr r11,SPRN_HSRR0 /* save HSRR0 */ | |
330 | .else | |
331 | mfspr r11,SPRN_SRR0 /* save SRR0 */ | |
332 | .endif | |
333 | LOAD_HANDLER(r12,\label\()) | |
334 | .if \hsrr | |
335 | mtspr SPRN_HSRR0,r12 | |
336 | mfspr r12,SPRN_HSRR1 /* and HSRR1 */ | |
337 | mtspr SPRN_HSRR1,r10 | |
338 | HRFI_TO_KERNEL | |
339 | .else | |
340 | mtspr SPRN_SRR0,r12 | |
341 | mfspr r12,SPRN_SRR1 /* and SRR1 */ | |
342 | mtspr SPRN_SRR1,r10 | |
343 | RFI_TO_KERNEL | |
344 | .endif | |
f9ff0f30 | 345 | b . /* prevent speculative execution */ |
4508a74a | 346 | .endm |
f9ff0f30 | 347 | |
83a980f7 | 348 | /* _NORI variant keeps MSR_RI clear */ |
4508a74a NP |
349 | .macro EXCEPTION_PROLOG_2_NORI label, hsrr |
350 | ld r10,PACAKMSR(r13) /* get MSR value for kernel */ | |
351 | xori r10,r10,MSR_RI /* Clear MSR_RI */ | |
352 | .if \hsrr | |
353 | mfspr r11,SPRN_HSRR0 /* save HSRR0 */ | |
354 | .else | |
355 | mfspr r11,SPRN_SRR0 /* save SRR0 */ | |
356 | .endif | |
357 | LOAD_HANDLER(r12,\label\()) | |
358 | .if \hsrr | |
359 | mtspr SPRN_HSRR0,r12 | |
360 | mfspr r12,SPRN_HSRR1 /* and HSRR1 */ | |
361 | mtspr SPRN_HSRR1,r10 | |
362 | HRFI_TO_KERNEL | |
363 | .else | |
364 | mtspr SPRN_SRR0,r12 | |
365 | mfspr r12,SPRN_SRR1 /* and SRR1 */ | |
366 | mtspr SPRN_SRR1,r10 | |
367 | RFI_TO_KERNEL | |
368 | .endif | |
83a980f7 | 369 | b . /* prevent speculative execution */ |
4508a74a | 370 | .endm |
83a980f7 | 371 | |
bdf08e1d | 372 | #define EXCEPTION_PROLOG(area, label, h, extra, vec) \ |
4a7a0a84 | 373 | SET_SCRATCH0(r13); /* save r13 */ \ |
1707dd16 | 374 | EXCEPTION_PROLOG_0(area); \ |
b01c8b54 | 375 | EXCEPTION_PROLOG_1(area, extra, vec); \ |
4508a74a | 376 | EXCEPTION_PROLOG_2 label, h |
b01c8b54 | 377 | |
dd96b2c2 AK |
378 | #ifdef CONFIG_KVM_BOOK3S_HV_POSSIBLE |
379 | /* | |
380 | * If hv is possible, interrupts come into to the hv version | |
381 | * of the kvmppc_interrupt code, which then jumps to the PR handler, | |
382 | * kvmppc_interrupt_pr, if the guest is a PR guest. | |
383 | */ | |
384 | #define kvmppc_interrupt kvmppc_interrupt_hv | |
385 | #else | |
386 | #define kvmppc_interrupt kvmppc_interrupt_pr | |
387 | #endif | |
388 | ||
b51351e2 NP |
389 | /* |
390 | * Branch to label using its 0xC000 address. This results in instruction | |
391 | * address suitable for MSR[IR]=0 or 1, which allows relocation to be turned | |
392 | * on using mtmsr rather than rfid. | |
393 | * | |
394 | * This could set the 0xc bits for !RELOCATABLE as an immediate, rather than | |
395 | * load KBASE for a slight optimisation. | |
396 | */ | |
397 | #define BRANCH_TO_C000(reg, label) \ | |
398 | __LOAD_HANDLER(reg, label); \ | |
399 | mtctr reg; \ | |
400 | bctr | |
401 | ||
fb479e44 NP |
402 | #ifdef CONFIG_RELOCATABLE |
403 | #define BRANCH_TO_COMMON(reg, label) \ | |
404 | __LOAD_HANDLER(reg, label); \ | |
405 | mtctr reg; \ | |
406 | bctr | |
407 | ||
be5c5e84 ME |
408 | #define BRANCH_LINK_TO_FAR(label) \ |
409 | __LOAD_FAR_HANDLER(r12, label); \ | |
410 | mtctr r12; \ | |
2337d207 NP |
411 | bctrl |
412 | ||
a97a65d5 NP |
413 | /* |
414 | * KVM requires __LOAD_FAR_HANDLER. | |
415 | * | |
416 | * __BRANCH_TO_KVM_EXIT branches are also a special case because they | |
417 | * explicitly use r9 then reload it from PACA before branching. Hence | |
418 | * the double-underscore. | |
419 | */ | |
420 | #define __BRANCH_TO_KVM_EXIT(area, label) \ | |
421 | mfctr r9; \ | |
422 | std r9,HSTATE_SCRATCH1(r13); \ | |
423 | __LOAD_FAR_HANDLER(r9, label); \ | |
424 | mtctr r9; \ | |
425 | ld r9,area+EX_R9(r13); \ | |
426 | bctr | |
427 | ||
fb479e44 NP |
428 | #else |
429 | #define BRANCH_TO_COMMON(reg, label) \ | |
430 | b label | |
431 | ||
be5c5e84 | 432 | #define BRANCH_LINK_TO_FAR(label) \ |
2337d207 NP |
433 | bl label |
434 | ||
a97a65d5 NP |
435 | #define __BRANCH_TO_KVM_EXIT(area, label) \ |
436 | ld r9,area+EX_R9(r13); \ | |
437 | b label | |
438 | ||
fb479e44 NP |
439 | #endif |
440 | ||
c4f3b52c | 441 | /* Do not enable RI */ |
94f3cc8e | 442 | #define EXCEPTION_PROLOG_NORI(area, label, h, extra, vec) \ |
c4f3b52c NP |
443 | EXCEPTION_PROLOG_0(area); \ |
444 | EXCEPTION_PROLOG_1(area, extra, vec); \ | |
4508a74a | 445 | EXCEPTION_PROLOG_2_NORI label, h |
b01c8b54 PM |
446 | |
447 | #ifdef CONFIG_KVM_BOOK3S_64_HANDLER | |
4508a74a NP |
448 | .macro KVMTEST hsrr, n |
449 | lbz r10,HSTATE_IN_GUEST(r13) | |
450 | cmpwi r10,0 | |
451 | .if \hsrr | |
452 | bne do_kvm_H\n | |
453 | .else | |
454 | bne do_kvm_\n | |
455 | .endif | |
456 | .endm | |
457 | ||
458 | .macro KVM_HANDLER area, hsrr, n | |
459 | BEGIN_FTR_SECTION_NESTED(947) | |
460 | ld r10,\area+EX_CFAR(r13) | |
461 | std r10,HSTATE_CFAR(r13) | |
462 | END_FTR_SECTION_NESTED(CPU_FTR_CFAR,CPU_FTR_CFAR,947) | |
463 | BEGIN_FTR_SECTION_NESTED(948) | |
464 | ld r10,\area+EX_PPR(r13) | |
465 | std r10,HSTATE_PPR(r13) | |
466 | END_FTR_SECTION_NESTED(CPU_FTR_HAS_PPR,CPU_FTR_HAS_PPR,948) | |
467 | ld r10,\area+EX_R10(r13) | |
468 | std r12,HSTATE_SCRATCH0(r13) | |
469 | sldi r12,r9,32 | |
470 | ori r12,r12,(\n) | |
471 | /* This reloads r9 before branching to kvmppc_interrupt */ | |
472 | __BRANCH_TO_KVM_EXIT(\area, kvmppc_interrupt) | |
473 | .endm | |
474 | ||
475 | .macro KVM_HANDLER_SKIP area, hsrr, n | |
476 | cmpwi r10,KVM_GUEST_MODE_SKIP | |
477 | beq 89f | |
478 | BEGIN_FTR_SECTION_NESTED(948) | |
479 | ld r10,\area+EX_PPR(r13) | |
480 | std r10,HSTATE_PPR(r13) | |
481 | END_FTR_SECTION_NESTED(CPU_FTR_HAS_PPR,CPU_FTR_HAS_PPR,948) | |
482 | ld r10,\area+EX_R10(r13) | |
483 | std r12,HSTATE_SCRATCH0(r13) | |
484 | sldi r12,r9,32 | |
485 | ori r12,r12,(\n) | |
486 | /* This reloads r9 before branching to kvmppc_interrupt */ | |
487 | __BRANCH_TO_KVM_EXIT(\area, kvmppc_interrupt) | |
488 | 89: mtocrf 0x80,r9 | |
489 | ld r9,\area+EX_R9(r13) | |
490 | ld r10,\area+EX_R10(r13) | |
491 | .if \hsrr | |
492 | b kvmppc_skip_Hinterrupt | |
493 | .else | |
494 | b kvmppc_skip_interrupt | |
495 | .endif | |
496 | .endm | |
b01c8b54 PM |
497 | |
498 | #else | |
4508a74a NP |
499 | .macro KVMTEST hsrr, n |
500 | .endm | |
501 | .macro KVM_HANDLER area, hsrr, n | |
502 | .endm | |
503 | .macro KVM_HANDLER_SKIP area, hsrr, n | |
504 | .endm | |
b01c8b54 PM |
505 | #endif |
506 | ||
507 | #define NOTEST(n) | |
508 | ||
a4087a4d NP |
509 | #define EXCEPTION_PROLOG_COMMON_1() \ |
510 | std r9,_CCR(r1); /* save CR in stackframe */ \ | |
511 | std r11,_NIP(r1); /* save SRR0 in stackframe */ \ | |
512 | std r12,_MSR(r1); /* save SRR1 in stackframe */ \ | |
513 | std r10,0(r1); /* make stack chain pointer */ \ | |
514 | std r0,GPR0(r1); /* save r0 in stackframe */ \ | |
515 | std r10,GPR1(r1); /* save r1 in stackframe */ \ | |
516 | ||
517 | ||
f9ff0f30 SR |
518 | /* |
519 | * The common exception prolog is used for all except a few exceptions | |
520 | * such as a segment miss on a kernel address. We have to be prepared | |
521 | * to take another exception from the point where we first touch the | |
522 | * kernel stack onwards. | |
523 | * | |
524 | * On entry r13 points to the paca, r9-r13 are saved in the paca, | |
525 | * r9 contains the saved CR, r11 and r12 contain the saved SRR0 and | |
526 | * SRR1, and relocation is on. | |
527 | */ | |
528 | #define EXCEPTION_PROLOG_COMMON(n, area) \ | |
529 | andi. r10,r12,MSR_PR; /* See if coming from user */ \ | |
530 | mr r10,r1; /* Save r1 */ \ | |
531 | subi r1,r1,INT_FRAME_SIZE; /* alloc frame on kernel stack */ \ | |
532 | beq- 1f; \ | |
533 | ld r1,PACAKSAVE(r13); /* kernel stack to use */ \ | |
90ff5d68 | 534 | 1: cmpdi cr1,r1,-INT_FRAME_SIZE; /* check if r1 is in userspace */ \ |
1977b502 PM |
535 | blt+ cr1,3f; /* abort if it is */ \ |
536 | li r1,(n); /* will be reloaded later */ \ | |
f9ff0f30 | 537 | sth r1,PACA_TRAP_SAVE(r13); \ |
1977b502 PM |
538 | std r3,area+EX_R3(r13); \ |
539 | addi r3,r13,area; /* r3 -> where regs are saved*/ \ | |
bc2e6c6a | 540 | RESTORE_CTR(r1, area); \ |
f9ff0f30 | 541 | b bad_stack; \ |
a4087a4d | 542 | 3: EXCEPTION_PROLOG_COMMON_1(); \ |
890274c2 | 543 | kuap_save_amr_and_lock r9, r10, cr1, cr0; \ |
5d75b264 | 544 | beq 4f; /* if from kernel mode */ \ |
c223c903 | 545 | ACCOUNT_CPU_USER_ENTRY(r13, r9, r10); \ |
4c2de74c | 546 | SAVE_PPR(area, r9); \ |
b14a7253 MS |
547 | 4: EXCEPTION_PROLOG_COMMON_2(area) \ |
548 | EXCEPTION_PROLOG_COMMON_3(n) \ | |
549 | ACCOUNT_STOLEN_TIME | |
550 | ||
551 | /* Save original regs values from save area to stack frame. */ | |
552 | #define EXCEPTION_PROLOG_COMMON_2(area) \ | |
f9ff0f30 SR |
553 | ld r9,area+EX_R9(r13); /* move r9, r10 to stackframe */ \ |
554 | ld r10,area+EX_R10(r13); \ | |
555 | std r9,GPR9(r1); \ | |
556 | std r10,GPR10(r1); \ | |
557 | ld r9,area+EX_R11(r13); /* move r11 - r13 to stackframe */ \ | |
558 | ld r10,area+EX_R12(r13); \ | |
559 | ld r11,area+EX_R13(r13); \ | |
560 | std r9,GPR11(r1); \ | |
561 | std r10,GPR12(r1); \ | |
562 | std r11,GPR13(r1); \ | |
48404f2e PM |
563 | BEGIN_FTR_SECTION_NESTED(66); \ |
564 | ld r10,area+EX_CFAR(r13); \ | |
565 | std r10,ORIG_GPR3(r1); \ | |
566 | END_FTR_SECTION_NESTED(CPU_FTR_CFAR, CPU_FTR_CFAR, 66); \ | |
b14a7253 MS |
567 | GET_CTR(r10, area); \ |
568 | std r10,_CTR(r1); | |
569 | ||
570 | #define EXCEPTION_PROLOG_COMMON_3(n) \ | |
571 | std r2,GPR2(r1); /* save r2 in stackframe */ \ | |
572 | SAVE_4GPRS(3, r1); /* save r3 - r6 in stackframe */ \ | |
573 | SAVE_2GPRS(7, r1); /* save r7, r8 in stackframe */ \ | |
bc2e6c6a | 574 | mflr r9; /* Get LR, later save to stack */ \ |
f9ff0f30 | 575 | ld r2,PACATOC(r13); /* get kernel TOC into r2 */ \ |
f9ff0f30 | 576 | std r9,_LINK(r1); \ |
4e26bc4a | 577 | lbz r10,PACAIRQSOFTMASK(r13); \ |
f9ff0f30 SR |
578 | mfspr r11,SPRN_XER; /* save XER in stackframe */ \ |
579 | std r10,SOFTE(r1); \ | |
580 | std r11,_XER(r1); \ | |
581 | li r9,(n)+1; \ | |
582 | std r9,_TRAP(r1); /* set trap number */ \ | |
583 | li r10,0; \ | |
584 | ld r11,exception_marker@toc(r2); \ | |
585 | std r10,RESULT(r1); /* clear regs->result */ \ | |
b14a7253 | 586 | std r11,STACK_FRAME_OVERHEAD-16(r1); /* mark the frame */ |
f9ff0f30 SR |
587 | |
588 | /* | |
589 | * Exception vectors. | |
590 | */ | |
e899fce5 | 591 | #define STD_EXCEPTION(vec, label) \ |
bdf08e1d | 592 | EXCEPTION_PROLOG(PACA_EXGEN, label, EXC_STD, KVMTEST_PR, vec); |
f9ff0f30 | 593 | |
1707dd16 | 594 | /* Version of above for when we have to branch out-of-line */ |
da2bc464 | 595 | #define __OOL_EXCEPTION(vec, label, hdlr) \ |
4b1f5ccc NP |
596 | SET_SCRATCH0(r13); \ |
597 | EXCEPTION_PROLOG_0(PACA_EXGEN); \ | |
598 | b hdlr | |
da2bc464 | 599 | |
75e8bef3 | 600 | #define STD_EXCEPTION_OOL(vec, label) \ |
da2bc464 | 601 | EXCEPTION_PROLOG_1(PACA_EXGEN, KVMTEST_PR, vec); \ |
4508a74a | 602 | EXCEPTION_PROLOG_2 label, EXC_STD |
da2bc464 ME |
603 | |
604 | #define STD_EXCEPTION_HV(loc, vec, label) \ | |
4b1f5ccc | 605 | EXCEPTION_PROLOG(PACA_EXGEN, label, EXC_HV, KVMTEST_HV, vec) |
f9ff0f30 | 606 | |
da2bc464 ME |
607 | #define STD_EXCEPTION_HV_OOL(vec, label) \ |
608 | EXCEPTION_PROLOG_1(PACA_EXGEN, KVMTEST_HV, vec); \ | |
4508a74a | 609 | EXCEPTION_PROLOG_2 label, EXC_HV |
1707dd16 | 610 | |
e42389c5 | 611 | #define STD_RELON_EXCEPTION(loc, vec, label) \ |
4700dfaf | 612 | /* No guest interrupts come through here */ \ |
4b1f5ccc | 613 | EXCEPTION_RELON_PROLOG(PACA_EXGEN, label, EXC_STD, NOTEST, vec) |
4700dfaf | 614 | |
b706f423 | 615 | #define STD_RELON_EXCEPTION_OOL(vec, label) \ |
c9f69518 | 616 | EXCEPTION_PROLOG_1(PACA_EXGEN, NOTEST, vec); \ |
4508a74a | 617 | EXCEPTION_PROLOG_2_RELON label, EXC_STD |
1707dd16 | 618 | |
4700dfaf | 619 | #define STD_RELON_EXCEPTION_HV(loc, vec, label) \ |
4b1f5ccc | 620 | EXCEPTION_RELON_PROLOG(PACA_EXGEN, label, EXC_HV, KVMTEST_HV, vec) |
4700dfaf | 621 | |
1707dd16 | 622 | #define STD_RELON_EXCEPTION_HV_OOL(vec, label) \ |
bc355125 | 623 | EXCEPTION_PROLOG_1(PACA_EXGEN, KVMTEST_HV, vec); \ |
4508a74a NP |
624 | EXCEPTION_PROLOG_2_RELON label, EXC_HV |
625 | ||
626 | .macro SOFTEN_TEST hsrr, vec, bitmask | |
627 | lbz r10, PACAIRQSOFTMASK(r13) | |
628 | andi. r10, r10, \bitmask | |
629 | /* This associates vector numbers with bits in paca->irq_happened */ | |
630 | .if \vec == 0x500 || \vec == 0xea0 | |
631 | li r10, PACA_IRQ_EE | |
632 | .elseif \vec == 0x900 | |
633 | li r10, PACA_IRQ_DEC | |
634 | .elseif \vec == 0xa00 || \vec == 0xe80 | |
635 | li r10, PACA_IRQ_DBELL | |
636 | .elseif \vec == 0xe60 | |
637 | li r10, PACA_IRQ_HMI | |
638 | .elseif \vec == 0xf00 | |
639 | li r10, PACA_IRQ_PMI | |
640 | .else | |
641 | .abort "Bad maskable vector" | |
642 | .endif | |
643 | ||
644 | ||
645 | .if \hsrr | |
646 | bne masked_Hinterrupt | |
647 | .else | |
648 | bne masked_interrupt | |
649 | .endif | |
650 | .endm | |
b01c8b54 | 651 | |
f14e953b | 652 | #define SOFTEN_TEST_PR(vec, bitmask) \ |
4508a74a NP |
653 | KVMTEST EXC_STD, vec ; \ |
654 | SOFTEN_TEST EXC_STD, vec, bitmask | |
b01c8b54 | 655 | |
f14e953b | 656 | #define SOFTEN_TEST_HV(vec, bitmask) \ |
4508a74a NP |
657 | KVMTEST EXC_HV, vec ; \ |
658 | SOFTEN_TEST EXC_HV, vec, bitmask | |
b01c8b54 | 659 | |
da2bc464 | 660 | #define KVMTEST_PR(vec) \ |
4508a74a | 661 | KVMTEST EXC_STD, vec |
da2bc464 ME |
662 | |
663 | #define KVMTEST_HV(vec) \ | |
4508a74a | 664 | KVMTEST EXC_HV, vec |
da2bc464 | 665 | |
4508a74a NP |
666 | #define SOFTEN_NOTEST_PR(vec, bitmask) SOFTEN_TEST EXC_STD, vec, bitmask |
667 | #define SOFTEN_NOTEST_HV(vec, bitmask) SOFTEN_TEST EXC_HV, vec, bitmask | |
4700dfaf | 668 | |
0a55c241 | 669 | #define __MASKABLE_EXCEPTION(vec, label, h, extra, bitmask) \ |
b01c8b54 | 670 | SET_SCRATCH0(r13); /* save r13 */ \ |
1707dd16 | 671 | EXCEPTION_PROLOG_0(PACA_EXGEN); \ |
f14e953b | 672 | MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, extra, vec, bitmask); \ |
4508a74a | 673 | EXCEPTION_PROLOG_2 label, h |
1707dd16 | 674 | |
b536da7c | 675 | #define MASKABLE_EXCEPTION(vec, label, bitmask) \ |
0a55c241 | 676 | __MASKABLE_EXCEPTION(vec, label, EXC_STD, SOFTEN_TEST_PR, bitmask) |
b3e6b5df | 677 | |
0a55c241 | 678 | #define MASKABLE_EXCEPTION_OOL(vec, label, bitmask) \ |
f14e953b | 679 | MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, SOFTEN_TEST_PR, vec, bitmask);\ |
4508a74a | 680 | EXCEPTION_PROLOG_2 label, EXC_STD |
da2bc464 | 681 | |
b536da7c | 682 | #define MASKABLE_EXCEPTION_HV(vec, label, bitmask) \ |
0a55c241 | 683 | __MASKABLE_EXCEPTION(vec, label, EXC_HV, SOFTEN_TEST_HV, bitmask) |
f9ff0f30 | 684 | |
f14e953b MS |
685 | #define MASKABLE_EXCEPTION_HV_OOL(vec, label, bitmask) \ |
686 | MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, SOFTEN_TEST_HV, vec, bitmask);\ | |
4508a74a | 687 | EXCEPTION_PROLOG_2 label, EXC_HV |
1707dd16 | 688 | |
0a55c241 | 689 | #define __MASKABLE_RELON_EXCEPTION(vec, label, h, extra, bitmask) \ |
4700dfaf | 690 | SET_SCRATCH0(r13); /* save r13 */ \ |
1707dd16 | 691 | EXCEPTION_PROLOG_0(PACA_EXGEN); \ |
f14e953b | 692 | MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, extra, vec, bitmask); \ |
4508a74a | 693 | EXCEPTION_PROLOG_2_RELON label, h |
da2bc464 | 694 | |
b536da7c | 695 | #define MASKABLE_RELON_EXCEPTION(vec, label, bitmask) \ |
0a55c241 | 696 | __MASKABLE_RELON_EXCEPTION(vec, label, EXC_STD, SOFTEN_NOTEST_PR, bitmask) |
4700dfaf | 697 | |
0a55c241 | 698 | #define MASKABLE_RELON_EXCEPTION_OOL(vec, label, bitmask) \ |
f442d004 | 699 | MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, SOFTEN_NOTEST_PR, vec, bitmask);\ |
4508a74a | 700 | EXCEPTION_PROLOG_2 label, EXC_STD |
f442d004 | 701 | |
b536da7c | 702 | #define MASKABLE_RELON_EXCEPTION_HV(vec, label, bitmask) \ |
0a55c241 | 703 | __MASKABLE_RELON_EXCEPTION(vec, label, EXC_HV, SOFTEN_TEST_HV, bitmask) |
4700dfaf | 704 | |
f14e953b | 705 | #define MASKABLE_RELON_EXCEPTION_HV_OOL(vec, label, bitmask) \ |
5c11d1e5 | 706 | MASKABLE_EXCEPTION_PROLOG_1(PACA_EXGEN, SOFTEN_TEST_HV, vec, bitmask);\ |
4508a74a | 707 | EXCEPTION_PROLOG_2_RELON label, EXC_HV |
1707dd16 | 708 | |
1b701179 BH |
709 | /* |
710 | * Our exception common code can be passed various "additions" | |
711 | * to specify the behaviour of interrupts, whether to kick the | |
712 | * runlatch, etc... | |
713 | */ | |
714 | ||
9daf112b ME |
715 | /* |
716 | * This addition reconciles our actual IRQ state with the various software | |
717 | * flags that track it. This may call C code. | |
718 | */ | |
719 | #define ADD_RECONCILE RECONCILE_IRQ_STATE(r10,r11) | |
f9ff0f30 | 720 | |
fe1952fc | 721 | #define ADD_NVGPRS \ |
b1576fec | 722 | bl save_nvgprs |
fe1952fc BH |
723 | |
724 | #define RUNLATCH_ON \ | |
725 | BEGIN_FTR_SECTION \ | |
c911d2e1 | 726 | ld r3, PACA_THREAD_INFO(r13); \ |
fe1952fc BH |
727 | ld r4,TI_LOCAL_FLAGS(r3); \ |
728 | andi. r0,r4,_TLF_RUNLATCH; \ | |
729 | beql ppc64_runlatch_on_trampoline; \ | |
730 | END_FTR_SECTION_IFSET(CPU_FTR_CTRL) | |
731 | ||
a3d96f70 NP |
732 | #define EXCEPTION_COMMON(area, trap, label, hdlr, ret, additions) \ |
733 | EXCEPTION_PROLOG_COMMON(trap, area); \ | |
a1d711c5 | 734 | /* Volatile regs are potentially clobbered here */ \ |
fe1952fc BH |
735 | additions; \ |
736 | addi r3,r1,STACK_FRAME_OVERHEAD; \ | |
737 | bl hdlr; \ | |
738 | b ret | |
739 | ||
b1ee8a3d NP |
740 | /* |
741 | * Exception where stack is already set in r1, r1 is saved in r10, and it | |
742 | * continues rather than returns. | |
743 | */ | |
744 | #define EXCEPTION_COMMON_NORET_STACK(area, trap, label, hdlr, additions) \ | |
745 | EXCEPTION_PROLOG_COMMON_1(); \ | |
890274c2 | 746 | kuap_save_amr_and_lock r9, r10, cr1; \ |
b1ee8a3d NP |
747 | EXCEPTION_PROLOG_COMMON_2(area); \ |
748 | EXCEPTION_PROLOG_COMMON_3(trap); \ | |
749 | /* Volatile regs are potentially clobbered here */ \ | |
750 | additions; \ | |
751 | addi r3,r1,STACK_FRAME_OVERHEAD; \ | |
752 | bl hdlr | |
753 | ||
fe1952fc | 754 | #define STD_EXCEPTION_COMMON(trap, label, hdlr) \ |
a3d96f70 NP |
755 | EXCEPTION_COMMON(PACA_EXGEN, trap, label, hdlr, \ |
756 | ret_from_except, ADD_NVGPRS;ADD_RECONCILE) | |
f9ff0f30 SR |
757 | |
758 | /* | |
759 | * Like STD_EXCEPTION_COMMON, but for exceptions that can occur | |
7450f6f0 BH |
760 | * in the idle task and therefore need the special idle handling |
761 | * (finish nap and runlatch) | |
f9ff0f30 | 762 | */ |
a3d96f70 NP |
763 | #define STD_EXCEPTION_COMMON_ASYNC(trap, label, hdlr) \ |
764 | EXCEPTION_COMMON(PACA_EXGEN, trap, label, hdlr, \ | |
765 | ret_from_except_lite, FINISH_NAP;ADD_RECONCILE;RUNLATCH_ON) | |
f9ff0f30 SR |
766 | |
767 | /* | |
768 | * When the idle code in power4_idle puts the CPU into NAP mode, | |
769 | * it has to do so in a loop, and relies on the external interrupt | |
770 | * and decrementer interrupt entry code to get it out of the loop. | |
771 | * It sets the _TLF_NAPPING bit in current_thread_info()->local_flags | |
772 | * to signal that it is in the loop and needs help to get out. | |
773 | */ | |
774 | #ifdef CONFIG_PPC_970_NAP | |
775 | #define FINISH_NAP \ | |
776 | BEGIN_FTR_SECTION \ | |
c911d2e1 | 777 | ld r11, PACA_THREAD_INFO(r13); \ |
f9ff0f30 SR |
778 | ld r9,TI_LOCAL_FLAGS(r11); \ |
779 | andi. r10,r9,_TLF_NAPPING; \ | |
780 | bnel power4_fixup_nap; \ | |
781 | END_FTR_SECTION_IFSET(CPU_FTR_CAN_NAP) | |
782 | #else | |
783 | #define FINISH_NAP | |
784 | #endif | |
785 | ||
4508a74a NP |
786 | #endif /* __ASSEMBLY__ */ |
787 | ||
f9ff0f30 | 788 | #endif /* _ASM_POWERPC_EXCEPTION_H */ |