powerpc/mm/hash64: Map all the kernel regions in the same 0xc range
[linux-2.6-block.git] / arch / powerpc / mm / tlb_nohash_low.S
CommitLineData
2a4aca11
BH
1/*
2 * This file contains low-level functions for performing various
3 * types of TLB invalidations on various processors with no hash
4 * table.
5 *
6 * This file implements the following functions for all no-hash
7 * processors. Some aren't implemented for some variants. Some
8 * are inline in tlbflush.h
9 *
10 * - tlbil_va
11 * - tlbil_pid
12 * - tlbil_all
e7f75ad0 13 * - tlbivax_bcast
2a4aca11
BH
14 *
15 * Code mostly moved over from misc_32.S
16 *
17 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
18 *
19 * Partially rewritten by Cort Dougan (cort@cs.nmt.edu)
20 * Paul Mackerras, Kumar Gala and Benjamin Herrenschmidt.
21 *
22 * This program is free software; you can redistribute it and/or
23 * modify it under the terms of the GNU General Public License
24 * as published by the Free Software Foundation; either version
25 * 2 of the License, or (at your option) any later version.
26 *
27 */
28
29#include <asm/reg.h>
30#include <asm/page.h>
31#include <asm/cputable.h>
32#include <asm/mmu.h>
33#include <asm/ppc_asm.h>
34#include <asm/asm-offsets.h>
35#include <asm/processor.h>
e7f75ad0 36#include <asm/bug.h>
ec0c464c 37#include <asm/asm-compat.h>
2c86cd18 38#include <asm/feature-fixups.h>
2a4aca11
BH
39
40#if defined(CONFIG_40x)
41
42/*
43 * 40x implementation needs only tlbil_va
44 */
d4e167da 45_GLOBAL(__tlbil_va)
2a4aca11
BH
46 /* We run the search with interrupts disabled because we have to change
47 * the PID and I don't want to preempt when that happens.
48 */
49 mfmsr r5
50 mfspr r6,SPRN_PID
51 wrteei 0
52 mtspr SPRN_PID,r4
53 tlbsx. r3, 0, r3
54 mtspr SPRN_PID,r6
55 wrtee r5
56 bne 1f
57 sync
58 /* There are only 64 TLB entries, so r3 < 64, which means bit 25 is
59 * clear. Since 25 is the V bit in the TLB_TAG, loading this value
60 * will invalidate the TLB entry. */
61 tlbwe r3, r3, TLB_TAG
62 isync
631: blr
64
968159c0 65#elif defined(CONFIG_PPC_8xx)
2a4aca11
BH
66
67/*
68 * Nothing to do for 8xx, everything is inline
69 */
70
e7f75ad0 71#elif defined(CONFIG_44x) /* Includes 47x */
2a4aca11
BH
72
73/*
74 * 440 implementation uses tlbsx/we for tlbil_va and a full sweep
75 * of the TLB for everything else.
76 */
d4e167da 77_GLOBAL(__tlbil_va)
2a4aca11 78 mfspr r5,SPRN_MMUCR
e7f75ad0
DK
79 mfmsr r10
80
81 /*
82 * We write 16 bits of STID since 47x supports that much, we
83 * will never be passed out of bounds values on 440 (hopefully)
84 */
85 rlwimi r5,r4,0,16,31
2a4aca11 86
760ec0e0
BH
87 /* We have to run the search with interrupts disabled, otherwise
88 * an interrupt which causes a TLB miss can clobber the MMUCR
89 * between the mtspr and the tlbsx.
90 *
91 * Critical and Machine Check interrupts take care of saving
92 * and restoring MMUCR, so only normal interrupts have to be
93 * taken care of.
94 */
760ec0e0 95 wrteei 0
2a4aca11 96 mtspr SPRN_MMUCR,r5
e7f75ad0
DK
97 tlbsx. r6,0,r3
98 bne 10f
2a4aca11 99 sync
e7f75ad0
DK
100BEGIN_MMU_FTR_SECTION
101 b 2f
102END_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x)
103 /* On 440 There are only 64 TLB entries, so r3 < 64, which means bit
104 * 22, is clear. Since 22 is the V bit in the TLB_PAGEID, loading this
2a4aca11
BH
105 * value will invalidate the TLB entry.
106 */
e7f75ad0 107 tlbwe r6,r6,PPC44x_TLB_PAGEID
2a4aca11 108 isync
e7f75ad0
DK
10910: wrtee r10
110 blr
1112:
112#ifdef CONFIG_PPC_47x
446957ba 113 oris r7,r6,0x8000 /* specify way explicitly */
e7f75ad0
DK
114 clrrwi r4,r3,12 /* get an EPN for the hashing with V = 0 */
115 ori r4,r4,PPC47x_TLBE_SIZE
116 tlbwe r4,r7,0 /* write it */
117 isync
118 wrtee r10
119 blr
120#else /* CONFIG_PPC_47x */
1211: trap
122 EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0;
123#endif /* !CONFIG_PPC_47x */
2a4aca11
BH
124
125_GLOBAL(_tlbil_all)
126_GLOBAL(_tlbil_pid)
e7f75ad0
DK
127BEGIN_MMU_FTR_SECTION
128 b 2f
129END_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x)
2a4aca11
BH
130 li r3,0
131 sync
132
133 /* Load high watermark */
134 lis r4,tlb_44x_hwater@ha
135 lwz r5,tlb_44x_hwater@l(r4)
136
1371: tlbwe r3,r3,PPC44x_TLB_PAGEID
138 addi r3,r3,1
139 cmpw 0,r3,r5
140 ble 1b
141
142 isync
143 blr
e7f75ad0
DK
1442:
145#ifdef CONFIG_PPC_47x
146 /* 476 variant. There's not simple way to do this, hopefully we'll
147 * try to limit the amount of such full invalidates
148 */
149 mfmsr r11 /* Interrupts off */
150 wrteei 0
151 li r3,-1 /* Current set */
152 lis r10,tlb_47x_boltmap@h
153 ori r10,r10,tlb_47x_boltmap@l
446957ba 154 lis r7,0x8000 /* Specify way explicitly */
e7f75ad0
DK
155
156 b 9f /* For each set */
157
1581: li r9,4 /* Number of ways */
159 li r4,0 /* Current way */
160 li r6,0 /* Default entry value 0 */
161 andi. r0,r8,1 /* Check if way 0 is bolted */
162 mtctr r9 /* Load way counter */
163 bne- 3f /* Bolted, skip loading it */
164
1652: /* For each way */
166 or r5,r3,r4 /* Make way|index for tlbre */
167 rlwimi r5,r5,16,8,15 /* Copy index into position */
168 tlbre r6,r5,0 /* Read entry */
1693: addis r4,r4,0x2000 /* Next way */
170 andi. r0,r6,PPC47x_TLB0_VALID /* Valid entry ? */
171 beq 4f /* Nope, skip it */
172 rlwimi r7,r5,0,1,2 /* Insert way number */
173 rlwinm r6,r6,0,21,19 /* Clear V */
174 tlbwe r6,r7,0 /* Write it */
1754: bdnz 2b /* Loop for each way */
176 srwi r8,r8,1 /* Next boltmap bit */
1779: cmpwi cr1,r3,255 /* Last set done ? */
178 addi r3,r3,1 /* Next set */
179 beq cr1,1f /* End of loop */
180 andi. r0,r3,0x1f /* Need to load a new boltmap word ? */
181 bne 1b /* No, loop */
182 lwz r8,0(r10) /* Load boltmap entry */
183 addi r10,r10,4 /* Next word */
184 b 1b /* Then loop */
1851: isync /* Sync shadows */
186 wrtee r11
187#else /* CONFIG_PPC_47x */
1881: trap
189 EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0;
190#endif /* !CONFIG_PPC_47x */
191 blr
192
193#ifdef CONFIG_PPC_47x
21a06b04 194
e7f75ad0
DK
195/*
196 * _tlbivax_bcast is only on 47x. We don't bother doing a runtime
197 * check though, it will blow up soon enough if we mistakenly try
198 * to use it on a 440.
199 */
200_GLOBAL(_tlbivax_bcast)
201 mfspr r5,SPRN_MMUCR
202 mfmsr r10
203 rlwimi r5,r4,0,16,31
204 wrteei 0
205 mtspr SPRN_MMUCR,r5
32412aa2 206 isync
1afc149d 207 PPC_TLBIVAX(0, R3)
e7f75ad0
DK
208 isync
209 eieio
210 tlbsync
21a06b04
DK
211BEGIN_FTR_SECTION
212 b 1f
213END_FTR_SECTION_IFSET(CPU_FTR_476_DD2)
214 sync
215 wrtee r10
216 blr
217/*
218 * DD2 HW could hang if in instruction fetch happens before msync completes.
219 * Touch enough instruction cache lines to ensure cache hits
220 */
2211: mflr r9
222 bl 2f
2232: mflr r6
224 li r7,32
1afc149d 225 PPC_ICBT(0,R6,R7) /* touch next cache line */
21a06b04 226 add r6,r6,r7
1afc149d 227 PPC_ICBT(0,R6,R7) /* touch next cache line */
21a06b04 228 add r6,r6,r7
1afc149d 229 PPC_ICBT(0,R6,R7) /* touch next cache line */
e7f75ad0 230 sync
21a06b04
DK
231 nop
232 nop
233 nop
234 nop
235 nop
236 nop
237 nop
238 nop
239 mtlr r9
e7f75ad0
DK
240 wrtee r10
241 blr
242#endif /* CONFIG_PPC_47x */
2a4aca11
BH
243
244#elif defined(CONFIG_FSL_BOOKE)
245/*
c3071951
KG
246 * FSL BookE implementations.
247 *
248 * Since feature sections are using _SECTION_ELSE we need
249 * to have the larger code path before the _SECTION_ELSE
2a4aca11
BH
250 */
251
252/*
253 * Flush MMU TLB on the local processor
254 */
2a4aca11 255_GLOBAL(_tlbil_all)
c3071951
KG
256BEGIN_MMU_FTR_SECTION
257 li r3,(MMUCSR0_TLBFI)@l
258 mtspr SPRN_MMUCSR0, r3
2591:
260 mfspr r3,SPRN_MMUCSR0
261 andi. r3,r3,MMUCSR0_TLBFI@l
262 bne 1b
263MMU_FTR_SECTION_ELSE
962cffbd 264 PPC_TLBILX_ALL(0,R0)
c3071951
KG
265ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
266 msync
267 isync
268 blr
269
270_GLOBAL(_tlbil_pid)
271BEGIN_MMU_FTR_SECTION
272 slwi r3,r3,16
273 mfmsr r10
274 wrteei 0
275 mfspr r4,SPRN_MAS6 /* save MAS6 */
276 mtspr SPRN_MAS6,r3
962cffbd 277 PPC_TLBILX_PID(0,R0)
c3071951
KG
278 mtspr SPRN_MAS6,r4 /* restore MAS6 */
279 wrtee r10
280MMU_FTR_SECTION_ELSE
2a4aca11
BH
281 li r3,(MMUCSR0_TLBFI)@l
282 mtspr SPRN_MMUCSR0, r3
2831:
284 mfspr r3,SPRN_MMUCSR0
285 andi. r3,r3,MMUCSR0_TLBFI@l
286 bne 1b
c3071951 287ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBILX)
2a4aca11
BH
288 msync
289 isync
290 blr
291
292/*
293 * Flush MMU TLB for a particular address, but only on the local processor
294 * (no broadcast)
295 */
d4e167da 296_GLOBAL(__tlbil_va)
2a4aca11
BH
297 mfmsr r10
298 wrteei 0
299 slwi r4,r4,16
c3071951 300 ori r4,r4,(MAS6_ISIZE(BOOK3E_PAGESZ_4K))@l
2a4aca11 301 mtspr SPRN_MAS6,r4 /* assume AS=0 for now */
c3071951 302BEGIN_MMU_FTR_SECTION
2a4aca11
BH
303 tlbsx 0,r3
304 mfspr r4,SPRN_MAS1 /* check valid */
305 andis. r3,r4,MAS1_VALID@h
306 beq 1f
307 rlwinm r4,r4,0,1,31
308 mtspr SPRN_MAS1,r4
309 tlbwe
c3071951 310MMU_FTR_SECTION_ELSE
962cffbd 311 PPC_TLBILX_VA(0,R3)
c3071951 312ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
2a4aca11
BH
313 msync
314 isync
3151: wrtee r10
316 blr
25d21ad6
BH
317#elif defined(CONFIG_PPC_BOOK3E)
318/*
319 * New Book3E (>= 2.06) implementation
320 *
321 * Note: We may be able to get away without the interrupt masking stuff
322 * if we save/restore MAS6 on exceptions that might modify it
323 */
324_GLOBAL(_tlbil_pid)
325 slwi r4,r3,MAS6_SPID_SHIFT
326 mfmsr r10
327 wrteei 0
328 mtspr SPRN_MAS6,r4
962cffbd 329 PPC_TLBILX_PID(0,R0)
25d21ad6
BH
330 wrtee r10
331 msync
332 isync
333 blr
334
335_GLOBAL(_tlbil_pid_noind)
336 slwi r4,r3,MAS6_SPID_SHIFT
337 mfmsr r10
338 ori r4,r4,MAS6_SIND
339 wrteei 0
340 mtspr SPRN_MAS6,r4
962cffbd 341 PPC_TLBILX_PID(0,R0)
25d21ad6
BH
342 wrtee r10
343 msync
344 isync
345 blr
346
347_GLOBAL(_tlbil_all)
962cffbd 348 PPC_TLBILX_ALL(0,R0)
25d21ad6
BH
349 msync
350 isync
351 blr
352
353_GLOBAL(_tlbil_va)
354 mfmsr r10
355 wrteei 0
356 cmpwi cr0,r6,0
357 slwi r4,r4,MAS6_SPID_SHIFT
358 rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
359 beq 1f
360 rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
3611: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */
962cffbd 362 PPC_TLBILX_VA(0,R3)
25d21ad6
BH
363 msync
364 isync
365 wrtee r10
366 blr
367
368_GLOBAL(_tlbivax_bcast)
369 mfmsr r10
370 wrteei 0
371 cmpwi cr0,r6,0
372 slwi r4,r4,MAS6_SPID_SHIFT
373 rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
374 beq 1f
375 rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
3761: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */
962cffbd 377 PPC_TLBIVAX(0,R3)
25d21ad6
BH
378 eieio
379 tlbsync
380 sync
381 wrtee r10
382 blr
383
384_GLOBAL(set_context)
385#ifdef CONFIG_BDI_SWITCH
386 /* Context switch the PTE pointer for the Abatron BDI2000.
387 * The PGDIR is the second parameter.
388 */
389 lis r5, abatron_pteptrs@h
390 ori r5, r5, abatron_pteptrs@l
391 stw r4, 0x4(r5)
392#endif
393 mtspr SPRN_PID,r3
394 isync /* Force context change */
395 blr
b62c31ae 396#else
2a4aca11
BH
397#error Unsupported processor type !
398#endif
78f62237 399
55fd766b 400#if defined(CONFIG_PPC_FSL_BOOK3E)
78f62237
KG
401/*
402 * extern void loadcam_entry(unsigned int index)
403 *
404 * Load TLBCAM[index] entry in to the L2 CAM MMU
d9e1831a 405 * Must preserve r7, r8, r9, and r10
78f62237
KG
406 */
407_GLOBAL(loadcam_entry)
0be7d969
KH
408 mflr r5
409 LOAD_REG_ADDR_PIC(r4, TLBCAM)
410 mtlr r5
78f62237
KG
411 mulli r5,r3,TLBCAM_SIZE
412 add r3,r5,r4
413 lwz r4,TLBCAM_MAS0(r3)
414 mtspr SPRN_MAS0,r4
415 lwz r4,TLBCAM_MAS1(r3)
416 mtspr SPRN_MAS1,r4
417 PPC_LL r4,TLBCAM_MAS2(r3)
418 mtspr SPRN_MAS2,r4
419 lwz r4,TLBCAM_MAS3(r3)
420 mtspr SPRN_MAS3,r4
421BEGIN_MMU_FTR_SECTION
422 lwz r4,TLBCAM_MAS7(r3)
423 mtspr SPRN_MAS7,r4
424END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
425 isync
426 tlbwe
427 isync
428 blr
d9e1831a
SW
429
430/*
431 * Load multiple TLB entries at once, using an alternate-space
432 * trampoline so that we don't have to care about whether the same
433 * TLB entry maps us before and after.
434 *
435 * r3 = first entry to write
436 * r4 = number of entries to write
437 * r5 = temporary tlb entry
438 */
439_GLOBAL(loadcam_multi)
440 mflr r8
441
442 /*
443 * Set up temporary TLB entry that is the same as what we're
444 * running from, but in AS=1.
445 */
446 bl 1f
4471: mflr r6
448 tlbsx 0,r8
449 mfspr r6,SPRN_MAS1
450 ori r6,r6,MAS1_TS
451 mtspr SPRN_MAS1,r6
452 mfspr r6,SPRN_MAS0
453 rlwimi r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
454 mr r7,r5
455 mtspr SPRN_MAS0,r6
456 isync
457 tlbwe
458 isync
459
460 /* Switch to AS=1 */
461 mfmsr r6
462 ori r6,r6,MSR_IS|MSR_DS
463 mtmsr r6
464 isync
465
466 mr r9,r3
467 add r10,r3,r4
4682: bl loadcam_entry
469 addi r9,r9,1
470 cmpw r9,r10
471 mr r3,r9
472 blt 2b
473
474 /* Return to AS=0 and clear the temporary entry */
475 mfmsr r6
476 rlwinm. r6,r6,0,~(MSR_IS|MSR_DS)
477 mtmsr r6
478 isync
479
480 li r6,0
481 mtspr SPRN_MAS1,r6
482 rlwinm r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
483 oris r6,r6,MAS0_TLBSEL(1)@h
484 mtspr SPRN_MAS0,r6
485 isync
486 tlbwe
487 isync
488
489 mtlr r8
490 blr
78f62237 491#endif