Commit | Line | Data |
---|---|---|
d2912cb1 | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
2ccdd1e7 CM |
2 | /* |
3 | * linux/arch/arm/mm/tlb-v7.S | |
4 | * | |
5 | * Copyright (C) 1997-2002 Russell King | |
6 | * Modified for ARMv7 by Catalin Marinas | |
7 | * | |
2ccdd1e7 CM |
8 | * ARM architecture version 6 TLB handling functions. |
9 | * These assume a split I/D TLB. | |
10 | */ | |
991da17e | 11 | #include <linux/init.h> |
2ccdd1e7 | 12 | #include <linux/linkage.h> |
f00ec48f | 13 | #include <asm/assembler.h> |
2ccdd1e7 CM |
14 | #include <asm/asm-offsets.h> |
15 | #include <asm/page.h> | |
16 | #include <asm/tlbflush.h> | |
17 | #include "proc-macros.S" | |
18 | ||
19 | /* | |
20 | * v7wbi_flush_user_tlb_range(start, end, vma) | |
21 | * | |
22 | * Invalidate a range of TLB entries in the specified address space. | |
23 | * | |
24 | * - start - start address (may not be aligned) | |
25 | * - end - end address (exclusive, may not be aligned) | |
26 | * - vma - vma_struct describing address range | |
27 | * | |
28 | * It is assumed that: | |
29 | * - the "Invalidate single entry" instruction will invalidate | |
30 | * both the I and the D TLBs on Harvard-style TLBs | |
31 | */ | |
32 | ENTRY(v7wbi_flush_user_tlb_range) | |
33 | vma_vm_mm r3, r2 @ get vma->vm_mm | |
34 | mmid r3, r3 @ get vm_mm->context.id | |
6abdd491 | 35 | dsb ish |
2ccdd1e7 CM |
36 | mov r0, r0, lsr #PAGE_SHIFT @ align address |
37 | mov r1, r1, lsr #PAGE_SHIFT | |
38 | asid r3, r3 @ mask ASID | |
730a8128 WD |
39 | #ifdef CONFIG_ARM_ERRATA_720789 |
40 | ALT_SMP(W(mov) r3, #0 ) | |
41 | ALT_UP(W(nop) ) | |
5a783cbc | 42 | #endif |
2ccdd1e7 CM |
43 | orr r0, r3, r0, lsl #PAGE_SHIFT @ Create initial MVA |
44 | mov r1, r1, lsl #PAGE_SHIFT | |
2ccdd1e7 | 45 | 1: |
5a783cbc WD |
46 | #ifdef CONFIG_ARM_ERRATA_720789 |
47 | ALT_SMP(mcr p15, 0, r0, c8, c3, 3) @ TLB invalidate U MVA all ASID (shareable) | |
48 | #else | |
f00ec48f | 49 | ALT_SMP(mcr p15, 0, r0, c8, c3, 1) @ TLB invalidate U MVA (shareable) |
5a783cbc | 50 | #endif |
f00ec48f RK |
51 | ALT_UP(mcr p15, 0, r0, c8, c7, 1) @ TLB invalidate U MVA |
52 | ||
2ccdd1e7 CM |
53 | add r0, r0, #PAGE_SZ |
54 | cmp r0, r1 | |
55 | blo 1b | |
6abdd491 | 56 | dsb ish |
6ebbf2ce | 57 | ret lr |
93ed3970 | 58 | ENDPROC(v7wbi_flush_user_tlb_range) |
2ccdd1e7 CM |
59 | |
60 | /* | |
61 | * v7wbi_flush_kern_tlb_range(start,end) | |
62 | * | |
63 | * Invalidate a range of kernel TLB entries | |
64 | * | |
65 | * - start - start address (may not be aligned) | |
66 | * - end - end address (exclusive, may not be aligned) | |
67 | */ | |
68 | ENTRY(v7wbi_flush_kern_tlb_range) | |
6abdd491 | 69 | dsb ish |
2ccdd1e7 CM |
70 | mov r0, r0, lsr #PAGE_SHIFT @ align address |
71 | mov r1, r1, lsr #PAGE_SHIFT | |
72 | mov r0, r0, lsl #PAGE_SHIFT | |
73 | mov r1, r1, lsl #PAGE_SHIFT | |
74 | 1: | |
5a783cbc WD |
75 | #ifdef CONFIG_ARM_ERRATA_720789 |
76 | ALT_SMP(mcr p15, 0, r0, c8, c3, 3) @ TLB invalidate U MVA all ASID (shareable) | |
77 | #else | |
f00ec48f | 78 | ALT_SMP(mcr p15, 0, r0, c8, c3, 1) @ TLB invalidate U MVA (shareable) |
5a783cbc | 79 | #endif |
f00ec48f | 80 | ALT_UP(mcr p15, 0, r0, c8, c7, 1) @ TLB invalidate U MVA |
2ccdd1e7 CM |
81 | add r0, r0, #PAGE_SZ |
82 | cmp r0, r1 | |
83 | blo 1b | |
6abdd491 | 84 | dsb ish |
2ccdd1e7 | 85 | isb |
6ebbf2ce | 86 | ret lr |
93ed3970 | 87 | ENDPROC(v7wbi_flush_kern_tlb_range) |
2ccdd1e7 | 88 | |
991da17e | 89 | __INIT |
2ccdd1e7 | 90 | |
2ba564b9 DM |
91 | /* define struct cpu_tlb_fns (see <asm/tlbflush.h> and proc-macros.S) */ |
92 | define_tlb_functions v7wbi, v7wbi_tlb_flags_up, flags_smp=v7wbi_tlb_flags_smp |