Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
1da177e4 LT |
2 | /* |
3 | * We need constants.h for: | |
4 | * VMA_VM_MM | |
5 | * VMA_VM_FLAGS | |
6 | * VM_EXEC | |
7 | */ | |
e6ae744d | 8 | #include <asm/asm-offsets.h> |
3cb0f230 | 9 | #include <asm/pgtable.h> |
1da177e4 LT |
10 | #include <asm/thread_info.h> |
11 | ||
9a1af5f2 VM |
12 | #ifdef CONFIG_CPU_V7M |
13 | #include <asm/v7m.h> | |
14 | #endif | |
15 | ||
1da177e4 LT |
16 | /* |
17 | * vma_vm_mm - get mm pointer from vma pointer (vma->vm_mm) | |
18 | */ | |
19 | .macro vma_vm_mm, rd, rn | |
20 | ldr \rd, [\rn, #VMA_VM_MM] | |
21 | .endm | |
22 | ||
23 | /* | |
24 | * vma_vm_flags - get vma->vm_flags | |
25 | */ | |
26 | .macro vma_vm_flags, rd, rn | |
27 | ldr \rd, [\rn, #VMA_VM_FLAGS] | |
28 | .endm | |
29 | ||
1da177e4 LT |
30 | /* |
31 | * act_mm - get current->active_mm | |
32 | */ | |
33 | .macro act_mm, rd | |
50596b75 | 34 | get_current \rd |
ffa47aa6 AB |
35 | .if (TSK_ACTIVE_MM > IMM12_MASK) |
36 | add \rd, \rd, #TSK_ACTIVE_MM & ~IMM12_MASK | |
37 | .endif | |
38 | ldr \rd, [\rd, #TSK_ACTIVE_MM & IMM12_MASK] | |
1da177e4 LT |
39 | .endm |
40 | ||
41 | /* | |
42 | * mmid - get context id from mm pointer (mm->context.id) | |
9520a5be | 43 | * note, this field is 64bit, so in big-endian the two words are swapped too. |
1da177e4 LT |
44 | */ |
45 | .macro mmid, rd, rn | |
9520a5be BD |
46 | #ifdef __ARMEB__ |
47 | ldr \rd, [\rn, #MM_CONTEXT_ID + 4 ] | |
48 | #else | |
1da177e4 | 49 | ldr \rd, [\rn, #MM_CONTEXT_ID] |
9520a5be | 50 | #endif |
1da177e4 LT |
51 | .endm |
52 | ||
53 | /* | |
54 | * mask_asid - mask the ASID from the context ID | |
55 | */ | |
56 | .macro asid, rd, rn | |
57 | and \rd, \rn, #255 | |
58 | .endm | |
22b19086 RK |
59 | |
60 | .macro crval, clear, mmuset, ucset | |
61 | #ifdef CONFIG_MMU | |
62 | .word \clear | |
63 | .word \mmuset | |
64 | #else | |
65 | .word \clear | |
66 | .word \ucset | |
67 | #endif | |
68 | .endm | |
bbe88886 CM |
69 | |
70 | /* | |
f91e2c3b CM |
71 | * dcache_line_size - get the minimum D-cache line size from the CTR register |
72 | * on ARMv7. | |
bbe88886 CM |
73 | */ |
74 | .macro dcache_line_size, reg, tmp | |
9a1af5f2 VM |
75 | #ifdef CONFIG_CPU_V7M |
76 | movw \tmp, #:lower16:BASEADDR_V7M_SCB + V7M_SCB_CTR | |
77 | movt \tmp, #:upper16:BASEADDR_V7M_SCB + V7M_SCB_CTR | |
78 | ldr \tmp, [\tmp] | |
79 | #else | |
f91e2c3b | 80 | mrc p15, 0, \tmp, c0, c0, 1 @ read ctr |
9a1af5f2 | 81 | #endif |
f91e2c3b CM |
82 | lsr \tmp, \tmp, #16 |
83 | and \tmp, \tmp, #0xf @ cache line size encoding | |
84 | mov \reg, #4 @ bytes per word | |
bbe88886 CM |
85 | mov \reg, \reg, lsl \tmp @ actual cache line size |
86 | .endm | |
da091653 | 87 | |
da30e0ac CM |
88 | /* |
89 | * icache_line_size - get the minimum I-cache line size from the CTR register | |
90 | * on ARMv7. | |
91 | */ | |
92 | .macro icache_line_size, reg, tmp | |
9a1af5f2 VM |
93 | #ifdef CONFIG_CPU_V7M |
94 | movw \tmp, #:lower16:BASEADDR_V7M_SCB + V7M_SCB_CTR | |
95 | movt \tmp, #:upper16:BASEADDR_V7M_SCB + V7M_SCB_CTR | |
96 | ldr \tmp, [\tmp] | |
97 | #else | |
da30e0ac | 98 | mrc p15, 0, \tmp, c0, c0, 1 @ read ctr |
9a1af5f2 | 99 | #endif |
da30e0ac CM |
100 | and \tmp, \tmp, #0xf @ cache line size encoding |
101 | mov \reg, #4 @ bytes per word | |
102 | mov \reg, \reg, lsl \tmp @ actual cache line size | |
103 | .endm | |
da091653 RK |
104 | |
105 | /* | |
106 | * Sanity check the PTE configuration for the code below - which makes | |
25985edc | 107 | * certain assumptions about how these bits are laid out. |
da091653 | 108 | */ |
8b79d5f2 | 109 | #ifdef CONFIG_MMU |
da091653 RK |
110 | #if L_PTE_SHARED != PTE_EXT_SHARED |
111 | #error PTE shared bit mismatch | |
112 | #endif | |
1b6ba46b CM |
113 | #if !defined (CONFIG_ARM_LPAE) && \ |
114 | (L_PTE_XN+L_PTE_USER+L_PTE_RDONLY+L_PTE_DIRTY+L_PTE_YOUNG+\ | |
b007ea79 | 115 | L_PTE_PRESENT) > L_PTE_SHARED |
da091653 RK |
116 | #error Invalid Linux PTE bit settings |
117 | #endif | |
8b79d5f2 | 118 | #endif /* CONFIG_MMU */ |
da091653 RK |
119 | |
120 | /* | |
121 | * The ARMv6 and ARMv7 set_pte_ext translation function. | |
122 | * | |
123 | * Permission translation: | |
124 | * YUWD APX AP1 AP0 SVC User | |
125 | * 0xxx 0 0 0 no acc no acc | |
126 | * 100x 1 0 1 r/o no acc | |
127 | * 10x0 1 0 1 r/o no acc | |
128 | * 1011 0 0 1 r/w no acc | |
247055aa CM |
129 | * 110x 1 1 1 r/o r/o |
130 | * 11x0 1 1 1 r/o r/o | |
b6ccb980 | 131 | * 1111 0 1 1 r/w r/w |
da091653 | 132 | */ |
639b0ae7 RK |
133 | .macro armv6_mt_table pfx |
134 | \pfx\()_mt_table: | |
135 | .long 0x00 @ L_PTE_MT_UNCACHED | |
136 | .long PTE_EXT_TEX(1) @ L_PTE_MT_BUFFERABLE | |
137 | .long PTE_CACHEABLE @ L_PTE_MT_WRITETHROUGH | |
138 | .long PTE_CACHEABLE | PTE_BUFFERABLE @ L_PTE_MT_WRITEBACK | |
139 | .long PTE_BUFFERABLE @ L_PTE_MT_DEV_SHARED | |
140 | .long 0x00 @ unused | |
141 | .long 0x00 @ L_PTE_MT_MINICACHE (not present) | |
142 | .long PTE_EXT_TEX(1) | PTE_CACHEABLE | PTE_BUFFERABLE @ L_PTE_MT_WRITEALLOC | |
143 | .long 0x00 @ unused | |
144 | .long PTE_EXT_TEX(1) @ L_PTE_MT_DEV_WC | |
145 | .long 0x00 @ unused | |
146 | .long PTE_CACHEABLE | PTE_BUFFERABLE @ L_PTE_MT_DEV_CACHED | |
147 | .long PTE_EXT_TEX(2) @ L_PTE_MT_DEV_NONSHARED | |
db5b7169 | 148 | .long 0x00 @ unused |
639b0ae7 | 149 | .long 0x00 @ unused |
b6ccb980 | 150 | .long PTE_CACHEABLE | PTE_BUFFERABLE | PTE_EXT_APX @ L_PTE_MT_VECTORS |
639b0ae7 RK |
151 | .endm |
152 | ||
153 | .macro armv6_set_pte_ext pfx | |
d30e45ee | 154 | str r1, [r0], #2048 @ linux version |
da091653 | 155 | |
639b0ae7 | 156 | bic r3, r1, #0x000003fc |
da091653 RK |
157 | bic r3, r3, #PTE_TYPE_MASK |
158 | orr r3, r3, r2 | |
159 | orr r3, r3, #PTE_EXT_AP0 | 2 | |
160 | ||
639b0ae7 RK |
161 | adr ip, \pfx\()_mt_table |
162 | and r2, r1, #L_PTE_MT_MASK | |
163 | ldr r2, [ip, r2] | |
164 | ||
36bb94ba RK |
165 | eor r1, r1, #L_PTE_DIRTY |
166 | tst r1, #L_PTE_DIRTY|L_PTE_RDONLY | |
167 | orrne r3, r3, #PTE_EXT_APX | |
da091653 RK |
168 | |
169 | tst r1, #L_PTE_USER | |
170 | orrne r3, r3, #PTE_EXT_AP1 | |
171 | tstne r3, #PTE_EXT_APX | |
b6ccb980 WD |
172 | |
173 | @ user read-only -> kernel read-only | |
174 | bicne r3, r3, #PTE_EXT_AP0 | |
da091653 | 175 | |
9522d7e4 RK |
176 | tst r1, #L_PTE_XN |
177 | orrne r3, r3, #PTE_EXT_XN | |
da091653 | 178 | |
b6ccb980 | 179 | eor r3, r3, r2 |
639b0ae7 | 180 | |
da091653 RK |
181 | tst r1, #L_PTE_YOUNG |
182 | tstne r1, #L_PTE_PRESENT | |
183 | moveq r3, #0 | |
26ffd0d4 WD |
184 | tstne r1, #L_PTE_NONE |
185 | movne r3, #0 | |
da091653 RK |
186 | |
187 | str r3, [r0] | |
188 | mcr p15, 0, r0, c7, c10, 1 @ flush_pte | |
189 | .endm | |
190 | ||
191 | ||
192 | /* | |
193 | * The ARMv3, ARMv4 and ARMv5 set_pte_ext translation function, | |
194 | * covering most CPUs except Xscale and Xscale 3. | |
195 | * | |
196 | * Permission translation: | |
197 | * YUWD AP SVC User | |
198 | * 0xxx 0x00 no acc no acc | |
199 | * 100x 0x00 r/o no acc | |
200 | * 10x0 0x00 r/o no acc | |
201 | * 1011 0x55 r/w no acc | |
202 | * 110x 0xaa r/w r/o | |
203 | * 11x0 0xaa r/w r/o | |
204 | * 1111 0xff r/w r/w | |
205 | */ | |
206 | .macro armv3_set_pte_ext wc_disable=1 | |
d30e45ee | 207 | str r1, [r0], #2048 @ linux version |
da091653 | 208 | |
36bb94ba | 209 | eor r3, r1, #L_PTE_PRESENT | L_PTE_YOUNG | L_PTE_DIRTY |
da091653 RK |
210 | |
211 | bic r2, r1, #PTE_SMALL_AP_MASK @ keep C, B bits | |
212 | bic r2, r2, #PTE_TYPE_MASK | |
213 | orr r2, r2, #PTE_TYPE_SMALL | |
214 | ||
215 | tst r3, #L_PTE_USER @ user? | |
216 | orrne r2, r2, #PTE_SMALL_AP_URO_SRW | |
217 | ||
36bb94ba | 218 | tst r3, #L_PTE_RDONLY | L_PTE_DIRTY @ write and dirty? |
da091653 RK |
219 | orreq r2, r2, #PTE_SMALL_AP_UNO_SRW |
220 | ||
221 | tst r3, #L_PTE_PRESENT | L_PTE_YOUNG @ present and young? | |
222 | movne r2, #0 | |
223 | ||
224 | .if \wc_disable | |
225 | #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH | |
226 | tst r2, #PTE_CACHEABLE | |
227 | bicne r2, r2, #PTE_BUFFERABLE | |
228 | #endif | |
229 | .endif | |
d30e45ee | 230 | str r2, [r0] @ hardware version |
da091653 RK |
231 | .endm |
232 | ||
233 | ||
234 | /* | |
235 | * Xscale set_pte_ext translation, split into two halves to cope | |
236 | * with work-arounds. r3 must be preserved by code between these | |
237 | * two macros. | |
238 | * | |
239 | * Permission translation: | |
240 | * YUWD AP SVC User | |
241 | * 0xxx 00 no acc no acc | |
242 | * 100x 00 r/o no acc | |
243 | * 10x0 00 r/o no acc | |
244 | * 1011 01 r/w no acc | |
245 | * 110x 10 r/w r/o | |
246 | * 11x0 10 r/w r/o | |
247 | * 1111 11 r/w r/w | |
248 | */ | |
249 | .macro xscale_set_pte_ext_prologue | |
d30e45ee | 250 | str r1, [r0] @ linux version |
da091653 | 251 | |
36bb94ba | 252 | eor r3, r1, #L_PTE_PRESENT | L_PTE_YOUNG | L_PTE_DIRTY |
da091653 RK |
253 | |
254 | bic r2, r1, #PTE_SMALL_AP_MASK @ keep C, B bits | |
255 | orr r2, r2, #PTE_TYPE_EXT @ extended page | |
256 | ||
257 | tst r3, #L_PTE_USER @ user? | |
258 | orrne r2, r2, #PTE_EXT_AP_URO_SRW @ yes -> user r/o, system r/w | |
259 | ||
36bb94ba | 260 | tst r3, #L_PTE_RDONLY | L_PTE_DIRTY @ write and dirty? |
da091653 RK |
261 | orreq r2, r2, #PTE_EXT_AP_UNO_SRW @ yes -> user n/a, system r/w |
262 | @ combined with user -> user r/w | |
263 | .endm | |
264 | ||
265 | .macro xscale_set_pte_ext_epilogue | |
266 | tst r3, #L_PTE_PRESENT | L_PTE_YOUNG @ present and young? | |
267 | movne r2, #0 @ no -> fault | |
268 | ||
d30e45ee | 269 | str r2, [r0, #2048]! @ hardware version |
da091653 RK |
270 | mov ip, #0 |
271 | mcr p15, 0, r0, c7, c10, 1 @ clean L1 D line | |
272 | mcr p15, 0, ip, c7, c10, 4 @ data write barrier | |
273 | .endm | |
66a625a8 | 274 | |
9d3a0492 | 275 | .macro define_processor_functions name:req, dabort:req, pabort:req, nommu=0, suspend=0, bugs=0 |
3a4d0c21 RK |
276 | /* |
277 | * If we are building for big.Little with branch predictor hardening, | |
278 | * we need the processor function tables to remain available after boot. | |
279 | */ | |
d6951f58 | 280 | #if defined(CONFIG_BIG_LITTLE) && defined(CONFIG_HARDEN_BRANCH_PREDICTOR) |
3a4d0c21 RK |
281 | .section ".rodata" |
282 | #endif | |
66a625a8 DM |
283 | .type \name\()_processor_functions, #object |
284 | .align 2 | |
285 | ENTRY(\name\()_processor_functions) | |
286 | .word \dabort | |
287 | .word \pabort | |
288 | .word cpu_\name\()_proc_init | |
9d3a0492 | 289 | .word \bugs |
66a625a8 DM |
290 | .word cpu_\name\()_proc_fin |
291 | .word cpu_\name\()_reset | |
292 | .word cpu_\name\()_do_idle | |
293 | .word cpu_\name\()_dcache_clean_area | |
294 | .word cpu_\name\()_switch_mm | |
295 | ||
296 | .if \nommu | |
297 | .word 0 | |
298 | .else | |
299 | .word cpu_\name\()_set_pte_ext | |
300 | .endif | |
301 | ||
302 | .if \suspend | |
303 | .word cpu_\name\()_suspend_size | |
f6f1ae82 | 304 | #ifdef CONFIG_ARM_CPU_SUSPEND |
66a625a8 DM |
305 | .word cpu_\name\()_do_suspend |
306 | .word cpu_\name\()_do_resume | |
6645cb61 RK |
307 | #else |
308 | .word 0 | |
309 | .word 0 | |
310 | #endif | |
66a625a8 DM |
311 | .else |
312 | .word 0 | |
313 | .word 0 | |
314 | .word 0 | |
315 | .endif | |
316 | ||
317 | .size \name\()_processor_functions, . - \name\()_processor_functions | |
d6951f58 | 318 | #if defined(CONFIG_BIG_LITTLE) && defined(CONFIG_HARDEN_BRANCH_PREDICTOR) |
3a4d0c21 RK |
319 | .previous |
320 | #endif | |
66a625a8 DM |
321 | .endm |
322 | ||
3e0a07f8 GC |
323 | .macro globl_equ x, y |
324 | .globl \x | |
325 | .equ \x, \y | |
326 | .endm | |
bf35706f AB |
327 | |
328 | .macro initfn, func, base | |
329 | .long \func - \base | |
330 | .endm | |
c848791f | 331 | |
6c5c2a01 RK |
332 | /* |
333 | * Macro to calculate the log2 size for the protection region | |
334 | * registers. This calculates rd = log2(size) - 1. tmp must | |
335 | * not be the same register as rd. | |
336 | */ | |
337 | .macro pr_sz, rd, size, tmp | |
338 | mov \tmp, \size, lsr #12 | |
339 | mov \rd, #11 | |
340 | 1: movs \tmp, \tmp, lsr #1 | |
341 | addne \rd, \rd, #1 | |
342 | bne 1b | |
343 | .endm | |
344 | ||
345 | /* | |
346 | * Macro to generate a protection region register value | |
347 | * given a pre-masked address, size, and enable bit. | |
348 | * Corrupts size. | |
349 | */ | |
350 | .macro pr_val, dest, addr, size, enable | |
351 | pr_sz \dest, \size, \size @ calculate log2(size) - 1 | |
352 | orr \dest, \addr, \dest, lsl #1 @ mask in the region size | |
353 | orr \dest, \dest, \enable | |
354 | .endm |