Commit | Line | Data |
---|---|---|
1a59d1b8 | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
1da177e4 LT |
2 | /* |
3 | * linux/arch/arm/mm/proc-arm922.S: MMU functions for ARM922 | |
4 | * | |
5 | * Copyright (C) 1999,2000 ARM Limited | |
6 | * Copyright (C) 2000 Deep Blue Solutions Ltd. | |
7 | * Copyright (C) 2001 Altera Corporation | |
d090ddda | 8 | * hacked for non-paged-MM by Hyok S. Choi, 2003. |
1da177e4 | 9 | * |
1da177e4 LT |
10 | * These are the low level assembler for performing cache and TLB |
11 | * functions on the arm922. | |
12 | * | |
13 | * CONFIG_CPU_ARM922_CPU_IDLE -> nohlt | |
14 | */ | |
15 | #include <linux/linkage.h> | |
1da177e4 | 16 | #include <linux/init.h> |
1036b895 | 17 | #include <linux/cfi_types.h> |
65fddcfc | 18 | #include <linux/pgtable.h> |
1da177e4 | 19 | #include <asm/assembler.h> |
5ec9407d | 20 | #include <asm/hwcap.h> |
74945c86 | 21 | #include <asm/pgtable-hwdef.h> |
1da177e4 LT |
22 | #include <asm/page.h> |
23 | #include <asm/ptrace.h> | |
24 | #include "proc-macros.S" | |
25 | ||
26 | /* | |
27 | * The size of one data cache line. | |
28 | */ | |
29 | #define CACHE_DLINESIZE 32 | |
30 | ||
31 | /* | |
32 | * The number of data cache segments. | |
33 | */ | |
34 | #define CACHE_DSEGMENTS 4 | |
35 | ||
36 | /* | |
37 | * The number of lines in a cache segment. | |
38 | */ | |
39 | #define CACHE_DENTRIES 64 | |
40 | ||
41 | /* | |
42 | * This is the size at which it becomes more efficient to | |
43 | * clean the whole cache, rather than using the individual | |
25985edc | 44 | * cache line maintenance instructions. (I think this should |
1da177e4 LT |
45 | * be 32768). |
46 | */ | |
47 | #define CACHE_DLIMIT 8192 | |
48 | ||
49 | ||
50 | .text | |
51 | /* | |
52 | * cpu_arm922_proc_init() | |
53 | */ | |
51db13aa | 54 | SYM_TYPED_FUNC_START(cpu_arm922_proc_init) |
6ebbf2ce | 55 | ret lr |
51db13aa | 56 | SYM_FUNC_END(cpu_arm922_proc_init) |
1da177e4 LT |
57 | |
58 | /* | |
59 | * cpu_arm922_proc_fin() | |
60 | */ | |
51db13aa | 61 | SYM_TYPED_FUNC_START(cpu_arm922_proc_fin) |
1da177e4 LT |
62 | mrc p15, 0, r0, c1, c0, 0 @ ctrl register |
63 | bic r0, r0, #0x1000 @ ...i............ | |
64 | bic r0, r0, #0x000e @ ............wca. | |
65 | mcr p15, 0, r0, c1, c0, 0 @ disable caches | |
6ebbf2ce | 66 | ret lr |
51db13aa | 67 | SYM_FUNC_END(cpu_arm922_proc_fin) |
1da177e4 LT |
68 | |
69 | /* | |
70 | * cpu_arm922_reset(loc) | |
71 | * | |
72 | * Perform a soft reset of the system. Put the CPU into the | |
73 | * same state as it would be if it had been reset, and branch | |
74 | * to what would be the reset vector. | |
75 | * | |
76 | * loc: location to jump to for soft reset | |
77 | */ | |
78 | .align 5 | |
1a4baafa | 79 | .pushsection .idmap.text, "ax" |
51db13aa | 80 | SYM_TYPED_FUNC_START(cpu_arm922_reset) |
1da177e4 LT |
81 | mov ip, #0 |
82 | mcr p15, 0, ip, c7, c7, 0 @ invalidate I,D caches | |
83 | mcr p15, 0, ip, c7, c10, 4 @ drain WB | |
d090ddda | 84 | #ifdef CONFIG_MMU |
1da177e4 | 85 | mcr p15, 0, ip, c8, c7, 0 @ invalidate I & D TLBs |
d090ddda | 86 | #endif |
1da177e4 LT |
87 | mrc p15, 0, ip, c1, c0, 0 @ ctrl register |
88 | bic ip, ip, #0x000f @ ............wcam | |
89 | bic ip, ip, #0x1100 @ ...i...s........ | |
90 | mcr p15, 0, ip, c1, c0, 0 @ ctrl register | |
6ebbf2ce | 91 | ret r0 |
51db13aa | 92 | SYM_FUNC_END(cpu_arm922_reset) |
1a4baafa | 93 | .popsection |
1da177e4 LT |
94 | |
95 | /* | |
96 | * cpu_arm922_do_idle() | |
97 | */ | |
98 | .align 5 | |
51db13aa | 99 | SYM_TYPED_FUNC_START(cpu_arm922_do_idle) |
1da177e4 | 100 | mcr p15, 0, r0, c7, c0, 4 @ Wait for interrupt |
6ebbf2ce | 101 | ret lr |
51db13aa | 102 | SYM_FUNC_END(cpu_arm922_do_idle) |
1da177e4 LT |
103 | |
104 | #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH | |
105 | ||
c8c90860 MW |
106 | /* |
107 | * flush_icache_all() | |
108 | * | |
109 | * Unconditionally clean and invalidate the entire icache. | |
110 | */ | |
1036b895 | 111 | SYM_TYPED_FUNC_START(arm922_flush_icache_all) |
c8c90860 MW |
112 | mov r0, #0 |
113 | mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache | |
6ebbf2ce | 114 | ret lr |
1036b895 | 115 | SYM_FUNC_END(arm922_flush_icache_all) |
c8c90860 | 116 | |
1da177e4 LT |
117 | /* |
118 | * flush_user_cache_all() | |
119 | * | |
120 | * Clean and invalidate all cache entries in a particular | |
121 | * address space. | |
122 | */ | |
2074beeb | 123 | SYM_FUNC_ALIAS(arm922_flush_user_cache_all, arm922_flush_kern_cache_all) |
1da177e4 LT |
124 | |
125 | /* | |
126 | * flush_kern_cache_all() | |
127 | * | |
128 | * Clean and invalidate the entire cache. | |
129 | */ | |
1036b895 | 130 | SYM_TYPED_FUNC_START(arm922_flush_kern_cache_all) |
1da177e4 LT |
131 | mov r2, #VM_EXEC |
132 | mov ip, #0 | |
133 | __flush_whole_cache: | |
134 | mov r1, #(CACHE_DSEGMENTS - 1) << 5 @ 8 segments | |
135 | 1: orr r3, r1, #(CACHE_DENTRIES - 1) << 26 @ 64 entries | |
136 | 2: mcr p15, 0, r3, c7, c14, 2 @ clean+invalidate D index | |
137 | subs r3, r3, #1 << 26 | |
138 | bcs 2b @ entries 63 to 0 | |
139 | subs r1, r1, #1 << 5 | |
140 | bcs 1b @ segments 7 to 0 | |
141 | tst r2, #VM_EXEC | |
142 | mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache | |
143 | mcrne p15, 0, ip, c7, c10, 4 @ drain WB | |
6ebbf2ce | 144 | ret lr |
1036b895 | 145 | SYM_FUNC_END(arm922_flush_kern_cache_all) |
1da177e4 LT |
146 | |
147 | /* | |
148 | * flush_user_cache_range(start, end, flags) | |
149 | * | |
150 | * Clean and invalidate a range of cache entries in the | |
151 | * specified address range. | |
152 | * | |
153 | * - start - start address (inclusive) | |
154 | * - end - end address (exclusive) | |
155 | * - flags - vm_flags describing address space | |
156 | */ | |
1036b895 | 157 | SYM_TYPED_FUNC_START(arm922_flush_user_cache_range) |
1da177e4 LT |
158 | mov ip, #0 |
159 | sub r3, r1, r0 @ calculate total size | |
160 | cmp r3, #CACHE_DLIMIT | |
161 | bhs __flush_whole_cache | |
162 | ||
163 | 1: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry | |
164 | tst r2, #VM_EXEC | |
165 | mcrne p15, 0, r0, c7, c5, 1 @ invalidate I entry | |
166 | add r0, r0, #CACHE_DLINESIZE | |
167 | cmp r0, r1 | |
168 | blo 1b | |
169 | tst r2, #VM_EXEC | |
170 | mcrne p15, 0, ip, c7, c10, 4 @ drain WB | |
6ebbf2ce | 171 | ret lr |
1036b895 | 172 | SYM_FUNC_END(arm922_flush_user_cache_range) |
1da177e4 LT |
173 | |
174 | /* | |
175 | * coherent_kern_range(start, end) | |
176 | * | |
177 | * Ensure coherency between the Icache and the Dcache in the | |
178 | * region described by start, end. If you have non-snooping | |
179 | * Harvard caches, you need to implement this function. | |
180 | * | |
181 | * - start - virtual start address | |
182 | * - end - virtual end address | |
183 | */ | |
1036b895 | 184 | SYM_TYPED_FUNC_START(arm922_coherent_kern_range) |
7b749aad | 185 | #ifdef CONFIG_CFI_CLANG /* Fallthrough if !CFI */ |
1036b895 | 186 | b arm922_coherent_user_range |
7b749aad | 187 | #endif |
1036b895 | 188 | SYM_FUNC_END(arm922_coherent_kern_range) |
1da177e4 LT |
189 | |
190 | /* | |
191 | * coherent_user_range(start, end) | |
192 | * | |
193 | * Ensure coherency between the Icache and the Dcache in the | |
194 | * region described by start, end. If you have non-snooping | |
195 | * Harvard caches, you need to implement this function. | |
196 | * | |
197 | * - start - virtual start address | |
198 | * - end - virtual end address | |
199 | */ | |
1036b895 | 200 | SYM_TYPED_FUNC_START(arm922_coherent_user_range) |
1da177e4 LT |
201 | bic r0, r0, #CACHE_DLINESIZE - 1 |
202 | 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry | |
203 | mcr p15, 0, r0, c7, c5, 1 @ invalidate I entry | |
204 | add r0, r0, #CACHE_DLINESIZE | |
205 | cmp r0, r1 | |
206 | blo 1b | |
207 | mcr p15, 0, r0, c7, c10, 4 @ drain WB | |
c5102f59 | 208 | mov r0, #0 |
6ebbf2ce | 209 | ret lr |
1036b895 | 210 | SYM_FUNC_END(arm922_coherent_user_range) |
1da177e4 LT |
211 | |
212 | /* | |
2c9b9c84 | 213 | * flush_kern_dcache_area(void *addr, size_t size) |
1da177e4 LT |
214 | * |
215 | * Ensure no D cache aliasing occurs, either with itself or | |
216 | * the I cache | |
217 | * | |
2c9b9c84 RK |
218 | * - addr - kernel address |
219 | * - size - region size | |
1da177e4 | 220 | */ |
1036b895 | 221 | SYM_TYPED_FUNC_START(arm922_flush_kern_dcache_area) |
2c9b9c84 | 222 | add r1, r0, r1 |
1da177e4 LT |
223 | 1: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry |
224 | add r0, r0, #CACHE_DLINESIZE | |
225 | cmp r0, r1 | |
226 | blo 1b | |
227 | mov r0, #0 | |
228 | mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache | |
229 | mcr p15, 0, r0, c7, c10, 4 @ drain WB | |
6ebbf2ce | 230 | ret lr |
1036b895 | 231 | SYM_FUNC_END(arm922_flush_kern_dcache_area) |
1da177e4 LT |
232 | |
233 | /* | |
234 | * dma_inv_range(start, end) | |
235 | * | |
236 | * Invalidate (discard) the specified virtual address range. | |
237 | * May not write back any entries. If 'start' or 'end' | |
238 | * are not cache line aligned, those lines must be written | |
239 | * back. | |
240 | * | |
241 | * - start - virtual start address | |
242 | * - end - virtual end address | |
243 | * | |
244 | * (same as v4wb) | |
245 | */ | |
702b94bf | 246 | arm922_dma_inv_range: |
1da177e4 LT |
247 | tst r0, #CACHE_DLINESIZE - 1 |
248 | bic r0, r0, #CACHE_DLINESIZE - 1 | |
249 | mcrne p15, 0, r0, c7, c10, 1 @ clean D entry | |
250 | tst r1, #CACHE_DLINESIZE - 1 | |
251 | mcrne p15, 0, r1, c7, c10, 1 @ clean D entry | |
252 | 1: mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry | |
253 | add r0, r0, #CACHE_DLINESIZE | |
254 | cmp r0, r1 | |
255 | blo 1b | |
256 | mcr p15, 0, r0, c7, c10, 4 @ drain WB | |
6ebbf2ce | 257 | ret lr |
1da177e4 LT |
258 | |
259 | /* | |
260 | * dma_clean_range(start, end) | |
261 | * | |
262 | * Clean the specified virtual address range. | |
263 | * | |
264 | * - start - virtual start address | |
265 | * - end - virtual end address | |
266 | * | |
267 | * (same as v4wb) | |
268 | */ | |
702b94bf | 269 | arm922_dma_clean_range: |
1da177e4 LT |
270 | bic r0, r0, #CACHE_DLINESIZE - 1 |
271 | 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry | |
272 | add r0, r0, #CACHE_DLINESIZE | |
273 | cmp r0, r1 | |
274 | blo 1b | |
275 | mcr p15, 0, r0, c7, c10, 4 @ drain WB | |
6ebbf2ce | 276 | ret lr |
1da177e4 LT |
277 | |
278 | /* | |
279 | * dma_flush_range(start, end) | |
280 | * | |
281 | * Clean and invalidate the specified virtual address range. | |
282 | * | |
283 | * - start - virtual start address | |
284 | * - end - virtual end address | |
285 | */ | |
1036b895 | 286 | SYM_TYPED_FUNC_START(arm922_dma_flush_range) |
1da177e4 LT |
287 | bic r0, r0, #CACHE_DLINESIZE - 1 |
288 | 1: mcr p15, 0, r0, c7, c14, 1 @ clean+invalidate D entry | |
289 | add r0, r0, #CACHE_DLINESIZE | |
290 | cmp r0, r1 | |
291 | blo 1b | |
292 | mcr p15, 0, r0, c7, c10, 4 @ drain WB | |
6ebbf2ce | 293 | ret lr |
1036b895 | 294 | SYM_FUNC_END(arm922_dma_flush_range) |
1da177e4 | 295 | |
a9c9147e RK |
296 | /* |
297 | * dma_map_area(start, size, dir) | |
298 | * - start - kernel virtual start address | |
299 | * - size - size of region | |
300 | * - dir - DMA direction | |
301 | */ | |
1036b895 | 302 | SYM_TYPED_FUNC_START(arm922_dma_map_area) |
a9c9147e RK |
303 | add r1, r1, r0 |
304 | cmp r2, #DMA_TO_DEVICE | |
305 | beq arm922_dma_clean_range | |
306 | bcs arm922_dma_inv_range | |
307 | b arm922_dma_flush_range | |
1036b895 | 308 | SYM_FUNC_END(arm922_dma_map_area) |
a9c9147e RK |
309 | |
310 | /* | |
311 | * dma_unmap_area(start, size, dir) | |
312 | * - start - kernel virtual start address | |
313 | * - size - size of region | |
314 | * - dir - DMA direction | |
315 | */ | |
1036b895 | 316 | SYM_TYPED_FUNC_START(arm922_dma_unmap_area) |
6ebbf2ce | 317 | ret lr |
1036b895 | 318 | SYM_FUNC_END(arm922_dma_unmap_area) |
a9c9147e | 319 | |
1036b895 | 320 | #endif /* !CONFIG_CPU_DCACHE_WRITETHROUGH */ |
1da177e4 | 321 | |
51db13aa | 322 | SYM_TYPED_FUNC_START(cpu_arm922_dcache_clean_area) |
1da177e4 LT |
323 | #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH |
324 | 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry | |
325 | add r0, r0, #CACHE_DLINESIZE | |
326 | subs r1, r1, #CACHE_DLINESIZE | |
327 | bhi 1b | |
328 | #endif | |
6ebbf2ce | 329 | ret lr |
51db13aa | 330 | SYM_FUNC_END(cpu_arm922_dcache_clean_area) |
1da177e4 LT |
331 | |
332 | /* =============================== PageTable ============================== */ | |
333 | ||
334 | /* | |
335 | * cpu_arm922_switch_mm(pgd) | |
336 | * | |
337 | * Set the translation base pointer to be as described by pgd. | |
338 | * | |
339 | * pgd: new page tables | |
340 | */ | |
341 | .align 5 | |
51db13aa | 342 | SYM_TYPED_FUNC_START(cpu_arm922_switch_mm) |
d090ddda | 343 | #ifdef CONFIG_MMU |
1da177e4 LT |
344 | mov ip, #0 |
345 | #ifdef CONFIG_CPU_DCACHE_WRITETHROUGH | |
346 | mcr p15, 0, ip, c7, c6, 0 @ invalidate D cache | |
347 | #else | |
348 | @ && 'Clean & Invalidate whole DCache' | |
349 | @ && Re-written to use Index Ops. | |
350 | @ && Uses registers r1, r3 and ip | |
351 | ||
352 | mov r1, #(CACHE_DSEGMENTS - 1) << 5 @ 4 segments | |
353 | 1: orr r3, r1, #(CACHE_DENTRIES - 1) << 26 @ 64 entries | |
354 | 2: mcr p15, 0, r3, c7, c14, 2 @ clean & invalidate D index | |
355 | subs r3, r3, #1 << 26 | |
356 | bcs 2b @ entries 63 to 0 | |
357 | subs r1, r1, #1 << 5 | |
358 | bcs 1b @ segments 7 to 0 | |
359 | #endif | |
360 | mcr p15, 0, ip, c7, c5, 0 @ invalidate I cache | |
361 | mcr p15, 0, ip, c7, c10, 4 @ drain WB | |
362 | mcr p15, 0, r0, c2, c0, 0 @ load page table pointer | |
363 | mcr p15, 0, ip, c8, c7, 0 @ invalidate I & D TLBs | |
d090ddda | 364 | #endif |
6ebbf2ce | 365 | ret lr |
51db13aa | 366 | SYM_FUNC_END(cpu_arm922_switch_mm) |
1da177e4 LT |
367 | |
368 | /* | |
ad1ae2fe | 369 | * cpu_arm922_set_pte_ext(ptep, pte, ext) |
1da177e4 LT |
370 | * |
371 | * Set a PTE and flush it out | |
372 | */ | |
373 | .align 5 | |
51db13aa | 374 | SYM_TYPED_FUNC_START(cpu_arm922_set_pte_ext) |
d090ddda | 375 | #ifdef CONFIG_MMU |
da091653 | 376 | armv3_set_pte_ext |
1da177e4 LT |
377 | mov r0, r0 |
378 | mcr p15, 0, r0, c7, c10, 1 @ clean D entry | |
379 | mcr p15, 0, r0, c7, c10, 4 @ drain WB | |
d090ddda | 380 | #endif /* CONFIG_MMU */ |
6ebbf2ce | 381 | ret lr |
51db13aa | 382 | SYM_FUNC_END(cpu_arm922_set_pte_ext) |
1da177e4 | 383 | |
1da177e4 LT |
384 | .type __arm922_setup, #function |
385 | __arm922_setup: | |
386 | mov r0, #0 | |
387 | mcr p15, 0, r0, c7, c7 @ invalidate I,D caches on v4 | |
388 | mcr p15, 0, r0, c7, c10, 4 @ drain write buffer on v4 | |
d090ddda | 389 | #ifdef CONFIG_MMU |
1da177e4 | 390 | mcr p15, 0, r0, c8, c7 @ invalidate I,D TLBs on v4 |
d090ddda | 391 | #endif |
22b19086 RK |
392 | adr r5, arm922_crval |
393 | ldmia r5, {r5, r6} | |
1da177e4 | 394 | mrc p15, 0, r0, c1, c0 @ get control register v4 |
1da177e4 | 395 | bic r0, r0, r5 |
22b19086 | 396 | orr r0, r0, r6 |
6ebbf2ce | 397 | ret lr |
1da177e4 LT |
398 | .size __arm922_setup, . - __arm922_setup |
399 | ||
400 | /* | |
401 | * R | |
402 | * .RVI ZFRS BLDP WCAM | |
403 | * ..11 0001 ..11 0101 | |
404 | * | |
405 | */ | |
22b19086 RK |
406 | .type arm922_crval, #object |
407 | arm922_crval: | |
408 | crval clear=0x00003f3f, mmuset=0x00003135, ucset=0x00001130 | |
1da177e4 LT |
409 | |
410 | __INITDATA | |
f3e7383f DM |
411 | @ define struct processor (see <asm/proc-fns.h> and proc-macros.S) |
412 | define_processor_functions arm922, dabort=v4t_early_abort, pabort=legacy_pabort | |
1da177e4 LT |
413 | |
414 | .section ".rodata" | |
415 | ||
f3e7383f DM |
416 | string cpu_arch_name, "armv4t" |
417 | string cpu_elf_name, "v4" | |
418 | string cpu_arm922_name, "ARM922T" | |
1da177e4 LT |
419 | |
420 | .align | |
421 | ||
790756c7 | 422 | .section ".proc.info.init", "a" |
1da177e4 LT |
423 | |
424 | .type __arm922_proc_info,#object | |
425 | __arm922_proc_info: | |
426 | .long 0x41009220 | |
427 | .long 0xff00fff0 | |
428 | .long PMD_TYPE_SECT | \ | |
429 | PMD_SECT_BUFFERABLE | \ | |
430 | PMD_SECT_CACHEABLE | \ | |
431 | PMD_BIT4 | \ | |
432 | PMD_SECT_AP_WRITE | \ | |
433 | PMD_SECT_AP_READ | |
8799ee9f RK |
434 | .long PMD_TYPE_SECT | \ |
435 | PMD_BIT4 | \ | |
436 | PMD_SECT_AP_WRITE | \ | |
437 | PMD_SECT_AP_READ | |
bf35706f | 438 | initfn __arm922_setup, __arm922_proc_info |
1da177e4 LT |
439 | .long cpu_arch_name |
440 | .long cpu_elf_name | |
441 | .long HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB | |
442 | .long cpu_arm922_name | |
443 | .long arm922_processor_functions | |
444 | .long v4wbi_tlb_fns | |
445 | .long v4wb_user_fns | |
446 | #ifndef CONFIG_CPU_DCACHE_WRITETHROUGH | |
447 | .long arm922_cache_fns | |
448 | #else | |
449 | .long v4wt_cache_fns | |
450 | #endif | |
451 | .size __arm922_proc_info, . - __arm922_proc_info |