Commit | Line | Data |
---|---|---|
1da177e4 | 1 | /* |
4baa9922 | 2 | * arch/arm/include/asm/assembler.h |
1da177e4 LT |
3 | * |
4 | * Copyright (C) 1996-2000 Russell King | |
5 | * | |
6 | * This program is free software; you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License version 2 as | |
8 | * published by the Free Software Foundation. | |
9 | * | |
10 | * This file contains arm architecture specific defines | |
11 | * for the different processors. | |
12 | * | |
13 | * Do not include any C declarations in this file - it is included by | |
14 | * assembler source. | |
15 | */ | |
2bc58a6f MD |
16 | #ifndef __ASM_ASSEMBLER_H__ |
17 | #define __ASM_ASSEMBLER_H__ | |
18 | ||
1da177e4 LT |
19 | #ifndef __ASSEMBLY__ |
20 | #error "Only include this from assembly code" | |
21 | #endif | |
22 | ||
23 | #include <asm/ptrace.h> | |
247055aa | 24 | #include <asm/domain.h> |
80c59daf | 25 | #include <asm/opcodes-virt.h> |
1da177e4 | 26 | |
6f6f6a70 RH |
27 | #define IOMEM(x) (x) |
28 | ||
1da177e4 LT |
29 | /* |
30 | * Endian independent macros for shifting bytes within registers. | |
31 | */ | |
32 | #ifndef __ARMEB__ | |
33 | #define pull lsr | |
34 | #define push lsl | |
35 | #define get_byte_0 lsl #0 | |
36 | #define get_byte_1 lsr #8 | |
37 | #define get_byte_2 lsr #16 | |
38 | #define get_byte_3 lsr #24 | |
39 | #define put_byte_0 lsl #0 | |
40 | #define put_byte_1 lsl #8 | |
41 | #define put_byte_2 lsl #16 | |
42 | #define put_byte_3 lsl #24 | |
43 | #else | |
44 | #define pull lsl | |
45 | #define push lsr | |
46 | #define get_byte_0 lsr #24 | |
47 | #define get_byte_1 lsr #16 | |
48 | #define get_byte_2 lsr #8 | |
49 | #define get_byte_3 lsl #0 | |
50 | #define put_byte_0 lsl #24 | |
51 | #define put_byte_1 lsl #16 | |
52 | #define put_byte_2 lsl #8 | |
53 | #define put_byte_3 lsl #0 | |
54 | #endif | |
55 | ||
56 | /* | |
57 | * Data preload for architectures that support it | |
58 | */ | |
59 | #if __LINUX_ARM_ARCH__ >= 5 | |
60 | #define PLD(code...) code | |
61 | #else | |
62 | #define PLD(code...) | |
63 | #endif | |
64 | ||
2239aff6 NP |
65 | /* |
66 | * This can be used to enable code to cacheline align the destination | |
67 | * pointer when bulk writing to memory. Experiments on StrongARM and | |
68 | * XScale didn't show this a worthwhile thing to do when the cache is not | |
69 | * set to write-allocate (this would need further testing on XScale when WA | |
70 | * is used). | |
71 | * | |
72 | * On Feroceon there is much to gain however, regardless of cache mode. | |
73 | */ | |
74 | #ifdef CONFIG_CPU_FEROCEON | |
75 | #define CALGN(code...) code | |
76 | #else | |
77 | #define CALGN(code...) | |
78 | #endif | |
79 | ||
1da177e4 | 80 | /* |
9c42954d | 81 | * Enable and disable interrupts |
1da177e4 | 82 | */ |
59d1ff3b | 83 | #if __LINUX_ARM_ARCH__ >= 6 |
0d928b0b | 84 | .macro disable_irq_notrace |
59d1ff3b | 85 | cpsid i |
9c42954d RK |
86 | .endm |
87 | ||
0d928b0b | 88 | .macro enable_irq_notrace |
9c42954d RK |
89 | cpsie i |
90 | .endm | |
59d1ff3b | 91 | #else |
0d928b0b | 92 | .macro disable_irq_notrace |
9c42954d RK |
93 | msr cpsr_c, #PSR_I_BIT | SVC_MODE |
94 | .endm | |
95 | ||
0d928b0b | 96 | .macro enable_irq_notrace |
9c42954d RK |
97 | msr cpsr_c, #SVC_MODE |
98 | .endm | |
59d1ff3b | 99 | #endif |
9c42954d | 100 | |
0d928b0b UKK |
101 | .macro asm_trace_hardirqs_off |
102 | #if defined(CONFIG_TRACE_IRQFLAGS) | |
103 | stmdb sp!, {r0-r3, ip, lr} | |
104 | bl trace_hardirqs_off | |
105 | ldmia sp!, {r0-r3, ip, lr} | |
106 | #endif | |
107 | .endm | |
108 | ||
109 | .macro asm_trace_hardirqs_on_cond, cond | |
110 | #if defined(CONFIG_TRACE_IRQFLAGS) | |
111 | /* | |
112 | * actually the registers should be pushed and pop'd conditionally, but | |
113 | * after bl the flags are certainly clobbered | |
114 | */ | |
115 | stmdb sp!, {r0-r3, ip, lr} | |
116 | bl\cond trace_hardirqs_on | |
117 | ldmia sp!, {r0-r3, ip, lr} | |
118 | #endif | |
119 | .endm | |
120 | ||
121 | .macro asm_trace_hardirqs_on | |
122 | asm_trace_hardirqs_on_cond al | |
123 | .endm | |
124 | ||
125 | .macro disable_irq | |
126 | disable_irq_notrace | |
127 | asm_trace_hardirqs_off | |
128 | .endm | |
129 | ||
130 | .macro enable_irq | |
131 | asm_trace_hardirqs_on | |
132 | enable_irq_notrace | |
133 | .endm | |
9c42954d RK |
134 | /* |
135 | * Save the current IRQ state and disable IRQs. Note that this macro | |
136 | * assumes FIQs are enabled, and that the processor is in SVC mode. | |
137 | */ | |
138 | .macro save_and_disable_irqs, oldcpsr | |
139 | mrs \oldcpsr, cpsr | |
140 | disable_irq | |
1da177e4 LT |
141 | .endm |
142 | ||
8e43a905 RV |
143 | .macro save_and_disable_irqs_notrace, oldcpsr |
144 | mrs \oldcpsr, cpsr | |
145 | disable_irq_notrace | |
146 | .endm | |
147 | ||
1da177e4 LT |
148 | /* |
149 | * Restore interrupt state previously stored in a register. We don't | |
150 | * guarantee that this will preserve the flags. | |
151 | */ | |
0d928b0b | 152 | .macro restore_irqs_notrace, oldcpsr |
1da177e4 LT |
153 | msr cpsr_c, \oldcpsr |
154 | .endm | |
155 | ||
0d928b0b UKK |
156 | .macro restore_irqs, oldcpsr |
157 | tst \oldcpsr, #PSR_I_BIT | |
158 | asm_trace_hardirqs_on_cond eq | |
159 | restore_irqs_notrace \oldcpsr | |
160 | .endm | |
161 | ||
1da177e4 LT |
162 | #define USER(x...) \ |
163 | 9999: x; \ | |
4260415f | 164 | .pushsection __ex_table,"a"; \ |
1da177e4 LT |
165 | .align 3; \ |
166 | .long 9999b,9001f; \ | |
4260415f | 167 | .popsection |
bac4e960 | 168 | |
f00ec48f RK |
169 | #ifdef CONFIG_SMP |
170 | #define ALT_SMP(instr...) \ | |
171 | 9998: instr | |
ed3768a8 DM |
172 | /* |
173 | * Note: if you get assembler errors from ALT_UP() when building with | |
174 | * CONFIG_THUMB2_KERNEL, you almost certainly need to use | |
175 | * ALT_SMP( W(instr) ... ) | |
176 | */ | |
f00ec48f RK |
177 | #define ALT_UP(instr...) \ |
178 | .pushsection ".alt.smp.init", "a" ;\ | |
179 | .long 9998b ;\ | |
ed3768a8 DM |
180 | 9997: instr ;\ |
181 | .if . - 9997b != 4 ;\ | |
182 | .error "ALT_UP() content must assemble to exactly 4 bytes";\ | |
183 | .endif ;\ | |
f00ec48f RK |
184 | .popsection |
185 | #define ALT_UP_B(label) \ | |
186 | .equ up_b_offset, label - 9998b ;\ | |
187 | .pushsection ".alt.smp.init", "a" ;\ | |
188 | .long 9998b ;\ | |
ed3768a8 | 189 | W(b) . + up_b_offset ;\ |
f00ec48f RK |
190 | .popsection |
191 | #else | |
192 | #define ALT_SMP(instr...) | |
193 | #define ALT_UP(instr...) instr | |
194 | #define ALT_UP_B(label) b label | |
195 | #endif | |
196 | ||
d675d0bc WD |
197 | /* |
198 | * Instruction barrier | |
199 | */ | |
200 | .macro instr_sync | |
201 | #if __LINUX_ARM_ARCH__ >= 7 | |
202 | isb | |
203 | #elif __LINUX_ARM_ARCH__ == 6 | |
204 | mcr p15, 0, r0, c7, c5, 4 | |
205 | #endif | |
206 | .endm | |
207 | ||
bac4e960 RK |
208 | /* |
209 | * SMP data memory barrier | |
210 | */ | |
ed3768a8 | 211 | .macro smp_dmb mode |
bac4e960 RK |
212 | #ifdef CONFIG_SMP |
213 | #if __LINUX_ARM_ARCH__ >= 7 | |
ed3768a8 | 214 | .ifeqs "\mode","arm" |
f00ec48f | 215 | ALT_SMP(dmb) |
ed3768a8 DM |
216 | .else |
217 | ALT_SMP(W(dmb)) | |
218 | .endif | |
bac4e960 | 219 | #elif __LINUX_ARM_ARCH__ == 6 |
f00ec48f RK |
220 | ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb |
221 | #else | |
222 | #error Incompatible SMP platform | |
bac4e960 | 223 | #endif |
ed3768a8 | 224 | .ifeqs "\mode","arm" |
f00ec48f | 225 | ALT_UP(nop) |
ed3768a8 DM |
226 | .else |
227 | ALT_UP(W(nop)) | |
228 | .endif | |
bac4e960 RK |
229 | #endif |
230 | .endm | |
b86040a5 CM |
231 | |
232 | #ifdef CONFIG_THUMB2_KERNEL | |
233 | .macro setmode, mode, reg | |
234 | mov \reg, #\mode | |
235 | msr cpsr_c, \reg | |
236 | .endm | |
237 | #else | |
238 | .macro setmode, mode, reg | |
239 | msr cpsr_c, #\mode | |
240 | .endm | |
241 | #endif | |
8b592783 | 242 | |
80c59daf DM |
243 | /* |
244 | * Helper macro to enter SVC mode cleanly and mask interrupts. reg is | |
245 | * a scratch register for the macro to overwrite. | |
246 | * | |
247 | * This macro is intended for forcing the CPU into SVC mode at boot time. | |
248 | * you cannot return to the original mode. | |
80c59daf DM |
249 | */ |
250 | .macro safe_svcmode_maskall reg:req | |
1ecec696 | 251 | #if __LINUX_ARM_ARCH__ >= 6 |
80c59daf | 252 | mrs \reg , cpsr |
8e9c24a2 RK |
253 | eor \reg, \reg, #HYP_MODE |
254 | tst \reg, #MODE_MASK | |
80c59daf | 255 | bic \reg , \reg , #MODE_MASK |
8e9c24a2 | 256 | orr \reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE |
80c59daf | 257 | THUMB( orr \reg , \reg , #PSR_T_BIT ) |
80c59daf | 258 | bne 1f |
2a552d5e MZ |
259 | orr \reg, \reg, #PSR_A_BIT |
260 | adr lr, BSYM(2f) | |
261 | msr spsr_cxsf, \reg | |
80c59daf DM |
262 | __MSR_ELR_HYP(14) |
263 | __ERET | |
2a552d5e | 264 | 1: msr cpsr_c, \reg |
80c59daf | 265 | 2: |
1ecec696 DM |
266 | #else |
267 | /* | |
268 | * workaround for possibly broken pre-v6 hardware | |
269 | * (akita, Sharp Zaurus C-1000, PXA270-based) | |
270 | */ | |
271 | setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg | |
272 | #endif | |
80c59daf DM |
273 | .endm |
274 | ||
8b592783 CM |
275 | /* |
276 | * STRT/LDRT access macros with ARM and Thumb-2 variants | |
277 | */ | |
278 | #ifdef CONFIG_THUMB2_KERNEL | |
279 | ||
4e7682d0 | 280 | .macro usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER() |
8b592783 CM |
281 | 9999: |
282 | .if \inc == 1 | |
247055aa | 283 | \instr\cond\()b\()\t\().w \reg, [\ptr, #\off] |
8b592783 | 284 | .elseif \inc == 4 |
247055aa | 285 | \instr\cond\()\t\().w \reg, [\ptr, #\off] |
8b592783 CM |
286 | .else |
287 | .error "Unsupported inc macro argument" | |
288 | .endif | |
289 | ||
4260415f | 290 | .pushsection __ex_table,"a" |
8b592783 CM |
291 | .align 3 |
292 | .long 9999b, \abort | |
4260415f | 293 | .popsection |
8b592783 CM |
294 | .endm |
295 | ||
296 | .macro usracc, instr, reg, ptr, inc, cond, rept, abort | |
297 | @ explicit IT instruction needed because of the label | |
298 | @ introduced by the USER macro | |
299 | .ifnc \cond,al | |
300 | .if \rept == 1 | |
301 | itt \cond | |
302 | .elseif \rept == 2 | |
303 | ittt \cond | |
304 | .else | |
305 | .error "Unsupported rept macro argument" | |
306 | .endif | |
307 | .endif | |
308 | ||
309 | @ Slightly optimised to avoid incrementing the pointer twice | |
310 | usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort | |
311 | .if \rept == 2 | |
1142b71d | 312 | usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort |
8b592783 CM |
313 | .endif |
314 | ||
315 | add\cond \ptr, #\rept * \inc | |
316 | .endm | |
317 | ||
318 | #else /* !CONFIG_THUMB2_KERNEL */ | |
319 | ||
4e7682d0 | 320 | .macro usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER() |
8b592783 CM |
321 | .rept \rept |
322 | 9999: | |
323 | .if \inc == 1 | |
247055aa | 324 | \instr\cond\()b\()\t \reg, [\ptr], #\inc |
8b592783 | 325 | .elseif \inc == 4 |
247055aa | 326 | \instr\cond\()\t \reg, [\ptr], #\inc |
8b592783 CM |
327 | .else |
328 | .error "Unsupported inc macro argument" | |
329 | .endif | |
330 | ||
4260415f | 331 | .pushsection __ex_table,"a" |
8b592783 CM |
332 | .align 3 |
333 | .long 9999b, \abort | |
4260415f | 334 | .popsection |
8b592783 CM |
335 | .endr |
336 | .endm | |
337 | ||
338 | #endif /* CONFIG_THUMB2_KERNEL */ | |
339 | ||
340 | .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
341 | usracc str, \reg, \ptr, \inc, \cond, \rept, \abort | |
342 | .endm | |
343 | ||
344 | .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
345 | usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort | |
346 | .endm | |
8f51965e DM |
347 | |
348 | /* Utility macro for declaring string literals */ | |
349 | .macro string name:req, string | |
350 | .type \name , #object | |
351 | \name: | |
352 | .asciz "\string" | |
353 | .size \name , . - \name | |
354 | .endm | |
355 | ||
8404663f RK |
356 | .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req |
357 | #ifndef CONFIG_CPU_USE_DOMAINS | |
358 | adds \tmp, \addr, #\size - 1 | |
359 | sbcccs \tmp, \tmp, \limit | |
360 | bcs \bad | |
361 | #endif | |
362 | .endm | |
363 | ||
2bc58a6f | 364 | #endif /* __ASM_ASSEMBLER_H__ */ |