Commit | Line | Data |
---|---|---|
1da177e4 | 1 | /* |
4baa9922 | 2 | * arch/arm/include/asm/assembler.h |
1da177e4 LT |
3 | * |
4 | * Copyright (C) 1996-2000 Russell King | |
5 | * | |
6 | * This program is free software; you can redistribute it and/or modify | |
7 | * it under the terms of the GNU General Public License version 2 as | |
8 | * published by the Free Software Foundation. | |
9 | * | |
10 | * This file contains arm architecture specific defines | |
11 | * for the different processors. | |
12 | * | |
13 | * Do not include any C declarations in this file - it is included by | |
14 | * assembler source. | |
15 | */ | |
2bc58a6f MD |
16 | #ifndef __ASM_ASSEMBLER_H__ |
17 | #define __ASM_ASSEMBLER_H__ | |
18 | ||
1da177e4 LT |
19 | #ifndef __ASSEMBLY__ |
20 | #error "Only include this from assembly code" | |
21 | #endif | |
22 | ||
23 | #include <asm/ptrace.h> | |
247055aa | 24 | #include <asm/domain.h> |
1da177e4 LT |
25 | |
26 | /* | |
27 | * Endian independent macros for shifting bytes within registers. | |
28 | */ | |
29 | #ifndef __ARMEB__ | |
30 | #define pull lsr | |
31 | #define push lsl | |
32 | #define get_byte_0 lsl #0 | |
33 | #define get_byte_1 lsr #8 | |
34 | #define get_byte_2 lsr #16 | |
35 | #define get_byte_3 lsr #24 | |
36 | #define put_byte_0 lsl #0 | |
37 | #define put_byte_1 lsl #8 | |
38 | #define put_byte_2 lsl #16 | |
39 | #define put_byte_3 lsl #24 | |
40 | #else | |
41 | #define pull lsl | |
42 | #define push lsr | |
43 | #define get_byte_0 lsr #24 | |
44 | #define get_byte_1 lsr #16 | |
45 | #define get_byte_2 lsr #8 | |
46 | #define get_byte_3 lsl #0 | |
47 | #define put_byte_0 lsl #24 | |
48 | #define put_byte_1 lsl #16 | |
49 | #define put_byte_2 lsl #8 | |
50 | #define put_byte_3 lsl #0 | |
51 | #endif | |
52 | ||
53 | /* | |
54 | * Data preload for architectures that support it | |
55 | */ | |
56 | #if __LINUX_ARM_ARCH__ >= 5 | |
57 | #define PLD(code...) code | |
58 | #else | |
59 | #define PLD(code...) | |
60 | #endif | |
61 | ||
2239aff6 NP |
62 | /* |
63 | * This can be used to enable code to cacheline align the destination | |
64 | * pointer when bulk writing to memory. Experiments on StrongARM and | |
65 | * XScale didn't show this a worthwhile thing to do when the cache is not | |
66 | * set to write-allocate (this would need further testing on XScale when WA | |
67 | * is used). | |
68 | * | |
69 | * On Feroceon there is much to gain however, regardless of cache mode. | |
70 | */ | |
71 | #ifdef CONFIG_CPU_FEROCEON | |
72 | #define CALGN(code...) code | |
73 | #else | |
74 | #define CALGN(code...) | |
75 | #endif | |
76 | ||
1da177e4 | 77 | /* |
9c42954d | 78 | * Enable and disable interrupts |
1da177e4 | 79 | */ |
59d1ff3b | 80 | #if __LINUX_ARM_ARCH__ >= 6 |
0d928b0b | 81 | .macro disable_irq_notrace |
59d1ff3b | 82 | cpsid i |
9c42954d RK |
83 | .endm |
84 | ||
0d928b0b | 85 | .macro enable_irq_notrace |
9c42954d RK |
86 | cpsie i |
87 | .endm | |
59d1ff3b | 88 | #else |
0d928b0b | 89 | .macro disable_irq_notrace |
9c42954d RK |
90 | msr cpsr_c, #PSR_I_BIT | SVC_MODE |
91 | .endm | |
92 | ||
0d928b0b | 93 | .macro enable_irq_notrace |
9c42954d RK |
94 | msr cpsr_c, #SVC_MODE |
95 | .endm | |
59d1ff3b | 96 | #endif |
9c42954d | 97 | |
0d928b0b UKK |
98 | .macro asm_trace_hardirqs_off |
99 | #if defined(CONFIG_TRACE_IRQFLAGS) | |
100 | stmdb sp!, {r0-r3, ip, lr} | |
101 | bl trace_hardirqs_off | |
102 | ldmia sp!, {r0-r3, ip, lr} | |
103 | #endif | |
104 | .endm | |
105 | ||
106 | .macro asm_trace_hardirqs_on_cond, cond | |
107 | #if defined(CONFIG_TRACE_IRQFLAGS) | |
108 | /* | |
109 | * actually the registers should be pushed and pop'd conditionally, but | |
110 | * after bl the flags are certainly clobbered | |
111 | */ | |
112 | stmdb sp!, {r0-r3, ip, lr} | |
113 | bl\cond trace_hardirqs_on | |
114 | ldmia sp!, {r0-r3, ip, lr} | |
115 | #endif | |
116 | .endm | |
117 | ||
118 | .macro asm_trace_hardirqs_on | |
119 | asm_trace_hardirqs_on_cond al | |
120 | .endm | |
121 | ||
122 | .macro disable_irq | |
123 | disable_irq_notrace | |
124 | asm_trace_hardirqs_off | |
125 | .endm | |
126 | ||
127 | .macro enable_irq | |
128 | asm_trace_hardirqs_on | |
129 | enable_irq_notrace | |
130 | .endm | |
9c42954d RK |
131 | /* |
132 | * Save the current IRQ state and disable IRQs. Note that this macro | |
133 | * assumes FIQs are enabled, and that the processor is in SVC mode. | |
134 | */ | |
135 | .macro save_and_disable_irqs, oldcpsr | |
136 | mrs \oldcpsr, cpsr | |
137 | disable_irq | |
1da177e4 LT |
138 | .endm |
139 | ||
140 | /* | |
141 | * Restore interrupt state previously stored in a register. We don't | |
142 | * guarantee that this will preserve the flags. | |
143 | */ | |
0d928b0b | 144 | .macro restore_irqs_notrace, oldcpsr |
1da177e4 LT |
145 | msr cpsr_c, \oldcpsr |
146 | .endm | |
147 | ||
0d928b0b UKK |
148 | .macro restore_irqs, oldcpsr |
149 | tst \oldcpsr, #PSR_I_BIT | |
150 | asm_trace_hardirqs_on_cond eq | |
151 | restore_irqs_notrace \oldcpsr | |
152 | .endm | |
153 | ||
1da177e4 LT |
154 | #define USER(x...) \ |
155 | 9999: x; \ | |
4260415f | 156 | .pushsection __ex_table,"a"; \ |
1da177e4 LT |
157 | .align 3; \ |
158 | .long 9999b,9001f; \ | |
4260415f | 159 | .popsection |
bac4e960 | 160 | |
f00ec48f RK |
161 | #ifdef CONFIG_SMP |
162 | #define ALT_SMP(instr...) \ | |
163 | 9998: instr | |
ed3768a8 DM |
164 | /* |
165 | * Note: if you get assembler errors from ALT_UP() when building with | |
166 | * CONFIG_THUMB2_KERNEL, you almost certainly need to use | |
167 | * ALT_SMP( W(instr) ... ) | |
168 | */ | |
f00ec48f RK |
169 | #define ALT_UP(instr...) \ |
170 | .pushsection ".alt.smp.init", "a" ;\ | |
171 | .long 9998b ;\ | |
ed3768a8 DM |
172 | 9997: instr ;\ |
173 | .if . - 9997b != 4 ;\ | |
174 | .error "ALT_UP() content must assemble to exactly 4 bytes";\ | |
175 | .endif ;\ | |
f00ec48f RK |
176 | .popsection |
177 | #define ALT_UP_B(label) \ | |
178 | .equ up_b_offset, label - 9998b ;\ | |
179 | .pushsection ".alt.smp.init", "a" ;\ | |
180 | .long 9998b ;\ | |
ed3768a8 | 181 | W(b) . + up_b_offset ;\ |
f00ec48f RK |
182 | .popsection |
183 | #else | |
184 | #define ALT_SMP(instr...) | |
185 | #define ALT_UP(instr...) instr | |
186 | #define ALT_UP_B(label) b label | |
187 | #endif | |
188 | ||
bac4e960 RK |
189 | /* |
190 | * SMP data memory barrier | |
191 | */ | |
ed3768a8 | 192 | .macro smp_dmb mode |
bac4e960 RK |
193 | #ifdef CONFIG_SMP |
194 | #if __LINUX_ARM_ARCH__ >= 7 | |
ed3768a8 | 195 | .ifeqs "\mode","arm" |
f00ec48f | 196 | ALT_SMP(dmb) |
ed3768a8 DM |
197 | .else |
198 | ALT_SMP(W(dmb)) | |
199 | .endif | |
bac4e960 | 200 | #elif __LINUX_ARM_ARCH__ == 6 |
f00ec48f RK |
201 | ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb |
202 | #else | |
203 | #error Incompatible SMP platform | |
bac4e960 | 204 | #endif |
ed3768a8 | 205 | .ifeqs "\mode","arm" |
f00ec48f | 206 | ALT_UP(nop) |
ed3768a8 DM |
207 | .else |
208 | ALT_UP(W(nop)) | |
209 | .endif | |
bac4e960 RK |
210 | #endif |
211 | .endm | |
b86040a5 CM |
212 | |
213 | #ifdef CONFIG_THUMB2_KERNEL | |
214 | .macro setmode, mode, reg | |
215 | mov \reg, #\mode | |
216 | msr cpsr_c, \reg | |
217 | .endm | |
218 | #else | |
219 | .macro setmode, mode, reg | |
220 | msr cpsr_c, #\mode | |
221 | .endm | |
222 | #endif | |
8b592783 CM |
223 | |
224 | /* | |
225 | * STRT/LDRT access macros with ARM and Thumb-2 variants | |
226 | */ | |
227 | #ifdef CONFIG_THUMB2_KERNEL | |
228 | ||
247055aa | 229 | .macro usraccoff, instr, reg, ptr, inc, off, cond, abort, t=T() |
8b592783 CM |
230 | 9999: |
231 | .if \inc == 1 | |
247055aa | 232 | \instr\cond\()b\()\t\().w \reg, [\ptr, #\off] |
8b592783 | 233 | .elseif \inc == 4 |
247055aa | 234 | \instr\cond\()\t\().w \reg, [\ptr, #\off] |
8b592783 CM |
235 | .else |
236 | .error "Unsupported inc macro argument" | |
237 | .endif | |
238 | ||
4260415f | 239 | .pushsection __ex_table,"a" |
8b592783 CM |
240 | .align 3 |
241 | .long 9999b, \abort | |
4260415f | 242 | .popsection |
8b592783 CM |
243 | .endm |
244 | ||
245 | .macro usracc, instr, reg, ptr, inc, cond, rept, abort | |
246 | @ explicit IT instruction needed because of the label | |
247 | @ introduced by the USER macro | |
248 | .ifnc \cond,al | |
249 | .if \rept == 1 | |
250 | itt \cond | |
251 | .elseif \rept == 2 | |
252 | ittt \cond | |
253 | .else | |
254 | .error "Unsupported rept macro argument" | |
255 | .endif | |
256 | .endif | |
257 | ||
258 | @ Slightly optimised to avoid incrementing the pointer twice | |
259 | usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort | |
260 | .if \rept == 2 | |
1142b71d | 261 | usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort |
8b592783 CM |
262 | .endif |
263 | ||
264 | add\cond \ptr, #\rept * \inc | |
265 | .endm | |
266 | ||
267 | #else /* !CONFIG_THUMB2_KERNEL */ | |
268 | ||
247055aa | 269 | .macro usracc, instr, reg, ptr, inc, cond, rept, abort, t=T() |
8b592783 CM |
270 | .rept \rept |
271 | 9999: | |
272 | .if \inc == 1 | |
247055aa | 273 | \instr\cond\()b\()\t \reg, [\ptr], #\inc |
8b592783 | 274 | .elseif \inc == 4 |
247055aa | 275 | \instr\cond\()\t \reg, [\ptr], #\inc |
8b592783 CM |
276 | .else |
277 | .error "Unsupported inc macro argument" | |
278 | .endif | |
279 | ||
4260415f | 280 | .pushsection __ex_table,"a" |
8b592783 CM |
281 | .align 3 |
282 | .long 9999b, \abort | |
4260415f | 283 | .popsection |
8b592783 CM |
284 | .endr |
285 | .endm | |
286 | ||
287 | #endif /* CONFIG_THUMB2_KERNEL */ | |
288 | ||
289 | .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
290 | usracc str, \reg, \ptr, \inc, \cond, \rept, \abort | |
291 | .endm | |
292 | ||
293 | .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f | |
294 | usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort | |
295 | .endm | |
2bc58a6f | 296 | #endif /* __ASM_ASSEMBLER_H__ */ |