Commit | Line | Data |
---|---|---|
0be7320a | 1 | /* |
7b7293ae | 2 | * Based on arch/arm/include/asm/assembler.h, arch/arm/mm/proc-macros.S |
0be7320a CM |
3 | * |
4 | * Copyright (C) 1996-2000 Russell King | |
5 | * Copyright (C) 2012 ARM Ltd. | |
6 | * | |
7 | * This program is free software; you can redistribute it and/or modify | |
8 | * it under the terms of the GNU General Public License version 2 as | |
9 | * published by the Free Software Foundation. | |
10 | * | |
11 | * This program is distributed in the hope that it will be useful, | |
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | * GNU General Public License for more details. | |
15 | * | |
16 | * You should have received a copy of the GNU General Public License | |
17 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
18 | */ | |
19 | #ifndef __ASSEMBLY__ | |
20 | #error "Only include this from assembly code" | |
21 | #endif | |
22 | ||
f3e39273 MZ |
23 | #ifndef __ASM_ASSEMBLER_H |
24 | #define __ASM_ASSEMBLER_H | |
25 | ||
7b7293ae GL |
26 | #include <asm/asm-offsets.h> |
27 | #include <asm/pgtable-hwdef.h> | |
0be7320a | 28 | #include <asm/ptrace.h> |
2a283070 | 29 | #include <asm/thread_info.h> |
0be7320a | 30 | |
0be7320a CM |
31 | /* |
32 | * Enable and disable interrupts. | |
33 | */ | |
34 | .macro disable_irq | |
35 | msr daifset, #2 | |
36 | .endm | |
37 | ||
38 | .macro enable_irq | |
39 | msr daifclr, #2 | |
40 | .endm | |
41 | ||
0be7320a CM |
42 | /* |
43 | * Enable and disable debug exceptions. | |
44 | */ | |
45 | .macro disable_dbg | |
46 | msr daifset, #8 | |
47 | .endm | |
48 | ||
49 | .macro enable_dbg | |
50 | msr daifclr, #8 | |
51 | .endm | |
52 | ||
2a283070 WD |
53 | .macro disable_step_tsk, flgs, tmp |
54 | tbz \flgs, #TIF_SINGLESTEP, 9990f | |
0be7320a CM |
55 | mrs \tmp, mdscr_el1 |
56 | bic \tmp, \tmp, #1 | |
57 | msr mdscr_el1, \tmp | |
2a283070 WD |
58 | isb // Synchronise with enable_dbg |
59 | 9990: | |
0be7320a CM |
60 | .endm |
61 | ||
2a283070 WD |
62 | .macro enable_step_tsk, flgs, tmp |
63 | tbz \flgs, #TIF_SINGLESTEP, 9990f | |
64 | disable_dbg | |
0be7320a CM |
65 | mrs \tmp, mdscr_el1 |
66 | orr \tmp, \tmp, #1 | |
67 | msr mdscr_el1, \tmp | |
2a283070 | 68 | 9990: |
0be7320a CM |
69 | .endm |
70 | ||
2a283070 WD |
71 | /* |
72 | * Enable both debug exceptions and interrupts. This is likely to be | |
73 | * faster than two daifclr operations, since writes to this register | |
74 | * are self-synchronising. | |
75 | */ | |
76 | .macro enable_dbg_and_irq | |
77 | msr daifclr, #(8 | 2) | |
0be7320a CM |
78 | .endm |
79 | ||
80 | /* | |
81 | * SMP data memory barrier | |
82 | */ | |
83 | .macro smp_dmb, opt | |
0be7320a | 84 | dmb \opt |
0be7320a CM |
85 | .endm |
86 | ||
6c94f27a AB |
87 | /* |
88 | * Emit an entry into the exception table | |
89 | */ | |
90 | .macro _asm_extable, from, to | |
91 | .pushsection __ex_table, "a" | |
92 | .align 3 | |
93 | .long (\from - .), (\to - .) | |
94 | .popsection | |
95 | .endm | |
96 | ||
0be7320a CM |
97 | #define USER(l, x...) \ |
98 | 9999: x; \ | |
6c94f27a | 99 | _asm_extable 9999b, l |
0be7320a CM |
100 | |
101 | /* | |
102 | * Register aliases. | |
103 | */ | |
104 | lr .req x30 // link register | |
dc637f1f MZ |
105 | |
106 | /* | |
107 | * Vector entry | |
108 | */ | |
109 | .macro ventry label | |
110 | .align 7 | |
111 | b \label | |
112 | .endm | |
e68bedaa ML |
113 | |
114 | /* | |
115 | * Select code when configured for BE. | |
116 | */ | |
117 | #ifdef CONFIG_CPU_BIG_ENDIAN | |
118 | #define CPU_BE(code...) code | |
119 | #else | |
120 | #define CPU_BE(code...) | |
121 | #endif | |
122 | ||
123 | /* | |
124 | * Select code when configured for LE. | |
125 | */ | |
126 | #ifdef CONFIG_CPU_BIG_ENDIAN | |
127 | #define CPU_LE(code...) | |
128 | #else | |
129 | #define CPU_LE(code...) code | |
130 | #endif | |
131 | ||
55b89540 ML |
132 | /* |
133 | * Define a macro that constructs a 64-bit value by concatenating two | |
134 | * 32-bit registers. Note that on big endian systems the order of the | |
135 | * registers is swapped. | |
136 | */ | |
137 | #ifndef CONFIG_CPU_BIG_ENDIAN | |
138 | .macro regs_to_64, rd, lbits, hbits | |
139 | #else | |
140 | .macro regs_to_64, rd, hbits, lbits | |
141 | #endif | |
142 | orr \rd, \lbits, \hbits, lsl #32 | |
143 | .endm | |
f3e39273 | 144 | |
b784a5d9 AB |
145 | /* |
146 | * Pseudo-ops for PC-relative adr/ldr/str <reg>, <symbol> where | |
147 | * <symbol> is within the range +/- 4 GB of the PC. | |
148 | */ | |
149 | /* | |
150 | * @dst: destination register (64 bit wide) | |
151 | * @sym: name of the symbol | |
152 | * @tmp: optional scratch register to be used if <dst> == sp, which | |
153 | * is not allowed in an adrp instruction | |
154 | */ | |
155 | .macro adr_l, dst, sym, tmp= | |
156 | .ifb \tmp | |
157 | adrp \dst, \sym | |
158 | add \dst, \dst, :lo12:\sym | |
159 | .else | |
160 | adrp \tmp, \sym | |
161 | add \dst, \tmp, :lo12:\sym | |
162 | .endif | |
163 | .endm | |
164 | ||
165 | /* | |
166 | * @dst: destination register (32 or 64 bit wide) | |
167 | * @sym: name of the symbol | |
168 | * @tmp: optional 64-bit scratch register to be used if <dst> is a | |
169 | * 32-bit wide register, in which case it cannot be used to hold | |
170 | * the address | |
171 | */ | |
172 | .macro ldr_l, dst, sym, tmp= | |
173 | .ifb \tmp | |
174 | adrp \dst, \sym | |
175 | ldr \dst, [\dst, :lo12:\sym] | |
176 | .else | |
177 | adrp \tmp, \sym | |
178 | ldr \dst, [\tmp, :lo12:\sym] | |
179 | .endif | |
180 | .endm | |
181 | ||
182 | /* | |
183 | * @src: source register (32 or 64 bit wide) | |
184 | * @sym: name of the symbol | |
185 | * @tmp: mandatory 64-bit scratch register to calculate the address | |
186 | * while <src> needs to be preserved. | |
187 | */ | |
188 | .macro str_l, src, sym, tmp | |
189 | adrp \tmp, \sym | |
190 | str \src, [\tmp, :lo12:\sym] | |
191 | .endm | |
192 | ||
aa4d5d3c JM |
193 | /* |
194 | * @sym: The name of the per-cpu variable | |
195 | * @reg: Result of per_cpu(sym, smp_processor_id()) | |
196 | * @tmp: scratch register | |
197 | */ | |
198 | .macro this_cpu_ptr, sym, reg, tmp | |
199 | adr_l \reg, \sym | |
200 | mrs \tmp, tpidr_el1 | |
201 | add \reg, \reg, \tmp | |
202 | .endm | |
203 | ||
7b7293ae GL |
204 | /* |
205 | * vma_vm_mm - get mm pointer from vma pointer (vma->vm_mm) | |
206 | */ | |
207 | .macro vma_vm_mm, rd, rn | |
208 | ldr \rd, [\rn, #VMA_VM_MM] | |
209 | .endm | |
210 | ||
211 | /* | |
212 | * mmid - get context id from mm pointer (mm->context.id) | |
213 | */ | |
214 | .macro mmid, rd, rn | |
215 | ldr \rd, [\rn, #MM_CONTEXT_ID] | |
216 | .endm | |
217 | ||
218 | /* | |
219 | * dcache_line_size - get the minimum D-cache line size from the CTR register. | |
220 | */ | |
221 | .macro dcache_line_size, reg, tmp | |
222 | mrs \tmp, ctr_el0 // read CTR | |
223 | ubfm \tmp, \tmp, #16, #19 // cache line size encoding | |
224 | mov \reg, #4 // bytes per word | |
225 | lsl \reg, \reg, \tmp // actual cache line size | |
226 | .endm | |
227 | ||
228 | /* | |
229 | * icache_line_size - get the minimum I-cache line size from the CTR register. | |
230 | */ | |
231 | .macro icache_line_size, reg, tmp | |
232 | mrs \tmp, ctr_el0 // read CTR | |
233 | and \tmp, \tmp, #0xf // cache line size encoding | |
234 | mov \reg, #4 // bytes per word | |
235 | lsl \reg, \reg, \tmp // actual cache line size | |
236 | .endm | |
237 | ||
238 | /* | |
239 | * tcr_set_idmap_t0sz - update TCR.T0SZ so that we can load the ID map | |
240 | */ | |
241 | .macro tcr_set_idmap_t0sz, valreg, tmpreg | |
242 | #ifndef CONFIG_ARM64_VA_BITS_48 | |
243 | ldr_l \tmpreg, idmap_t0sz | |
244 | bfi \valreg, \tmpreg, #TCR_T0SZ_OFFSET, #TCR_TxSZ_WIDTH | |
245 | #endif | |
246 | .endm | |
247 | ||
248 | /* | |
249 | * Macro to perform a data cache maintenance for the interval | |
250 | * [kaddr, kaddr + size) | |
251 | * | |
252 | * op: operation passed to dc instruction | |
253 | * domain: domain used in dsb instruciton | |
254 | * kaddr: starting virtual address of the region | |
255 | * size: size of the region | |
256 | * Corrupts: kaddr, size, tmp1, tmp2 | |
257 | */ | |
258 | .macro dcache_by_line_op op, domain, kaddr, size, tmp1, tmp2 | |
259 | dcache_line_size \tmp1, \tmp2 | |
260 | add \size, \kaddr, \size | |
261 | sub \tmp2, \tmp1, #1 | |
262 | bic \kaddr, \kaddr, \tmp2 | |
263 | 9998: dc \op, \kaddr | |
264 | add \kaddr, \kaddr, \tmp1 | |
265 | cmp \kaddr, \size | |
266 | b.lo 9998b | |
267 | dsb \domain | |
268 | .endm | |
269 | ||
270 | /* | |
271 | * reset_pmuserenr_el0 - reset PMUSERENR_EL0 if PMUv3 present | |
272 | */ | |
273 | .macro reset_pmuserenr_el0, tmpreg | |
274 | mrs \tmpreg, id_aa64dfr0_el1 // Check ID_AA64DFR0_EL1 PMUVer | |
275 | sbfx \tmpreg, \tmpreg, #8, #4 | |
276 | cmp \tmpreg, #1 // Skip if no PMU present | |
277 | b.lt 9000f | |
278 | msr pmuserenr_el0, xzr // Disable PMU access from EL0 | |
279 | 9000: | |
280 | .endm | |
281 | ||
20791846 AB |
282 | /* |
283 | * Annotate a function as position independent, i.e., safe to be called before | |
284 | * the kernel virtual mapping is activated. | |
285 | */ | |
286 | #define ENDPIPROC(x) \ | |
287 | .globl __pi_##x; \ | |
288 | .type __pi_##x, %function; \ | |
289 | .set __pi_##x, x; \ | |
290 | .size __pi_##x, . - x; \ | |
291 | ENDPROC(x) | |
292 | ||
6ad1fe5d AB |
293 | /* |
294 | * Emit a 64-bit absolute little endian symbol reference in a way that | |
295 | * ensures that it will be resolved at build time, even when building a | |
296 | * PIE binary. This requires cooperation from the linker script, which | |
297 | * must emit the lo32/hi32 halves individually. | |
298 | */ | |
299 | .macro le64sym, sym | |
300 | .long \sym\()_lo32 | |
301 | .long \sym\()_hi32 | |
302 | .endm | |
303 | ||
30b5ba5c AB |
304 | /* |
305 | * mov_q - move an immediate constant into a 64-bit register using | |
306 | * between 2 and 4 movz/movk instructions (depending on the | |
307 | * magnitude and sign of the operand) | |
308 | */ | |
309 | .macro mov_q, reg, val | |
310 | .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff) | |
311 | movz \reg, :abs_g1_s:\val | |
312 | .else | |
313 | .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff) | |
314 | movz \reg, :abs_g2_s:\val | |
315 | .else | |
316 | movz \reg, :abs_g3:\val | |
317 | movk \reg, :abs_g2_nc:\val | |
318 | .endif | |
319 | movk \reg, :abs_g1_nc:\val | |
320 | .endif | |
321 | movk \reg, :abs_g0_nc:\val | |
322 | .endm | |
323 | ||
f3e39273 | 324 | #endif /* __ASM_ASSEMBLER_H */ |