arm64: acpi: add acpi=on cmdline option to prefer ACPI boot over DT
[linux-2.6-block.git] / arch / arm64 / include / asm / assembler.h
CommitLineData
0be7320a
CM
1/*
2 * Based on arch/arm/include/asm/assembler.h
3 *
4 * Copyright (C) 1996-2000 Russell King
5 * Copyright (C) 2012 ARM Ltd.
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 *
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
15 *
16 * You should have received a copy of the GNU General Public License
17 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 */
19#ifndef __ASSEMBLY__
20#error "Only include this from assembly code"
21#endif
22
f3e39273
MZ
23#ifndef __ASM_ASSEMBLER_H
24#define __ASM_ASSEMBLER_H
25
0be7320a 26#include <asm/ptrace.h>
2a283070 27#include <asm/thread_info.h>
0be7320a 28
0be7320a
CM
29/*
30 * Enable and disable interrupts.
31 */
32 .macro disable_irq
33 msr daifset, #2
34 .endm
35
36 .macro enable_irq
37 msr daifclr, #2
38 .endm
39
0be7320a
CM
40/*
41 * Enable and disable debug exceptions.
42 */
43 .macro disable_dbg
44 msr daifset, #8
45 .endm
46
47 .macro enable_dbg
48 msr daifclr, #8
49 .endm
50
2a283070
WD
51 .macro disable_step_tsk, flgs, tmp
52 tbz \flgs, #TIF_SINGLESTEP, 9990f
0be7320a
CM
53 mrs \tmp, mdscr_el1
54 bic \tmp, \tmp, #1
55 msr mdscr_el1, \tmp
2a283070
WD
56 isb // Synchronise with enable_dbg
579990:
0be7320a
CM
58 .endm
59
2a283070
WD
60 .macro enable_step_tsk, flgs, tmp
61 tbz \flgs, #TIF_SINGLESTEP, 9990f
62 disable_dbg
0be7320a
CM
63 mrs \tmp, mdscr_el1
64 orr \tmp, \tmp, #1
65 msr mdscr_el1, \tmp
2a283070 669990:
0be7320a
CM
67 .endm
68
2a283070
WD
69/*
70 * Enable both debug exceptions and interrupts. This is likely to be
71 * faster than two daifclr operations, since writes to this register
72 * are self-synchronising.
73 */
74 .macro enable_dbg_and_irq
75 msr daifclr, #(8 | 2)
0be7320a
CM
76 .endm
77
78/*
79 * SMP data memory barrier
80 */
81 .macro smp_dmb, opt
0be7320a 82 dmb \opt
0be7320a
CM
83 .endm
84
6c94f27a
AB
85/*
86 * Emit an entry into the exception table
87 */
88 .macro _asm_extable, from, to
89 .pushsection __ex_table, "a"
90 .align 3
91 .long (\from - .), (\to - .)
92 .popsection
93 .endm
94
0be7320a
CM
95#define USER(l, x...) \
969999: x; \
6c94f27a 97 _asm_extable 9999b, l
0be7320a
CM
98
99/*
100 * Register aliases.
101 */
102lr .req x30 // link register
dc637f1f
MZ
103
104/*
105 * Vector entry
106 */
107 .macro ventry label
108 .align 7
109 b \label
110 .endm
e68bedaa
ML
111
112/*
113 * Select code when configured for BE.
114 */
115#ifdef CONFIG_CPU_BIG_ENDIAN
116#define CPU_BE(code...) code
117#else
118#define CPU_BE(code...)
119#endif
120
121/*
122 * Select code when configured for LE.
123 */
124#ifdef CONFIG_CPU_BIG_ENDIAN
125#define CPU_LE(code...)
126#else
127#define CPU_LE(code...) code
128#endif
129
55b89540
ML
130/*
131 * Define a macro that constructs a 64-bit value by concatenating two
132 * 32-bit registers. Note that on big endian systems the order of the
133 * registers is swapped.
134 */
135#ifndef CONFIG_CPU_BIG_ENDIAN
136 .macro regs_to_64, rd, lbits, hbits
137#else
138 .macro regs_to_64, rd, hbits, lbits
139#endif
140 orr \rd, \lbits, \hbits, lsl #32
141 .endm
f3e39273 142
b784a5d9
AB
143/*
144 * Pseudo-ops for PC-relative adr/ldr/str <reg>, <symbol> where
145 * <symbol> is within the range +/- 4 GB of the PC.
146 */
147 /*
148 * @dst: destination register (64 bit wide)
149 * @sym: name of the symbol
150 * @tmp: optional scratch register to be used if <dst> == sp, which
151 * is not allowed in an adrp instruction
152 */
153 .macro adr_l, dst, sym, tmp=
154 .ifb \tmp
155 adrp \dst, \sym
156 add \dst, \dst, :lo12:\sym
157 .else
158 adrp \tmp, \sym
159 add \dst, \tmp, :lo12:\sym
160 .endif
161 .endm
162
163 /*
164 * @dst: destination register (32 or 64 bit wide)
165 * @sym: name of the symbol
166 * @tmp: optional 64-bit scratch register to be used if <dst> is a
167 * 32-bit wide register, in which case it cannot be used to hold
168 * the address
169 */
170 .macro ldr_l, dst, sym, tmp=
171 .ifb \tmp
172 adrp \dst, \sym
173 ldr \dst, [\dst, :lo12:\sym]
174 .else
175 adrp \tmp, \sym
176 ldr \dst, [\tmp, :lo12:\sym]
177 .endif
178 .endm
179
180 /*
181 * @src: source register (32 or 64 bit wide)
182 * @sym: name of the symbol
183 * @tmp: mandatory 64-bit scratch register to calculate the address
184 * while <src> needs to be preserved.
185 */
186 .macro str_l, src, sym, tmp
187 adrp \tmp, \sym
188 str \src, [\tmp, :lo12:\sym]
189 .endm
190
aa4d5d3c
JM
191 /*
192 * @sym: The name of the per-cpu variable
193 * @reg: Result of per_cpu(sym, smp_processor_id())
194 * @tmp: scratch register
195 */
196 .macro this_cpu_ptr, sym, reg, tmp
197 adr_l \reg, \sym
198 mrs \tmp, tpidr_el1
199 add \reg, \reg, \tmp
200 .endm
201
20791846
AB
202/*
203 * Annotate a function as position independent, i.e., safe to be called before
204 * the kernel virtual mapping is activated.
205 */
206#define ENDPIPROC(x) \
207 .globl __pi_##x; \
208 .type __pi_##x, %function; \
209 .set __pi_##x, x; \
210 .size __pi_##x, . - x; \
211 ENDPROC(x)
212
6ad1fe5d
AB
213 /*
214 * Emit a 64-bit absolute little endian symbol reference in a way that
215 * ensures that it will be resolved at build time, even when building a
216 * PIE binary. This requires cooperation from the linker script, which
217 * must emit the lo32/hi32 halves individually.
218 */
219 .macro le64sym, sym
220 .long \sym\()_lo32
221 .long \sym\()_hi32
222 .endm
223
30b5ba5c
AB
224 /*
225 * mov_q - move an immediate constant into a 64-bit register using
226 * between 2 and 4 movz/movk instructions (depending on the
227 * magnitude and sign of the operand)
228 */
229 .macro mov_q, reg, val
230 .if (((\val) >> 31) == 0 || ((\val) >> 31) == 0x1ffffffff)
231 movz \reg, :abs_g1_s:\val
232 .else
233 .if (((\val) >> 47) == 0 || ((\val) >> 47) == 0x1ffff)
234 movz \reg, :abs_g2_s:\val
235 .else
236 movz \reg, :abs_g3:\val
237 movk \reg, :abs_g2_nc:\val
238 .endif
239 movk \reg, :abs_g1_nc:\val
240 .endif
241 movk \reg, :abs_g0_nc:\val
242 .endm
243
f3e39273 244#endif /* __ASM_ASSEMBLER_H */