x86: relocate_kernel - use predefined macroses for page attributes
[linux-2.6-block.git] / arch / x86 / kernel / relocate_kernel_64.S
CommitLineData
5234f5eb
EB
1/*
2 * relocate_kernel.S - put the kernel image in place to boot
3 * Copyright (C) 2002-2005 Eric Biederman <ebiederm@xmission.com>
4 *
5 * This source code is licensed under the GNU General Public License,
6 * Version 2. See the file COPYING for more details.
7 */
8
9#include <linux/linkage.h>
4bfaaef0
MD
10#include <asm/page.h>
11#include <asm/kexec.h>
fd3af531 12#include <asm/processor-flags.h>
366932de 13#include <asm/pgtable.h>
5234f5eb 14
4bfaaef0
MD
15/*
16 * Must be relocatable PIC code callable as a C function
17 */
18
19#define PTR(x) (x << 3)
366932de 20#define PAGE_ATTR (_PAGE_PRESENT | _PAGE_RW | _PAGE_ACCESSED | _PAGE_DIRTY)
4bfaaef0
MD
21
22 .text
288621e3 23 .align PAGE_SIZE
5234f5eb 24 .code64
4bfaaef0
MD
25 .globl relocate_kernel
26relocate_kernel:
27 /* %rdi indirection_page
28 * %rsi page_list
29 * %rdx start address
30 */
31
32 /* map the control page at its virtual address */
33
34 movq $0x0000ff8000000000, %r10 /* mask */
35 mov $(39 - 3), %cl /* bits to shift */
36 movq PTR(VA_CONTROL_PAGE)(%rsi), %r11 /* address to map */
37
38 movq %r11, %r9
39 andq %r10, %r9
40 shrq %cl, %r9
41
42 movq PTR(VA_PGD)(%rsi), %r8
43 addq %r8, %r9
44 movq PTR(PA_PUD_0)(%rsi), %r8
45 orq $PAGE_ATTR, %r8
46 movq %r8, (%r9)
47
48 shrq $9, %r10
49 sub $9, %cl
50
51 movq %r11, %r9
52 andq %r10, %r9
53 shrq %cl, %r9
54
55 movq PTR(VA_PUD_0)(%rsi), %r8
56 addq %r8, %r9
57 movq PTR(PA_PMD_0)(%rsi), %r8
58 orq $PAGE_ATTR, %r8
59 movq %r8, (%r9)
60
61 shrq $9, %r10
62 sub $9, %cl
63
64 movq %r11, %r9
65 andq %r10, %r9
66 shrq %cl, %r9
67
68 movq PTR(VA_PMD_0)(%rsi), %r8
69 addq %r8, %r9
70 movq PTR(PA_PTE_0)(%rsi), %r8
71 orq $PAGE_ATTR, %r8
72 movq %r8, (%r9)
73
74 shrq $9, %r10
75 sub $9, %cl
76
77 movq %r11, %r9
78 andq %r10, %r9
79 shrq %cl, %r9
80
81 movq PTR(VA_PTE_0)(%rsi), %r8
82 addq %r8, %r9
83 movq PTR(PA_CONTROL_PAGE)(%rsi), %r8
84 orq $PAGE_ATTR, %r8
85 movq %r8, (%r9)
86
87 /* identity map the control page at its physical address */
88
89 movq $0x0000ff8000000000, %r10 /* mask */
90 mov $(39 - 3), %cl /* bits to shift */
91 movq PTR(PA_CONTROL_PAGE)(%rsi), %r11 /* address to map */
92
93 movq %r11, %r9
94 andq %r10, %r9
95 shrq %cl, %r9
96
97 movq PTR(VA_PGD)(%rsi), %r8
98 addq %r8, %r9
99 movq PTR(PA_PUD_1)(%rsi), %r8
100 orq $PAGE_ATTR, %r8
101 movq %r8, (%r9)
102
103 shrq $9, %r10
104 sub $9, %cl
105
106 movq %r11, %r9
107 andq %r10, %r9
108 shrq %cl, %r9
109
110 movq PTR(VA_PUD_1)(%rsi), %r8
111 addq %r8, %r9
112 movq PTR(PA_PMD_1)(%rsi), %r8
113 orq $PAGE_ATTR, %r8
114 movq %r8, (%r9)
115
116 shrq $9, %r10
117 sub $9, %cl
118
119 movq %r11, %r9
120 andq %r10, %r9
121 shrq %cl, %r9
122
123 movq PTR(VA_PMD_1)(%rsi), %r8
124 addq %r8, %r9
125 movq PTR(PA_PTE_1)(%rsi), %r8
126 orq $PAGE_ATTR, %r8
127 movq %r8, (%r9)
128
129 shrq $9, %r10
130 sub $9, %cl
131
132 movq %r11, %r9
133 andq %r10, %r9
134 shrq %cl, %r9
135
136 movq PTR(VA_PTE_1)(%rsi), %r8
137 addq %r8, %r9
138 movq PTR(PA_CONTROL_PAGE)(%rsi), %r8
139 orq $PAGE_ATTR, %r8
140 movq %r8, (%r9)
141
5234f5eb 142relocate_new_kernel:
4bfaaef0
MD
143 /* %rdi indirection_page
144 * %rsi page_list
5234f5eb 145 * %rdx start address
5234f5eb
EB
146 */
147
148 /* zero out flags, and disable interrupts */
149 pushq $0
150 popfq
151
4bfaaef0
MD
152 /* get physical address of control page now */
153 /* this is impossible after page table switch */
154 movq PTR(PA_CONTROL_PAGE)(%rsi), %r8
155
156 /* get physical address of page table now too */
157 movq PTR(PA_TABLE_PAGE)(%rsi), %rcx
5234f5eb 158
4bfaaef0
MD
159 /* switch to new set of page tables */
160 movq PTR(PA_PGD)(%rsi), %r9
161 movq %r9, %cr3
162
163 /* setup a new stack at the end of the physical control page */
a7bba17b 164 lea PAGE_SIZE(%r8), %rsp
4bfaaef0
MD
165
166 /* jump to identity mapped page */
167 addq $(identity_mapped - relocate_kernel), %r8
168 pushq %r8
169 ret
170
171identity_mapped:
172 /* store the start address on the stack */
173 pushq %rdx
5234f5eb
EB
174
175 /* Set cr0 to a known state:
fd3af531 176 * - Paging enabled
177 * - Alignment check disabled
178 * - Write protect disabled
179 * - No task switch
180 * - Don't do FP software emulation.
181 * - Proctected mode enabled
5234f5eb
EB
182 */
183 movq %cr0, %rax
fd3af531 184 andq $~(X86_CR0_AM | X86_CR0_WP | X86_CR0_TS | X86_CR0_EM), %rax
185 orl $(X86_CR0_PG | X86_CR0_PE), %eax
5234f5eb
EB
186 movq %rax, %cr0
187
188 /* Set cr4 to a known state:
fd3af531 189 * - physical address extension enabled
5234f5eb 190 */
fd3af531 191 movq $X86_CR4_PAE, %rax
5234f5eb
EB
192 movq %rax, %cr4
193
194 jmp 1f
1951:
196
197 /* Switch to the identity mapped page tables,
198 * and flush the TLB.
199 */
200 movq %rcx, %cr3
201
202 /* Do the copies */
203 movq %rdi, %rcx /* Put the page_list in %rcx */
204 xorq %rdi, %rdi
205 xorq %rsi, %rsi
206 jmp 1f
207
2080: /* top, read another word for the indirection page */
209
210 movq (%rbx), %rcx
211 addq $8, %rbx
2121:
213 testq $0x1, %rcx /* is it a destination page? */
214 jz 2f
215 movq %rcx, %rdi
216 andq $0xfffffffffffff000, %rdi
217 jmp 0b
2182:
219 testq $0x2, %rcx /* is it an indirection page? */
220 jz 2f
221 movq %rcx, %rbx
222 andq $0xfffffffffffff000, %rbx
223 jmp 0b
2242:
225 testq $0x4, %rcx /* is it the done indicator? */
226 jz 2f
227 jmp 3f
2282:
229 testq $0x8, %rcx /* is it the source indicator? */
230 jz 0b /* Ignore it otherwise */
231 movq %rcx, %rsi /* For ever source page do a copy */
232 andq $0xfffffffffffff000, %rsi
233
234 movq $512, %rcx
235 rep ; movsq
236 jmp 0b
2373:
238
239 /* To be certain of avoiding problems with self-modifying code
240 * I need to execute a serializing instruction here.
241 * So I flush the TLB by reloading %cr3 here, it's handy,
242 * and not processor dependent.
243 */
244 movq %cr3, %rax
245 movq %rax, %cr3
246
247 /* set all of the registers to known values */
248 /* leave %rsp alone */
249
250 xorq %rax, %rax
251 xorq %rbx, %rbx
252 xorq %rcx, %rcx
253 xorq %rdx, %rdx
254 xorq %rsi, %rsi
255 xorq %rdi, %rdi
256 xorq %rbp, %rbp
257 xorq %r8, %r8
258 xorq %r9, %r9
259 xorq %r10, %r9
260 xorq %r11, %r11
261 xorq %r12, %r12
262 xorq %r13, %r13
263 xorq %r14, %r14
264 xorq %r15, %r15
265
266 ret