License cleanup: add SPDX GPL-2.0 license identifier to files with no license
[linux-2.6-block.git] / arch / powerpc / platforms / 52xx / lite5200_sleep.S
CommitLineData
b2441318 1/* SPDX-License-Identifier: GPL-2.0 */
ee983079
DP
2#include <asm/reg.h>
3#include <asm/ppc_asm.h>
4#include <asm/processor.h>
5#include <asm/cache.h>
6
7
8#define SDRAM_CTRL 0x104
9#define SC_MODE_EN (1<<31)
10#define SC_CKE (1<<30)
11#define SC_REF_EN (1<<28)
12#define SC_SOFT_PRE (1<<1)
13
14#define GPIOW_GPIOE 0xc00
15#define GPIOW_DDR 0xc08
16#define GPIOW_DVO 0xc0c
17
18#define CDM_CE 0x214
19#define CDM_SDRAM (1<<3)
20
21
22/* helpers... beware: r10 and r4 are overwritten */
23#define SAVE_SPRN(reg, addr) \
24 mfspr r10, SPRN_##reg; \
25 stw r10, ((addr)*4)(r4);
26
27#define LOAD_SPRN(reg, addr) \
28 lwz r10, ((addr)*4)(r4); \
29 mtspr SPRN_##reg, r10; \
30 sync; \
31 isync;
32
33
34 .data
35registers:
36 .space 0x5c*4
37 .text
38
39/* ---------------------------------------------------------------------- */
40/* low-power mode with help of M68HLC908QT1 */
41
42 .globl lite5200_low_power
43lite5200_low_power:
44
45 mr r7, r3 /* save SRAM va */
46 mr r8, r4 /* save MBAR va */
47
48 /* setup wakeup address for u-boot at physical location 0x0 */
49 lis r3, CONFIG_KERNEL_START@h
50 lis r4, lite5200_wakeup@h
51 ori r4, r4, lite5200_wakeup@l
52 sub r4, r4, r3
53 stw r4, 0(r3)
54
55
56 /*
57 * save stuff BDI overwrites
58 * 0xf0 (0xe0->0x100 gets overwritten when BDI connected;
59 * even when CONFIG_BDI* is disabled and MMU XLAT commented; heisenbug?))
60 * WARNING: self-refresh doesn't seem to work when BDI2000 is connected,
61 * possibly because BDI sets SDRAM registers before wakeup code does
62 */
63 lis r4, registers@h
64 ori r4, r4, registers@l
65 lwz r10, 0xf0(r3)
66 stw r10, (0x1d*4)(r4)
67
68 /* save registers to r4 [destroys r10] */
69 SAVE_SPRN(LR, 0x1c)
70 bl save_regs
71
72 /* flush caches [destroys r3, r4] */
73 bl flush_data_cache
74
75
76 /* copy code to sram */
77 mr r4, r7
78 li r3, (sram_code_end - sram_code)/4
79 mtctr r3
80 lis r3, sram_code@h
81 ori r3, r3, sram_code@l
821:
83 lwz r5, 0(r3)
84 stw r5, 0(r4)
85 addi r3, r3, 4
86 addi r4, r4, 4
87 bdnz 1b
88
89 /* get tb_ticks_per_usec */
90 lis r3, tb_ticks_per_usec@h
91 lwz r11, tb_ticks_per_usec@l(r3)
92
93 /* disable I and D caches */
94 mfspr r3, SPRN_HID0
95 ori r3, r3, HID0_ICE | HID0_DCE
96 xori r3, r3, HID0_ICE | HID0_DCE
97 sync; isync;
98 mtspr SPRN_HID0, r3
99 sync; isync;
100
101 /* jump to sram */
102 mtlr r7
103 blrl
104 /* doesn't return */
105
106
107sram_code:
108 /* self refresh */
109 lwz r4, SDRAM_CTRL(r8)
110
111 /* send NOP (precharge) */
112 oris r4, r4, SC_MODE_EN@h /* mode_en */
113 stw r4, SDRAM_CTRL(r8)
114 sync
115
116 ori r4, r4, SC_SOFT_PRE /* soft_pre */
117 stw r4, SDRAM_CTRL(r8)
118 sync
119 xori r4, r4, SC_SOFT_PRE
120
121 xoris r4, r4, SC_MODE_EN@h /* !mode_en */
122 stw r4, SDRAM_CTRL(r8)
123 sync
124
125 /* delay (for NOP to finish) */
126 li r12, 1
127 bl udelay
128
129 /*
130 * mode_en must not be set when enabling self-refresh
131 * send AR with CKE low (self-refresh)
132 */
133 oris r4, r4, (SC_REF_EN | SC_CKE)@h
134 xoris r4, r4, (SC_CKE)@h /* ref_en !cke */
135 stw r4, SDRAM_CTRL(r8)
136 sync
137
138 /* delay (after !CKE there should be two cycles) */
139 li r12, 1
140 bl udelay
141
142 /* disable clock */
143 lwz r4, CDM_CE(r8)
144 ori r4, r4, CDM_SDRAM
145 xori r4, r4, CDM_SDRAM
146 stw r4, CDM_CE(r8)
147 sync
148
149 /* delay a bit */
150 li r12, 1
151 bl udelay
152
153
154 /* turn off with QT chip */
155 li r4, 0x02
156 stb r4, GPIOW_GPIOE(r8) /* enable gpio_wkup1 */
157 sync
158
159 stb r4, GPIOW_DVO(r8) /* "output" high */
160 sync
161 stb r4, GPIOW_DDR(r8) /* output */
162 sync
163 stb r4, GPIOW_DVO(r8) /* output high */
164 sync
165
166 /* 10uS delay */
167 li r12, 10
168 bl udelay
169
170 /* turn off */
171 li r4, 0
172 stb r4, GPIOW_DVO(r8) /* output low */
173 sync
174
175 /* wait until we're offline */
176 1:
177 b 1b
178
179
180 /* local udelay in sram is needed */
181 udelay: /* r11 - tb_ticks_per_usec, r12 - usecs, overwrites r13 */
182 mullw r12, r12, r11
183 mftb r13 /* start */
184 addi r12, r13, r12 /* end */
185 1:
186 mftb r13 /* current */
187 cmp cr0, r13, r12
188 blt 1b
189 blr
190
191sram_code_end:
192
193
194
195/* uboot jumps here on resume */
196lite5200_wakeup:
197 bl restore_regs
198
199
200 /* HIDs, MSR */
201 LOAD_SPRN(HID1, 0x19)
202 LOAD_SPRN(HID2, 0x1a)
203
204
205 /* address translation is tricky (see turn_on_mmu) */
206 mfmsr r10
207 ori r10, r10, MSR_DR | MSR_IR
208
209
210 mtspr SPRN_SRR1, r10
211 lis r10, mmu_on@h
212 ori r10, r10, mmu_on@l
213 mtspr SPRN_SRR0, r10
214 sync
215 rfi
216mmu_on:
217 /* kernel offset (r4 is still set from restore_registers) */
218 addis r4, r4, CONFIG_KERNEL_START@h
219
220
221 /* restore MSR */
222 lwz r10, (4*0x1b)(r4)
223 mtmsr r10
224 sync; isync;
225
226 /* invalidate caches */
227 mfspr r10, SPRN_HID0
228 ori r5, r10, HID0_ICFI | HID0_DCI
229 mtspr SPRN_HID0, r5 /* invalidate caches */
230 sync; isync;
231 mtspr SPRN_HID0, r10
232 sync; isync;
233
234 /* enable caches */
235 lwz r10, (4*0x18)(r4)
236 mtspr SPRN_HID0, r10 /* restore (enable caches, DPM) */
237 /* ^ this has to be after address translation set in MSR */
238 sync
239 isync
240
241
242 /* restore 0xf0 (BDI2000) */
243 lis r3, CONFIG_KERNEL_START@h
244 lwz r10, (0x1d*4)(r4)
245 stw r10, 0xf0(r3)
246
247 LOAD_SPRN(LR, 0x1c)
248
249
250 blr
251
252
253/* ---------------------------------------------------------------------- */
254/* boring code: helpers */
255
256/* save registers */
257#define SAVE_BAT(n, addr) \
258 SAVE_SPRN(DBAT##n##L, addr); \
259 SAVE_SPRN(DBAT##n##U, addr+1); \
260 SAVE_SPRN(IBAT##n##L, addr+2); \
261 SAVE_SPRN(IBAT##n##U, addr+3);
262
263#define SAVE_SR(n, addr) \
264 mfsr r10, n; \
265 stw r10, ((addr)*4)(r4);
266
267#define SAVE_4SR(n, addr) \
268 SAVE_SR(n, addr); \
269 SAVE_SR(n+1, addr+1); \
270 SAVE_SR(n+2, addr+2); \
271 SAVE_SR(n+3, addr+3);
272
273save_regs:
274 stw r0, 0(r4)
275 stw r1, 0x4(r4)
276 stw r2, 0x8(r4)
277 stmw r11, 0xc(r4) /* 0xc -> 0x5f, (0x18*4-1) */
278
279 SAVE_SPRN(HID0, 0x18)
280 SAVE_SPRN(HID1, 0x19)
281 SAVE_SPRN(HID2, 0x1a)
282 mfmsr r10
283 stw r10, (4*0x1b)(r4)
284 /*SAVE_SPRN(LR, 0x1c) have to save it before the call */
285 /* 0x1d reserved by 0xf0 */
286 SAVE_SPRN(RPA, 0x1e)
287 SAVE_SPRN(SDR1, 0x1f)
288
289 /* save MMU regs */
290 SAVE_BAT(0, 0x20)
291 SAVE_BAT(1, 0x24)
292 SAVE_BAT(2, 0x28)
293 SAVE_BAT(3, 0x2c)
294 SAVE_BAT(4, 0x30)
295 SAVE_BAT(5, 0x34)
296 SAVE_BAT(6, 0x38)
297 SAVE_BAT(7, 0x3c)
298
299 SAVE_4SR(0, 0x40)
300 SAVE_4SR(4, 0x44)
301 SAVE_4SR(8, 0x48)
302 SAVE_4SR(12, 0x4c)
303
304 SAVE_SPRN(SPRG0, 0x50)
305 SAVE_SPRN(SPRG1, 0x51)
306 SAVE_SPRN(SPRG2, 0x52)
307 SAVE_SPRN(SPRG3, 0x53)
308 SAVE_SPRN(SPRG4, 0x54)
309 SAVE_SPRN(SPRG5, 0x55)
310 SAVE_SPRN(SPRG6, 0x56)
311 SAVE_SPRN(SPRG7, 0x57)
312
313 SAVE_SPRN(IABR, 0x58)
314 SAVE_SPRN(DABR, 0x59)
315 SAVE_SPRN(TBRL, 0x5a)
316 SAVE_SPRN(TBRU, 0x5b)
317
318 blr
319
320
321/* restore registers */
322#define LOAD_BAT(n, addr) \
323 LOAD_SPRN(DBAT##n##L, addr); \
324 LOAD_SPRN(DBAT##n##U, addr+1); \
325 LOAD_SPRN(IBAT##n##L, addr+2); \
326 LOAD_SPRN(IBAT##n##U, addr+3);
327
328#define LOAD_SR(n, addr) \
329 lwz r10, ((addr)*4)(r4); \
330 mtsr n, r10;
331
332#define LOAD_4SR(n, addr) \
333 LOAD_SR(n, addr); \
334 LOAD_SR(n+1, addr+1); \
335 LOAD_SR(n+2, addr+2); \
336 LOAD_SR(n+3, addr+3);
337
338restore_regs:
339 lis r4, registers@h
340 ori r4, r4, registers@l
341
342 /* MMU is not up yet */
343 subis r4, r4, CONFIG_KERNEL_START@h
344
345 lwz r0, 0(r4)
346 lwz r1, 0x4(r4)
347 lwz r2, 0x8(r4)
348 lmw r11, 0xc(r4)
349
350 /*
351 * these are a bit tricky
352 *
353 * 0x18 - HID0
354 * 0x19 - HID1
355 * 0x1a - HID2
356 * 0x1b - MSR
357 * 0x1c - LR
358 * 0x1d - reserved by 0xf0 (BDI2000)
359 */
360 LOAD_SPRN(RPA, 0x1e);
361 LOAD_SPRN(SDR1, 0x1f);
362
363 /* restore MMU regs */
364 LOAD_BAT(0, 0x20)
365 LOAD_BAT(1, 0x24)
366 LOAD_BAT(2, 0x28)
367 LOAD_BAT(3, 0x2c)
368 LOAD_BAT(4, 0x30)
369 LOAD_BAT(5, 0x34)
370 LOAD_BAT(6, 0x38)
371 LOAD_BAT(7, 0x3c)
372
373 LOAD_4SR(0, 0x40)
374 LOAD_4SR(4, 0x44)
375 LOAD_4SR(8, 0x48)
376 LOAD_4SR(12, 0x4c)
377
378 /* rest of regs */
379 LOAD_SPRN(SPRG0, 0x50);
380 LOAD_SPRN(SPRG1, 0x51);
381 LOAD_SPRN(SPRG2, 0x52);
382 LOAD_SPRN(SPRG3, 0x53);
383 LOAD_SPRN(SPRG4, 0x54);
384 LOAD_SPRN(SPRG5, 0x55);
385 LOAD_SPRN(SPRG6, 0x56);
386 LOAD_SPRN(SPRG7, 0x57);
387
388 LOAD_SPRN(IABR, 0x58);
389 LOAD_SPRN(DABR, 0x59);
390 LOAD_SPRN(TBWL, 0x5a); /* these two have separate R/W regs */
391 LOAD_SPRN(TBWU, 0x5b);
392
393 blr
394
395
396
397/* cache flushing code. copied from arch/ppc/boot/util.S */
398#define NUM_CACHE_LINES (128*8)
399
400/*
401 * Flush data cache
402 * Do this by just reading lots of stuff into the cache.
403 */
404flush_data_cache:
405 lis r3,CONFIG_KERNEL_START@h
406 ori r3,r3,CONFIG_KERNEL_START@l
407 li r4,NUM_CACHE_LINES
408 mtctr r4
4091:
410 lwz r4,0(r3)
411 addi r3,r3,L1_CACHE_BYTES /* Next line, please */
412 bdnz 1b
413 blr